mirror of
https://github.com/catchorg/Catch2.git
synced 2024-11-25 23:06:10 +01:00
Exit with non-0 return code if no tests were run
A new flag, `--allow-running-no-tests` was added to override this behaviour if exit code of 0 was desired. This change also made `-w NoTests` obsolete, and so it has been removed.
This commit is contained in:
parent
602e484f02
commit
9f2dca5384
@ -30,6 +30,7 @@
|
|||||||
[Filenames as tags](#filenames-as-tags)<br>
|
[Filenames as tags](#filenames-as-tags)<br>
|
||||||
[Override output colouring](#override-output-colouring)<br>
|
[Override output colouring](#override-output-colouring)<br>
|
||||||
[Test Sharding](#test-sharding)<br>
|
[Test Sharding](#test-sharding)<br>
|
||||||
|
[Allow running the binary without tests](#no-tests-override)<br>
|
||||||
|
|
||||||
Catch works quite nicely without any command line options at all - but for those times when you want greater control the following options are available.
|
Catch works quite nicely without any command line options at all - but for those times when you want greater control the following options are available.
|
||||||
Click one of the following links to take you straight to that option - or scroll on to browse the available options.
|
Click one of the following links to take you straight to that option - or scroll on to browse the available options.
|
||||||
@ -70,6 +71,7 @@ Click one of the following links to take you straight to that option - or scroll
|
|||||||
<a href="#use-colour"> ` --use-colour`</a><br />
|
<a href="#use-colour"> ` --use-colour`</a><br />
|
||||||
<a href="#test-sharding"> ` --shard-count`</a><br />
|
<a href="#test-sharding"> ` --shard-count`</a><br />
|
||||||
<a href="#test-sharding"> ` --shard-index`</a><br />
|
<a href="#test-sharding"> ` --shard-index`</a><br />
|
||||||
|
<a href=#no-tests-override> ` --allow-running-no-tests`</a><br />
|
||||||
|
|
||||||
</br>
|
</br>
|
||||||
|
|
||||||
@ -210,14 +212,12 @@ This option transforms tabs and newline characters into ```\t``` and ```\n``` re
|
|||||||
## Warnings
|
## Warnings
|
||||||
<pre>-w, --warn <warning name></pre>
|
<pre>-w, --warn <warning name></pre>
|
||||||
|
|
||||||
Enables reporting of suspicious test states. There are currently two
|
Enables reporting of suspicious test runs. There is currently only one
|
||||||
available warnings
|
available warning.
|
||||||
|
|
||||||
```
|
```
|
||||||
NoAssertions // Fail test case / leaf section if no assertions
|
NoAssertions // Fail test case / leaf section if no assertions
|
||||||
// (e.g. `REQUIRE`) is encountered.
|
// (e.g. `REQUIRE`) is encountered.
|
||||||
NoTests // Return non-zero exit code when no test cases were run
|
|
||||||
// Also calls reporter's noMatchingTestCases method
|
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
@ -441,6 +441,17 @@ the number of shards.
|
|||||||
|
|
||||||
This is useful when you want to split test execution across multiple processes, as is done with [Bazel test sharding](https://docs.bazel.build/versions/main/test-encyclopedia.html#test-sharding).
|
This is useful when you want to split test execution across multiple processes, as is done with [Bazel test sharding](https://docs.bazel.build/versions/main/test-encyclopedia.html#test-sharding).
|
||||||
|
|
||||||
|
<a id="no-tests-override"></a>
|
||||||
|
## Allow running the binary without tests
|
||||||
|
<pre>--allow-running-no-tests</pre>
|
||||||
|
|
||||||
|
> Introduced in Catch2 X.Y.Z.
|
||||||
|
|
||||||
|
By default, Catch2 test binaries return non-0 exit code if no tests were
|
||||||
|
run, e.g. if the binary was compiled with no tests, or the provided test
|
||||||
|
spec matched no tests. This flag overrides that, so a test run with no
|
||||||
|
tests still returns 0.
|
||||||
|
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
@ -198,6 +198,9 @@ new design.
|
|||||||
* `catch2` is the statically compiled implementation by itself
|
* `catch2` is the statically compiled implementation by itself
|
||||||
* `catch2-with-main` also links in the default main
|
* `catch2-with-main` also links in the default main
|
||||||
* Passing invalid test specifications passed to Catch2 are now reported before tests are run, and are a hard error.
|
* Passing invalid test specifications passed to Catch2 are now reported before tests are run, and are a hard error.
|
||||||
|
* Running 0 tests (e.g. due to empty binary, or test spec not matching anything) returns non-0 exit code
|
||||||
|
* Flag `--allow-running-no-tests` overrides this behaviour.
|
||||||
|
* `NoTests` warning has been removed because it is fully subsumed by this change.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@ -67,8 +67,10 @@ namespace Catch {
|
|||||||
std::ostream& Config::stream() const { return m_stream->stream(); }
|
std::ostream& Config::stream() const { return m_stream->stream(); }
|
||||||
StringRef Config::name() const { return m_data.name.empty() ? m_data.processName : m_data.name; }
|
StringRef Config::name() const { return m_data.name.empty() ? m_data.processName : m_data.name; }
|
||||||
bool Config::includeSuccessfulResults() const { return m_data.showSuccessfulTests; }
|
bool Config::includeSuccessfulResults() const { return m_data.showSuccessfulTests; }
|
||||||
bool Config::warnAboutMissingAssertions() const { return !!(m_data.warnings & WarnAbout::NoAssertions); }
|
bool Config::warnAboutMissingAssertions() const {
|
||||||
bool Config::warnAboutNoTests() const { return !!(m_data.warnings & WarnAbout::NoTests); }
|
return !!( m_data.warnings & WarnAbout::NoAssertions );
|
||||||
|
}
|
||||||
|
bool Config::zeroTestsCountAsSuccess() const { return m_data.allowZeroTests; }
|
||||||
ShowDurations Config::showDurations() const { return m_data.showDurations; }
|
ShowDurations Config::showDurations() const { return m_data.showDurations; }
|
||||||
double Config::minDuration() const { return m_data.minDuration; }
|
double Config::minDuration() const { return m_data.minDuration; }
|
||||||
TestRunOrder Config::runOrder() const { return m_data.runOrder; }
|
TestRunOrder Config::runOrder() const { return m_data.runOrder; }
|
||||||
|
@ -33,6 +33,7 @@ namespace Catch {
|
|||||||
bool showInvisibles = false;
|
bool showInvisibles = false;
|
||||||
bool filenamesAsTags = false;
|
bool filenamesAsTags = false;
|
||||||
bool libIdentify = false;
|
bool libIdentify = false;
|
||||||
|
bool allowZeroTests = false;
|
||||||
|
|
||||||
int abortAfter = -1;
|
int abortAfter = -1;
|
||||||
uint32_t rngSeed = generateRandomSeed(GenerateFrom::Default);
|
uint32_t rngSeed = generateRandomSeed(GenerateFrom::Default);
|
||||||
@ -97,7 +98,7 @@ namespace Catch {
|
|||||||
StringRef name() const override;
|
StringRef name() const override;
|
||||||
bool includeSuccessfulResults() const override;
|
bool includeSuccessfulResults() const override;
|
||||||
bool warnAboutMissingAssertions() const override;
|
bool warnAboutMissingAssertions() const override;
|
||||||
bool warnAboutNoTests() const override;
|
bool zeroTestsCountAsSuccess() const override;
|
||||||
ShowDurations showDurations() const override;
|
ShowDurations showDurations() const override;
|
||||||
double minDuration() const override;
|
double minDuration() const override;
|
||||||
TestRunOrder runOrder() const override;
|
TestRunOrder runOrder() const override;
|
||||||
|
@ -105,12 +105,10 @@ namespace Catch {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
using Tests = std::set<TestCaseHandle const*>;
|
|
||||||
|
|
||||||
IStreamingReporter* m_reporter;
|
IStreamingReporter* m_reporter;
|
||||||
Config const* m_config;
|
Config const* m_config;
|
||||||
RunContext m_context;
|
RunContext m_context;
|
||||||
Tests m_tests;
|
std::set<TestCaseHandle const*> m_tests;
|
||||||
TestSpec::Matches m_matches;
|
TestSpec::Matches m_matches;
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -303,8 +301,10 @@ namespace Catch {
|
|||||||
TestGroup tests { CATCH_MOVE(reporter), m_config.get() };
|
TestGroup tests { CATCH_MOVE(reporter), m_config.get() };
|
||||||
auto const totals = tests.execute();
|
auto const totals = tests.execute();
|
||||||
|
|
||||||
if( m_config->warnAboutNoTests() && totals.error == -1 )
|
if ( totals.testCases.total() == 0
|
||||||
|
&& !m_config->zeroTestsCountAsSuccess() ) {
|
||||||
return 2;
|
return 2;
|
||||||
|
}
|
||||||
|
|
||||||
// Note that on unices only the lower 8 bits are usually used, clamping
|
// Note that on unices only the lower 8 bits are usually used, clamping
|
||||||
// the return value to 255 prevents false negative when some multiple
|
// the return value to 255 prevents false negative when some multiple
|
||||||
|
@ -27,7 +27,6 @@ namespace Catch {
|
|||||||
struct WarnAbout { enum What {
|
struct WarnAbout { enum What {
|
||||||
Nothing = 0x00,
|
Nothing = 0x00,
|
||||||
NoAssertions = 0x01,
|
NoAssertions = 0x01,
|
||||||
NoTests = 0x02
|
|
||||||
}; };
|
}; };
|
||||||
|
|
||||||
enum class ShowDurations {
|
enum class ShowDurations {
|
||||||
@ -64,7 +63,7 @@ namespace Catch {
|
|||||||
virtual bool includeSuccessfulResults() const = 0;
|
virtual bool includeSuccessfulResults() const = 0;
|
||||||
virtual bool shouldDebugBreak() const = 0;
|
virtual bool shouldDebugBreak() const = 0;
|
||||||
virtual bool warnAboutMissingAssertions() const = 0;
|
virtual bool warnAboutMissingAssertions() const = 0;
|
||||||
virtual bool warnAboutNoTests() const = 0;
|
virtual bool zeroTestsCountAsSuccess() const = 0;
|
||||||
virtual int abortAfter() const = 0;
|
virtual int abortAfter() const = 0;
|
||||||
virtual bool showInvisibles() const = 0;
|
virtual bool showInvisibles() const = 0;
|
||||||
virtual ShowDurations showDurations() const = 0;
|
virtual ShowDurations showDurations() const = 0;
|
||||||
|
@ -24,21 +24,14 @@ namespace Catch {
|
|||||||
using namespace Clara;
|
using namespace Clara;
|
||||||
|
|
||||||
auto const setWarning = [&]( std::string const& warning ) {
|
auto const setWarning = [&]( std::string const& warning ) {
|
||||||
auto warningSet = [&]() {
|
if ( warning == "NoAssertions" ) {
|
||||||
if( warning == "NoAssertions" )
|
config.warnings = WarnAbout::NoAssertions;
|
||||||
return WarnAbout::NoAssertions;
|
|
||||||
|
|
||||||
if ( warning == "NoTests" )
|
|
||||||
return WarnAbout::NoTests;
|
|
||||||
|
|
||||||
return WarnAbout::Nothing;
|
|
||||||
}();
|
|
||||||
|
|
||||||
if (warningSet == WarnAbout::Nothing)
|
|
||||||
return ParserResult::runtimeError( "Unrecognised warning: '" + warning + '\'' );
|
|
||||||
config.warnings = static_cast<WarnAbout::What>( config.warnings | warningSet );
|
|
||||||
return ParserResult::ok( ParseResultType::Matched );
|
return ParserResult::ok( ParseResultType::Matched );
|
||||||
};
|
}
|
||||||
|
|
||||||
|
return ParserResult ::runtimeError(
|
||||||
|
"Unrecognised warning option: '" + warning + '\'' );
|
||||||
|
};
|
||||||
auto const loadTestNamesFromFile = [&]( std::string const& filename ) {
|
auto const loadTestNamesFromFile = [&]( std::string const& filename ) {
|
||||||
std::ifstream f( filename.c_str() );
|
std::ifstream f( filename.c_str() );
|
||||||
if( !f.is_open() )
|
if( !f.is_open() )
|
||||||
@ -280,7 +273,10 @@ namespace Catch {
|
|||||||
( "split the tests to execute into this many groups" )
|
( "split the tests to execute into this many groups" )
|
||||||
| Opt( setShardIndex, "shard index" )
|
| Opt( setShardIndex, "shard index" )
|
||||||
["--shard-index"]
|
["--shard-index"]
|
||||||
( "index of the group of tests to execute (see --shard-count)" )
|
( "index of the group of tests to execute (see --shard-count)" ) |
|
||||||
|
Opt( config.allowZeroTests )
|
||||||
|
["--allow-running-no-tests"]
|
||||||
|
( "Treat 'No tests run' as a success" )
|
||||||
| Arg( config.testsOrTags, "test name|pattern|tags" )
|
| Arg( config.testsOrTags, "test name|pattern|tags" )
|
||||||
( "which test or tests to use" );
|
( "which test or tests to use" );
|
||||||
|
|
||||||
|
@ -207,15 +207,29 @@ set_tests_properties(List::Reporters::XmlOutput PROPERTIES
|
|||||||
add_test(NAME NoAssertions COMMAND $<TARGET_FILE:SelfTest> -w NoAssertions "An empty test with no assertions")
|
add_test(NAME NoAssertions COMMAND $<TARGET_FILE:SelfTest> -w NoAssertions "An empty test with no assertions")
|
||||||
set_tests_properties(NoAssertions PROPERTIES PASS_REGULAR_EXPRESSION "No assertions in test case")
|
set_tests_properties(NoAssertions PROPERTIES PASS_REGULAR_EXPRESSION "No assertions in test case")
|
||||||
|
|
||||||
add_test(NAME NoTest COMMAND $<TARGET_FILE:SelfTest> Tracker, "___nonexistent_test___")
|
# We cannot combine a regular expression on output with return code check
|
||||||
set_tests_properties(NoTest PROPERTIES
|
# in one test, so we register two instead of making a checking script because
|
||||||
|
# the runtime overhead is small enough.
|
||||||
|
add_test(NAME TestSpecs::CombiningMatchingAndNonMatchingIsOk-1 COMMAND $<TARGET_FILE:SelfTest> Tracker, "___nonexistent_test___")
|
||||||
|
|
||||||
|
add_test(NAME TestSpecs::CombiningMatchingAndNonMatchingIsOk-2 COMMAND $<TARGET_FILE:SelfTest> Tracker, "___nonexistent_test___")
|
||||||
|
set_tests_properties(TestSpecs::CombiningMatchingAndNonMatchingIsOk-2 PROPERTIES
|
||||||
PASS_REGULAR_EXPRESSION "No test cases matched '___nonexistent_test___'"
|
PASS_REGULAR_EXPRESSION "No test cases matched '___nonexistent_test___'"
|
||||||
FAIL_REGULAR_EXPRESSION "No tests ran"
|
FAIL_REGULAR_EXPRESSION "No tests ran"
|
||||||
)
|
)
|
||||||
|
|
||||||
add_test(NAME WarnAboutNoTests COMMAND ${CMAKE_COMMAND} -P ${SELF_TEST_DIR}/WarnAboutNoTests.cmake $<TARGET_FILE:SelfTest>)
|
add_test(NAME TestSpecs::NoMatchedTestsFail
|
||||||
|
COMMAND $<TARGET_FILE:SelfTest> "___nonexistent_test___"
|
||||||
|
)
|
||||||
|
set_tests_properties(TestSpecs::NoMatchedTestsFail
|
||||||
|
PROPERTIES
|
||||||
|
WILL_FAIL ON
|
||||||
|
)
|
||||||
|
add_test(NAME TestSpecs::OverrideFailureWithNoMatchedTests
|
||||||
|
COMMAND $<TARGET_FILE:SelfTest> "___nonexistent_test___" --allow-running-no-tests
|
||||||
|
)
|
||||||
|
|
||||||
add_test(NAME UnmatchedOutputFilter COMMAND $<TARGET_FILE:SelfTest> [this-tag-does-not-exist] -w NoTests)
|
add_test(NAME UnmatchedOutputFilter COMMAND $<TARGET_FILE:SelfTest> [this-tag-does-not-exist])
|
||||||
set_tests_properties(UnmatchedOutputFilter
|
set_tests_properties(UnmatchedOutputFilter
|
||||||
PROPERTIES
|
PROPERTIES
|
||||||
PASS_REGULAR_EXPRESSION "No test cases matched '\\[this-tag-does-not-exist\\]'"
|
PASS_REGULAR_EXPRESSION "No test cases matched '\\[this-tag-does-not-exist\\]'"
|
||||||
|
@ -299,6 +299,22 @@ set_tests_properties(
|
|||||||
# PASS_REGULAR_EXPRESSION "Pretty please, break into debugger"
|
# PASS_REGULAR_EXPRESSION "Pretty please, break into debugger"
|
||||||
#)
|
#)
|
||||||
|
|
||||||
|
add_executable(NoTests ${TESTS_DIR}/X92-NoTests.cpp)
|
||||||
|
target_link_libraries(NoTests PRIVATE Catch2::Catch2WithMain)
|
||||||
|
|
||||||
|
add_test(
|
||||||
|
NAME TestSpecs::EmptySpecWithNoTestsFails
|
||||||
|
COMMAND $<TARGET_FILE:NoTests>
|
||||||
|
)
|
||||||
|
set_tests_properties(TestSpecs::EmptySpecWithNoTestsFails
|
||||||
|
PROPERTIES
|
||||||
|
WILL_FAIL ON
|
||||||
|
)
|
||||||
|
add_test(
|
||||||
|
NAME TestSpecs::OverrideFailureWithEmptySpec
|
||||||
|
COMMAND $<TARGET_FILE:NoTests> --allow-running-no-tests
|
||||||
|
)
|
||||||
|
|
||||||
set( EXTRA_TEST_BINARIES
|
set( EXTRA_TEST_BINARIES
|
||||||
PrefixedMacros
|
PrefixedMacros
|
||||||
DisabledMacros
|
DisabledMacros
|
||||||
@ -310,6 +326,7 @@ set( EXTRA_TEST_BINARIES
|
|||||||
DuplicatedTestCases-SameNameAndTags
|
DuplicatedTestCases-SameNameAndTags
|
||||||
DuplicatedTestCases-SameNameDifferentTags
|
DuplicatedTestCases-SameNameDifferentTags
|
||||||
DuplicatedTestCases-DuplicatedTestCaseMethods
|
DuplicatedTestCases-DuplicatedTestCaseMethods
|
||||||
|
NoTests
|
||||||
# DebugBreakMacros
|
# DebugBreakMacros
|
||||||
)
|
)
|
||||||
|
|
||||||
|
11
tests/ExtraTests/X92-NoTests.cpp
Normal file
11
tests/ExtraTests/X92-NoTests.cpp
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
|
||||||
|
// Copyright Catch2 Authors
|
||||||
|
// Distributed under the Boost Software License, Version 1.0.
|
||||||
|
// (See accompanying file LICENSE_1_0.txt or copy at
|
||||||
|
// https://www.boost.org/LICENSE_1_0.txt)
|
||||||
|
|
||||||
|
// SPDX-License-Identifier: BSL-1.0
|
||||||
|
|
||||||
|
/**\file
|
||||||
|
* Links into executable with no tests which should fail when run
|
||||||
|
*/
|
@ -1,19 +0,0 @@
|
|||||||
# Workaround for a peculiarity where CTest disregards the return code from a
|
|
||||||
# test command if a PASS_REGULAR_EXPRESSION is also set
|
|
||||||
execute_process(
|
|
||||||
COMMAND ${CMAKE_ARGV3} -w NoTests "___nonexistent_test___"
|
|
||||||
RESULT_VARIABLE ret
|
|
||||||
OUTPUT_VARIABLE out
|
|
||||||
)
|
|
||||||
|
|
||||||
message("${out}")
|
|
||||||
|
|
||||||
if(NOT ${ret} MATCHES "^[0-9]+$")
|
|
||||||
message(FATAL_ERROR "${ret}")
|
|
||||||
endif()
|
|
||||||
|
|
||||||
if(${ret} EQUAL 0)
|
|
||||||
message(FATAL_ERROR "Expected nonzero return code")
|
|
||||||
elseif(${out} MATCHES "Helper failed with")
|
|
||||||
message(FATAL_ERROR "Helper failed")
|
|
||||||
endif()
|
|
Loading…
Reference in New Issue
Block a user