mirror of
				https://github.com/catchorg/Catch2.git
				synced 2025-10-31 04:07:10 +01:00 
			
		
		
		
	Implement warning for unmatched test specs
This commit is contained in:
		| @@ -212,14 +212,22 @@ This option transforms tabs and newline characters into ```\t``` and ```\n``` re | ||||
| ## Warnings | ||||
| <pre>-w, --warn <warning name></pre> | ||||
|  | ||||
| Enables reporting of suspicious test runs. There is currently only one | ||||
| available warning. | ||||
| You can think of Catch2's warnings as the equivalent of `-Werror` (`/WX`) | ||||
| flag for C++ compilers. It turns some suspicious occurences, like a section | ||||
| without assertions, into errors. Because these might be intended, warnings | ||||
| are not enabled by default, but user can opt in. | ||||
|  | ||||
| There are currently two warnings implemented: | ||||
|  | ||||
| ``` | ||||
|     NoAssertions   // Fail test case / leaf section if no assertions | ||||
|                    // (e.g. `REQUIRE`) is encountered. | ||||
|     NoAssertions        // Fail test case / leaf section if no assertions | ||||
|                         // (e.g. `REQUIRE`) is encountered. | ||||
|     UnmatchedTestSpec   // Fail test run if any of the CLI test specs did | ||||
|                         // not match any tests. | ||||
| ``` | ||||
|  | ||||
| > `UnmatchedTestSpec` was introduced in Catch2 X.Y.Z. | ||||
|  | ||||
|  | ||||
| <a id="reporting-timings"></a> | ||||
| ## Reporting timings | ||||
|   | ||||
| @@ -173,6 +173,7 @@ new design. | ||||
|   * When deferred tu runtime, it behaves like `CHECK`, and not like `REQUIRE`. | ||||
| * You can have multiple tests with the same name, as long as other parts of the test identity differ (#1915, #1999, #2175) | ||||
|   * Test identity includes test's name, test's tags and and test's class name if applicable. | ||||
| * Added new warning, `UnmatchedTestSpec`, to error on test specs with no matching tests | ||||
|  | ||||
|  | ||||
| ### Fixes | ||||
|   | ||||
| @@ -70,6 +70,9 @@ namespace Catch { | ||||
|     bool Config::warnAboutMissingAssertions() const { | ||||
|         return !!( m_data.warnings & WarnAbout::NoAssertions ); | ||||
|     } | ||||
|     bool Config::warnAboutUnmatchedTestSpecs() const { | ||||
|         return !!( m_data.warnings & WarnAbout::UnmatchedTestSpec ); | ||||
|     } | ||||
|     bool Config::zeroTestsCountAsSuccess() const       { return m_data.allowZeroTests; } | ||||
|     ShowDurations Config::showDurations() const        { return m_data.showDurations; } | ||||
|     double Config::minDuration() const                 { return m_data.minDuration; } | ||||
|   | ||||
| @@ -98,6 +98,7 @@ namespace Catch { | ||||
|         StringRef name() const override; | ||||
|         bool includeSuccessfulResults() const override; | ||||
|         bool warnAboutMissingAssertions() const override; | ||||
|         bool warnAboutUnmatchedTestSpecs() const override; | ||||
|         bool zeroTestsCountAsSuccess() const override; | ||||
|         ShowDurations showDurations() const override; | ||||
|         double minDuration() const override; | ||||
|   | ||||
| @@ -96,20 +96,26 @@ namespace Catch { | ||||
|  | ||||
|                 for (auto const& match : m_matches) { | ||||
|                     if (match.tests.empty()) { | ||||
|                         m_reporter->noMatchingTestCases(match.name); | ||||
|                         totals.error = -1; | ||||
|                         m_unmatchedTestSpecs = true; | ||||
|                         m_reporter->noMatchingTestCases( match.name ); | ||||
|                     } | ||||
|                 } | ||||
|  | ||||
|                 return totals; | ||||
|             } | ||||
|  | ||||
|             bool hadUnmatchedTestSpecs() const { | ||||
|                 return m_unmatchedTestSpecs; | ||||
|             } | ||||
|  | ||||
|  | ||||
|         private: | ||||
|             IStreamingReporter* m_reporter; | ||||
|             Config const* m_config; | ||||
|             RunContext m_context; | ||||
|             std::set<TestCaseHandle const*> m_tests; | ||||
|             TestSpec::Matches m_matches; | ||||
|             bool m_unmatchedTestSpecs = false; | ||||
|         }; | ||||
|  | ||||
|         void applyFilenamesAsTags() { | ||||
| @@ -301,6 +307,11 @@ namespace Catch { | ||||
|             TestGroup tests { CATCH_MOVE(reporter), m_config.get() }; | ||||
|             auto const totals = tests.execute(); | ||||
|  | ||||
|             if ( tests.hadUnmatchedTestSpecs() | ||||
|                 && m_config->warnAboutUnmatchedTestSpecs() ) { | ||||
|                 return 3; | ||||
|             } | ||||
|  | ||||
|             if ( totals.testCases.total() == 0 | ||||
|                 && !m_config->zeroTestsCountAsSuccess() ) { | ||||
|                 return 2; | ||||
|   | ||||
| @@ -26,7 +26,10 @@ namespace Catch { | ||||
|  | ||||
|     struct WarnAbout { enum What { | ||||
|         Nothing = 0x00, | ||||
|         //! A test case or leaf section did not run any assertions | ||||
|         NoAssertions = 0x01, | ||||
|         //! A command line test spec matched no test cases | ||||
|         UnmatchedTestSpec = 0x02, | ||||
|     }; }; | ||||
|  | ||||
|     enum class ShowDurations { | ||||
| @@ -63,6 +66,7 @@ namespace Catch { | ||||
|         virtual bool includeSuccessfulResults() const = 0; | ||||
|         virtual bool shouldDebugBreak() const = 0; | ||||
|         virtual bool warnAboutMissingAssertions() const = 0; | ||||
|         virtual bool warnAboutUnmatchedTestSpecs() const = 0; | ||||
|         virtual bool zeroTestsCountAsSuccess() const = 0; | ||||
|         virtual int abortAfter() const = 0; | ||||
|         virtual bool showInvisibles() const = 0; | ||||
|   | ||||
| @@ -27,6 +27,9 @@ namespace Catch { | ||||
|             if ( warning == "NoAssertions" ) { | ||||
|                 config.warnings = WarnAbout::NoAssertions; | ||||
|                 return ParserResult::ok( ParseResultType::Matched ); | ||||
|             } else if ( warning == "UnmatchedTestSpec" ) { | ||||
|                 config.warnings = WarnAbout::UnmatchedTestSpec; | ||||
|                 return ParserResult::ok( ParseResultType::Matched ); | ||||
|             } | ||||
|  | ||||
|             return ParserResult ::runtimeError( | ||||
|   | ||||
| @@ -229,6 +229,23 @@ add_test(NAME TestSpecs::OverrideFailureWithNoMatchedTests | ||||
|   COMMAND $<TARGET_FILE:SelfTest> "___nonexistent_test___" --allow-running-no-tests | ||||
| ) | ||||
|  | ||||
| add_test(NAME Warnings::UnmatchedTestSpecIsAccepted | ||||
|   COMMAND $<TARGET_FILE:SelfTest> Tracker --warn UnmatchedTestSpec | ||||
| ) | ||||
| set_tests_properties(Warnings::UnmatchedTestSpecIsAccepted | ||||
|   PROPERTIES | ||||
|     FAIL_REGULAR_EXPRESSION "Unrecognised warning option: " | ||||
| ) | ||||
|  | ||||
| add_test(NAME TestSpecs::WarnUnmatchedTestSpecFailsWithUnmatchedTestSpec | ||||
|   COMMAND | ||||
|     $<TARGET_FILE:SelfTest> Tracker, "___nonexistent_test___" --warn UnmatchedTestSpec | ||||
| ) | ||||
| set_tests_properties(TestSpecs::WarnUnmatchedTestSpecFailsWithUnmatchedTestSpec | ||||
|   PROPERTIES | ||||
|     WILL_FAIL ON | ||||
| ) | ||||
|  | ||||
| add_test(NAME UnmatchedOutputFilter COMMAND $<TARGET_FILE:SelfTest> [this-tag-does-not-exist]) | ||||
| set_tests_properties(UnmatchedOutputFilter | ||||
|   PROPERTIES | ||||
|   | ||||
		Reference in New Issue
	
	Block a user
	 Martin Hořeňovský
					Martin Hořeňovský