mirror of
https://github.com/catchorg/Catch2.git
synced 2024-11-22 13:26:10 +01:00
Add --min-duration option
A test runner already has a --durations option to print durations. However, this isn't entirely satisfactory. When there are many tests, this produces output spam which makes it hard to find the test failure output. Nevertheless, it is helpful to be informed of tests which are unusually slow. Therefore, introduce a new option --min-duration that causes all durations above a certain threshold to be printed. This allows slow tests to be visible without mentioning every test.
This commit is contained in:
parent
36131f7ffa
commit
80b0d6975c
@ -222,6 +222,10 @@ available warnings
|
||||
|
||||
When set to ```yes``` Catch will report the duration of each test case, in milliseconds. Note that it does this regardless of whether a test case passes or fails. Note, also, the certain reporters (e.g. Junit) always report test case durations regardless of this option being set or not.
|
||||
|
||||
<pre>-D, --min-duration <value></pre>
|
||||
|
||||
When set, Catch will report the duration of each test case that took more than <value> seconds, in milliseconds.
|
||||
|
||||
<a id="input-file"></a>
|
||||
## Load test names to run from a file
|
||||
<pre>-f, --input-file <filename></pre>
|
||||
|
@ -69,6 +69,7 @@ namespace Catch {
|
||||
bool Config::warnAboutMissingAssertions() const { return !!(m_data.warnings & WarnAbout::NoAssertions); }
|
||||
bool Config::warnAboutNoTests() const { return !!(m_data.warnings & WarnAbout::NoTests); }
|
||||
ShowDurations::OrNot Config::showDurations() const { return m_data.showDurations; }
|
||||
double Config::minDuration() const { return m_data.minDuration; }
|
||||
RunTests::InWhatOrder Config::runOrder() const { return m_data.runOrder; }
|
||||
unsigned int Config::rngSeed() const { return m_data.rngSeed; }
|
||||
UseColour::YesOrNo Config::useColour() const { return m_data.useColour; }
|
||||
|
@ -48,6 +48,7 @@ namespace Catch {
|
||||
Verbosity verbosity = Verbosity::Normal;
|
||||
WarnAbout::What warnings = WarnAbout::Nothing;
|
||||
ShowDurations::OrNot showDurations = ShowDurations::DefaultForReporter;
|
||||
double minDuration = -1;
|
||||
RunTests::InWhatOrder runOrder = RunTests::InDeclarationOrder;
|
||||
UseColour::YesOrNo useColour = UseColour::Auto;
|
||||
WaitForKeypress::When waitForKeypress = WaitForKeypress::Never;
|
||||
@ -98,6 +99,7 @@ namespace Catch {
|
||||
bool warnAboutMissingAssertions() const override;
|
||||
bool warnAboutNoTests() const override;
|
||||
ShowDurations::OrNot showDurations() const override;
|
||||
double minDuration() const override;
|
||||
RunTests::InWhatOrder runOrder() const override;
|
||||
unsigned int rngSeed() const override;
|
||||
UseColour::YesOrNo useColour() const override;
|
||||
|
@ -67,6 +67,7 @@ namespace Catch {
|
||||
virtual int abortAfter() const = 0;
|
||||
virtual bool showInvisibles() const = 0;
|
||||
virtual ShowDurations::OrNot showDurations() const = 0;
|
||||
virtual double minDuration() const = 0;
|
||||
virtual TestSpec const& testSpec() const = 0;
|
||||
virtual bool hasTestFilters() const = 0;
|
||||
virtual std::vector<std::string> const& getTestsOrTags() const = 0;
|
||||
|
@ -170,6 +170,9 @@ namespace Catch {
|
||||
| Opt( [&]( bool flag ) { config.showDurations = flag ? ShowDurations::Always : ShowDurations::Never; }, "yes|no" )
|
||||
["-d"]["--durations"]
|
||||
( "show test durations" )
|
||||
| Opt( config.minDuration, "seconds" )
|
||||
["-D"]["--min-duration"]
|
||||
( "show test durations for tests taking at least the given number of seconds" )
|
||||
| Opt( loadTestNamesFromFile, "filename" )
|
||||
["-f"]["--input-file"]
|
||||
( "load test names to run from a file" )
|
||||
|
@ -43,6 +43,13 @@ namespace Catch {
|
||||
return std::string(buffer);
|
||||
}
|
||||
|
||||
bool shouldShowDuration( IConfig const& config, double duration ) {
|
||||
if( config.showDurations() == ShowDurations::Always )
|
||||
return true;
|
||||
double min = config.minDuration();
|
||||
return min >= 0 && duration >= min;
|
||||
}
|
||||
|
||||
std::string serializeFilters( std::vector<std::string> const& container ) {
|
||||
ReusableStringStream oss;
|
||||
bool first = true;
|
||||
|
@ -24,6 +24,8 @@ namespace Catch {
|
||||
// Returns double formatted as %.3f (format expected on output)
|
||||
std::string getFormattedDuration( double duration );
|
||||
|
||||
bool shouldShowDuration( IConfig const &, double duration );
|
||||
|
||||
std::string serializeFilters( std::vector<std::string> const& container );
|
||||
|
||||
struct StreamingReporterBase : IStreamingReporter {
|
||||
|
@ -286,8 +286,9 @@ private:
|
||||
}
|
||||
|
||||
void CompactReporter::sectionEnded(SectionStats const& _sectionStats) {
|
||||
if (m_config->showDurations() == ShowDurations::Always) {
|
||||
stream << getFormattedDuration(_sectionStats.durationInSeconds) << " s: " << _sectionStats.sectionInfo.name << std::endl;
|
||||
double dur = _sectionStats.durationInSeconds;
|
||||
if ( shouldShowDuration( *m_config, dur ) ) {
|
||||
stream << getFormattedDuration( dur ) << " s: " << _sectionStats.sectionInfo.name << std::endl;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -420,8 +420,9 @@ void ConsoleReporter::sectionEnded(SectionStats const& _sectionStats) {
|
||||
stream << "\nNo assertions in test case";
|
||||
stream << " '" << _sectionStats.sectionInfo.name << "'\n" << std::endl;
|
||||
}
|
||||
if (m_config->showDurations() == ShowDurations::Always) {
|
||||
stream << getFormattedDuration(_sectionStats.durationInSeconds) << " s: " << _sectionStats.sectionInfo.name << std::endl;
|
||||
double dur = _sectionStats.durationInSeconds;
|
||||
if (shouldShowDuration(*m_config, dur)) {
|
||||
stream << getFormattedDuration(dur) << " s: " << _sectionStats.sectionInfo.name << std::endl;
|
||||
}
|
||||
if (m_headerPrinted) {
|
||||
m_headerPrinted = false;
|
||||
|
@ -28,6 +28,7 @@ set(TEST_SOURCES
|
||||
${SELF_TEST_DIR}/IntrospectiveTests/Xml.tests.cpp
|
||||
${SELF_TEST_DIR}/IntrospectiveTests/ToString.tests.cpp
|
||||
${SELF_TEST_DIR}/IntrospectiveTests/UniquePtr.tests.cpp
|
||||
${SELF_TEST_DIR}/TimingTests/Sleep.tests.cpp
|
||||
${SELF_TEST_DIR}/UsageTests/Approx.tests.cpp
|
||||
${SELF_TEST_DIR}/UsageTests/BDD.tests.cpp
|
||||
${SELF_TEST_DIR}/UsageTests/Benchmark.tests.cpp
|
||||
@ -201,6 +202,9 @@ set_tests_properties(TagAlias PROPERTIES
|
||||
add_test(NAME RandomTestOrdering COMMAND ${PYTHON_EXECUTABLE}
|
||||
${CATCH_DIR}/tests/TestScripts/testRandomOrder.py $<TARGET_FILE:SelfTest>)
|
||||
|
||||
add_test(NAME TestTimeThreshold COMMAND ${PYTHON_EXECUTABLE}
|
||||
${CATCH_DIR}/tests/TestScripts/testTimeThreshold.py $<TARGET_FILE:SelfTest>)
|
||||
|
||||
add_test(NAME CheckConvenienceHeaders
|
||||
COMMAND
|
||||
${PYTHON_EXECUTABLE} ${CATCH_DIR}/tools/scripts/checkConvenienceHeaders.py
|
||||
|
23
tests/SelfTest/TimingTests/Sleep.tests.cpp
Normal file
23
tests/SelfTest/TimingTests/Sleep.tests.cpp
Normal file
@ -0,0 +1,23 @@
|
||||
/*
|
||||
* Copyright 2011 Two Blue Cubes Ltd. All rights reserved.
|
||||
*
|
||||
* Distributed under the Boost Software License, Version 1.0. (See accompanying
|
||||
* file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
|
||||
*/
|
||||
|
||||
#include <catch2/catch_test_macros.hpp>
|
||||
|
||||
#include <chrono>
|
||||
#include <thread>
|
||||
|
||||
TEST_CASE( "sleep_for_100ms", "[.min_duration_test][approvals]" )
|
||||
{
|
||||
std::this_thread::sleep_for( std::chrono::milliseconds( 100 ) );
|
||||
CHECK( true );
|
||||
}
|
||||
|
||||
TEST_CASE( "sleep_for_200ms", "[.min_duration_test][approvals]" )
|
||||
{
|
||||
std::this_thread::sleep_for( std::chrono::milliseconds( 200 ) );
|
||||
CHECK( true );
|
||||
}
|
41
tests/TestScripts/testTimeThreshold.py
Normal file
41
tests/TestScripts/testTimeThreshold.py
Normal file
@ -0,0 +1,41 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
def run_tests_with_threshold(self_test_exe, threshold):
|
||||
cmd = [self_test_exe, '--min-duration', str(threshold),
|
||||
'[min_duration_test]']
|
||||
process = subprocess.Popen(
|
||||
cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
stdout, stderr = process.communicate()
|
||||
if stderr:
|
||||
raise RuntimeError("Unexpected error output:\n" +
|
||||
stderr.decode())
|
||||
if process.returncode != 0:
|
||||
raise RuntimeError("Unexpected failure to run tests\n")
|
||||
result = stdout.split(b'\n')
|
||||
report_lines = [s.split() for s in result if b' s: ' in s]
|
||||
tests_reported = [l[2] for l in report_lines]
|
||||
times_reported = [float(l[0]) for l in report_lines]
|
||||
return tests_reported, times_reported
|
||||
|
||||
def check_times_at_least(times_reported, minimum):
|
||||
for time in times_reported:
|
||||
assert time >= minimum, (
|
||||
'Time {} was less that requested minimum {}' .format(
|
||||
time, minimum))
|
||||
|
||||
def main():
|
||||
self_test_exe, = sys.argv[1:]
|
||||
tests, times = run_tests_with_threshold(self_test_exe, '0.15')
|
||||
assert tests == [b'sleep_for_200ms'], (
|
||||
"Unexpected tests reported %s" % tests)
|
||||
check_times_at_least(times, 0.15)
|
||||
tests,times = run_tests_with_threshold(self_test_exe, '0')
|
||||
assert tests == [b'sleep_for_100ms', b'sleep_for_200ms'], (
|
||||
"Unexpected tests reported %s" % tests)
|
||||
check_times_at_least(times, 0)
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
Loading…
Reference in New Issue
Block a user