diff --git a/projects/CMakeLists.txt b/projects/CMakeLists.txt index cde25cbd..914af8d6 100644 --- a/projects/CMakeLists.txt +++ b/projects/CMakeLists.txt @@ -455,9 +455,6 @@ set_tests_properties(TestsInFile::InvalidTestNames-2 PROPERTIES PASS_REGULAR_EXP add_test(NAME RandomTestOrdering COMMAND ${PYTHON_EXECUTABLE} ${CATCH_DIR}/projects/TestScripts/testRandomOrder.py $) -add_test(NAME TestTimeThreshold COMMAND ${PYTHON_EXECUTABLE} - ${CATCH_DIR}/projects/TestScripts/testTimeThreshold.py $) - if (CATCH_USE_VALGRIND) add_test(NAME ValgrindRunTests COMMAND valgrind --leak-check=full --error-exitcode=1 $) add_test(NAME ValgrindListTests COMMAND valgrind --leak-check=full --error-exitcode=1 $ --list-tests --verbosity high) diff --git a/projects/ExtraTests/CMakeLists.txt b/projects/ExtraTests/CMakeLists.txt index c8c975c0..63aa678b 100644 --- a/projects/ExtraTests/CMakeLists.txt +++ b/projects/ExtraTests/CMakeLists.txt @@ -10,6 +10,31 @@ project( Catch2ExtraTests LANGUAGES CXX ) message( STATUS "Extra tests included" ) +# The MinDuration reporting tests do not need separate compilation, but +# they have non-trivial execution time, so they are categorized as +# extra tests, so that they are run less. +add_test(NAME MinDuration::SimpleThreshold COMMAND $ --min-duration 0.15 [min_duration_test]) +set_tests_properties( + MinDuration::SimpleThreshold + PROPERTIES + PASS_REGULAR_EXPRESSION "s: sleep_for_200ms" + FAIL_REGULAR_EXPRESSION "sleep_for_100ms" + RUN_SERIAL ON # The test is timing sensitive, so we want to run it + # serially to avoid false positives on oversubscribed machines +) + +# -d yes overrides the threshold, so we should see the faster test even +# with a ridiculous high min duration threshold +add_test(NAME MinDuration::DurationOverrideYes COMMAND $ --min-duration 1.0 -d yes [min_duration_test]) +set_tests_properties( + MinDuration::DurationOverrideYes + PROPERTIES + PASS_REGULAR_EXPRESSION "s: sleep_for_100ms" +) + + +# ------------ end of duration reporting tests + # define folders used: set( TESTS_DIR ${CATCH_DIR}/projects/ExtraTests ) diff --git a/projects/TestScripts/testTimeThreshold.py b/projects/TestScripts/testTimeThreshold.py deleted file mode 100644 index 51804aae..00000000 --- a/projects/TestScripts/testTimeThreshold.py +++ /dev/null @@ -1,41 +0,0 @@ -#!/usr/bin/env python3 - -import subprocess -import sys - -def run_tests_with_threshold(self_test_exe, threshold): - cmd = [self_test_exe, '--min-duration', str(threshold), - '[min_duration_test]'] - process = subprocess.Popen( - cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - stdout, stderr = process.communicate() - if stderr: - raise RuntimeError("Unexpected error output:\n" + - stderr.decode()) - if process.returncode != 0: - raise RuntimeError("Unexpected failure to run tests\n") - result = stdout.split(b'\n') - report_lines = [s.split() for s in result if b' s: ' in s] - tests_reported = [l[2] for l in report_lines] - times_reported = [float(l[0]) for l in report_lines] - return tests_reported, times_reported - -def check_times_at_least(times_reported, minimum): - for time in times_reported: - assert time >= minimum, ( - 'Time {} was less that requested minimum {}' .format( - time, minimum)) - -def main(): - self_test_exe, = sys.argv[1:] - tests, times = run_tests_with_threshold(self_test_exe, '0.15') - assert tests == [b'sleep_for_200ms'], ( - "Unexpected tests reported %s" % tests) - check_times_at_least(times, 0.15) - tests,times = run_tests_with_threshold(self_test_exe, '0') - assert tests == [b'sleep_for_100ms', b'sleep_for_200ms'], ( - "Unexpected tests reported %s" % tests) - check_times_at_least(times, 0) - -if __name__ == '__main__': - sys.exit(main())