diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index 72ece878..3235c36b 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -202,9 +202,6 @@ set_tests_properties(TagAlias PROPERTIES add_test(NAME RandomTestOrdering COMMAND ${PYTHON_EXECUTABLE} ${CATCH_DIR}/tests/TestScripts/testRandomOrder.py $) -add_test(NAME TestTimeThreshold COMMAND ${PYTHON_EXECUTABLE} - ${CATCH_DIR}/tests/TestScripts/testTimeThreshold.py $) - add_test(NAME CheckConvenienceHeaders COMMAND ${PYTHON_EXECUTABLE} ${CATCH_DIR}/tools/scripts/checkConvenienceHeaders.py diff --git a/tests/ExtraTests/CMakeLists.txt b/tests/ExtraTests/CMakeLists.txt index 4946d842..3c6da4d5 100644 --- a/tests/ExtraTests/CMakeLists.txt +++ b/tests/ExtraTests/CMakeLists.txt @@ -8,6 +8,31 @@ project( Catch2ExtraTests LANGUAGES CXX ) message( STATUS "Extra tests included" ) +# The MinDuration reporting tests do not need separate compilation, but +# they have non-trivial execution time, so they are categorized as +# extra tests, so that they are run less. +add_test(NAME MinDuration::SimpleThreshold COMMAND $ --min-duration 0.15 [min_duration_test]) +set_tests_properties( + MinDuration::SimpleThreshold + PROPERTIES + PASS_REGULAR_EXPRESSION "s: sleep_for_200ms" + FAIL_REGULAR_EXPRESSION "sleep_for_100ms" + RUN_SERIAL ON # The test is timing sensitive, so we want to run it + # serially to avoid false positives on oversubscribed machines +) + +# -d yes overrides the threshold, so we should see the faster test even +# with a ridiculous high min duration threshold +add_test(NAME MinDuration::DurationOverrideYes COMMAND $ --min-duration 1.0 -d yes [min_duration_test]) +set_tests_properties( + MinDuration::DurationOverrideYes + PROPERTIES + PASS_REGULAR_EXPRESSION "s: sleep_for_100ms" +) + + +# ------------ end of duration reporting tests + # define folders used: set( TESTS_DIR ${CATCH_DIR}/tests/ExtraTests ) diff --git a/tests/TestScripts/testTimeThreshold.py b/tests/TestScripts/testTimeThreshold.py deleted file mode 100644 index 51804aae..00000000 --- a/tests/TestScripts/testTimeThreshold.py +++ /dev/null @@ -1,41 +0,0 @@ -#!/usr/bin/env python3 - -import subprocess -import sys - -def run_tests_with_threshold(self_test_exe, threshold): - cmd = [self_test_exe, '--min-duration', str(threshold), - '[min_duration_test]'] - process = subprocess.Popen( - cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - stdout, stderr = process.communicate() - if stderr: - raise RuntimeError("Unexpected error output:\n" + - stderr.decode()) - if process.returncode != 0: - raise RuntimeError("Unexpected failure to run tests\n") - result = stdout.split(b'\n') - report_lines = [s.split() for s in result if b' s: ' in s] - tests_reported = [l[2] for l in report_lines] - times_reported = [float(l[0]) for l in report_lines] - return tests_reported, times_reported - -def check_times_at_least(times_reported, minimum): - for time in times_reported: - assert time >= minimum, ( - 'Time {} was less that requested minimum {}' .format( - time, minimum)) - -def main(): - self_test_exe, = sys.argv[1:] - tests, times = run_tests_with_threshold(self_test_exe, '0.15') - assert tests == [b'sleep_for_200ms'], ( - "Unexpected tests reported %s" % tests) - check_times_at_least(times, 0.15) - tests,times = run_tests_with_threshold(self_test_exe, '0') - assert tests == [b'sleep_for_100ms', b'sleep_for_200ms'], ( - "Unexpected tests reported %s" % tests) - check_times_at_least(times, 0) - -if __name__ == '__main__': - sys.exit(main())