mirror of
https://github.com/catchorg/Catch2.git
synced 2024-12-27 21:23:30 +01:00
Refactor tests for duration reporting threshold
This commit is contained in:
parent
53d8af8e96
commit
4eb9d37e05
@ -455,9 +455,6 @@ set_tests_properties(TestsInFile::InvalidTestNames-2 PROPERTIES PASS_REGULAR_EXP
|
||||
add_test(NAME RandomTestOrdering COMMAND ${PYTHON_EXECUTABLE}
|
||||
${CATCH_DIR}/projects/TestScripts/testRandomOrder.py $<TARGET_FILE:SelfTest>)
|
||||
|
||||
add_test(NAME TestTimeThreshold COMMAND ${PYTHON_EXECUTABLE}
|
||||
${CATCH_DIR}/projects/TestScripts/testTimeThreshold.py $<TARGET_FILE:SelfTest>)
|
||||
|
||||
if (CATCH_USE_VALGRIND)
|
||||
add_test(NAME ValgrindRunTests COMMAND valgrind --leak-check=full --error-exitcode=1 $<TARGET_FILE:SelfTest>)
|
||||
add_test(NAME ValgrindListTests COMMAND valgrind --leak-check=full --error-exitcode=1 $<TARGET_FILE:SelfTest> --list-tests --verbosity high)
|
||||
|
@ -10,6 +10,31 @@ project( Catch2ExtraTests LANGUAGES CXX )
|
||||
|
||||
message( STATUS "Extra tests included" )
|
||||
|
||||
# The MinDuration reporting tests do not need separate compilation, but
|
||||
# they have non-trivial execution time, so they are categorized as
|
||||
# extra tests, so that they are run less.
|
||||
add_test(NAME MinDuration::SimpleThreshold COMMAND $<TARGET_FILE:SelfTest> --min-duration 0.15 [min_duration_test])
|
||||
set_tests_properties(
|
||||
MinDuration::SimpleThreshold
|
||||
PROPERTIES
|
||||
PASS_REGULAR_EXPRESSION "s: sleep_for_200ms"
|
||||
FAIL_REGULAR_EXPRESSION "sleep_for_100ms"
|
||||
RUN_SERIAL ON # The test is timing sensitive, so we want to run it
|
||||
# serially to avoid false positives on oversubscribed machines
|
||||
)
|
||||
|
||||
# -d yes overrides the threshold, so we should see the faster test even
|
||||
# with a ridiculous high min duration threshold
|
||||
add_test(NAME MinDuration::DurationOverrideYes COMMAND $<TARGET_FILE:SelfTest> --min-duration 1.0 -d yes [min_duration_test])
|
||||
set_tests_properties(
|
||||
MinDuration::DurationOverrideYes
|
||||
PROPERTIES
|
||||
PASS_REGULAR_EXPRESSION "s: sleep_for_100ms"
|
||||
)
|
||||
|
||||
|
||||
# ------------ end of duration reporting tests
|
||||
|
||||
# define folders used:
|
||||
|
||||
set( TESTS_DIR ${CATCH_DIR}/projects/ExtraTests )
|
||||
|
@ -1,41 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
def run_tests_with_threshold(self_test_exe, threshold):
|
||||
cmd = [self_test_exe, '--min-duration', str(threshold),
|
||||
'[min_duration_test]']
|
||||
process = subprocess.Popen(
|
||||
cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
stdout, stderr = process.communicate()
|
||||
if stderr:
|
||||
raise RuntimeError("Unexpected error output:\n" +
|
||||
stderr.decode())
|
||||
if process.returncode != 0:
|
||||
raise RuntimeError("Unexpected failure to run tests\n")
|
||||
result = stdout.split(b'\n')
|
||||
report_lines = [s.split() for s in result if b' s: ' in s]
|
||||
tests_reported = [l[2] for l in report_lines]
|
||||
times_reported = [float(l[0]) for l in report_lines]
|
||||
return tests_reported, times_reported
|
||||
|
||||
def check_times_at_least(times_reported, minimum):
|
||||
for time in times_reported:
|
||||
assert time >= minimum, (
|
||||
'Time {} was less that requested minimum {}' .format(
|
||||
time, minimum))
|
||||
|
||||
def main():
|
||||
self_test_exe, = sys.argv[1:]
|
||||
tests, times = run_tests_with_threshold(self_test_exe, '0.15')
|
||||
assert tests == [b'sleep_for_200ms'], (
|
||||
"Unexpected tests reported %s" % tests)
|
||||
check_times_at_least(times, 0.15)
|
||||
tests,times = run_tests_with_threshold(self_test_exe, '0')
|
||||
assert tests == [b'sleep_for_100ms', b'sleep_for_200ms'], (
|
||||
"Unexpected tests reported %s" % tests)
|
||||
check_times_at_least(times, 0)
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
Loading…
Reference in New Issue
Block a user