Refactor tests for duration reporting threshold

This commit is contained in:
Martin Hořeňovský 2020-07-06 20:02:20 +02:00
parent 80b0d6975c
commit e6d947f6d4
No known key found for this signature in database
GPG Key ID: DE48307B8B0D381A
3 changed files with 25 additions and 44 deletions

View File

@ -202,9 +202,6 @@ set_tests_properties(TagAlias PROPERTIES
add_test(NAME RandomTestOrdering COMMAND ${PYTHON_EXECUTABLE}
${CATCH_DIR}/tests/TestScripts/testRandomOrder.py $<TARGET_FILE:SelfTest>)
add_test(NAME TestTimeThreshold COMMAND ${PYTHON_EXECUTABLE}
${CATCH_DIR}/tests/TestScripts/testTimeThreshold.py $<TARGET_FILE:SelfTest>)
add_test(NAME CheckConvenienceHeaders
COMMAND
${PYTHON_EXECUTABLE} ${CATCH_DIR}/tools/scripts/checkConvenienceHeaders.py

View File

@ -8,6 +8,31 @@ project( Catch2ExtraTests LANGUAGES CXX )
message( STATUS "Extra tests included" )
# The MinDuration reporting tests do not need separate compilation, but
# they have non-trivial execution time, so they are categorized as
# extra tests, so that they are run less.
add_test(NAME MinDuration::SimpleThreshold COMMAND $<TARGET_FILE:SelfTest> --min-duration 0.15 [min_duration_test])
set_tests_properties(
MinDuration::SimpleThreshold
PROPERTIES
PASS_REGULAR_EXPRESSION "s: sleep_for_200ms"
FAIL_REGULAR_EXPRESSION "sleep_for_100ms"
RUN_SERIAL ON # The test is timing sensitive, so we want to run it
# serially to avoid false positives on oversubscribed machines
)
# -d yes overrides the threshold, so we should see the faster test even
# with a ridiculous high min duration threshold
add_test(NAME MinDuration::DurationOverrideYes COMMAND $<TARGET_FILE:SelfTest> --min-duration 1.0 -d yes [min_duration_test])
set_tests_properties(
MinDuration::DurationOverrideYes
PROPERTIES
PASS_REGULAR_EXPRESSION "s: sleep_for_100ms"
)
# ------------ end of duration reporting tests
# define folders used:
set( TESTS_DIR ${CATCH_DIR}/tests/ExtraTests )

View File

@ -1,41 +0,0 @@
#!/usr/bin/env python3
import subprocess
import sys
def run_tests_with_threshold(self_test_exe, threshold):
cmd = [self_test_exe, '--min-duration', str(threshold),
'[min_duration_test]']
process = subprocess.Popen(
cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = process.communicate()
if stderr:
raise RuntimeError("Unexpected error output:\n" +
stderr.decode())
if process.returncode != 0:
raise RuntimeError("Unexpected failure to run tests\n")
result = stdout.split(b'\n')
report_lines = [s.split() for s in result if b' s: ' in s]
tests_reported = [l[2] for l in report_lines]
times_reported = [float(l[0]) for l in report_lines]
return tests_reported, times_reported
def check_times_at_least(times_reported, minimum):
for time in times_reported:
assert time >= minimum, (
'Time {} was less that requested minimum {}' .format(
time, minimum))
def main():
self_test_exe, = sys.argv[1:]
tests, times = run_tests_with_threshold(self_test_exe, '0.15')
assert tests == [b'sleep_for_200ms'], (
"Unexpected tests reported %s" % tests)
check_times_at_least(times, 0.15)
tests,times = run_tests_with_threshold(self_test_exe, '0')
assert tests == [b'sleep_for_100ms', b'sleep_for_200ms'], (
"Unexpected tests reported %s" % tests)
check_times_at_least(times, 0)
if __name__ == '__main__':
sys.exit(main())