From 931441251e8975a0e67dd94fa369cf310ac54602 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martin=20Ho=C5=99e=C5=88ovsk=C3=BD?= Date: Sun, 1 Apr 2018 22:50:39 +0200 Subject: [PATCH] Add an early bailout out of benchmark timer calibration Specific platforms (e.g. TDM-GCC) can have terrible timer resolution, and our checking code will then loop for an inordinate amount of time. This change will make it so that the calibration gives up after 3 seconds and just uses the already measured values. This leaves one open question, how to signal that the resolution is terrible and benchmarking should not happen? Fixes #1237 --- include/internal/catch_timer.cpp | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/include/internal/catch_timer.cpp b/include/internal/catch_timer.cpp index b9ae6887..6786efb0 100644 --- a/include/internal/catch_timer.cpp +++ b/include/internal/catch_timer.cpp @@ -10,6 +10,8 @@ #include +static const uint64_t nanosecondsInSecond = 1000000000; + namespace Catch { auto getCurrentNanosecondsSinceEpoch() -> uint64_t { @@ -20,17 +22,25 @@ namespace Catch { uint64_t sum = 0; static const uint64_t iterations = 1000000; + auto startTime = getCurrentNanosecondsSinceEpoch(); + for( std::size_t i = 0; i < iterations; ++i ) { uint64_t ticks; uint64_t baseTicks = getCurrentNanosecondsSinceEpoch(); do { ticks = getCurrentNanosecondsSinceEpoch(); - } - while( ticks == baseTicks ); + } while( ticks == baseTicks ); auto delta = ticks - baseTicks; sum += delta; + + // If we have been calibrating for over 3 seconds -- the clock + // is terrible and we should move on. + // TBD: How to signal that the measured resolution is probably wrong? + if (ticks > startTime + 3 * nanosecondsInSecond) { + return sum / i; + } } // We're just taking the mean, here. To do better we could take the std. dev and exclude outliers