Add an early bailout out of benchmark timer calibration

Specific platforms (e.g. TDM-GCC) can have terrible timer resolution,
and our checking code will then loop for an inordinate amount of time.
This change will make it so that the calibration gives up after 3
seconds and just uses the already measured values.

This leaves one open question, how to signal that the resolution
is terrible and benchmarking should not happen?

Fixes #1237
This commit is contained in:
Martin Hořeňovský 2018-04-01 22:50:39 +02:00
parent ea1f326261
commit 931441251e
1 changed files with 12 additions and 2 deletions

View File

@ -10,6 +10,8 @@
#include <chrono> #include <chrono>
static const uint64_t nanosecondsInSecond = 1000000000;
namespace Catch { namespace Catch {
auto getCurrentNanosecondsSinceEpoch() -> uint64_t { auto getCurrentNanosecondsSinceEpoch() -> uint64_t {
@ -20,17 +22,25 @@ namespace Catch {
uint64_t sum = 0; uint64_t sum = 0;
static const uint64_t iterations = 1000000; static const uint64_t iterations = 1000000;
auto startTime = getCurrentNanosecondsSinceEpoch();
for( std::size_t i = 0; i < iterations; ++i ) { for( std::size_t i = 0; i < iterations; ++i ) {
uint64_t ticks; uint64_t ticks;
uint64_t baseTicks = getCurrentNanosecondsSinceEpoch(); uint64_t baseTicks = getCurrentNanosecondsSinceEpoch();
do { do {
ticks = getCurrentNanosecondsSinceEpoch(); ticks = getCurrentNanosecondsSinceEpoch();
} } while( ticks == baseTicks );
while( ticks == baseTicks );
auto delta = ticks - baseTicks; auto delta = ticks - baseTicks;
sum += delta; sum += delta;
// If we have been calibrating for over 3 seconds -- the clock
// is terrible and we should move on.
// TBD: How to signal that the measured resolution is probably wrong?
if (ticks > startTime + 3 * nanosecondsInSecond) {
return sum / i;
}
} }
// We're just taking the mean, here. To do better we could take the std. dev and exclude outliers // We're just taking the mean, here. To do better we could take the std. dev and exclude outliers