Integrate Nonius benchmark into Catch2

Changes done to Nonius:
* Moved things into "Catch::Benchmark" namespace
* Benchmarks were integrated with `TEST_CASE`/`SECTION`/`GENERATE` macros
* Removed Nonius's parameters for benchmarks, Generators should be used instead
* Added relevant methods to the reporter interface (default-implemented, to avoid
breaking existing 3rd party reporters)
* Async processing is guarded with `_REENTRANT` macro for GCC/Clang, used by default
on MSVC
* Added a macro `CATCH_CONFIG_DISABLE_BENCHMARKING` that removes all traces of
benchmarking from Catch
This commit is contained in:
Joachim Meyer
2019-04-23 23:41:13 +02:00
committed by Martin Hořeňovský
parent 00347f1e79
commit ce2560ca95
46 changed files with 2614 additions and 190 deletions

View File

@@ -208,6 +208,10 @@ class Duration {
Unit m_units;
public:
explicit Duration(double inNanoseconds, Unit units = Unit::Auto)
: Duration(static_cast<uint64_t>(inNanoseconds), units) {
}
explicit Duration(uint64_t inNanoseconds, Unit units = Unit::Auto)
: m_inNanoseconds(inNanoseconds),
m_units(units) {
@@ -283,9 +287,15 @@ public:
if (!m_isOpen) {
m_isOpen = true;
*this << RowBreak();
for (auto const& info : m_columnInfos)
*this << info.name << ColumnBreak();
*this << RowBreak();
Columns headerCols;
Spacer spacer(2);
for (auto const& info : m_columnInfos) {
headerCols += Column(info.name).width(static_cast<std::size_t>(info.width - 2));
headerCols += spacer;
}
m_os << headerCols << "\n";
m_os << Catch::getLineOfChars<'-'>() << "\n";
}
}
@@ -340,9 +350,9 @@ ConsoleReporter::ConsoleReporter(ReporterConfig const& config)
m_tablePrinter(new TablePrinter(config.stream(),
{
{ "benchmark name", CATCH_CONFIG_CONSOLE_WIDTH - 32, ColumnInfo::Left },
{ "iters", 8, ColumnInfo::Right },
{ "elapsed ns", 14, ColumnInfo::Right },
{ "average", 14, ColumnInfo::Right }
{ "samples mean std dev", 14, ColumnInfo::Right },
{ "iterations low mean low std dev", 14, ColumnInfo::Right },
{ "estimated high mean high std dev", 14, ColumnInfo::Right }
})) {}
ConsoleReporter::~ConsoleReporter() = default;
@@ -374,6 +384,7 @@ bool ConsoleReporter::assertionEnded(AssertionStats const& _assertionStats) {
}
void ConsoleReporter::sectionStarting(SectionInfo const& _sectionInfo) {
m_tablePrinter->close();
m_headerPrinted = false;
StreamingReporterBase::sectionStarting(_sectionInfo);
}
@@ -397,29 +408,45 @@ void ConsoleReporter::sectionEnded(SectionStats const& _sectionStats) {
StreamingReporterBase::sectionEnded(_sectionStats);
}
#ifndef CATCH_CONFIG_DISABLE_BENCHMARKING
void ConsoleReporter::benchmarkPreparing(std::string const& name) {
lazyPrintWithoutClosingBenchmarkTable();
auto nameCol = Column(name).width(static_cast<std::size_t>(m_tablePrinter->columnInfos()[0].width - 2));
bool firstLine = true;
for (auto line : nameCol) {
if (!firstLine)
(*m_tablePrinter) << ColumnBreak() << ColumnBreak() << ColumnBreak();
else
firstLine = false;
(*m_tablePrinter) << line << ColumnBreak();
}
}
void ConsoleReporter::benchmarkStarting(BenchmarkInfo const& info) {
lazyPrintWithoutClosingBenchmarkTable();
auto nameCol = Column( info.name ).width( static_cast<std::size_t>( m_tablePrinter->columnInfos()[0].width - 2 ) );
bool firstLine = true;
for (auto line : nameCol) {
if (!firstLine)
(*m_tablePrinter) << ColumnBreak() << ColumnBreak() << ColumnBreak();
else
firstLine = false;
(*m_tablePrinter) << line << ColumnBreak();
}
(*m_tablePrinter) << info.samples << ColumnBreak()
<< info.iterations << ColumnBreak()
<< Duration(info.estimatedDuration) << ColumnBreak();
}
void ConsoleReporter::benchmarkEnded(BenchmarkStats const& stats) {
Duration average(stats.elapsedTimeInNanoseconds / stats.iterations);
void ConsoleReporter::benchmarkEnded(BenchmarkStats<> const& stats) {
(*m_tablePrinter) << ColumnBreak()
<< Duration(stats.mean.point.count()) << ColumnBreak()
<< Duration(stats.mean.lower_bound.count()) << ColumnBreak()
<< Duration(stats.mean.upper_bound.count()) << ColumnBreak() << ColumnBreak()
<< Duration(stats.standardDeviation.point.count()) << ColumnBreak()
<< Duration(stats.standardDeviation.lower_bound.count()) << ColumnBreak()
<< Duration(stats.standardDeviation.upper_bound.count()) << ColumnBreak() << ColumnBreak() << ColumnBreak() << ColumnBreak() << ColumnBreak();
}
void ConsoleReporter::benchmarkFailed(std::string const& error) {
Colour colour(Colour::Red);
(*m_tablePrinter)
<< stats.iterations << ColumnBreak()
<< stats.elapsedTimeInNanoseconds << ColumnBreak()
<< average << ColumnBreak();
<< "Benchmark failed (" << error << ")"
<< ColumnBreak() << RowBreak();
}
#endif // CATCH_CONFIG_DISABLE_BENCHMARKING
void ConsoleReporter::testCaseEnded(TestCaseStats const& _testCaseStats) {
m_tablePrinter->close();

View File

@@ -39,9 +39,12 @@ namespace Catch {
void sectionStarting(SectionInfo const& _sectionInfo) override;
void sectionEnded(SectionStats const& _sectionStats) override;
#ifndef CATCH_CONFIG_DISABLE_BENCHMARKING
void benchmarkPreparing(std::string const& name) override;
void benchmarkStarting(BenchmarkInfo const& info) override;
void benchmarkEnded(BenchmarkStats const& stats) override;
void benchmarkEnded(BenchmarkStats<> const& stats) override;
void benchmarkFailed(std::string const& error) override;
#endif // CATCH_CONFIG_DISABLE_BENCHMARKING
void testCaseEnded(TestCaseStats const& _testCaseStats) override;
void testGroupEnded(TestGroupStats const& _testGroupStats) override;

View File

@@ -42,19 +42,34 @@ namespace Catch {
m_reporter->noMatchingTestCases( spec );
}
#ifndef CATCH_CONFIG_DISABLE_BENCHMARKING
void ListeningReporter::benchmarkPreparing( std::string const& name ) {
for (auto const& listener : m_listeners) {
listener->benchmarkPreparing(name);
}
m_reporter->benchmarkPreparing(name);
}
void ListeningReporter::benchmarkStarting( BenchmarkInfo const& benchmarkInfo ) {
for ( auto const& listener : m_listeners ) {
listener->benchmarkStarting( benchmarkInfo );
}
m_reporter->benchmarkStarting( benchmarkInfo );
}
void ListeningReporter::benchmarkEnded( BenchmarkStats const& benchmarkStats ) {
void ListeningReporter::benchmarkEnded( BenchmarkStats<> const& benchmarkStats ) {
for ( auto const& listener : m_listeners ) {
listener->benchmarkEnded( benchmarkStats );
}
m_reporter->benchmarkEnded( benchmarkStats );
}
void ListeningReporter::benchmarkFailed( std::string const& error ) {
for (auto const& listener : m_listeners) {
listener->benchmarkFailed(error);
}
m_reporter->benchmarkFailed(error);
}
#endif // CATCH_CONFIG_DISABLE_BENCHMARKING
void ListeningReporter::testRunStarting( TestRunInfo const& testRunInfo ) {
for ( auto const& listener : m_listeners ) {
listener->testRunStarting( testRunInfo );

View File

@@ -31,8 +31,12 @@ namespace Catch {
static std::set<Verbosity> getSupportedVerbosities();
#ifndef CATCH_CONFIG_DISABLE_BENCHMARKING
void benchmarkPreparing(std::string const& name) override;
void benchmarkStarting( BenchmarkInfo const& benchmarkInfo ) override;
void benchmarkEnded( BenchmarkStats const& benchmarkStats ) override;
void benchmarkEnded( BenchmarkStats<> const& benchmarkStats ) override;
void benchmarkFailed(std::string const&) override;
#endif // CATCH_CONFIG_DISABLE_BENCHMARKING
void testRunStarting( TestRunInfo const& testRunInfo ) override;
void testGroupStarting( GroupInfo const& groupInfo ) override;

View File

@@ -219,6 +219,48 @@ namespace Catch {
m_xml.endElement();
}
#ifndef CATCH_CONFIG_DISABLE_BENCHMARKING
void XmlReporter::benchmarkStarting(BenchmarkInfo const &info) {
m_xml.startElement("BenchmarkResults")
.writeAttribute("name", info.name)
.writeAttribute("samples", info.samples)
.writeAttribute("resamples", info.resamples)
.writeAttribute("iterations", info.iterations)
.writeAttribute("clockResolution", static_cast<uint64_t>(info.clockResolution))
.writeAttribute("estimatedDuration", static_cast<uint64_t>(info.estimatedDuration))
.writeComment("All values in nano seconds");
}
void XmlReporter::benchmarkEnded(BenchmarkStats<> const& benchmarkStats) {
m_xml.startElement("mean")
.writeAttribute("value", static_cast<uint64_t>(benchmarkStats.mean.point.count()))
.writeAttribute("lowerBound", static_cast<uint64_t>(benchmarkStats.mean.lower_bound.count()))
.writeAttribute("upperBound", static_cast<uint64_t>(benchmarkStats.mean.upper_bound.count()))
.writeAttribute("ci", benchmarkStats.mean.confidence_interval);
m_xml.endElement();
m_xml.startElement("standardDeviation")
.writeAttribute("value", benchmarkStats.standardDeviation.point.count())
.writeAttribute("lowerBound", benchmarkStats.standardDeviation.lower_bound.count())
.writeAttribute("upperBound", benchmarkStats.standardDeviation.upper_bound.count())
.writeAttribute("ci", benchmarkStats.standardDeviation.confidence_interval);
m_xml.endElement();
m_xml.startElement("outliers")
.writeAttribute("variance", benchmarkStats.outlierVariance)
.writeAttribute("lowMild", benchmarkStats.outliers.low_mild)
.writeAttribute("lowSevere", benchmarkStats.outliers.low_severe)
.writeAttribute("highMild", benchmarkStats.outliers.high_mild)
.writeAttribute("highSevere", benchmarkStats.outliers.high_severe);
m_xml.endElement();
m_xml.endElement();
}
void XmlReporter::benchmarkFailed(std::string const &error) {
m_xml.scopedElement("failed").
writeAttribute("message", error);
m_xml.endElement();
}
#endif // CATCH_CONFIG_DISABLE_BENCHMARKING
CATCH_REGISTER_REPORTER( "xml", XmlReporter )
} // end namespace Catch

View File

@@ -50,6 +50,12 @@ namespace Catch {
void testRunEnded(TestRunStats const& testRunStats) override;
#ifndef CATCH_CONFIG_DISABLE_BENCHMARKING
void benchmarkStarting(BenchmarkInfo const&) override;
void benchmarkEnded(BenchmarkStats<> const&) override;
void benchmarkFailed(std::string const&) override;
#endif // CATCH_CONFIG_DISABLE_BENCHMARKING
private:
Timer m_testCaseTimer;
XmlWriter m_xml;