mirror of
https://github.com/catchorg/Catch2.git
synced 2024-11-26 07:16:10 +01:00
Use StringRef literals where possible in XML using reporters
This let's us avoid running `strlen` at runtime to convert the plain string literals to `StringRef`s, by guaranteeing that we instead have the size available after compilation. In optimized builds the performance improvement should be even greater, as the `StringRef` UDL and the related constructor are both `constexpr`, and thus can be baked completely during compilation.
This commit is contained in:
parent
41ad0fda11
commit
8d6a1c27ef
@ -115,29 +115,29 @@ namespace Catch {
|
||||
XmlWriter::ScopedElement e = xml.scopedElement( "testsuite" );
|
||||
|
||||
TestGroupStats const& stats = groupNode.value;
|
||||
xml.writeAttribute( "name", stats.groupInfo.name );
|
||||
xml.writeAttribute( "errors", unexpectedExceptions );
|
||||
xml.writeAttribute( "failures", stats.totals.assertions.failed-unexpectedExceptions );
|
||||
xml.writeAttribute( "tests", stats.totals.assertions.total() );
|
||||
xml.writeAttribute( "hostname", "tbd" ); // !TBD
|
||||
xml.writeAttribute( "name"_sr, stats.groupInfo.name );
|
||||
xml.writeAttribute( "errors"_sr, unexpectedExceptions );
|
||||
xml.writeAttribute( "failures"_sr, stats.totals.assertions.failed-unexpectedExceptions );
|
||||
xml.writeAttribute( "tests"_sr, stats.totals.assertions.total() );
|
||||
xml.writeAttribute( "hostname"_sr, "tbd"_sr ); // !TBD
|
||||
if( m_config->showDurations() == ShowDurations::Never )
|
||||
xml.writeAttribute( "time", "" );
|
||||
xml.writeAttribute( "time"_sr, ""_sr );
|
||||
else
|
||||
xml.writeAttribute( "time", suiteTime );
|
||||
xml.writeAttribute( "timestamp", getCurrentTimestamp() );
|
||||
xml.writeAttribute( "time"_sr, suiteTime );
|
||||
xml.writeAttribute( "timestamp"_sr, getCurrentTimestamp() );
|
||||
|
||||
// Write properties if there are any
|
||||
if (m_config->hasTestFilters() || m_config->rngSeed() != 0) {
|
||||
auto properties = xml.scopedElement("properties");
|
||||
if (m_config->hasTestFilters()) {
|
||||
xml.scopedElement("property")
|
||||
.writeAttribute("name", "filters")
|
||||
.writeAttribute("value", serializeFilters(m_config->getTestsOrTags()));
|
||||
.writeAttribute("name"_sr, "filters"_sr)
|
||||
.writeAttribute("value"_sr, serializeFilters(m_config->getTestsOrTags()));
|
||||
}
|
||||
if (m_config->rngSeed() != 0) {
|
||||
xml.scopedElement("property")
|
||||
.writeAttribute("name", "random-seed")
|
||||
.writeAttribute("value", m_config->rngSeed());
|
||||
.writeAttribute("name"_sr, "random-seed"_sr)
|
||||
.writeAttribute("value"_sr, m_config->rngSeed());
|
||||
}
|
||||
}
|
||||
|
||||
@ -183,19 +183,19 @@ namespace Catch {
|
||||
!sectionNode.stdErr.empty() ) {
|
||||
XmlWriter::ScopedElement e = xml.scopedElement( "testcase" );
|
||||
if( className.empty() ) {
|
||||
xml.writeAttribute( "classname", name );
|
||||
xml.writeAttribute( "name", "root" );
|
||||
xml.writeAttribute( "classname"_sr, name );
|
||||
xml.writeAttribute( "name"_sr, "root"_sr );
|
||||
}
|
||||
else {
|
||||
xml.writeAttribute( "classname", className );
|
||||
xml.writeAttribute( "name", name );
|
||||
xml.writeAttribute( "classname"_sr, className );
|
||||
xml.writeAttribute( "name"_sr, name );
|
||||
}
|
||||
xml.writeAttribute( "time", ::Catch::Detail::stringify( sectionNode.stats.durationInSeconds ) );
|
||||
xml.writeAttribute( "time"_sr, ::Catch::Detail::stringify( sectionNode.stats.durationInSeconds ) );
|
||||
// This is not ideal, but it should be enough to mimic gtest's
|
||||
// junit output.
|
||||
// Ideally the JUnit reporter would also handle `skipTest`
|
||||
// events and write those out appropriately.
|
||||
xml.writeAttribute( "status", "run" );
|
||||
xml.writeAttribute( "status"_sr, "run"_sr );
|
||||
|
||||
writeAssertions( sectionNode );
|
||||
|
||||
@ -244,8 +244,8 @@ namespace Catch {
|
||||
|
||||
XmlWriter::ScopedElement e = xml.scopedElement( elementName );
|
||||
|
||||
xml.writeAttribute( "message", result.getExpression() );
|
||||
xml.writeAttribute( "type", result.getTestMacroName() );
|
||||
xml.writeAttribute( "message"_sr, result.getExpression() );
|
||||
xml.writeAttribute( "type"_sr, result.getTestMacroName() );
|
||||
|
||||
ReusableStringStream rss;
|
||||
if (stats.totals.assertions.total() > 0) {
|
||||
|
@ -19,7 +19,7 @@ namespace Catch {
|
||||
void SonarQubeReporter::testRunStarting(TestRunInfo const& testRunInfo) {
|
||||
CumulativeReporterBase::testRunStarting(testRunInfo);
|
||||
xml.startElement("testExecutions");
|
||||
xml.writeAttribute("version", '1');
|
||||
xml.writeAttribute("version"_sr, '1');
|
||||
}
|
||||
|
||||
void SonarQubeReporter::testGroupEnded(TestGroupStats const& testGroupStats) {
|
||||
@ -40,7 +40,7 @@ namespace Catch {
|
||||
|
||||
void SonarQubeReporter::writeTestFile(std::string const& filename, std::vector<TestCaseNode const*> const& testCaseNodes) {
|
||||
XmlWriter::ScopedElement e = xml.scopedElement("file");
|
||||
xml.writeAttribute("path", filename);
|
||||
xml.writeAttribute("path"_sr, filename);
|
||||
|
||||
for (auto const& child : testCaseNodes)
|
||||
writeTestCase(*child);
|
||||
@ -61,8 +61,8 @@ namespace Catch {
|
||||
|
||||
if (!sectionNode.assertions.empty() || !sectionNode.stdOut.empty() || !sectionNode.stdErr.empty()) {
|
||||
XmlWriter::ScopedElement e = xml.scopedElement("testCase");
|
||||
xml.writeAttribute("name", name);
|
||||
xml.writeAttribute("duration", static_cast<long>(sectionNode.stats.durationInSeconds * 1000));
|
||||
xml.writeAttribute("name"_sr, name);
|
||||
xml.writeAttribute("duration"_sr, static_cast<long>(sectionNode.stats.durationInSeconds * 1000));
|
||||
|
||||
writeAssertions(sectionNode, okToFail);
|
||||
}
|
||||
@ -114,7 +114,7 @@ namespace Catch {
|
||||
|
||||
ReusableStringStream messageRss;
|
||||
messageRss << result.getTestMacroName() << "(" << result.getExpression() << ")";
|
||||
xml.writeAttribute("message", messageRss.str());
|
||||
xml.writeAttribute("message"_sr, messageRss.str());
|
||||
|
||||
ReusableStringStream textRss;
|
||||
if (stats.totals.assertions.total() > 0) {
|
||||
|
@ -42,8 +42,8 @@ namespace Catch {
|
||||
|
||||
void XmlReporter::writeSourceInfo( SourceLineInfo const& sourceInfo ) {
|
||||
m_xml
|
||||
.writeAttribute( "filename", sourceInfo.file )
|
||||
.writeAttribute( "line", sourceInfo.line );
|
||||
.writeAttribute( "filename"_sr, sourceInfo.file )
|
||||
.writeAttribute( "line"_sr, sourceInfo.line );
|
||||
}
|
||||
|
||||
void XmlReporter::noMatchingTestCases( std::string const& s ) {
|
||||
@ -57,25 +57,25 @@ namespace Catch {
|
||||
m_xml.writeStylesheetRef( stylesheetRef );
|
||||
m_xml.startElement( "Catch" );
|
||||
if( !m_config->name().empty() )
|
||||
m_xml.writeAttribute( "name", m_config->name() );
|
||||
m_xml.writeAttribute( "name"_sr, m_config->name() );
|
||||
if (m_config->testSpec().hasFilters())
|
||||
m_xml.writeAttribute( "filters", serializeFilters( m_config->getTestsOrTags() ) );
|
||||
m_xml.writeAttribute( "filters"_sr, serializeFilters( m_config->getTestsOrTags() ) );
|
||||
if( m_config->rngSeed() != 0 )
|
||||
m_xml.scopedElement( "Randomness" )
|
||||
.writeAttribute( "seed", m_config->rngSeed() );
|
||||
.writeAttribute( "seed"_sr, m_config->rngSeed() );
|
||||
}
|
||||
|
||||
void XmlReporter::testGroupStarting( GroupInfo const& groupInfo ) {
|
||||
StreamingReporterBase::testGroupStarting( groupInfo );
|
||||
m_xml.startElement( "Group" )
|
||||
.writeAttribute( "name", groupInfo.name );
|
||||
.writeAttribute( "name"_sr, groupInfo.name );
|
||||
}
|
||||
|
||||
void XmlReporter::testCaseStarting( TestCaseInfo const& testInfo ) {
|
||||
StreamingReporterBase::testCaseStarting(testInfo);
|
||||
m_xml.startElement( "TestCase" )
|
||||
.writeAttribute( "name", trim( testInfo.name ) )
|
||||
.writeAttribute( "tags", testInfo.tagsAsString() );
|
||||
.writeAttribute( "name"_sr, trim( testInfo.name ) )
|
||||
.writeAttribute( "tags"_sr, testInfo.tagsAsString() );
|
||||
|
||||
writeSourceInfo( testInfo.lineInfo );
|
||||
|
||||
@ -88,7 +88,7 @@ namespace Catch {
|
||||
StreamingReporterBase::sectionStarting( sectionInfo );
|
||||
if( m_sectionDepth++ > 0 ) {
|
||||
m_xml.startElement( "Section" )
|
||||
.writeAttribute( "name", trim( sectionInfo.name ) );
|
||||
.writeAttribute( "name"_sr, trim( sectionInfo.name ) );
|
||||
writeSourceInfo( sectionInfo.lineInfo );
|
||||
m_xml.ensureTagClosed();
|
||||
}
|
||||
@ -123,8 +123,8 @@ namespace Catch {
|
||||
// Print the expression if there is one.
|
||||
if( result.hasExpression() ) {
|
||||
m_xml.startElement( "Expression" )
|
||||
.writeAttribute( "success", result.succeeded() )
|
||||
.writeAttribute( "type", result.getTestMacroName() );
|
||||
.writeAttribute( "success"_sr, result.succeeded() )
|
||||
.writeAttribute( "type"_sr, result.getTestMacroName() );
|
||||
|
||||
writeSourceInfo( result.getSourceInfo() );
|
||||
|
||||
@ -175,12 +175,12 @@ namespace Catch {
|
||||
StreamingReporterBase::sectionEnded( sectionStats );
|
||||
if( --m_sectionDepth > 0 ) {
|
||||
XmlWriter::ScopedElement e = m_xml.scopedElement( "OverallResults" );
|
||||
e.writeAttribute( "successes", sectionStats.assertions.passed );
|
||||
e.writeAttribute( "failures", sectionStats.assertions.failed );
|
||||
e.writeAttribute( "expectedFailures", sectionStats.assertions.failedButOk );
|
||||
e.writeAttribute( "successes"_sr, sectionStats.assertions.passed );
|
||||
e.writeAttribute( "failures"_sr, sectionStats.assertions.failed );
|
||||
e.writeAttribute( "expectedFailures"_sr, sectionStats.assertions.failedButOk );
|
||||
|
||||
if ( m_config->showDurations() == ShowDurations::Always )
|
||||
e.writeAttribute( "durationInSeconds", sectionStats.durationInSeconds );
|
||||
e.writeAttribute( "durationInSeconds"_sr, sectionStats.durationInSeconds );
|
||||
|
||||
m_xml.endElement();
|
||||
}
|
||||
@ -189,10 +189,10 @@ namespace Catch {
|
||||
void XmlReporter::testCaseEnded( TestCaseStats const& testCaseStats ) {
|
||||
StreamingReporterBase::testCaseEnded( testCaseStats );
|
||||
XmlWriter::ScopedElement e = m_xml.scopedElement( "OverallResult" );
|
||||
e.writeAttribute( "success", testCaseStats.totals.assertions.allOk() );
|
||||
e.writeAttribute( "success"_sr, testCaseStats.totals.assertions.allOk() );
|
||||
|
||||
if ( m_config->showDurations() == ShowDurations::Always )
|
||||
e.writeAttribute( "durationInSeconds", m_testCaseTimer.getElapsedSeconds() );
|
||||
e.writeAttribute( "durationInSeconds"_sr, m_testCaseTimer.getElapsedSeconds() );
|
||||
|
||||
if( !testCaseStats.stdOut.empty() )
|
||||
m_xml.scopedElement( "StdOut" ).writeText( trim( testCaseStats.stdOut ), XmlFormatting::Newline );
|
||||
@ -206,69 +206,69 @@ namespace Catch {
|
||||
StreamingReporterBase::testGroupEnded( testGroupStats );
|
||||
// TODO: Check testGroupStats.aborting and act accordingly.
|
||||
m_xml.scopedElement( "OverallResults" )
|
||||
.writeAttribute( "successes", testGroupStats.totals.assertions.passed )
|
||||
.writeAttribute( "failures", testGroupStats.totals.assertions.failed )
|
||||
.writeAttribute( "expectedFailures", testGroupStats.totals.assertions.failedButOk );
|
||||
.writeAttribute( "successes"_sr, testGroupStats.totals.assertions.passed )
|
||||
.writeAttribute( "failures"_sr, testGroupStats.totals.assertions.failed )
|
||||
.writeAttribute( "expectedFailures"_sr, testGroupStats.totals.assertions.failedButOk );
|
||||
m_xml.scopedElement( "OverallResultsCases")
|
||||
.writeAttribute( "successes", testGroupStats.totals.testCases.passed )
|
||||
.writeAttribute( "failures", testGroupStats.totals.testCases.failed )
|
||||
.writeAttribute( "expectedFailures", testGroupStats.totals.testCases.failedButOk );
|
||||
.writeAttribute( "successes"_sr, testGroupStats.totals.testCases.passed )
|
||||
.writeAttribute( "failures"_sr, testGroupStats.totals.testCases.failed )
|
||||
.writeAttribute( "expectedFailures"_sr, testGroupStats.totals.testCases.failedButOk );
|
||||
m_xml.endElement();
|
||||
}
|
||||
|
||||
void XmlReporter::testRunEnded( TestRunStats const& testRunStats ) {
|
||||
StreamingReporterBase::testRunEnded( testRunStats );
|
||||
m_xml.scopedElement( "OverallResults" )
|
||||
.writeAttribute( "successes", testRunStats.totals.assertions.passed )
|
||||
.writeAttribute( "failures", testRunStats.totals.assertions.failed )
|
||||
.writeAttribute( "expectedFailures", testRunStats.totals.assertions.failedButOk );
|
||||
.writeAttribute( "successes"_sr, testRunStats.totals.assertions.passed )
|
||||
.writeAttribute( "failures"_sr, testRunStats.totals.assertions.failed )
|
||||
.writeAttribute( "expectedFailures"_sr, testRunStats.totals.assertions.failedButOk );
|
||||
m_xml.scopedElement( "OverallResultsCases")
|
||||
.writeAttribute( "successes", testRunStats.totals.testCases.passed )
|
||||
.writeAttribute( "failures", testRunStats.totals.testCases.failed )
|
||||
.writeAttribute( "expectedFailures", testRunStats.totals.testCases.failedButOk );
|
||||
.writeAttribute( "successes"_sr, testRunStats.totals.testCases.passed )
|
||||
.writeAttribute( "failures"_sr, testRunStats.totals.testCases.failed )
|
||||
.writeAttribute( "expectedFailures"_sr, testRunStats.totals.testCases.failedButOk );
|
||||
m_xml.endElement();
|
||||
}
|
||||
|
||||
void XmlReporter::benchmarkPreparing(std::string const& name) {
|
||||
m_xml.startElement("BenchmarkResults")
|
||||
.writeAttribute("name", name);
|
||||
.writeAttribute("name"_sr, name);
|
||||
}
|
||||
|
||||
void XmlReporter::benchmarkStarting(BenchmarkInfo const &info) {
|
||||
m_xml.writeAttribute("samples", info.samples)
|
||||
.writeAttribute("resamples", info.resamples)
|
||||
.writeAttribute("iterations", info.iterations)
|
||||
.writeAttribute("clockResolution", info.clockResolution)
|
||||
.writeAttribute("estimatedDuration", info.estimatedDuration)
|
||||
.writeComment("All values in nano seconds");
|
||||
m_xml.writeAttribute("samples"_sr, info.samples)
|
||||
.writeAttribute("resamples"_sr, info.resamples)
|
||||
.writeAttribute("iterations"_sr, info.iterations)
|
||||
.writeAttribute("clockResolution"_sr, info.clockResolution)
|
||||
.writeAttribute("estimatedDuration"_sr, info.estimatedDuration)
|
||||
.writeComment("All values in nano seconds"_sr);
|
||||
}
|
||||
|
||||
void XmlReporter::benchmarkEnded(BenchmarkStats<> const& benchmarkStats) {
|
||||
m_xml.startElement("mean")
|
||||
.writeAttribute("value", benchmarkStats.mean.point.count())
|
||||
.writeAttribute("lowerBound", benchmarkStats.mean.lower_bound.count())
|
||||
.writeAttribute("upperBound", benchmarkStats.mean.upper_bound.count())
|
||||
.writeAttribute("ci", benchmarkStats.mean.confidence_interval);
|
||||
.writeAttribute("value"_sr, benchmarkStats.mean.point.count())
|
||||
.writeAttribute("lowerBound"_sr, benchmarkStats.mean.lower_bound.count())
|
||||
.writeAttribute("upperBound"_sr, benchmarkStats.mean.upper_bound.count())
|
||||
.writeAttribute("ci"_sr, benchmarkStats.mean.confidence_interval);
|
||||
m_xml.endElement();
|
||||
m_xml.startElement("standardDeviation")
|
||||
.writeAttribute("value", benchmarkStats.standardDeviation.point.count())
|
||||
.writeAttribute("lowerBound", benchmarkStats.standardDeviation.lower_bound.count())
|
||||
.writeAttribute("upperBound", benchmarkStats.standardDeviation.upper_bound.count())
|
||||
.writeAttribute("ci", benchmarkStats.standardDeviation.confidence_interval);
|
||||
.writeAttribute("value"_sr, benchmarkStats.standardDeviation.point.count())
|
||||
.writeAttribute("lowerBound"_sr, benchmarkStats.standardDeviation.lower_bound.count())
|
||||
.writeAttribute("upperBound"_sr, benchmarkStats.standardDeviation.upper_bound.count())
|
||||
.writeAttribute("ci"_sr, benchmarkStats.standardDeviation.confidence_interval);
|
||||
m_xml.endElement();
|
||||
m_xml.startElement("outliers")
|
||||
.writeAttribute("variance", benchmarkStats.outlierVariance)
|
||||
.writeAttribute("lowMild", benchmarkStats.outliers.low_mild)
|
||||
.writeAttribute("lowSevere", benchmarkStats.outliers.low_severe)
|
||||
.writeAttribute("highMild", benchmarkStats.outliers.high_mild)
|
||||
.writeAttribute("highSevere", benchmarkStats.outliers.high_severe);
|
||||
.writeAttribute("variance"_sr, benchmarkStats.outlierVariance)
|
||||
.writeAttribute("lowMild"_sr, benchmarkStats.outliers.low_mild)
|
||||
.writeAttribute("lowSevere"_sr, benchmarkStats.outliers.low_severe)
|
||||
.writeAttribute("highMild"_sr, benchmarkStats.outliers.high_mild)
|
||||
.writeAttribute("highSevere"_sr, benchmarkStats.outliers.high_severe);
|
||||
m_xml.endElement();
|
||||
m_xml.endElement();
|
||||
}
|
||||
|
||||
void XmlReporter::benchmarkFailed(std::string const &error) {
|
||||
m_xml.scopedElement("failed").
|
||||
writeAttribute("message", error);
|
||||
writeAttribute("message"_sr, error);
|
||||
m_xml.endElement();
|
||||
}
|
||||
|
||||
@ -320,7 +320,7 @@ namespace Catch {
|
||||
auto aliasTag = m_xml.scopedElement("Aliases");
|
||||
for (auto const& alias : tag.spellings) {
|
||||
m_xml.startElement("Alias", XmlFormatting::Indent)
|
||||
.writeText(static_cast<std::string>(alias), XmlFormatting::None)
|
||||
.writeText(alias, XmlFormatting::None)
|
||||
.endElement(XmlFormatting::Newline);
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user