Fix cout/cerr outut to reporter

This commit is contained in:
Malcolm Noyes 2013-12-10 13:25:31 +00:00
parent e8c32e819c
commit 8774268140
4 changed files with 256 additions and 355 deletions

View File

@ -283,12 +283,14 @@ private:
cd.warnings = (CatchOverrides::Config<Catch::IConfig const*>::instance().warnAboutMissingAssertions(__FILE__, Count ) ? Catch::WarnAbout::NoAssertions : Catch::WarnAbout::Nothing); \ cd.warnings = (CatchOverrides::Config<Catch::IConfig const*>::instance().warnAboutMissingAssertions(__FILE__, Count ) ? Catch::WarnAbout::NoAssertions : Catch::WarnAbout::Nothing); \
cd.abortAfter = CatchOverrides::Config<Catch::IConfig const*>::instance().abortAfter(__FILE__, Count ); \ cd.abortAfter = CatchOverrides::Config<Catch::IConfig const*>::instance().abortAfter(__FILE__, Count ); \
Catch::Ptr<Catch::Config> config(new Catch::Config(cd)); \ Catch::Ptr<Catch::Config> config(new Catch::Config(cd)); \
Catch::MSTestReporter* rep = new Catch::MSTestReporter(config.get()); \ Catch::ReporterRegistrar<Catch::MSTestReporter> reporterReg("vs_reporter"); \
Catch::RunContext tr(config.get(), rep); \ Catch::RunContext context(config.get(), Catch::getRegistryHub().getReporterRegistry().create( "vs_reporter", config.get())); \
std::vector<Catch::TestCase> testCase = Catch::getRegistryHub().getTestCaseRegistry().getMatchingTestCases(name_desc.name); \ std::vector<Catch::TestCase> testCase = Catch::getRegistryHub().getTestCaseRegistry().getMatchingTestCases(name_desc.name); \
if( testCase.empty() ) Assert::Fail(FAIL_STRING("No tests match")); \ if( testCase.empty() ) Assert::Fail(FAIL_STRING("No tests match")); \
if( testCase.size() > 1 ) Assert::Fail(FAIL_STRING("More than one test with the same name")); \ if( testCase.size() > 1 ) Assert::Fail(FAIL_STRING("More than one test with the same name")); \
Catch::Totals totals = tr.runTest(*testCase.begin()); \ context.testGroupStarting( "", 0, 1 ); \
Catch::Totals totals = context.runTest(*testCase.begin()); \
context.testGroupEnded( "", totals, 0, 1 ); \
if( totals.assertions.failed > 0 ) { \ if( totals.assertions.failed > 0 ) { \
INTERNAL_CATCH_TEST_THROW_FAILURE \ INTERNAL_CATCH_TEST_THROW_FAILURE \
} \ } \
@ -371,11 +373,11 @@ private:
cd.showSuccessfulTests = CatchOverrides::Config<Catch::IConfig const*>::instance().includeSuccessfulResults(__FILE__, Count ); \ cd.showSuccessfulTests = CatchOverrides::Config<Catch::IConfig const*>::instance().includeSuccessfulResults(__FILE__, Count ); \
cd.warnings = (CatchOverrides::Config<Catch::IConfig const*>::instance().warnAboutMissingAssertions(__FILE__, Count ) ? Catch::WarnAbout::NoAssertions : Catch::WarnAbout::Nothing); \ cd.warnings = (CatchOverrides::Config<Catch::IConfig const*>::instance().warnAboutMissingAssertions(__FILE__, Count ) ? Catch::WarnAbout::NoAssertions : Catch::WarnAbout::Nothing); \
cd.abortAfter = CatchOverrides::Config<Catch::IConfig const*>::instance().abortAfter(__FILE__, Count ); \ cd.abortAfter = CatchOverrides::Config<Catch::IConfig const*>::instance().abortAfter(__FILE__, Count ); \
cd.reporterName = "vs_reporter"; \ cd.reporterName = "vs_reporterlf"; \
cd.name = "Batch run using tag : " Tag; \ cd.name = "Batch run using tag : " Tag; \
cd.testsOrTags.push_back( Tag ); \ cd.testsOrTags.push_back( Tag ); \
Catch::Ptr<Catch::Config> config(new Catch::Config(cd)); \ Catch::Ptr<Catch::Config> config(new Catch::Config(cd)); \
Catch::ReporterRegistrar<Catch::MSTestReporter> reporterReg("vs_reporter"); \ Catch::ReporterRegistrar<Catch::MSTestReporterLineFeed> reporterReg("vs_reporterlf"); \
Catch::Runner runner(config); \ Catch::Runner runner(config); \
Catch::Totals totals = runner.runTests(); \ Catch::Totals totals = runner.runTests(); \
if( totals.assertions.failed > 0 ) { \ if( totals.assertions.failed > 0 ) { \
@ -398,17 +400,23 @@ private:
cd.showSuccessfulTests = CatchOverrides::Config<Catch::IConfig const*>::instance().includeSuccessfulResults(__FILE__, Count ); \ cd.showSuccessfulTests = CatchOverrides::Config<Catch::IConfig const*>::instance().includeSuccessfulResults(__FILE__, Count ); \
cd.warnings = (CatchOverrides::Config<Catch::IConfig const*>::instance().warnAboutMissingAssertions(__FILE__, Count ) ? Catch::WarnAbout::NoAssertions : Catch::WarnAbout::Nothing); \ cd.warnings = (CatchOverrides::Config<Catch::IConfig const*>::instance().warnAboutMissingAssertions(__FILE__, Count ) ? Catch::WarnAbout::NoAssertions : Catch::WarnAbout::Nothing); \
cd.abortAfter = CatchOverrides::Config<Catch::IConfig const*>::instance().abortAfter(__FILE__, Count ); \ cd.abortAfter = CatchOverrides::Config<Catch::IConfig const*>::instance().abortAfter(__FILE__, Count ); \
cd.reporterName = "vs_reporter"; \ cd.reporterName = "vs_reporterlf"; \
cd.name = "Batch run using category : " Category; \ cd.name = "Batch run using category : " Category; \
std::vector<std::string> stringNames = CatchOverrides::Config<Catch::IConfig const*>::instance().listOfTests(__FILE__, Count ); \ std::vector<std::string> stringNames = CatchOverrides::Config<Catch::IConfig const*>::instance().listOfTests(__FILE__, Count ); \
Catch::Ptr<Catch::Config> config(new Catch::Config(cd)); \ Catch::Ptr<Catch::Config> config(new Catch::Config(cd)); \
Catch::MSTestReporter* rep = new Catch::MSTestReporter(config.get()); \ Catch::ReporterRegistrar<Catch::MSTestReporterLineFeed> reporterReg("vs_reporterlf"); \
Catch::RunContext tr(config.get(), rep); \ Catch::RunContext context(config.get(), Catch::getRegistryHub().getReporterRegistry().create( "vs_reporterlf", config.get())); \
Catch::Totals totals; \
context.testGroupStarting( "", 0, 1 ); \
for( std::vector<std::string>::iterator it = stringNames.begin(); it != stringNames.end(); ++it ) { \ for( std::vector<std::string>::iterator it = stringNames.begin(); it != stringNames.end(); ++it ) { \
std::vector<Catch::TestCase> testCase = Catch::getRegistryHub().getTestCaseRegistry().getMatchingTestCases(*it); \ std::vector<Catch::TestCase> testCase = Catch::getRegistryHub().getTestCaseRegistry().getMatchingTestCases(*it); \
if( testCase.empty() ) Assert::Fail(FAIL_STRING("No tests match")); \ if( testCase.empty() ) Assert::Fail(FAIL_STRING("No tests match")); \
if( testCase.size() > 1 ) Assert::Fail(FAIL_STRING("More than one test with the same name")); \ if( testCase.size() > 1 ) Assert::Fail(FAIL_STRING("More than one test with the same name")); \
tr.runTest(*testCase.begin()); \ totals += context.runTest(*testCase.begin()); \
} \
context.testGroupEnded( "", totals, 0, 1 ); \
if( totals.assertions.failed > 0 ) { \
INTERNAL_CATCH_TEST_REPORT_BATCH_FAILURE(totals.assertions.failed) \
} \ } \
} \ } \
}; \ }; \

View File

@ -11,6 +11,7 @@
#include "../internal/catch_interfaces_reporter.h" #include "../internal/catch_interfaces_reporter.h"
#include "../internal/catch_text.h" #include "../internal/catch_text.h"
#include "../internal/catch_version.h" #include "../internal/catch_version.h"
#include "../internal/catch_console_colour.hpp"
namespace Catch { namespace Catch {
@ -44,42 +45,33 @@ namespace Catch {
#endif // _WINDLL #endif // _WINDLL
#endif // detect CLR #endif // detect CLR
inline void replaceSingleLinefeed(const std::string& s, std::string& result)
struct MSTestReporter : SharedImpl<IStreamingReporter> { {
MSTestReporter( ReporterConfig const& _config ) bool needr(false);
: m_config( _config.fullConfig() ), for(std::string::const_iterator it = s.begin(); it != s.end(); ++it ) {
m_headerPrinted( false ), if( *it == '\r' ) {
m_atLeastOneTestCasePrinted( false ), needr = false;
m_failed(0)
{}
MSTestReporter( Ptr<IConfig> const& _fullConfig )
: m_config( _fullConfig ),
m_headerPrinted( false ),
m_atLeastOneTestCasePrinted( false ),
m_failed(0)
{}
virtual ~MSTestReporter() {
if( m_atLeastOneTestCasePrinted ) {
write_output_message(stream.str());
/*if( m_failed )
{
Assert::IsTrue(false, L"At least one test failed - examine output for failures.");
}*/
} }
else if( *it == '\n' && needr ) {
needr = false;
result += '\r';
result += *it;
}
else {
needr = true;
}
result += *it;
} }
}
static std::string getDescription() { struct VSStreamingReporterBase : SharedImpl<IStreamingReporter> {
return "Reports test results to MSTest";
} VSStreamingReporterBase( ReporterConfig const& _config )
virtual ReporterPreferences getPreferences() const { : m_config( _config.fullConfig() )
ReporterPreferences prefs; {}
prefs.shouldRedirectStdOut = true;
return prefs; virtual ~VSStreamingReporterBase() {}
}
//base
virtual void noMatchingTestCases( std::string const& ) {} virtual void noMatchingTestCases( std::string const& ) {}
virtual void testRunStarting( TestRunInfo const& _testRunInfo ) { virtual void testRunStarting( TestRunInfo const& _testRunInfo ) {
@ -93,69 +85,76 @@ namespace Catch {
currentTestCaseInfo = _testInfo; currentTestCaseInfo = _testInfo;
} }
virtual void sectionStarting( SectionInfo const& _sectionInfo ) { virtual void sectionStarting( SectionInfo const& _sectionInfo ) {
m_headerPrinted = false;
m_sectionStack.push_back( _sectionInfo ); m_sectionStack.push_back( _sectionInfo );
} }
virtual void sectionEnded( SectionStats const& _sectionStats ) { virtual void sectionEnded( SectionStats const& /* _sectionStats */ ) {
if( _sectionStats.missingAssertions ) {
lazyPrint();
if( m_sectionStack.size() > 1 )
stream << "\r\n" << "No assertions in section";
else
stream << "\r\n" << "No assertions in test case";
stream << " '" << _sectionStats.sectionInfo.name << "'" << "\r\n" << "\r\n";
}
if( m_headerPrinted ) {
if( m_config->showDurations() == ShowDurations::Always )
stream << "Completed in " << _sectionStats.durationInSeconds << "s" << "\r\n";
m_headerPrinted = false;
}
else {
if( m_config->showDurations() == ShowDurations::Always )
stream << _sectionStats.sectionInfo.name << " completed in " << _sectionStats.durationInSeconds << "s" << "\r\n";
}
m_sectionStack.pop_back(); m_sectionStack.pop_back();
} }
virtual void testCaseEnded( TestCaseStats const& _testCaseStats ) { virtual void testCaseEnded( TestCaseStats const& /* _testCaseStats */ ) {
if( !_testCaseStats.stdOut.empty() ) {
write_output_message(getDoubleDashes());
write_output_message("Output to std::cout :");
write_output_message(getDashes());
write_output_message(_testCaseStats.stdOut);
write_output_message(getDoubleDashes());
}
if( !_testCaseStats.stdErr.empty() ) {
write_output_message(getDoubleDashes());
write_output_message("Output to std::cerr :");
write_output_message(getDashes());
write_output_message(_testCaseStats.stdErr);
write_output_message(getDoubleDashes());
}
m_headerPrinted = false;
currentTestCaseInfo.reset(); currentTestCaseInfo.reset();
assert( m_sectionStack.empty() ); assert( m_sectionStack.empty() );
} }
virtual void testGroupEnded( TestGroupStats const& _testGroupStats ) { virtual void testGroupEnded( TestGroupStats const& /* _testGroupStats */ ) {
if( currentGroupInfo.used ) {
printSummaryDivider();
stream << "Summary for group '" << _testGroupStats.groupInfo.name << "':" << "\r\n";
printTotals( _testGroupStats.totals );
stream << "\r\n" << "\r\n";
}
currentGroupInfo.reset(); currentGroupInfo.reset();
} }
virtual void testRunEnded( TestRunStats const& _testRunStats ) { virtual void testRunEnded( TestRunStats const& /* _testRunStats */ ) {
if( m_atLeastOneTestCasePrinted )
printTotalsDivider();
printTotals( _testRunStats.totals );
stream << "\r\n" << "\r\n";
m_failed = _testRunStats.totals.testCases.failed;
currentTestCaseInfo.reset(); currentTestCaseInfo.reset();
currentGroupInfo.reset(); currentGroupInfo.reset();
currentTestRunInfo.reset(); currentTestRunInfo.reset();
} }
// base end
Ptr<IConfig> m_config;
LazyStat<TestRunInfo> currentTestRunInfo;
LazyStat<GroupInfo> currentGroupInfo;
LazyStat<TestCaseInfo> currentTestCaseInfo;
std::vector<SectionInfo> m_sectionStack;
};
struct MSTestReporter : VSStreamingReporterBase {
typedef VSStreamingReporterBase StreamingReporterBase;
MSTestReporter( ReporterConfig const& _config )
: VSStreamingReporterBase( _config ),
m_prevCout( std::cout.rdbuf() ),
m_prevCerr( std::cerr.rdbuf() ),
m_addLineFeeds(true),
m_headerPrinted( false ),
m_atLeastOneTestCasePrinted( false )
{
std::cout.rdbuf( stream.rdbuf() );
std::cerr.rdbuf( stream.rdbuf() );
}
virtual ~MSTestReporter() {
std::string output = stream.str();
if( !output.empty() ) {
if( m_addLineFeeds ) {
std::string revised;
replaceSingleLinefeed(output, revised);
write_output_message(revised);
}
else {
write_output_message(output);
}
}
std::cout.rdbuf( m_prevCout );
std::cerr.rdbuf( m_prevCerr );
}
static std::string getDescription() {
return "Reports test results as plain lines of text";
}
virtual ReporterPreferences getPreferences() const {
ReporterPreferences prefs;
prefs.shouldRedirectStdOut = false;
return prefs;
}
virtual void noMatchingTestCases( std::string const& spec ) {
stream << "No test cases matched '" << spec << "'" << std::endl;
}
virtual void assertionStarting( AssertionInfo const& ) { virtual void assertionStarting( AssertionInfo const& ) {
} }
@ -163,32 +162,87 @@ namespace Catch {
virtual bool assertionEnded( AssertionStats const& _assertionStats ) { virtual bool assertionEnded( AssertionStats const& _assertionStats ) {
AssertionResult const& result = _assertionStats.assertionResult; AssertionResult const& result = _assertionStats.assertionResult;
bool printInfoMessages = true;
// Drop out if result was successful and we're not printing those // Drop out if result was successful and we're not printing those
if( !m_config->includeSuccessfulResults() && result.isOk() ) if( !m_config->includeSuccessfulResults() && result.isOk() ) {
return false; if( result.getResultType() != ResultWas::Warning )
return false;
printInfoMessages = false;
}
lazyPrint(); lazyPrint();
AssertionPrinter printer( stream, _assertionStats ); AssertionPrinter printer( stream, _assertionStats, printInfoMessages );
printer.print(); printer.print();
stream << "\r\n"; stream << std::endl;
return true; return true;
} }
virtual void sectionStarting( SectionInfo const& _sectionInfo ) {
m_headerPrinted = false;
StreamingReporterBase::sectionStarting( _sectionInfo );
}
virtual void sectionEnded( SectionStats const& _sectionStats ) {
if( _sectionStats.missingAssertions ) {
lazyPrint();
Colour colour( Colour::ResultError );
if( m_sectionStack.size() > 1 )
stream << "\nNo assertions in section";
else
stream << "\nNo assertions in test case";
stream << " '" << _sectionStats.sectionInfo.name << "'\n" << std::endl;
}
if( m_headerPrinted ) {
if( m_config->showDurations() == ShowDurations::Always )
stream << "Completed in " << _sectionStats.durationInSeconds << "s" << std::endl;
m_headerPrinted = false;
}
else {
if( m_config->showDurations() == ShowDurations::Always )
stream << _sectionStats.sectionInfo.name << " completed in " << _sectionStats.durationInSeconds << "s" << std::endl;
}
StreamingReporterBase::sectionEnded( _sectionStats );
}
virtual void testCaseEnded( TestCaseStats const& _testCaseStats ) {
StreamingReporterBase::testCaseEnded( _testCaseStats );
m_headerPrinted = false;
}
virtual void testGroupEnded( TestGroupStats const& _testGroupStats ) {
if( currentGroupInfo.used ) {
printSummaryDivider();
stream << "Summary for group '" << _testGroupStats.groupInfo.name << "':\n";
printTotals( _testGroupStats.totals );
stream << "\n" << std::endl;
}
StreamingReporterBase::testGroupEnded( _testGroupStats );
}
virtual void testRunEnded( TestRunStats const& _testRunStats ) {
if( m_atLeastOneTestCasePrinted )
printTotalsDivider();
printTotals( _testRunStats.totals );
stream << "\n" << std::endl;
StreamingReporterBase::testRunEnded( _testRunStats );
}
private: private:
class AssertionPrinter { class AssertionPrinter {
void operator= ( AssertionPrinter const& ); void operator= ( AssertionPrinter const& );
public: public:
AssertionPrinter( std::ostream& _stream, AssertionStats const& _stats ) AssertionPrinter( std::ostream& _stream, AssertionStats const& _stats, bool _printInfoMessages )
: stream( _stream ), : stream( _stream ),
stats( _stats ), stats( _stats ),
result( _stats.assertionResult ), result( _stats.assertionResult ),
colour( Colour::None ),
message( result.getMessage() ), message( result.getMessage() ),
messages( _stats.infoMessages ) messages( _stats.infoMessages ),
printInfoMessages( _printInfoMessages )
{ {
switch( result.getResultType() ) { switch( result.getResultType() ) {
case ResultWas::Ok: case ResultWas::Ok:
colour = Colour::Success;
passOrFail = "PASSED"; passOrFail = "PASSED";
//if( result.hasMessage() ) //if( result.hasMessage() )
if( _stats.infoMessages.size() == 1 ) if( _stats.infoMessages.size() == 1 )
@ -198,9 +252,11 @@ namespace Catch {
break; break;
case ResultWas::ExpressionFailed: case ResultWas::ExpressionFailed:
if( result.isOk() ) { if( result.isOk() ) {
colour = Colour::Success;
passOrFail = "FAILED - but was ok"; passOrFail = "FAILED - but was ok";
} }
else { else {
colour = Colour::Error;
passOrFail = "FAILED"; passOrFail = "FAILED";
} }
if( _stats.infoMessages.size() == 1 ) if( _stats.infoMessages.size() == 1 )
@ -209,10 +265,12 @@ namespace Catch {
messageLabel = "with messages"; messageLabel = "with messages";
break; break;
case ResultWas::ThrewException: case ResultWas::ThrewException:
colour = Colour::Error;
passOrFail = "FAILED"; passOrFail = "FAILED";
messageLabel = "due to unexpected exception with message"; messageLabel = "due to unexpected exception with message";
break; break;
case ResultWas::DidntThrowException: case ResultWas::DidntThrowException:
colour = Colour::Error;
passOrFail = "FAILED"; passOrFail = "FAILED";
messageLabel = "because no exception was thrown where one was expected"; messageLabel = "because no exception was thrown where one was expected";
break; break;
@ -224,6 +282,7 @@ namespace Catch {
break; break;
case ResultWas::ExplicitFailure: case ResultWas::ExplicitFailure:
passOrFail = "FAILED"; passOrFail = "FAILED";
colour = Colour::Error;
if( _stats.infoMessages.size() == 1 ) if( _stats.infoMessages.size() == 1 )
messageLabel = "explicitly with message"; messageLabel = "explicitly with message";
if( _stats.infoMessages.size() > 1 ) if( _stats.infoMessages.size() > 1 )
@ -234,6 +293,7 @@ namespace Catch {
case ResultWas::FailureBit: case ResultWas::FailureBit:
case ResultWas::Exception: case ResultWas::Exception:
passOrFail = "** internal error **"; passOrFail = "** internal error **";
colour = Colour::Error;
break; break;
} }
} }
@ -242,13 +302,13 @@ namespace Catch {
printSourceInfo(); printSourceInfo();
if( stats.totals.assertions.total() > 0 ) { if( stats.totals.assertions.total() > 0 ) {
if( result.isOk() ) if( result.isOk() )
stream << "\r\n"; stream << "\n";
printResultType(); printResultType();
printOriginalExpression(); printOriginalExpression();
printReconstructedExpression(); printReconstructedExpression();
} }
else { else {
stream << "\r\n"; stream << "\n";
} }
printMessage(); printMessage();
} }
@ -256,42 +316,50 @@ namespace Catch {
private: private:
void printResultType() const { void printResultType() const {
if( !passOrFail.empty() ) { if( !passOrFail.empty() ) {
stream << passOrFail << ":" << "\r\n"; Colour colourGuard( colour );
stream << passOrFail << ":\n";
} }
} }
void printOriginalExpression() const { void printOriginalExpression() const {
if( result.hasExpression() ) { if( result.hasExpression() ) {
Colour colourGuard( Colour::OriginalExpression );
stream << " "; stream << " ";
stream << result.getExpressionInMacro(); stream << result.getExpressionInMacro();
stream << "\r\n"; stream << "\n";
} }
} }
void printReconstructedExpression() const { void printReconstructedExpression() const {
if( result.hasExpandedExpression() ) { if( result.hasExpandedExpression() ) {
stream << "with expansion:" << "\r\n"; stream << "with expansion:\n";
stream << Text( result.getExpandedExpression(), TextAttributes().setIndent(2) ) << "\r\n"; Colour colourGuard( Colour::ReconstructedExpression );
stream << Text( result.getExpandedExpression(), TextAttributes().setIndent(2) ) << "\n";
} }
} }
void printMessage() const { void printMessage() const {
if( !messageLabel.empty() ) if( !messageLabel.empty() )
stream << messageLabel << ":" << "\r\n"; stream << messageLabel << ":" << "\n";
for( std::vector<MessageInfo>::const_iterator it = messages.begin(), itEnd = messages.end(); for( std::vector<MessageInfo>::const_iterator it = messages.begin(), itEnd = messages.end();
it != itEnd; it != itEnd;
++it ) { ++it ) {
stream << Text( it->message, TextAttributes().setIndent(2) ) << "\r\n"; // If this assertion is a warning ignore any INFO messages
if( printInfoMessages || it->type != ResultWas::Info )
stream << Text( it->message, TextAttributes().setIndent(2) ) << "\n";
} }
} }
void printSourceInfo() const { void printSourceInfo() const {
Colour colourGuard( Colour::FileName );
stream << result.getSourceInfo() << ": "; stream << result.getSourceInfo() << ": ";
} }
std::ostream& stream; std::ostream& stream;
AssertionStats const& stats; AssertionStats const& stats;
AssertionResult const& result; AssertionResult const& result;
Colour::Code colour;
std::string passOrFail; std::string passOrFail;
std::string messageLabel; std::string messageLabel;
std::string message; std::string message;
std::vector<MessageInfo> messages; std::vector<MessageInfo> messages;
bool printInfoMessages;
}; };
void lazyPrint() { void lazyPrint() {
@ -308,29 +376,27 @@ namespace Catch {
m_atLeastOneTestCasePrinted = true; m_atLeastOneTestCasePrinted = true;
} }
void lazyPrintRunInfo() { void lazyPrintRunInfo() {
stream << getTildes() << "\r\n"; stream << "\n" << getTildes() << "\n";
Colour colour( Colour::SecondaryText );
stream << "Using Catch v" << libraryVersion::value.majorVersion << "." stream << "Using Catch v" << libraryVersion::value.majorVersion << "."
<< libraryVersion::value.minorVersion << " b" << libraryVersion::value.minorVersion << " b"
<< libraryVersion::value.buildNumber; << libraryVersion::value.buildNumber;
if( libraryVersion::value.branchName != "master" ) if( libraryVersion::value.branchName != "master" )
stream << " (" << libraryVersion::value.branchName << ")"; stream << " (" << libraryVersion::value.branchName << ")";
#if (_MANAGED == 1) || (_M_CEE == 1) // detect CLR #if (_MANAGED == 1) || (_M_CEE == 1) // detect CLR
stream << " for a managed MSTest project." << "\r\n"; stream << " for a managed MSTest project." << "\n";
#else #else
#ifdef _WINDLL #ifdef _WINDLL
stream << " for a native MSTest project." << "\r\n"; stream << " for a native MSTest project." << "\n";
#endif #endif
#endif #endif
currentTestRunInfo.used = true; currentTestRunInfo.used = true;
} }
void lazyPrintGroupInfo() { void lazyPrintGroupInfo() {
if( currentGroupInfo.some() ) if( !currentGroupInfo->name.empty() && currentGroupInfo->groupsCounts > 1 ) {
{ printClosedHeader( "Group: " + currentGroupInfo->name );
if( !currentGroupInfo->name.empty() && currentGroupInfo->groupsCounts > 1 ) { currentGroupInfo.used = true;
printClosedHeader( "Group: " + currentGroupInfo->name );
currentGroupInfo.used = true;
}
} }
} }
void printTestCaseAndSectionHeader() { void printTestCaseAndSectionHeader() {
@ -338,6 +404,7 @@ namespace Catch {
printOpenHeader( currentTestCaseInfo->name ); printOpenHeader( currentTestCaseInfo->name );
if( m_sectionStack.size() > 1 ) { if( m_sectionStack.size() > 1 ) {
Colour colourGuard( Colour::Headers );
std::vector<SectionInfo>::const_iterator std::vector<SectionInfo>::const_iterator
it = m_sectionStack.begin()+1, // Skip first section (test case) it = m_sectionStack.begin()+1, // Skip first section (test case)
@ -349,19 +416,21 @@ namespace Catch {
SourceLineInfo lineInfo = m_sectionStack.front().lineInfo; SourceLineInfo lineInfo = m_sectionStack.front().lineInfo;
if( !lineInfo.empty() ){ if( !lineInfo.empty() ){
stream << getDashes() << "\r\n"; stream << getDashes() << "\n";
stream << lineInfo << "\r\n"; Colour colourGuard( Colour::FileName );
stream << lineInfo << "\n";
} }
stream << getDots() << "\r\n" << "\r\n"; stream << getDots() << "\n" << std::endl;
} }
void printClosedHeader( std::string const& _name ) { void printClosedHeader( std::string const& _name ) {
printOpenHeader( _name ); printOpenHeader( _name );
stream << getDots() << "\r\n"; stream << getDots() << "\n";
} }
void printOpenHeader( std::string const& _name ) { void printOpenHeader( std::string const& _name ) {
stream << getDashes() << "\r\n"; stream << getDashes() << "\n";
{ {
Colour colourGuard( Colour::Headers );
printHeaderString( _name ); printHeaderString( _name );
} }
} }
@ -376,14 +445,20 @@ namespace Catch {
i = 0; i = 0;
stream << Text( _string, TextAttributes() stream << Text( _string, TextAttributes()
.setIndent( indent+i) .setIndent( indent+i)
.setInitialIndent( indent ) ) << "\r\n"; .setInitialIndent( indent ) ) << "\n";
} }
void printTotals( const Totals& totals ) { void printTotals( const Totals& totals ) {
if( totals.assertions.total() == 0 ) { if( totals.testCases.total() == 0 ) {
stream << "No tests ran"; stream << "No tests ran";
} }
else if( totals.assertions.total() == 0 ) {
Colour colour( Colour::Yellow );
printCounts( "test case", totals.testCases );
stream << " (no assertions)";
}
else if( totals.assertions.failed ) { else if( totals.assertions.failed ) {
Colour colour( Colour::ResultError );
printCounts( "test case", totals.testCases ); printCounts( "test case", totals.testCases );
if( totals.testCases.failed > 0 ) { if( totals.testCases.failed > 0 ) {
stream << " ("; stream << " (";
@ -392,6 +467,7 @@ namespace Catch {
} }
} }
else { else {
Colour colour( Colour::ResultSuccess );
stream << "All tests passed (" stream << "All tests passed ("
<< pluralise( totals.assertions.passed, "assertion" ) << " in " << pluralise( totals.assertions.passed, "assertion" ) << " in "
<< pluralise( totals.testCases.passed, "test case" ) << ")"; << pluralise( totals.testCases.passed, "test case" ) << ")";
@ -425,42 +501,47 @@ namespace Catch {
} }
void printTotalsDivider() { void printTotalsDivider() {
stream << getDoubleDashes() << "\r\n"; stream << getDoubleDashes() << "\n";
} }
void printSummaryDivider() { void printSummaryDivider() {
stream << getDashes() << "\r\n"; stream << getDashes() << "\n";
} }
static std::string getDashes() { static std::string getDashes() {
std::string dashes( CATCH_CONFIG_CONSOLE_WIDTH-1, '-' ); const std::string dashes( CATCH_CONFIG_CONSOLE_WIDTH-1, '-' );
return dashes; return dashes;
} }
static std::string getDots() { static std::string getDots() {
std::string dots( CATCH_CONFIG_CONSOLE_WIDTH-1, '.' ); const std::string dots( CATCH_CONFIG_CONSOLE_WIDTH-1, '.' );
return dots; return dots;
} }
static std::string getDoubleDashes() { static std::string getDoubleDashes() {
std::string doubleDashes( CATCH_CONFIG_CONSOLE_WIDTH-1, '=' ); const std::string doubleDashes( CATCH_CONFIG_CONSOLE_WIDTH-1, '=' );
return doubleDashes; return doubleDashes;
} }
static std::string getTildes() { static std::string getTildes() {
std::string dots( CATCH_CONFIG_CONSOLE_WIDTH-1, '~' ); const std::string dots( CATCH_CONFIG_CONSOLE_WIDTH-1, '~' );
return dots; return dots;
} }
private: private:
Ptr<IConfig> m_config;
std::ostringstream stream; std::ostringstream stream;
std::streambuf* m_prevCout;
std::streambuf* m_prevCerr;
protected:
bool m_addLineFeeds;
LazyStat<TestRunInfo> currentTestRunInfo; private:
LazyStat<GroupInfo> currentGroupInfo;
LazyStat<TestCaseInfo> currentTestCaseInfo;
std::vector<SectionInfo> m_sectionStack;
bool m_headerPrinted; bool m_headerPrinted;
bool m_atLeastOneTestCasePrinted; bool m_atLeastOneTestCasePrinted;
size_t m_failed;
}; };
struct MSTestReporterLineFeed : MSTestReporter {
MSTestReporterLineFeed( ReporterConfig const& _config )
: MSTestReporter( _config )
{
m_addLineFeeds = false;
}
};
} // end namespace Catch } // end namespace Catch
#endif // TWOBLUECUBES_CATCH_REPORTER_MSTEST_HPP_INCLUDED #endif // TWOBLUECUBES_CATCH_REPORTER_MSTEST_HPP_INCLUDED

View File

@ -12,211 +12,6 @@
namespace AllTestsRunner { namespace AllTestsRunner {
#ifdef OLD_RUNNER
class NullStreamingReporter : public Catch::SharedImpl<Catch::IStreamingReporter> {
public:
virtual ~NullStreamingReporter();
static std::string getDescription() {
return "null reporter";
}
private: // IStreamingReporter
virtual Catch::ReporterPreferences getPreferences() const {
return Catch::ReporterPreferences();
}
virtual void noMatchingTestCases( std::string const& ) {}
virtual void testRunStarting( Catch::TestRunInfo const& ) {}
virtual void testGroupStarting( Catch::GroupInfo const& ) {}
virtual void testCaseStarting( Catch::TestCaseInfo const& ) {}
virtual void sectionStarting( Catch::SectionInfo const& ) {}
virtual void assertionStarting( Catch::AssertionInfo const& ) {}
virtual bool assertionEnded( Catch::AssertionStats const& ) { return false; }
virtual void sectionEnded( Catch::SectionStats const& ) {}
virtual void testCaseEnded( Catch::TestCaseStats const& ) {}
virtual void testGroupEnded( Catch::TestGroupStats const& ) {}
virtual void testRunEnded( Catch::TestRunStats const& ) {}
};
class EmbeddedRunner {
public:
EmbeddedRunner() : m_reporter( new NullStreamingReporter() ) {}
Catch::Totals runMatching( const std::string& rawTestSpec,
std::size_t groupIndex,
std::size_t groupsCount,
const std::string& reporter = "console" );
private:
Catch::Ptr<Catch::IStreamingReporter> m_reporter;
};
NullStreamingReporter::~NullStreamingReporter() {}
Catch::Totals EmbeddedRunner::runMatching( const std::string& rawTestSpec, std::size_t groupIndex, std::size_t groupsCount, const std::string& ) {
std::ostringstream oss;
Catch::Ptr<Catch::Config> config = new Catch::Config();
config->setStreamBuf( oss.rdbuf() );
Catch::Totals totals;
// Scoped because RunContext doesn't report EndTesting until its destructor
{
Catch::RunContext runner( config.get(), m_reporter.get() );
totals = runner.runMatching( rawTestSpec, groupIndex, groupsCount );
}
return totals;
}
class MetaTestRunner {
public:
struct Expected { enum Result {
ToSucceed,
ToFail
}; };
MetaTestRunner( Expected::Result expectedResult, std::size_t groupIndex, std::size_t groupsCount )
: m_expectedResult( expectedResult ),
m_groupIndex( groupIndex ),
m_groupsCount( groupsCount )
{}
static void runMatching( const std::string& testSpec,
Expected::Result expectedResult,
std::size_t groupIndex,
std::size_t groupsCount ) {
forEach( Catch::getRegistryHub().getTestCaseRegistry().getMatchingTestCases( testSpec ),
MetaTestRunner( expectedResult, groupIndex, groupsCount ) );
}
void operator()( const Catch::TestCase& testCase ) {
std::string name;
Catch::Totals totals;
{
EmbeddedRunner runner;
name = testCase.getTestCaseInfo().name;
totals = runner.runMatching( name, m_groupIndex, m_groupsCount );
}
switch( m_expectedResult ) {
case Expected::ToSucceed:
if( totals.assertions.failed > 0 ) {
FAIL( "Expected test case '"
<< name
<< "' to succeed but there was/ were "
<< totals.assertions.failed << " failure(s)" );
}
else {
SUCCEED( "Tests passed, as expected" );
}
break;
case Expected::ToFail:
if( totals.assertions.failed == 0 ) {
FAIL( "Expected test case '"
<< name
<< "' to fail but there was/ were "
<< totals.assertions.passed << " success(es)" );
}
else {
SUCCEED( "Tests failed, as expected" );
}
break;
}
}
private:
Expected::Result m_expectedResult;
std::size_t m_groupIndex;
std::size_t m_groupsCount;
};
TEST_CASE( "Run all failing and succeeding tests", "[vsall]" ) {
///////////////////////////////////////////////////////////////////////////
SECTION( "selftest/expected result",
"Tests do what they claim" ) {
#ifdef _UNICODE
std::cout << "using Unicode..." << std::endl;
#else
std::cout << "using Mbcs..." << std::endl;
#endif
SECTION( "selftest/expected result/failing tests",
"Tests in the 'failing' branch fail" ) {
std::cout << "Tests in the 'failing' branch fail" << std::endl;
MetaTestRunner::runMatching( "./failing/*", MetaTestRunner::Expected::ToFail, 0, 2 );
}
SECTION( "selftest/expected result/succeeding tests",
"Tests in the 'succeeding' branch succeed" ) {
std::cout << "Tests in the 'succeeding' branch succeed" << std::endl;
MetaTestRunner::runMatching( "./succeeding/*", MetaTestRunner::Expected::ToSucceed, 1, 2 );
}
}
///////////////////////////////////////////////////////////////////////////
SECTION( "selftest/test counts",
"Number of test cases that run is fixed" ) {
EmbeddedRunner runner;
SECTION( "selftest/test counts/succeeding tests",
"Number of 'succeeding' tests is fixed" ) {
std::cout << "Number of 'succeeding' tests is fixed" << std::endl;
Catch::Totals totals = runner.runMatching( "./succeeding/*", 0, 2 );
CHECK( totals.assertions.passed == 298 );
CHECK( totals.assertions.failed == 0 );
}
SECTION( "selftest/test counts/failing tests",
"Number of 'failing' tests is fixed" ) {
std::cout << "Number of 'failing' tests is fixed" << std::endl;
Catch::Totals totals = runner.runMatching( "./failing/*", 1, 2 );
CHECK( totals.assertions.passed == 2 );
CHECK( totals.assertions.failed == 77 );
}
}
}
TEST_CASE( "Run all failing and succeeding tests", "[sw4][vs]" ) {
CatchOverrides::ConfigGuard cg;
Catch::ConfigData cd(cg.value().get());
cd.name = "Test sw4";
cd.abortAfter = 1;
cd.showSuccessfulTests = true;
cd.warnings = Catch::WarnAbout::NoAssertions;
cd.abortAfter = -1;
Catch::Ptr<Catch::Config> config(new Catch::Config(cd));
Catch::MSTestReporter* rep = new Catch::MSTestReporter(config.get());
Catch::RunContext tr(config.get(), rep);
std::string names[] = {"one","two","three"};
std::vector<std::string> stringNames(names, names + (sizeof(names)/sizeof(std::string)));
std::vector<Catch::TestCase> testCase = Catch::getRegistryHub().getTestCaseRegistry().getMatchingTestCases( "Some simple comparisons between doubles" );
//std::vector<Catch::TestCase> testCase = Catch::getRegistryHub().getTestCaseRegistry().getMatchingTestCases(name_desc.name);
if( testCase.empty() ) Assert::Fail(FAIL_STRING("No tests match"));
if( testCase.size() > 1 ) Assert::Fail(FAIL_STRING("More than one test with the same name"));
Catch::Totals totals = tr.runTest(*testCase.begin());
if( totals.assertions.failed > 0 ) {
INTERNAL_CATCH_TEST_THROW_FAILURE
}
/*for(std::vector<Catch::TestCase>::iterator it = tests.begin(); it != tests.end(); ++it )
{
Catch::Totals totals;
std::size_t groupIndex(0);
std::size_t groupsCount(0);
{
EmbeddedRunner runner;
std::string name = it->getTestCaseInfo().name;
totals = runner.runMatching( name, groupIndex, groupsCount );
}
}*/
}
#endif
// mstest /TestContainer:Debug\ManagedTestCatch.dll /category:"all" // mstest /TestContainer:Debug\ManagedTestCatch.dll /category:"all"
#if defined(INTERNAL_CATCH_VS_MANAGED) || defined(INTERNAL_CATCH_VS_NATIVE) #if defined(INTERNAL_CATCH_VS_MANAGED) || defined(INTERNAL_CATCH_VS_NATIVE)
CATCH_MAP_CATEGORY_TO_TAG(all, "~[vs]"); CATCH_MAP_CATEGORY_TO_TAG(all, "~[vs]");
@ -242,6 +37,10 @@ namespace AllTestsRunner {
CATCH_INTERNAL_CONFIG_ADD_TEST("Equality checks that should succeed") CATCH_INTERNAL_CONFIG_ADD_TEST("Equality checks that should succeed")
CATCH_INTERNAL_CONFIG_ADD_TEST("Equality checks that should fail]") CATCH_INTERNAL_CONFIG_ADD_TEST("Equality checks that should fail]")
INTERNAL_CATCH_MAP_CATEGORY_TO_LIST(allSucceedingAborting); INTERNAL_CATCH_MAP_CATEGORY_TO_LIST(allSucceedingAborting);
#endif
CATCH_INTERNAL_CONFIG_ADD_TEST("Output from all sections is reported")
CATCH_INTERNAL_CONFIG_ADD_TEST("Standard output from all sections is reported")
INTERNAL_CATCH_MAP_CATEGORY_TO_LIST(OutputFromAllSectionsIsReported);
#endif
} }

View File

@ -8,13 +8,12 @@ import re
import xml.etree.cElementTree as etree import xml.etree.cElementTree as etree
from scriptCommon import catchPath from scriptCommon import catchPath
#from rawfile import writeRawFile
#from rawfile import parseRawFileIntoTree
from catch_test_run import TestRunApprovedHandler from catch_test_run import TestRunApprovedHandler
from catch_test_run import TestRunData from catch_test_run import TestRunData
from catch_test_run import TestRunResultHandler from catch_test_run import TestRunResultHandler
from catch_test_case import TestCaseResultParser from catch_test_case import TestCaseResultParser
from catch_test_case import TestCaseData from catch_test_case import TestCaseData
from catch_conditions import RandomOutput
rootPath = os.path.join(os.path.join(os.path.join( catchPath, 'projects'), 'SelfTest'), 'Baselines' ) rootPath = os.path.join(os.path.join(os.path.join( catchPath, 'projects'), 'SelfTest'), 'Baselines' )
@ -638,6 +637,8 @@ def parseTrxFile(baseName, trxFile):
#print ids #print ids
ids = dict(ids) ids = dict(ids)
#print ids["87ec526a-e414-1a3f-ba0f-e210b204bb42"] #print ids["87ec526a-e414-1a3f-ba0f-e210b204bb42"]
lineNumber = 0
resultParser = TestCaseResultParser() resultParser = TestCaseResultParser()
for tc in ts: for tc in ts:
m = qname.match(tc.tag) m = qname.match(tc.tag)
@ -668,15 +669,27 @@ def parseTrxFile(baseName, trxFile):
found = True found = True
break break
index += 1 index += 1
lines = lines[index + 2:-1] lines = lines[index + 2:]
#print "*******",desc #print "*******",desc
#print lines #print lines
if found: if found:
endOfRun = False
for line in lines: for line in lines:
testcase = resultParser.parseResultLine(line) if endOfRun:
if isinstance(testcase, TestCaseData): testRun.results = line.strip()
testRun.testcases.append(testcase) else:
lines = testRun.generateSortedUnapprovedLines(0) try:
testcase = resultParser.parseResultLine(line)
except RandomOutput as e:
#print "E:", self.lineNumber, ", ",e.output
testRun.output = e.output
testRun.outputLine = lineNumber - len(e.output)
if isinstance(testcase, TestCaseData):
testRun.testcases.append(testcase)
if line.startswith("==============================================================================="):
endOfRun = True
lineNumber += 1
lines = testRun.generateSortedUnapprovedLines(testRun.outputLine)
rawSortedPath = os.path.join( rootPath, '{0}.sorted.unapproved.txt'.format( baseName ) ) rawSortedPath = os.path.join( rootPath, '{0}.sorted.unapproved.txt'.format( baseName ) )
rawWriteFile = open( rawSortedPath, 'wb' ) rawWriteFile = open( rawSortedPath, 'wb' )
@ -709,18 +722,18 @@ def approveMsTest( baseName, filter ):
parseTrxFile(baseName, trxFile) parseTrxFile(baseName, trxFile)
# Standard console reporter # Standard console reporter
#approve( "console.std", ["~_"] ) approve( "console.std", ["~_"] )
# console reporter, include passes, warn about No Assertions # console reporter, include passes, warn about No Assertions
#approve( "console.sw", ["~_", "-s", "-w", "NoAssertions"] ) approve( "console.sw", ["~_", "-s", "-w", "NoAssertions"] )
# console reporter, include passes, warn about No Assertions, limit failures to first 4 # console reporter, include passes, warn about No Assertions, limit failures to first 4
#approve( "console.swa4", ["~_", "-s", "-w", "NoAssertions", "-x", "4"] ) approve( "console.swa4", ["~_", "-s", "-w", "NoAssertions", "-x", "4"] )
# junit reporter, include passes, warn about No Assertions # junit reporter, include passes, warn about No Assertions
#approveJunit( "junit.sw", ["~_", "-s", "-w", "NoAssertions", "-r", "junit"] ) approveJunit( "junit.sw", ["~_", "-s", "-w", "NoAssertions", "-r", "junit"] )
# xml reporter, include passes, warn about No Assertions # xml reporter, include passes, warn about No Assertions
#approveXml( "xml.sw", ["~_", "-s", "-w", "NoAssertions", "-r", "xml"] ) approveXml( "xml.sw", ["~_", "-s", "-w", "NoAssertions", "-r", "xml"] )
#mstest runner, xml output # mstest runner, xml output
#approveMsTest( "mstest.std", "all") approveMsTest( "mstest.std", "all")
#approveMsTest( "mstest.sw", "allSucceeding") approveMsTest( "mstest.sw", "allSucceeding")
approveMsTest( "mstest.swa4", "allSucceedingAborting") approveMsTest( "mstest.swa4", "allSucceedingAborting")
if overallResult <> 0: if overallResult <> 0: