mirror of
https://github.com/catchorg/Catch2.git
synced 2024-11-17 03:02:24 +01:00
Add abiiity to stop after n tests. Add ordered list of tests to check sw4
This commit is contained in:
parent
15cc1fc185
commit
e8c32e819c
@ -208,10 +208,12 @@
|
||||
#define CATCH_MAP_CATEGORY_TO_TAG( Category, Tag ) INTERNAL_CATCH_MAP_CATEGORY_TO_TAG( Category, Tag )
|
||||
#define CATCH_CONFIG_SHOW_SUCCESS( v ) CATCH_INTERNAL_CONFIG_SHOW_SUCCESS( v )
|
||||
#define CATCH_CONFIG_WARN_MISSING_ASSERTIONS( v ) CATCH_INTERNAL_CONFIG_WARN_MISSING_ASSERTIONS( v )
|
||||
#define CATCH_CONFIG_ABORT_AFTER( v ) CATCH_INTERNAL_CONFIG_ABORT_AFTER( v )
|
||||
#else
|
||||
#define CATCH_MAP_CATEGORY_TO_TAG( Category, Tag )
|
||||
#define CATCH_CONFIG_SHOW_SUCCESS( v )
|
||||
#define CATCH_CONFIG_WARN_MISSING_ASSERTIONS( v )
|
||||
#define CATCH_CONFIG_ABORT_AFTER( v )
|
||||
#endif
|
||||
|
||||
using Catch::Detail::Approx;
|
||||
|
@ -45,8 +45,10 @@ namespace CatchOverrides {
|
||||
template <typename T>
|
||||
class Config
|
||||
{
|
||||
typedef std::map<int, bool> LineData;
|
||||
typedef std::map<int, int> LineData;
|
||||
typedef std::map<std::string, LineData> FileLineData;
|
||||
typedef std::map<int, std::string> StringLineData;
|
||||
typedef std::map<std::string, StringLineData> FileStringLineData;
|
||||
public:
|
||||
bool includeSuccessfulResults(const std::string& file, int c) const
|
||||
{
|
||||
@ -58,7 +60,7 @@ namespace CatchOverrides {
|
||||
{
|
||||
if( c <= lineIt->first )
|
||||
break;
|
||||
result = lineIt->second;
|
||||
result = lineIt->second ? true : false;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
@ -69,12 +71,12 @@ namespace CatchOverrides {
|
||||
if( it == showSuccessfulTestsData.end() )
|
||||
{
|
||||
LineData tmp;
|
||||
tmp.insert(std::make_pair(c,v));
|
||||
tmp.insert(std::make_pair(c,(v ? 1 : 0)));
|
||||
showSuccessfulTestsData.insert(std::make_pair(file, tmp));
|
||||
}
|
||||
else
|
||||
{
|
||||
it->second.insert(std::make_pair(c,v));
|
||||
it->second.insert(std::make_pair(c,(v ? 1 : 0)));
|
||||
}
|
||||
}
|
||||
bool warnAboutMissingAssertions(const std::string& file, int c) const
|
||||
@ -87,7 +89,7 @@ namespace CatchOverrides {
|
||||
{
|
||||
if( c <= lineIt->first )
|
||||
break;
|
||||
result = lineIt->second;
|
||||
result = lineIt->second ? true : false;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
@ -98,10 +100,72 @@ namespace CatchOverrides {
|
||||
if( it == missingAssertionData.end() )
|
||||
{
|
||||
LineData tmp;
|
||||
tmp.insert(std::make_pair(c,v));
|
||||
tmp.insert(std::make_pair(c,(v ? 1 : 0)));
|
||||
missingAssertionData.insert(std::make_pair(file, tmp));
|
||||
}
|
||||
else
|
||||
{
|
||||
it->second.insert(std::make_pair(c,(v ? 1 : 0)));
|
||||
}
|
||||
}
|
||||
int abortAfter(const std::string& file, int c) const
|
||||
{
|
||||
int result(-1);
|
||||
FileLineData::const_iterator it = abortAfterData.find(file);
|
||||
if( it != abortAfterData.end() )
|
||||
{
|
||||
for( LineData::const_iterator lineIt = it->second.begin(); lineIt != it->second.end(); ++lineIt )
|
||||
{
|
||||
if( c <= lineIt->first )
|
||||
break;
|
||||
result = lineIt->second;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
void insertAbortAfter(const std::string& file, int c, int v)
|
||||
{
|
||||
FileLineData::iterator it = abortAfterData.find(file);
|
||||
if( it == abortAfterData.end() )
|
||||
{
|
||||
LineData tmp;
|
||||
tmp.insert(std::make_pair(c,v));
|
||||
abortAfterData.insert(std::make_pair(file, tmp));
|
||||
}
|
||||
else
|
||||
{
|
||||
it->second.insert(std::make_pair(c,v));
|
||||
}
|
||||
}
|
||||
std::vector<std::string> listOfTests(const std::string& file, int c) const
|
||||
{
|
||||
std::vector<std::string> result;
|
||||
FileStringLineData::const_iterator it = testData.find(file);
|
||||
if( it != testData.end() )
|
||||
{
|
||||
for( StringLineData::const_iterator lineIt = it->second.begin(); lineIt != it->second.end(); ++lineIt )
|
||||
{
|
||||
if( lineIt->second.empty() && c > lineIt->first)
|
||||
result.clear();
|
||||
else {
|
||||
if( c <= lineIt->first )
|
||||
break;
|
||||
result.push_back(lineIt->second);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
void insertTest(const std::string& file, int c, const std::string& v)
|
||||
{
|
||||
FileStringLineData::iterator it = testData.find(file);
|
||||
if( it == testData.end() )
|
||||
{
|
||||
StringLineData tmp;
|
||||
tmp.insert(std::make_pair(c,v));
|
||||
testData.insert(std::make_pair(file, tmp));
|
||||
}
|
||||
else
|
||||
{
|
||||
it->second.insert(std::make_pair(c,v));
|
||||
}
|
||||
@ -117,6 +181,8 @@ namespace CatchOverrides {
|
||||
private:
|
||||
FileLineData showSuccessfulTestsData;
|
||||
FileLineData missingAssertionData;
|
||||
FileLineData abortAfterData;
|
||||
FileStringLineData testData;
|
||||
|
||||
static Config<T>* s_instance;
|
||||
};
|
||||
@ -130,6 +196,8 @@ namespace CatchOverrides {
|
||||
{
|
||||
Config<T>::instance().insertSuccessfulResults(file, c, false);
|
||||
Config<T>::instance().insertMissingAssertions(file, c, false);
|
||||
Config<T>::instance().insertAbortAfter(file, c, -1);
|
||||
Config<T>::instance().insertTest(file, c, "");
|
||||
}
|
||||
};
|
||||
|
||||
@ -152,6 +220,26 @@ namespace CatchOverrides {
|
||||
Config<T>::instance().insertMissingAssertions(file, c, v ? true : false);
|
||||
}
|
||||
};
|
||||
|
||||
template <typename T>
|
||||
struct ConfigAbortAfter
|
||||
{
|
||||
template <typename U>
|
||||
ConfigAbortAfter( const std::string& file, int c, U v )
|
||||
{
|
||||
Config<T>::instance().insertAbortAfter(file, c, v);
|
||||
}
|
||||
};
|
||||
|
||||
template <typename T>
|
||||
struct ConfigAddTest
|
||||
{
|
||||
template <typename U>
|
||||
ConfigAddTest( const std::string& file, int c, U v )
|
||||
{
|
||||
Config<T>::instance().insertTest(file, c, v);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
namespace Catch {
|
||||
|
@ -192,6 +192,9 @@ private:
|
||||
#define INTERNAL_CATCH_MAP_CATEGORY_TO_TAG( Category, Tag ) \
|
||||
INTERNAL_CATCH_MAP_CATEGORY_TO_TAG2( #Category, Tag, __COUNTER__ )
|
||||
|
||||
#define INTERNAL_CATCH_MAP_CATEGORY_TO_LIST( Category ) \
|
||||
INTERNAL_CATCH_MAP_CATEGORY_TO_LIST2( #Category, __COUNTER__ )
|
||||
|
||||
#define FAIL_STRING( str ) _T( str )
|
||||
|
||||
#else // detect CLR
|
||||
@ -238,6 +241,9 @@ private:
|
||||
#define INTERNAL_CATCH_MAP_CATEGORY_TO_TAG( Category, Tag ) \
|
||||
INTERNAL_CATCH_MAP_CATEGORY_TO_TAG2( Category, Tag, __COUNTER__ )
|
||||
|
||||
#define INTERNAL_CATCH_MAP_CATEGORY_TO_LIST( Category, List ) \
|
||||
INTERNAL_CATCH_MAP_CATEGORY_TO_LIST2( Category, List, __COUNTER__ )
|
||||
|
||||
#define FAIL_STRING( str ) WIDEN( str )
|
||||
|
||||
#endif // detect CLR
|
||||
@ -250,12 +256,24 @@ private:
|
||||
#define CATCH_INTERNAL_CONFIG_WARN_MISSING_ASSERTIONS2( v, Count ) \
|
||||
namespace { CatchOverrides::ConfigWarnMissingAssertions<Catch::IConfig const*> INTERNAL_CATCH_UNIQUE_NAME_LINE( C_A_T_C_H_____O_V_E_R_R_I_D_E____, INTERNAL_CATCH_CONCAT_LINE_COUNTER( Count ) )(__FILE__, Count, v); }
|
||||
|
||||
#define CATCH_INTERNAL_CONFIG_ABORT_AFTER2( v, Count ) \
|
||||
namespace { CatchOverrides::ConfigAbortAfter<Catch::IConfig const*> INTERNAL_CATCH_UNIQUE_NAME_LINE( C_A_T_C_H_____O_V_E_R_R_I_D_E____, INTERNAL_CATCH_CONCAT_LINE_COUNTER( Count ) )(__FILE__, Count, v); }
|
||||
|
||||
#define CATCH_INTERNAL_CONFIG_ADD_TEST2( v, Count ) \
|
||||
namespace { CatchOverrides::ConfigAddTest<Catch::IConfig const*> INTERNAL_CATCH_UNIQUE_NAME_LINE( C_A_T_C_H_____O_V_E_R_R_I_D_E____, INTERNAL_CATCH_CONCAT_LINE_COUNTER( Count ) )(__FILE__, Count, v); }
|
||||
|
||||
#define CATCH_INTERNAL_CONFIG_SHOW_SUCCESS( v ) \
|
||||
CATCH_INTERNAL_CONFIG_SHOW_SUCCESS2( v, __COUNTER__)
|
||||
|
||||
#define CATCH_INTERNAL_CONFIG_WARN_MISSING_ASSERTIONS( v ) \
|
||||
CATCH_INTERNAL_CONFIG_WARN_MISSING_ASSERTIONS2( v, __COUNTER__)
|
||||
|
||||
#define CATCH_INTERNAL_CONFIG_ABORT_AFTER( v ) \
|
||||
CATCH_INTERNAL_CONFIG_ABORT_AFTER2( v, __COUNTER__)
|
||||
|
||||
#define CATCH_INTERNAL_CONFIG_ADD_TEST( v ) \
|
||||
CATCH_INTERNAL_CONFIG_ADD_TEST2( v, __COUNTER__)
|
||||
|
||||
#define CATCH_INTERNAL_RUN_SINGLE_TEST( Count ) \
|
||||
{ CatchOverrides::ConfigGuard cg; \
|
||||
Catch::ConfigData cd(cg.value().get()); \
|
||||
@ -263,6 +281,7 @@ private:
|
||||
cd.abortAfter = 1; \
|
||||
cd.showSuccessfulTests = CatchOverrides::Config<Catch::IConfig const*>::instance().includeSuccessfulResults(__FILE__, Count ); \
|
||||
cd.warnings = (CatchOverrides::Config<Catch::IConfig const*>::instance().warnAboutMissingAssertions(__FILE__, Count ) ? Catch::WarnAbout::NoAssertions : Catch::WarnAbout::Nothing); \
|
||||
cd.abortAfter = CatchOverrides::Config<Catch::IConfig const*>::instance().abortAfter(__FILE__, Count ); \
|
||||
Catch::Ptr<Catch::Config> config(new Catch::Config(cd)); \
|
||||
Catch::MSTestReporter* rep = new Catch::MSTestReporter(config.get()); \
|
||||
Catch::RunContext tr(config.get(), rep); \
|
||||
@ -351,6 +370,7 @@ private:
|
||||
Catch::ConfigData cd; \
|
||||
cd.showSuccessfulTests = CatchOverrides::Config<Catch::IConfig const*>::instance().includeSuccessfulResults(__FILE__, Count ); \
|
||||
cd.warnings = (CatchOverrides::Config<Catch::IConfig const*>::instance().warnAboutMissingAssertions(__FILE__, Count ) ? Catch::WarnAbout::NoAssertions : Catch::WarnAbout::Nothing); \
|
||||
cd.abortAfter = CatchOverrides::Config<Catch::IConfig const*>::instance().abortAfter(__FILE__, Count ); \
|
||||
cd.reporterName = "vs_reporter"; \
|
||||
cd.name = "Batch run using tag : " Tag; \
|
||||
cd.testsOrTags.push_back( Tag ); \
|
||||
@ -365,6 +385,35 @@ private:
|
||||
}; \
|
||||
}
|
||||
|
||||
#define INTERNAL_CATCH_MAP_CATEGORY_TO_LIST2( Category, Count ) \
|
||||
CHECK_FOR_TEST_CASE_CLASH \
|
||||
namespace CATCH_INTERNAL_NAMESPACE( INTERNAL_CATCH_CONCAT_LINE_COUNTER( Count ) ) { \
|
||||
CatchOverrides::ConfigReset<Catch::IConfig const*> INTERNAL_CATCH_UNIQUE_NAME_LINE( C_A_T_C_H____T_E_S_T____C_O_N_F_I_G___, INTERNAL_CATCH_CONCAT_LINE_COUNTER( Count ) )(__FILE__, Count); \
|
||||
INTERNAL_CATCH_CLASS_DEFINITION( INTERNAL_CATCH_UNIQUE_NAME_LINE( C_A_T_C_H____T_E_S_T____C_L_A_S_S___, INTERNAL_CATCH_CONCAT_LINE_COUNTER( Count ) ) ) \
|
||||
{ \
|
||||
INTERNAL_CATCH_CLASS_CONTEXT \
|
||||
BEGIN_INTERNAL_CATCH_BATCH_METHOD( Category, INTERNAL_CATCH_CONCAT_LINE_COUNTER( Count ) ) \
|
||||
{ \
|
||||
Catch::ConfigData cd; \
|
||||
cd.showSuccessfulTests = CatchOverrides::Config<Catch::IConfig const*>::instance().includeSuccessfulResults(__FILE__, Count ); \
|
||||
cd.warnings = (CatchOverrides::Config<Catch::IConfig const*>::instance().warnAboutMissingAssertions(__FILE__, Count ) ? Catch::WarnAbout::NoAssertions : Catch::WarnAbout::Nothing); \
|
||||
cd.abortAfter = CatchOverrides::Config<Catch::IConfig const*>::instance().abortAfter(__FILE__, Count ); \
|
||||
cd.reporterName = "vs_reporter"; \
|
||||
cd.name = "Batch run using category : " Category; \
|
||||
std::vector<std::string> stringNames = CatchOverrides::Config<Catch::IConfig const*>::instance().listOfTests(__FILE__, Count ); \
|
||||
Catch::Ptr<Catch::Config> config(new Catch::Config(cd)); \
|
||||
Catch::MSTestReporter* rep = new Catch::MSTestReporter(config.get()); \
|
||||
Catch::RunContext tr(config.get(), rep); \
|
||||
for( std::vector<std::string>::iterator it = stringNames.begin(); it != stringNames.end(); ++it ) { \
|
||||
std::vector<Catch::TestCase> testCase = Catch::getRegistryHub().getTestCaseRegistry().getMatchingTestCases(*it); \
|
||||
if( testCase.empty() ) Assert::Fail(FAIL_STRING("No tests match")); \
|
||||
if( testCase.size() > 1 ) Assert::Fail(FAIL_STRING("More than one test with the same name")); \
|
||||
tr.runTest(*testCase.begin()); \
|
||||
} \
|
||||
} \
|
||||
}; \
|
||||
}
|
||||
|
||||
//#undef CATCH_CONFIG_VARIADIC_MACROS
|
||||
|
||||
#ifdef CATCH_CONFIG_VARIADIC_MACROS
|
||||
|
@ -23,7 +23,7 @@ namespace MI1
|
||||
// greedy instantiation (or whatever process it uses) eliminate all other
|
||||
// references to the globalCount
|
||||
|
||||
TEST_CASE("message counting1","")
|
||||
TEST_CASE("message counting1","[vs]")
|
||||
{
|
||||
if( Counter::g_haveCountedMessages > 0 ) {
|
||||
REQUIRE( Catch::MessageInfoCounter<unsigned int>::globalCount > 0 );
|
||||
@ -42,7 +42,7 @@ int g_haveCountedMessagesLong = 0;
|
||||
|
||||
namespace MI1
|
||||
{
|
||||
TEST_CASE("long message counting1","")
|
||||
TEST_CASE("long message counting1","[vs]")
|
||||
{
|
||||
if( LongCounter::g_haveCountedMessagesLong > 0 ) {
|
||||
REQUIRE( Catch::MessageInfoCounter<long>::globalCount > 0 );
|
||||
|
@ -22,7 +22,7 @@ namespace MI2
|
||||
// greedy instantiation (or whatever process it uses) eliminate all other
|
||||
// references to the globalCount
|
||||
|
||||
TEST_CASE("message counting2","")
|
||||
TEST_CASE("message counting2","[vs]")
|
||||
{
|
||||
if( Counter::g_haveCountedMessages > 0 ) {
|
||||
REQUIRE( Catch::MessageInfoCounter<unsigned int>::globalCount > 0 );
|
||||
@ -41,7 +41,7 @@ extern int g_haveCountedMessagesLong;
|
||||
|
||||
namespace MI2
|
||||
{
|
||||
TEST_CASE("long message counting2","")
|
||||
TEST_CASE("long message counting2","[vs]")
|
||||
{
|
||||
if( LongCounter::g_haveCountedMessagesLong > 0 ) {
|
||||
REQUIRE( Catch::MessageInfoCounter<long>::globalCount > 0 );
|
||||
|
@ -12,6 +12,7 @@
|
||||
|
||||
namespace AllTestsRunner {
|
||||
|
||||
#ifdef OLD_RUNNER
|
||||
class NullStreamingReporter : public Catch::SharedImpl<Catch::IStreamingReporter> {
|
||||
public:
|
||||
|
||||
@ -54,6 +55,23 @@ namespace AllTestsRunner {
|
||||
Catch::Ptr<Catch::IStreamingReporter> m_reporter;
|
||||
};
|
||||
|
||||
NullStreamingReporter::~NullStreamingReporter() {}
|
||||
|
||||
Catch::Totals EmbeddedRunner::runMatching( const std::string& rawTestSpec, std::size_t groupIndex, std::size_t groupsCount, const std::string& ) {
|
||||
std::ostringstream oss;
|
||||
Catch::Ptr<Catch::Config> config = new Catch::Config();
|
||||
config->setStreamBuf( oss.rdbuf() );
|
||||
|
||||
Catch::Totals totals;
|
||||
|
||||
// Scoped because RunContext doesn't report EndTesting until its destructor
|
||||
{
|
||||
Catch::RunContext runner( config.get(), m_reporter.get() );
|
||||
totals = runner.runMatching( rawTestSpec, groupIndex, groupsCount );
|
||||
}
|
||||
return totals;
|
||||
}
|
||||
|
||||
class MetaTestRunner {
|
||||
|
||||
public:
|
||||
@ -116,23 +134,6 @@ namespace AllTestsRunner {
|
||||
std::size_t m_groupsCount;
|
||||
};
|
||||
|
||||
NullStreamingReporter::~NullStreamingReporter() {}
|
||||
|
||||
Catch::Totals EmbeddedRunner::runMatching( const std::string& rawTestSpec, std::size_t groupIndex, std::size_t groupsCount, const std::string& ) {
|
||||
std::ostringstream oss;
|
||||
Catch::Ptr<Catch::Config> config = new Catch::Config();
|
||||
config->setStreamBuf( oss.rdbuf() );
|
||||
|
||||
Catch::Totals totals;
|
||||
|
||||
// Scoped because RunContext doesn't report EndTesting until its destructor
|
||||
{
|
||||
Catch::RunContext runner( config.get(), m_reporter.get() );
|
||||
totals = runner.runMatching( rawTestSpec, groupIndex, groupsCount );
|
||||
}
|
||||
return totals;
|
||||
}
|
||||
|
||||
TEST_CASE( "Run all failing and succeeding tests", "[vsall]" ) {
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////
|
||||
@ -181,8 +182,66 @@ namespace AllTestsRunner {
|
||||
}
|
||||
}
|
||||
|
||||
// mstest /TestContainer:Debug\ManagedTestCatch.dll /category:"all"
|
||||
#if defined(INTERNAL_CATCH_VS_MANAGED) || defined(INTERNAL_CATCH_VS_NATIVE)
|
||||
CATCH_MAP_CATEGORY_TO_TAG(all, "[vsall]");
|
||||
TEST_CASE( "Run all failing and succeeding tests", "[sw4][vs]" ) {
|
||||
CatchOverrides::ConfigGuard cg;
|
||||
Catch::ConfigData cd(cg.value().get());
|
||||
cd.name = "Test sw4";
|
||||
cd.abortAfter = 1;
|
||||
cd.showSuccessfulTests = true;
|
||||
cd.warnings = Catch::WarnAbout::NoAssertions;
|
||||
cd.abortAfter = -1;
|
||||
Catch::Ptr<Catch::Config> config(new Catch::Config(cd));
|
||||
Catch::MSTestReporter* rep = new Catch::MSTestReporter(config.get());
|
||||
Catch::RunContext tr(config.get(), rep);
|
||||
std::string names[] = {"one","two","three"};
|
||||
std::vector<std::string> stringNames(names, names + (sizeof(names)/sizeof(std::string)));
|
||||
std::vector<Catch::TestCase> testCase = Catch::getRegistryHub().getTestCaseRegistry().getMatchingTestCases( "Some simple comparisons between doubles" );
|
||||
//std::vector<Catch::TestCase> testCase = Catch::getRegistryHub().getTestCaseRegistry().getMatchingTestCases(name_desc.name);
|
||||
if( testCase.empty() ) Assert::Fail(FAIL_STRING("No tests match"));
|
||||
if( testCase.size() > 1 ) Assert::Fail(FAIL_STRING("More than one test with the same name"));
|
||||
Catch::Totals totals = tr.runTest(*testCase.begin());
|
||||
if( totals.assertions.failed > 0 ) {
|
||||
INTERNAL_CATCH_TEST_THROW_FAILURE
|
||||
}
|
||||
/*for(std::vector<Catch::TestCase>::iterator it = tests.begin(); it != tests.end(); ++it )
|
||||
{
|
||||
Catch::Totals totals;
|
||||
std::size_t groupIndex(0);
|
||||
std::size_t groupsCount(0);
|
||||
{
|
||||
EmbeddedRunner runner;
|
||||
std::string name = it->getTestCaseInfo().name;
|
||||
totals = runner.runMatching( name, groupIndex, groupsCount );
|
||||
}
|
||||
}*/
|
||||
}
|
||||
#endif
|
||||
|
||||
// mstest /TestContainer:Debug\ManagedTestCatch.dll /category:"all"
|
||||
#if defined(INTERNAL_CATCH_VS_MANAGED) || defined(INTERNAL_CATCH_VS_NATIVE)
|
||||
CATCH_MAP_CATEGORY_TO_TAG(all, "~[vs]");
|
||||
|
||||
CATCH_CONFIG_SHOW_SUCCESS(true)
|
||||
CATCH_CONFIG_WARN_MISSING_ASSERTIONS(true)
|
||||
CATCH_MAP_CATEGORY_TO_TAG(allSucceeding, "~[vs]");
|
||||
|
||||
CATCH_CONFIG_SHOW_SUCCESS(true)
|
||||
CATCH_CONFIG_WARN_MISSING_ASSERTIONS(true)
|
||||
CATCH_CONFIG_ABORT_AFTER(4)
|
||||
CATCH_INTERNAL_CONFIG_ADD_TEST("Some simple comparisons between doubles")
|
||||
CATCH_INTERNAL_CONFIG_ADD_TEST("Approximate comparisons with different epsilons")
|
||||
CATCH_INTERNAL_CONFIG_ADD_TEST("Approximate comparisons with floats")
|
||||
CATCH_INTERNAL_CONFIG_ADD_TEST("Approximate comparisons with ints")
|
||||
CATCH_INTERNAL_CONFIG_ADD_TEST("Approximate comparisons with mixed numeric types")
|
||||
CATCH_INTERNAL_CONFIG_ADD_TEST("Use a custom approx")
|
||||
CATCH_INTERNAL_CONFIG_ADD_TEST("Approximate PI")
|
||||
CATCH_INTERNAL_CONFIG_ADD_TEST("A METHOD_AS_TEST_CASE based test run that succeeds")
|
||||
CATCH_INTERNAL_CONFIG_ADD_TEST("A METHOD_AS_TEST_CASE based test run that fails")
|
||||
CATCH_INTERNAL_CONFIG_ADD_TEST("A TEST_CASE_METHOD based test run that succeeds")
|
||||
CATCH_INTERNAL_CONFIG_ADD_TEST("A TEST_CASE_METHOD based test run that fails")
|
||||
CATCH_INTERNAL_CONFIG_ADD_TEST("Equality checks that should succeed")
|
||||
CATCH_INTERNAL_CONFIG_ADD_TEST("Equality checks that should fail]")
|
||||
INTERNAL_CATCH_MAP_CATEGORY_TO_LIST(allSucceedingAborting);
|
||||
#endif
|
||||
|
||||
}
|
||||
|
@ -24,26 +24,26 @@ namespace VisualStudioTests
|
||||
};
|
||||
|
||||
int UniqueTestsFixture::uniqueID = 0;
|
||||
TEST_CASE("M00", "[m_off]")
|
||||
TEST_CASE("M00", "[m_off][vs]")
|
||||
{
|
||||
bool show = Catch::getCurrentContext().getConfig()->includeSuccessfulResults();
|
||||
REQUIRE(!show);
|
||||
}
|
||||
|
||||
CATCH_CONFIG_SHOW_SUCCESS(true)
|
||||
TEST_CASE("M01", "[m_on]")
|
||||
TEST_CASE("M01", "[m_on][vs]")
|
||||
{
|
||||
bool show = Catch::getCurrentContext().getConfig()->includeSuccessfulResults();
|
||||
REQUIRE(show);
|
||||
}
|
||||
|
||||
TEST_CASE("M02", "[m_off]")
|
||||
TEST_CASE("M02", "[m_off][vs]")
|
||||
{
|
||||
bool show = Catch::getCurrentContext().getConfig()->includeSuccessfulResults();
|
||||
REQUIRE(!show);
|
||||
}
|
||||
|
||||
TEST_CASE_METHOD(UniqueTestsFixture, "M10", "[m_off]")
|
||||
TEST_CASE_METHOD(UniqueTestsFixture, "M10", "[m_off][vs]")
|
||||
{
|
||||
bool show = Catch::getCurrentContext().getConfig()->includeSuccessfulResults();
|
||||
REQUIRE(!show);
|
||||
@ -52,7 +52,7 @@ namespace VisualStudioTests
|
||||
|
||||
CATCH_CONFIG_WARN_MISSING_ASSERTIONS(true)
|
||||
CATCH_CONFIG_SHOW_SUCCESS(true)
|
||||
TEST_CASE_METHOD(UniqueTestsFixture, "M11", "[m_on]")
|
||||
TEST_CASE_METHOD(UniqueTestsFixture, "M11", "[m_on][vs]")
|
||||
{
|
||||
bool show = Catch::getCurrentContext().getConfig()->includeSuccessfulResults();
|
||||
REQUIRE(show);
|
||||
@ -61,7 +61,7 @@ namespace VisualStudioTests
|
||||
|
||||
CATCH_CONFIG_WARN_MISSING_ASSERTIONS(true)
|
||||
CATCH_CONFIG_SHOW_SUCCESS(true)
|
||||
TEST_CASE_METHOD(UniqueTestsFixture, "M99", "[m_on]")
|
||||
TEST_CASE_METHOD(UniqueTestsFixture, "M99", "[m_on][vs]")
|
||||
{
|
||||
bool show = Catch::getCurrentContext().getConfig()->includeSuccessfulResults();
|
||||
REQUIRE(show);
|
||||
@ -69,7 +69,7 @@ namespace VisualStudioTests
|
||||
getID();
|
||||
}
|
||||
|
||||
TEST_CASE_METHOD(UniqueTestsFixture, "M12", "[m_off]")
|
||||
TEST_CASE_METHOD(UniqueTestsFixture, "M12", "[m_off][vs]")
|
||||
{
|
||||
bool show = Catch::getCurrentContext().getConfig()->includeSuccessfulResults();
|
||||
REQUIRE(!show);
|
||||
@ -95,16 +95,16 @@ namespace VisualStudioTests
|
||||
REQUIRE(!show);
|
||||
}
|
||||
};
|
||||
METHOD_AS_TEST_CASE(ConfigTest::run1,"M20", "[m_off]");
|
||||
METHOD_AS_TEST_CASE(ConfigTest::run1,"M20", "[m_off][vs]");
|
||||
|
||||
CATCH_CONFIG_SHOW_SUCCESS(true)
|
||||
METHOD_AS_TEST_CASE(ConfigTest::run2,"M21", "[m_on]");
|
||||
METHOD_AS_TEST_CASE(ConfigTest::run2,"M21", "[m_on][vs]");
|
||||
|
||||
METHOD_AS_TEST_CASE(ConfigTest::run3,"M22", "[m_off]");
|
||||
METHOD_AS_TEST_CASE(ConfigTest::run3,"M22", "[m_off][vs]");
|
||||
|
||||
CATCH_MAP_CATEGORY_TO_TAG(vstestsCheckOutputOff, "[m_off]");
|
||||
CATCH_MAP_CATEGORY_TO_TAG(vstestsCheckOutputOff, "[m_off][vs]");
|
||||
CATCH_CONFIG_SHOW_SUCCESS(true)
|
||||
CATCH_MAP_CATEGORY_TO_TAG(vstestsCheckOutputOn, "[m_on]");
|
||||
CATCH_MAP_CATEGORY_TO_TAG(vstestsCheckOutputOff2, "[m_off]");
|
||||
CATCH_MAP_CATEGORY_TO_TAG(vstestsCheckOutputOn, "[m_on][vs]");
|
||||
CATCH_MAP_CATEGORY_TO_TAG(vstestsCheckOutputOff2, "[m_off][vs]");
|
||||
}
|
||||
#endif
|
||||
|
@ -23,7 +23,8 @@ if len(sys.argv) == 2:
|
||||
else:
|
||||
if sys.platform == 'win32':
|
||||
cmdPath = os.path.join( catchPath, 'projects\\VS2010\\TestCatch\\Release\\TestCatch.exe' )
|
||||
dllPath = os.path.join( catchPath, 'projects\\VS2010\\ManagedTestCatch\\Release\\ManagedTestCatch.dll' )
|
||||
#dllPath = os.path.join( catchPath, 'projects\\VS2010\\ManagedTestCatch\\Release\\ManagedTestCatch.dll' )
|
||||
dllPath = os.path.join( catchPath, 'projects\\VS2010\\ManagedTestCatch\\Debug\\ManagedTestCatch.dll' )
|
||||
else:
|
||||
cmdPath = os.path.join( catchPath, 'projects/XCode4/CatchSelfTest/DerivedData/CatchSelfTest/Build/Products/Debug/CatchSelfTest' )
|
||||
|
||||
@ -62,6 +63,7 @@ def approve( baseName, args ):
|
||||
else:
|
||||
raise Exception("Results file does not exist: '" + rawResultsPath + "'")
|
||||
|
||||
def callDiff():
|
||||
#os.remove( rawResultsPath )
|
||||
print
|
||||
print baseName + ":"
|
||||
@ -593,7 +595,7 @@ def approveXml( baseName, args ):
|
||||
rawWriteFile.write(line + "\n")
|
||||
rawWriteFile.close()
|
||||
|
||||
def parseTrxFile(trxFile):
|
||||
def parseTrxFile(baseName, trxFile):
|
||||
print "TRX file:" ,trxFile
|
||||
if os.path.exists( trxFile ):
|
||||
xml = ""
|
||||
@ -657,19 +659,26 @@ def parseTrxFile(trxFile):
|
||||
if tag != None and tag == "StdOut":
|
||||
desc = sub.text
|
||||
lines = desc.splitlines()
|
||||
if (len(lines) > 2 and
|
||||
lines[0].startswith("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") and
|
||||
lines[1].startswith("Using Catch v") ):
|
||||
lines = lines[2:-1]
|
||||
#print "*******",desc
|
||||
#print lines
|
||||
found = False
|
||||
index = 0
|
||||
for tmp in lines:
|
||||
if (len(lines) >= (index + 2) and
|
||||
lines[index].startswith("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") and
|
||||
lines[index + 1].startswith("Using Catch v") ):
|
||||
found = True
|
||||
break
|
||||
index += 1
|
||||
lines = lines[index + 2:-1]
|
||||
#print "*******",desc
|
||||
#print lines
|
||||
if found:
|
||||
for line in lines:
|
||||
testcase = resultParser.parseResultLine(line)
|
||||
if isinstance(testcase, TestCaseData):
|
||||
testRun.testcases.append(testcase)
|
||||
lines = testRun.generateSortedUnapprovedLines(0)
|
||||
|
||||
rawSortedPath = os.path.join( rootPath, 'mstest.trx.sorted.unapproved.txt' )
|
||||
rawSortedPath = os.path.join( rootPath, '{0}.sorted.unapproved.txt'.format( baseName ) )
|
||||
rawWriteFile = open( rawSortedPath, 'wb' )
|
||||
for line in lines:
|
||||
#print "L:",line
|
||||
@ -677,7 +686,7 @@ def parseTrxFile(trxFile):
|
||||
rawWriteFile.close()
|
||||
|
||||
|
||||
def approveMsTest( baseName ):
|
||||
def approveMsTest( baseName, filter ):
|
||||
rawResultsPath = os.path.join( rootPath, '_{0}.tmp'.format( baseName ) )
|
||||
if not(os.path.exists( dllPath )):
|
||||
raise Exception("Managed DLL does not exist: '" + dllPath + "'")
|
||||
@ -685,18 +694,19 @@ def approveMsTest( baseName ):
|
||||
args = []
|
||||
args.append("MSTest.exe")
|
||||
args.append("/testcontainer:" + dllPath)
|
||||
#f = open( rawResultsPath, 'w' )
|
||||
#subprocess.call( args, stdout=f, stderr=f )
|
||||
#f.close()
|
||||
args.append("/category:\"" + filter + "\"")
|
||||
f = open( rawResultsPath, 'w' )
|
||||
subprocess.call( args, stdout=f, stderr=f )
|
||||
f.close()
|
||||
|
||||
#if os.path.exists( rawResultsPath ):
|
||||
# f = open( rawResultsPath, 'r' )
|
||||
# for line in f:
|
||||
line = "Results file: c:\Projects\Catch\TestResults\NoyesMa_SACHDEW7 2013-12-09 11_43_57.trx"
|
||||
if os.path.exists( rawResultsPath ):
|
||||
f = open( rawResultsPath, 'r' )
|
||||
for line in f:
|
||||
#line = "Results file: c:\Projects\Catch\TestResults\NoyesMa_SACHDEW7 2013-12-09 11_43_57.trx"
|
||||
|
||||
if line.startswith("Results file:"):
|
||||
trxFile = line[13:].strip()
|
||||
parseTrxFile(trxFile)
|
||||
if line.startswith("Results file:"):
|
||||
trxFile = line[13:].strip()
|
||||
parseTrxFile(baseName, trxFile)
|
||||
|
||||
# Standard console reporter
|
||||
#approve( "console.std", ["~_"] )
|
||||
@ -709,7 +719,9 @@ def approveMsTest( baseName ):
|
||||
# xml reporter, include passes, warn about No Assertions
|
||||
#approveXml( "xml.sw", ["~_", "-s", "-w", "NoAssertions", "-r", "xml"] )
|
||||
#mstest runner, xml output
|
||||
approveMsTest( "mstest.sw")
|
||||
#approveMsTest( "mstest.std", "all")
|
||||
#approveMsTest( "mstest.sw", "allSucceeding")
|
||||
approveMsTest( "mstest.swa4", "allSucceedingAborting")
|
||||
|
||||
if overallResult <> 0:
|
||||
print "run approve.py to approve new baselines"
|
Loading…
Reference in New Issue
Block a user