mirror of
				https://github.com/catchorg/Catch2.git
				synced 2025-10-30 19:57:10 +01:00 
			
		
		
		
	Merge branch 'dev-performance-round3'
This commit is contained in:
		| @@ -19,6 +19,14 @@ | ||||
| #include "catch_type_traits.hpp" | ||||
|  | ||||
|  | ||||
| #if defined(CATCH_CONFIG_FAST_COMPILE) | ||||
| /////////////////////////////////////////////////////////////////////////////// | ||||
| // We can speedup compilation significantly by breaking into debugger lower in | ||||
| // the callstack, because then we don't have to expand CATCH_BREAK_INTO_DEBUGGER | ||||
| // macro in each assertion | ||||
| #define INTERNAL_CATCH_REACT( resultBuilder ) \ | ||||
|     resultBuilder.react(); | ||||
| #else | ||||
| /////////////////////////////////////////////////////////////////////////////// | ||||
| // In the event of a failure works out if the debugger needs to be invoked | ||||
| // and/or an exception thrown and takes appropriate action. | ||||
| @@ -26,7 +34,8 @@ | ||||
| // source code rather than in Catch library code | ||||
| #define INTERNAL_CATCH_REACT( resultBuilder ) \ | ||||
|     if( resultBuilder.shouldDebugBreak() ) CATCH_BREAK_INTO_DEBUGGER(); \ | ||||
|     resultBuilder.react(); | ||||
|     resultBuilder.react();  | ||||
| #endif | ||||
|  | ||||
|  | ||||
| /////////////////////////////////////////////////////////////////////////////// | ||||
|   | ||||
| @@ -100,6 +100,15 @@ namespace Catch { | ||||
|     } | ||||
|  | ||||
|     void ResultBuilder::react() { | ||||
| #if defined(CATCH_CONFIG_FAST_COMPILE) | ||||
|         if (m_shouldDebugBreak) { | ||||
|             /////////////////////////////////////////////////////////////////// | ||||
|             // To inspect the state during test, you need to go one level up the callstack | ||||
|             // To go back to the test and change execution, jump over the throw statement | ||||
|             /////////////////////////////////////////////////////////////////// | ||||
|             CATCH_BREAK_INTO_DEBUGGER(); | ||||
|         } | ||||
| #endif | ||||
|         if( m_shouldThrow ) | ||||
|             throw Catch::TestFailureException(); | ||||
|     } | ||||
|   | ||||
							
								
								
									
										146
									
								
								scripts/benchmarkCompile.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										146
									
								
								scripts/benchmarkCompile.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,146 @@ | ||||
| #!/usr/bin/env python | ||||
|  | ||||
| from __future__ import print_function | ||||
|  | ||||
| import time, subprocess, sys, os, shutil, glob, random | ||||
| import argparse | ||||
|  | ||||
| def median(lst): | ||||
|     lst = sorted(lst) | ||||
|     mid, odd = divmod(len(lst), 2) | ||||
|     if odd: | ||||
|         return lst[mid] | ||||
|     else: | ||||
|         return (lst[mid - 1] + lst[mid]) / 2.0 | ||||
|  | ||||
| def mean(lst): | ||||
|     return float(sum(lst)) / max(len(lst), 1) | ||||
|  | ||||
| compiler_path = '' | ||||
| flags = [] | ||||
|  | ||||
| main_file = r''' | ||||
| #define CATCH_CONFIG_MAIN | ||||
| #include "catch.hpp" | ||||
| ''' | ||||
| main_name = 'catch-main.cpp' | ||||
|  | ||||
| dir_name = 'benchmark-dir' | ||||
|  | ||||
| files = 20 | ||||
| test_cases_in_file = 20 | ||||
| sections_in_file = 4 | ||||
| assertions_per_section = 5 | ||||
|  | ||||
| checks = [ | ||||
|     'a != b', 'a != c', 'a != d', 'a != e', 'b != c', 'b != d', 'b != e', 'c != d', 'c != e', 'd != e', 'a + a == a', | ||||
|     'a + b == b', 'a + c == c', 'a + d == d', 'a + e == e', 'b + a == b', 'b + b == c', 'b + c == d', | ||||
|     'b + d == e', 'c + a == c', 'c + b == d', 'c + c == e', 'd + a == d', 'd + b == e', 'e + a == e', | ||||
|     'a + a + a == a', 'b + c == a + d', 'c + a + a == a + b + b + a', | ||||
|     'a < b', 'b < c', 'c < d', 'd < e', 'a >= a', 'd >= b', | ||||
| ] | ||||
|  | ||||
| def create_temp_dir(): | ||||
|     if os.path.exists(dir_name): | ||||
|         shutil.rmtree(dir_name) | ||||
|     os.mkdir(dir_name) | ||||
|  | ||||
| def copy_catch(path_to_catch): | ||||
|     shutil.copy(path_to_catch, dir_name) | ||||
|  | ||||
| def create_catch_main(): | ||||
|     with open(main_name, 'w') as f: | ||||
|         f.write(main_file) | ||||
|  | ||||
| def compile_main(): | ||||
|     start_t = time.time() | ||||
|     subprocess.check_call([compiler_path, main_name, '-c'] + flags) | ||||
|     end_t = time.time() | ||||
|     return end_t - start_t | ||||
|  | ||||
| def compile_files(): | ||||
|     cpp_files = glob.glob('*.cpp') | ||||
|     start_t = time.time() | ||||
|     subprocess.check_call([compiler_path, '-c'] + flags + cpp_files) | ||||
|     end_t = time.time() | ||||
|     return end_t - start_t | ||||
|  | ||||
| def link_files(): | ||||
|     obj_files = glob.glob('*.o') | ||||
|     start_t = time.time() | ||||
|     subprocess.check_call([compiler_path] + flags + obj_files) | ||||
|     end_t = time.time() | ||||
|     return end_t - start_t | ||||
|  | ||||
| def benchmark(func): | ||||
|     results = [func() for i in range(10)] | ||||
|     return mean(results), median(results) | ||||
|  | ||||
| def char_range(start, end): | ||||
|     for c in range(ord(start), ord(end)): | ||||
|         yield chr(c) | ||||
|  | ||||
| def generate_sections(fd): | ||||
|     for i in range(sections_in_file): | ||||
|         fd.write('    SECTION("Section {}") {{\n'.format(i)) | ||||
|         fd.write('\n'.join('        CHECK({});'.format(check) for check in random.sample(checks, assertions_per_section))) | ||||
|         fd.write('    }\n') | ||||
|  | ||||
|  | ||||
| def generate_file(file_no): | ||||
|     with open('tests{}.cpp'.format(file_no), 'w') as f: | ||||
|         f.write('#include "catch.hpp"\n\n') | ||||
|         for i in range(test_cases_in_file): | ||||
|             f.write('TEST_CASE("File {} test {}", "[.compile]"){{\n'.format(file_no, i)) | ||||
|             for i, c in enumerate(char_range('a', 'f')): | ||||
|                 f.write('    int {} = {};\n'.format(c, i)) | ||||
|             generate_sections(f) | ||||
|             f.write('}\n\n') | ||||
|  | ||||
|  | ||||
| def generate_files(): | ||||
|     create_catch_main() | ||||
|     for i in range(files): | ||||
|         generate_file(i) | ||||
|  | ||||
|  | ||||
| options = ['all', 'main', 'files', 'link'] | ||||
|  | ||||
| parser = argparse.ArgumentParser(description='Benchmarks Catch\'s compile times against some synthetic tests') | ||||
| # Add first arg -- benchmark type | ||||
| parser.add_argument('benchmark_kind', nargs='?', default='all', choices=options, help='What kind of benchmark to run, default: all') | ||||
|  | ||||
| # Args to allow changing header/compiler | ||||
| parser.add_argument('-I', '--catch-header', default='catch.hpp', help = 'Path to catch.hpp, default: catch.hpp') | ||||
| parser.add_argument('-c', '--compiler', default='g++', help = 'Compiler to use, default: g++') | ||||
|  | ||||
| parser.add_argument('-f', '--flags', nargs='*', help = 'Flags to be passed to the compiler') | ||||
|  | ||||
| # Allow creating files only, without running the whole thing | ||||
| parser.add_argument('-g', '--generate-files', action='store_true', help='Generate test files and quit') | ||||
|  | ||||
| args = parser.parse_args() | ||||
|  | ||||
| compiler_path = args.compiler | ||||
| catch_path = args.catch_header | ||||
|  | ||||
| os.chdir(dir_name) | ||||
| if args.generate_files: | ||||
|     create_temp_dir() | ||||
|     copy_catch(catch_path) | ||||
|     # now create the fake test files | ||||
|     generate_files() | ||||
|     # Early exit | ||||
|     print('Finished generating files') | ||||
|     exit(1) | ||||
|  | ||||
| if args.flags: | ||||
|     flags = args.flags | ||||
|  | ||||
| print('Time needed for ...') | ||||
| if args.benchmark_kind in ('all', 'main'): | ||||
|     print('        ... compiling main, mean: {:.2f}, median: {:.2f} s'.format(*benchmark(compile_main))) | ||||
| if args.benchmark_kind in ('all', 'files'): | ||||
|     print('        ... compiling test files, mean: {:.2f}, median: {:.2f} s'.format(*benchmark(compile_files))) | ||||
| if args.benchmark_kind in ('all', 'link'): | ||||
|     print('        ... linking everything, mean: {:.2f}, median: {:.2f} s'.format(*benchmark(link_files))) | ||||
		Reference in New Issue
	
	Block a user
	 Martin Hořeňovský
					Martin Hořeňovský