mirror of
https://github.com/catchorg/Catch2.git
synced 2024-11-24 22:36:10 +01:00
Remove no longer used Python scripts
This commit is contained in:
parent
153965a655
commit
9d6fffb922
@ -1,148 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import time, subprocess, sys, os, shutil, glob, random
|
|
||||||
import argparse
|
|
||||||
|
|
||||||
def median(lst):
|
|
||||||
lst = sorted(lst)
|
|
||||||
mid, odd = divmod(len(lst), 2)
|
|
||||||
if odd:
|
|
||||||
return lst[mid]
|
|
||||||
else:
|
|
||||||
return (lst[mid - 1] + lst[mid]) / 2.0
|
|
||||||
|
|
||||||
def mean(lst):
|
|
||||||
return float(sum(lst)) / max(len(lst), 1)
|
|
||||||
|
|
||||||
compiler_path = ''
|
|
||||||
flags = []
|
|
||||||
|
|
||||||
main_file = r'''
|
|
||||||
#define CATCH_CONFIG_MAIN
|
|
||||||
#include "catch.hpp"
|
|
||||||
'''
|
|
||||||
main_name = 'catch-main.cpp'
|
|
||||||
|
|
||||||
dir_name = 'benchmark-dir'
|
|
||||||
|
|
||||||
files = 20
|
|
||||||
test_cases_in_file = 20
|
|
||||||
sections_in_file = 4
|
|
||||||
assertions_per_section = 5
|
|
||||||
|
|
||||||
checks = [
|
|
||||||
'a != b', 'a != c', 'a != d', 'a != e', 'b != c', 'b != d', 'b != e', 'c != d', 'c != e', 'd != e', 'a + a == a',
|
|
||||||
'a + b == b', 'a + c == c', 'a + d == d', 'a + e == e', 'b + a == b', 'b + b == c', 'b + c == d',
|
|
||||||
'b + d == e', 'c + a == c', 'c + b == d', 'c + c == e', 'd + a == d', 'd + b == e', 'e + a == e',
|
|
||||||
'a + a + a == a', 'b + c == a + d', 'c + a + a == a + b + b + a',
|
|
||||||
'a < b', 'b < c', 'c < d', 'd < e', 'a >= a', 'd >= b',
|
|
||||||
]
|
|
||||||
|
|
||||||
def create_temp_dir():
|
|
||||||
if os.path.exists(dir_name):
|
|
||||||
shutil.rmtree(dir_name)
|
|
||||||
os.mkdir(dir_name)
|
|
||||||
|
|
||||||
def copy_catch(path_to_catch):
|
|
||||||
shutil.copy(path_to_catch, dir_name)
|
|
||||||
|
|
||||||
def create_catch_main():
|
|
||||||
with open(main_name, 'w') as f:
|
|
||||||
f.write(main_file)
|
|
||||||
|
|
||||||
def compile_main():
|
|
||||||
start_t = time.time()
|
|
||||||
subprocess.check_call([compiler_path, main_name, '-c'] + flags)
|
|
||||||
end_t = time.time()
|
|
||||||
return end_t - start_t
|
|
||||||
|
|
||||||
def compile_files():
|
|
||||||
cpp_files = glob.glob('tests*.cpp')
|
|
||||||
start_t = time.time()
|
|
||||||
subprocess.check_call([compiler_path, '-c'] + flags + cpp_files)
|
|
||||||
end_t = time.time()
|
|
||||||
return end_t - start_t
|
|
||||||
|
|
||||||
def link_files():
|
|
||||||
obj_files = glob.glob('*.o')
|
|
||||||
start_t = time.time()
|
|
||||||
subprocess.check_call([compiler_path] + flags + obj_files)
|
|
||||||
end_t = time.time()
|
|
||||||
return end_t - start_t
|
|
||||||
|
|
||||||
def benchmark(func):
|
|
||||||
results = [func() for i in range(10)]
|
|
||||||
return mean(results), median(results)
|
|
||||||
|
|
||||||
def char_range(start, end):
|
|
||||||
for c in range(ord(start), ord(end)):
|
|
||||||
yield chr(c)
|
|
||||||
|
|
||||||
def generate_sections(fd):
|
|
||||||
for i in range(sections_in_file):
|
|
||||||
fd.write(' SECTION("Section {}") {{\n'.format(i))
|
|
||||||
fd.write('\n'.join(' CHECK({});'.format(check) for check in random.sample(checks, assertions_per_section)))
|
|
||||||
fd.write(' }\n')
|
|
||||||
|
|
||||||
|
|
||||||
def generate_file(file_no):
|
|
||||||
with open('tests{}.cpp'.format(file_no), 'w') as f:
|
|
||||||
f.write('#include "catch.hpp"\n\n')
|
|
||||||
for i in range(test_cases_in_file):
|
|
||||||
f.write('TEST_CASE("File {} test {}", "[.compile]"){{\n'.format(file_no, i))
|
|
||||||
for i, c in enumerate(char_range('a', 'f')):
|
|
||||||
f.write(' int {} = {};\n'.format(c, i))
|
|
||||||
generate_sections(f)
|
|
||||||
f.write('}\n\n')
|
|
||||||
|
|
||||||
|
|
||||||
def generate_files():
|
|
||||||
create_catch_main()
|
|
||||||
for i in range(files):
|
|
||||||
generate_file(i)
|
|
||||||
|
|
||||||
|
|
||||||
options = ['all', 'main', 'files', 'link']
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description='Benchmarks Catch\'s compile times against some synthetic tests')
|
|
||||||
# Add first arg -- benchmark type
|
|
||||||
parser.add_argument('benchmark_kind', nargs='?', default='all', choices=options, help='What kind of benchmark to run, default: all')
|
|
||||||
|
|
||||||
# Args to allow changing header/compiler
|
|
||||||
parser.add_argument('-I', '--catch-header', default='catch.hpp', help = 'Path to catch.hpp, default: catch.hpp')
|
|
||||||
parser.add_argument('-c', '--compiler', default='g++', help = 'Compiler to use, default: g++')
|
|
||||||
|
|
||||||
parser.add_argument('-f', '--flags', help = 'Flags to be passed to the compiler. Pass as "," separated list')
|
|
||||||
|
|
||||||
# Allow creating files only, without running the whole thing
|
|
||||||
parser.add_argument('-g', '--generate-files', action='store_true', help='Generate test files and quit')
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
compiler_path = args.compiler
|
|
||||||
catch_path = args.catch_header
|
|
||||||
|
|
||||||
if args.generate_files:
|
|
||||||
create_temp_dir()
|
|
||||||
copy_catch(catch_path)
|
|
||||||
os.chdir(dir_name)
|
|
||||||
# now create the fake test files
|
|
||||||
generate_files()
|
|
||||||
# Early exit
|
|
||||||
print('Finished generating files')
|
|
||||||
exit(1)
|
|
||||||
|
|
||||||
os.chdir(dir_name)
|
|
||||||
|
|
||||||
if args.flags:
|
|
||||||
flags = args.flags.split(',')
|
|
||||||
|
|
||||||
print('Time needed for ...')
|
|
||||||
if args.benchmark_kind in ('all', 'main'):
|
|
||||||
print(' ... compiling main, mean: {:.2f}, median: {:.2f} s'.format(*benchmark(compile_main)))
|
|
||||||
if args.benchmark_kind in ('all', 'files'):
|
|
||||||
print(' ... compiling test files, mean: {:.2f}, median: {:.2f} s'.format(*benchmark(compile_files)))
|
|
||||||
if args.benchmark_kind in ('all', 'link'):
|
|
||||||
print(' ... linking everything, mean: {:.2f}, median: {:.2f} s'.format(*benchmark(link_files)))
|
|
@ -1,56 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import subprocess, os, sys
|
|
||||||
import xml.etree.ElementTree as ET
|
|
||||||
from collections import defaultdict
|
|
||||||
from statistics import median, stdev
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
def get_commit_hash():
|
|
||||||
res = subprocess.run('git rev-parse HEAD'.split(), check=True, stdout=subprocess.PIPE, universal_newlines=True)
|
|
||||||
return res.stdout.strip()
|
|
||||||
|
|
||||||
if len(sys.argv) < 2:
|
|
||||||
print('Usage: {} benchmark-binary'.format(sys.argv[0]))
|
|
||||||
exit(1)
|
|
||||||
|
|
||||||
|
|
||||||
num_runs = 10
|
|
||||||
data = defaultdict(list)
|
|
||||||
|
|
||||||
|
|
||||||
def parse_file(file):
|
|
||||||
|
|
||||||
def recursive_search(node):
|
|
||||||
if node.tag == 'TestCase':
|
|
||||||
results = node.find('OverallResult')
|
|
||||||
time = results.get('durationInSeconds')
|
|
||||||
data[node.get('name')].append(float(time))
|
|
||||||
elif node.tag in ('Group', 'Catch'):
|
|
||||||
for child in node:
|
|
||||||
recursive_search(child)
|
|
||||||
|
|
||||||
tree = ET.parse(file)
|
|
||||||
recursive_search(tree.getroot())
|
|
||||||
|
|
||||||
def run_benchmarks(binary):
|
|
||||||
call = [binary] + '-d yes -r xml -o'.split()
|
|
||||||
for i in range(num_runs):
|
|
||||||
file = 'temp{}.xml'.format(i)
|
|
||||||
print('Run number {}'.format(i))
|
|
||||||
subprocess.run(call + [file])
|
|
||||||
parse_file(file)
|
|
||||||
# Remove file right after parsing, because benchmark output can be big
|
|
||||||
os.remove(file)
|
|
||||||
|
|
||||||
|
|
||||||
# Run benchmarks
|
|
||||||
run_benchmarks(sys.argv[1])
|
|
||||||
|
|
||||||
result_file = '{:%Y-%m-%dT%H-%M-%S}-{}.result'.format(datetime.now(), get_commit_hash())
|
|
||||||
|
|
||||||
|
|
||||||
print('Writing results to {}'.format(result_file))
|
|
||||||
with open(result_file, 'w') as file:
|
|
||||||
for k in sorted(data):
|
|
||||||
file.write('{}: median: {} (s), stddev: {} (s)\n'.format(k, median(data[k]), stdev(data[k])))
|
|
@ -1,63 +0,0 @@
|
|||||||
import re
|
|
||||||
|
|
||||||
preprocessorRe = re.compile( r'\s*#.*' )
|
|
||||||
|
|
||||||
fdefineRe = re.compile( r'\s*#\s*define\s*(\S*)\s*\(' ) # #defines that take arguments
|
|
||||||
defineRe = re.compile( r'\s*#\s*define\s*(\S*)(\s+)(.*)' ) # all #defines
|
|
||||||
undefRe = re.compile( r'\s*#\s*undef\s*(\S*)' ) # all #undefs
|
|
||||||
|
|
||||||
ifdefCommonRe = re.compile( r'\s*#\s*if' ) # all #ifdefs
|
|
||||||
ifdefRe = re.compile( r'\s*#\s*ifdef\s*(\S*)' )
|
|
||||||
ifndefRe = re.compile( r'\s*#\s*ifndef\s*(\S*)' )
|
|
||||||
endifRe = re.compile( r'\s*#\s*endif\s*//\s*(.*)' )
|
|
||||||
elseRe = re.compile( r'\s*#\s*else' )
|
|
||||||
ifRe = re.compile( r'\s*#\s*if\s+(.*)' )
|
|
||||||
|
|
||||||
nsRe = re.compile( r'(.*?\s*\s*namespace\s+)(\w+)(\s*{?)(.*)' )
|
|
||||||
nsCloseRe = re.compile( r'(.*\s*})(\s*\/\/\s*namespace\s+)(\w+)(\s*)(.*)' )
|
|
||||||
|
|
||||||
|
|
||||||
class LineMapper:
|
|
||||||
def __init__( self, idMap, outerNamespace ):
|
|
||||||
self.idMap = idMap
|
|
||||||
self.outerNamespace = outerNamespace
|
|
||||||
|
|
||||||
# TBD:
|
|
||||||
# #if, #ifdef, comments after #else
|
|
||||||
def mapLine( self, lineNo, line ):
|
|
||||||
for idFrom, idTo in self.idMap.items():
|
|
||||||
r = re.compile("(.*)" + idFrom + "(.*)")
|
|
||||||
|
|
||||||
m = r.match( line )
|
|
||||||
if m:
|
|
||||||
line = m.group(1) + idTo + m.group(2) + "\n"
|
|
||||||
|
|
||||||
m = nsCloseRe.match( line )
|
|
||||||
if m:
|
|
||||||
originalNs = m.group(3)
|
|
||||||
# print("[{0}] originalNs: '{1}' - closing".format(lineNo, originalNs))
|
|
||||||
# print( " " + line )
|
|
||||||
# print( " 1:[{0}]\n 2:[{1}]\n 3:[{2}]\n 4:[{3}]\n 5:[{4}]".format( m.group(1), m.group(2), m.group(3), m.group(4), m.group(5) ) )
|
|
||||||
if originalNs in self.outerNamespace:
|
|
||||||
outerNs, innerNs = self.outerNamespace[originalNs]
|
|
||||||
return "{0}}}{1}{2}::{3}{4}{5}\n".format( m.group(1), m.group(2), outerNs, innerNs, m.group(4), m.group(5))
|
|
||||||
m = nsRe.match( line )
|
|
||||||
if m:
|
|
||||||
originalNs = m.group(2)
|
|
||||||
# print("[{0}] originalNs: '{1}'".format(lineNo, originalNs))
|
|
||||||
# print( " " + line )
|
|
||||||
# print( " 1:[{0}]\n 2:[{1}]\n 3:[{2}]\n 4:[{3}]".format( m.group(1), m.group(2), m.group(3), m.group(4) ) )
|
|
||||||
if originalNs in self.outerNamespace:
|
|
||||||
outerNs, innerNs = self.outerNamespace[originalNs]
|
|
||||||
return "{0}{1} {{ namespace {2}{3}{4}\n".format( m.group(1), outerNs, innerNs, m.group(3), m.group(4) )
|
|
||||||
|
|
||||||
return line
|
|
||||||
|
|
||||||
def mapFile(self, filenameIn, filenameOut ):
|
|
||||||
print( "Embedding:\n {0}\nas:\n {1}".format( filenameIn, filenameOut ) )
|
|
||||||
with open( filenameIn, 'r' ) as f, open( filenameOut, 'w' ) as outf:
|
|
||||||
lineNo = 1
|
|
||||||
for line in f:
|
|
||||||
outf.write( self.mapLine( lineNo, line ) )
|
|
||||||
lineNo = lineNo + 1
|
|
||||||
print( "Written {0} lines".format( lineNo ) )
|
|
@ -1,27 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
# Execute this script any time you import a new copy of Clara into the third_party area
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import embed
|
|
||||||
|
|
||||||
rootPath = os.path.dirname(os.path.realpath( os.path.dirname(sys.argv[0])))
|
|
||||||
|
|
||||||
filename = os.path.join( rootPath, "third_party", "clara.hpp" )
|
|
||||||
outfilename = os.path.join( rootPath, "include", "external", "clara.hpp" )
|
|
||||||
|
|
||||||
|
|
||||||
# Mapping of pre-processor identifiers
|
|
||||||
idMap = {
|
|
||||||
"CLARA_HPP_INCLUDED": "CATCH_CLARA_HPP_INCLUDED",
|
|
||||||
"CLARA_CONFIG_CONSOLE_WIDTH": "CATCH_CLARA_CONFIG_CONSOLE_WIDTH",
|
|
||||||
"CLARA_TEXTFLOW_HPP_INCLUDED": "CATCH_CLARA_TEXTFLOW_HPP_INCLUDED",
|
|
||||||
"CLARA_TEXTFLOW_CONFIG_CONSOLE_WIDTH": "CATCH_CLARA_TEXTFLOW_CONFIG_CONSOLE_WIDTH",
|
|
||||||
"CLARA_PLATFORM_WINDOWS": "CATCH_PLATFORM_WINDOWS"
|
|
||||||
}
|
|
||||||
|
|
||||||
# outer namespace to add
|
|
||||||
outerNamespace = { "clara": ("Catch", "clara") }
|
|
||||||
|
|
||||||
mapper = embed.LineMapper( idMap, outerNamespace )
|
|
||||||
mapper.mapFile( filename, outfilename )
|
|
@ -1,65 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import urllib2
|
|
||||||
import json
|
|
||||||
|
|
||||||
from scriptCommon import catchPath
|
|
||||||
from scriptCommon import runAndCapture
|
|
||||||
|
|
||||||
issueNumberRe = re.compile( r'(.*?)#([0-9]*)([^0-9]?.*)' )
|
|
||||||
|
|
||||||
rootPath = os.path.join( catchPath, 'include/' )
|
|
||||||
versionPath = os.path.join( rootPath, "internal/catch_version.hpp" )
|
|
||||||
|
|
||||||
|
|
||||||
hashes = runAndCapture( ['git', 'log', '-2', '--format="%H"', versionPath] )
|
|
||||||
lines = runAndCapture( ['git', 'log', hashes[1] + ".." + hashes[0], catchPath] )
|
|
||||||
|
|
||||||
prevLine = ""
|
|
||||||
messages = []
|
|
||||||
dates = []
|
|
||||||
issues = {}
|
|
||||||
|
|
||||||
def getIssueTitle( issueNumber ):
|
|
||||||
try:
|
|
||||||
s = urllib2.urlopen("https://api.github.com/repos/philsquared/catch/issues/" + issueNumber ).read()
|
|
||||||
except:
|
|
||||||
return "#HTTP Error#"
|
|
||||||
|
|
||||||
try:
|
|
||||||
j = json.loads( s )
|
|
||||||
return j["title"]
|
|
||||||
except:
|
|
||||||
return "#JSON Error#"
|
|
||||||
|
|
||||||
for line in lines:
|
|
||||||
if line.startswith( "commit"):
|
|
||||||
pass
|
|
||||||
elif line.startswith( "Author:"):
|
|
||||||
pass
|
|
||||||
elif line.startswith( "Date:"):
|
|
||||||
dates.append( line[5:].lstrip() )
|
|
||||||
elif line == "" and prevLine == "":
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
prevLine = line
|
|
||||||
match = issueNumberRe.match( line )
|
|
||||||
line2 = ""
|
|
||||||
while match:
|
|
||||||
issueNumber = match.group(2)
|
|
||||||
issue = '#{0} ("{1}")'.format( issueNumber, getIssueTitle( issueNumber ) )
|
|
||||||
line2 = line2 + match.group(1) + issue
|
|
||||||
match = issueNumberRe.match( match.group(3) )
|
|
||||||
if line2 == "":
|
|
||||||
messages.append( line )
|
|
||||||
else:
|
|
||||||
messages.append( line2 )
|
|
||||||
|
|
||||||
print("All changes between {0} and {1}:\n".format( dates[-1], dates[0] ))
|
|
||||||
|
|
||||||
for line in messages:
|
|
||||||
print(line)
|
|
@ -1,53 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
import urllib.request
|
|
||||||
import urllib.parse
|
|
||||||
|
|
||||||
|
|
||||||
from scriptCommon import catchPath
|
|
||||||
|
|
||||||
def upload(options):
|
|
||||||
# request_blah = urllib.request.Request('https://
|
|
||||||
|
|
||||||
request = urllib.request.Request('https://melpon.org/wandbox/api/compile.json', method='POST')
|
|
||||||
json_bytes = json.dumps(options).encode('utf-8')
|
|
||||||
request.add_header('Content-Type', 'application/json; charset=utf-8')
|
|
||||||
request.add_header('Content-Length', len(json_bytes))
|
|
||||||
response = urllib.request.urlopen(request, json_bytes)
|
|
||||||
return json.loads(response.read().decode('utf-8'))
|
|
||||||
|
|
||||||
main_file = '''
|
|
||||||
#define CATCH_CONFIG_MAIN // This tells Catch to provide a main() - only do this in one cpp file
|
|
||||||
#include "catch.hpp"
|
|
||||||
|
|
||||||
unsigned int Factorial( unsigned int number ) {
|
|
||||||
return number <= 1 ? number : Factorial(number-1)*number;
|
|
||||||
}
|
|
||||||
|
|
||||||
TEST_CASE( "Factorials are computed", "[factorial]" ) {
|
|
||||||
REQUIRE( Factorial(1) == 1 );
|
|
||||||
REQUIRE( Factorial(2) == 2 );
|
|
||||||
REQUIRE( Factorial(3) == 6 );
|
|
||||||
REQUIRE( Factorial(10) == 3628800 );
|
|
||||||
}
|
|
||||||
'''
|
|
||||||
|
|
||||||
def uploadFiles():
|
|
||||||
response = upload({
|
|
||||||
'compiler': 'gcc-head',
|
|
||||||
'code': main_file,
|
|
||||||
'codes': [{
|
|
||||||
'file': 'catch.hpp',
|
|
||||||
'code': open(os.path.join(catchPath, 'single_include', 'catch2', 'catch.hpp')).read()
|
|
||||||
}],
|
|
||||||
'options': 'c++11,cpp-no-pedantic,boost-nothing',
|
|
||||||
'compiler-option-raw': '-DCATCH_CONFIG_FAST_COMPILE',
|
|
||||||
'save': True
|
|
||||||
})
|
|
||||||
|
|
||||||
if 'url' in response and 'compiler_error' not in response:
|
|
||||||
return True, response['url']
|
|
||||||
else:
|
|
||||||
return False, response
|
|
Loading…
Reference in New Issue
Block a user