1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159
/*!
| Usage:
|
| static c_void NameOfYourBenchmarkFunction(Bench& bench)
| {
| ...do any setup needed...
|
| bench.run([&] {
| ...do stuff you want to time; refer to src/bench/nanobench.h
| for more information and the options that can be passed here...
| });
|
| ...do any cleanup needed...
| }
|
| BENCHMARK(NameOfYourBenchmarkFunction);
|
*/
crate::ix!();
//-------------------------------------------[.cpp/bitcoin/src/bench/bench.h]
pub type BenchFunction = fn(_0: &mut Bencher) -> ();
pub struct Args {
is_list_only: bool,
/// millis
min_time: Duration,
asymptote: Vec<f64>,
output_csv: String,
output_json: String,
regex_filter: String,
}
pub struct BenchRunner {
}
pub type BenchmarkMap = HashMap<String,BenchFunction>;
/**
| BENCHMARK(foo) expands to: BenchRunner
| bench_11foo("foo", foo);
|
*/
macro_rules! benchmark {
($n:ident) => {
/*
BenchRunner PASTE2(bench_, PASTE2(__LINE__, n))(STRINGIZE(n), n);
*/
}
}
//-------------------------------------------[.cpp/bitcoin/src/bench/bench.cpp]
lazy_static!{
/*
const std::function<c_void(const std::string&)> G_TEST_LOG_FUN{};
*/
}
pub type NanoBenchResult = Broken;
pub fn generate_template_results(
benchmark_results: &Vec<NanoBenchResult>,
filename: &String,
tpl: *const u8) {
todo!();
/*
if (benchmarkResults.empty() || filename.empty()) {
// nothing to write, bail out
return;
}
std::ofstream fout(filename);
if (fout.is_open()) {
ankerl::nanobench::render(tpl, benchmarkResults, fout);
} else {
std::cout << "Could write to file '" << filename << "'" << std::endl;
}
std::cout << "Created '" << filename << "'" << std::endl;
*/
}
impl BenchRunner {
pub fn benchmarks(&mut self) -> &mut BenchmarkMap {
todo!();
/*
static std::map<std::string, BenchFunction> benchmarks_map;
return benchmarks_map;
*/
}
pub fn new(
name: String,
func: BenchFunction) -> Self {
todo!();
/*
benchmarks().insert(std::make_pair(name, func));
*/
}
pub fn run_all(&mut self, args: &Args) {
todo!();
/*
std::regex reFilter(args.regex_filter);
std::smatch baseMatch;
std::vector<ankerl::nanobench::Result> benchmarkResults;
for (const auto& p : benchmarks()) {
if (!std::regex_match(p.first, baseMatch, reFilter)) {
continue;
}
if (args.is_list_only) {
std::cout << p.first << std::endl;
continue;
}
Bench bench;
bench.name(p.first);
if (args.min_time > 0ms) {
// convert to nanos before dividing to reduce rounding errors
std::chrono::nanoseconds min_time_ns = args.min_time;
bench.minEpochTime(min_time_ns / bench.epochs());
}
if (args.asymptote.empty()) {
p.second(bench);
} else {
for (auto n : args.asymptote) {
bench.complexityN(n);
p.second(bench);
}
std::cout << bench.complexityBigO() << std::endl;
}
if (!bench.results().empty()) {
benchmarkResults.push_back(bench.results().back());
}
}
GenerateTemplateResults(benchmarkResults, args.output_csv, "# Benchmark, evals, iterations, total, min, max, median\n"
"{{#result}}{{name}}, {{epochs}}, {{average(iterations)}}, {{sumProduct(iterations, elapsed)}}, {{minimum(elapsed)}}, {{maximum(elapsed)}}, {{median(elapsed)}}\n"
"{{/result}}");
GenerateTemplateResults(benchmarkResults, args.output_json, ankerl::nanobench::templates::json());
*/
}
}