|
4 | 4 |
|
5 | 5 | #include <bench/bench.h> |
6 | 6 |
|
| 7 | +#include <clientversion.h> |
7 | 8 | #include <crypto/sha256.h> |
8 | 9 | #include <fs.h> |
9 | 10 | #include <stacktraces.h> |
10 | 11 | #include <util/strencodings.h> |
11 | 12 | #include <util/system.h> |
12 | 13 |
|
13 | 14 | #include <bls/bls.h> |
| 15 | +#include <chrono> |
| 16 | +#include <cstdint> |
| 17 | +#include <iostream> |
| 18 | +#include <sstream> |
| 19 | +#include <vector> |
14 | 20 |
|
15 | 21 | static const char* DEFAULT_BENCH_FILTER = ".*"; |
| 22 | +static constexpr int64_t DEFAULT_MIN_TIME_MS{10}; |
16 | 23 |
|
17 | 24 | static void SetupBenchArgs(ArgsManager& argsman) |
18 | 25 | { |
19 | 26 | SetupHelpOptions(argsman); |
20 | 27 |
|
21 | | - argsman.AddArg("-asymptote=n1,n2,n3,...", "Test asymptotic growth of the runtime of an algorithm, if supported by the benchmark", ArgsManager::ALLOW_ANY, OptionsCategory::OPTIONS); |
| 28 | + argsman.AddArg("-asymptote=<n1,n2,n3,...>", "Test asymptotic growth of the runtime of an algorithm, if supported by the benchmark", ArgsManager::ALLOW_ANY, OptionsCategory::OPTIONS); |
22 | 29 | argsman.AddArg("-filter=<regex>", strprintf("Regular expression filter to select benchmark by name (default: %s)", DEFAULT_BENCH_FILTER), ArgsManager::ALLOW_ANY, OptionsCategory::OPTIONS); |
23 | | - argsman.AddArg("-list", "List benchmarks without executing them", ArgsManager::ALLOW_ANY, OptionsCategory::OPTIONS); |
| 30 | + argsman.AddArg("-list", "List benchmarks without executing them", ArgsManager::ALLOW_BOOL, OptionsCategory::OPTIONS); |
| 31 | + argsman.AddArg("-min_time=<milliseconds>", strprintf("Minimum runtime per benchmark, in milliseconds (default: %d)", DEFAULT_MIN_TIME_MS), ArgsManager::ALLOW_INT, OptionsCategory::OPTIONS); |
24 | 32 | argsman.AddArg("-output_csv=<output.csv>", "Generate CSV file with the most important benchmark results", ArgsManager::ALLOW_ANY, OptionsCategory::OPTIONS); |
25 | 33 | argsman.AddArg("-output_json=<output.json>", "Generate JSON file with all benchmark results", ArgsManager::ALLOW_ANY, OptionsCategory::OPTIONS); |
26 | 34 | } |
@@ -50,16 +58,62 @@ int main(int argc, char** argv) |
50 | 58 | } |
51 | 59 |
|
52 | 60 | if (HelpRequested(argsman)) { |
53 | | - std::cout << argsman.GetHelpMessage(); |
| 61 | + std::cout << "Usage: bench_dash [options]\n" |
| 62 | + "\n" |
| 63 | + << argsman.GetHelpMessage() |
| 64 | + << "Description:\n" |
| 65 | + "\n" |
| 66 | + " bench_dash executes microbenchmarks. The quality of the benchmark results\n" |
| 67 | + " highly depend on the stability of the machine. It can sometimes be difficult\n" |
| 68 | + " to get stable, repeatable results, so here are a few tips:\n" |
| 69 | + "\n" |
| 70 | + " * Use pyperf [1] to disable frequency scaling, turbo boost etc. For best\n" |
| 71 | + " results, use CPU pinning and CPU isolation (see [2]).\n" |
| 72 | + "\n" |
| 73 | + " * Each call of run() should do exactly the same work. E.g. inserting into\n" |
| 74 | + " a std::vector doesn't do that as it will reallocate on certain calls. Make\n" |
| 75 | + " sure each run has exactly the same preconditions.\n" |
| 76 | + "\n" |
| 77 | + " * If results are still not reliable, increase runtime with e.g.\n" |
| 78 | + " -min_time=5000 to let a benchmark run for at least 5 seconds.\n" |
| 79 | + "\n" |
| 80 | + " * bench_dash uses nanobench [3] for which there is extensive\n" |
| 81 | + " documentation available online.\n" |
| 82 | + "\n" |
| 83 | + "Environment Variables:\n" |
| 84 | + "\n" |
| 85 | + " To attach a profiler you can run a benchmark in endless mode. This can be\n" |
| 86 | + " done with the environment variable NANOBENCH_ENDLESS. E.g. like so:\n" |
| 87 | + "\n" |
| 88 | + " NANOBENCH_ENDLESS=MuHash ./bench_dash -filter=MuHash\n" |
| 89 | + "\n" |
| 90 | + " In rare cases it can be useful to suppress stability warnings. This can be\n" |
| 91 | + " done with the environment variable NANOBENCH_SUPPRESS_WARNINGS, e.g:\n" |
| 92 | + "\n" |
| 93 | + " NANOBENCH_SUPPRESS_WARNINGS=1 ./bench_dash\n" |
| 94 | + "\n" |
| 95 | + "Notes:\n" |
| 96 | + "\n" |
| 97 | + " 1. pyperf\n" |
| 98 | + " https://github.com/psf/pyperf\n" |
| 99 | + "\n" |
| 100 | + " 2. CPU pinning & isolation\n" |
| 101 | + " https://pyperf.readthedocs.io/en/latest/system.html\n" |
| 102 | + "\n" |
| 103 | + " 3. nanobench\n" |
| 104 | + " https://github.com/martinus/nanobench\n" |
| 105 | + "\n"; |
| 106 | + |
54 | 107 | return EXIT_SUCCESS; |
55 | 108 | } |
56 | 109 |
|
57 | 110 | benchmark::Args args; |
58 | | - args.regex_filter = argsman.GetArg("-filter", DEFAULT_BENCH_FILTER); |
59 | | - args.is_list_only = argsman.GetBoolArg("-list", false); |
60 | 111 | args.asymptote = parseAsymptote(argsman.GetArg("-asymptote", "")); |
| 112 | + args.is_list_only = argsman.GetBoolArg("-list", false); |
| 113 | + args.min_time = std::chrono::milliseconds(argsman.GetArg("-min_time", DEFAULT_MIN_TIME_MS)); |
61 | 114 | args.output_csv = fs::PathFromString(argsman.GetArg("-output_csv", "")); |
62 | 115 | args.output_json = fs::PathFromString(argsman.GetArg("-output_json", "")); |
| 116 | + args.regex_filter = argsman.GetArg("-filter", DEFAULT_BENCH_FILTER); |
63 | 117 |
|
64 | 118 | benchmark::BenchRunner::RunAll(args); |
65 | 119 |
|
|
0 commit comments