Skip to content

Commit

Permalink
remove CSV Reporter
Browse files Browse the repository at this point in the history
  • Loading branch information
EricWF committed Dec 14, 2017
1 parent 3d8dbc6 commit b01b035
Show file tree
Hide file tree
Showing 10 changed files with 15 additions and 670 deletions.
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -800,7 +800,7 @@ BM_memcpy/32k 1834 ns 1837 ns 357143

## Output Formats
The library supports multiple output formats. Use the
`--benchmark_format=<console|json|csv>` flag to set the format type. `console`
`--benchmark_format=<console|json>` flag to set the format type. `console`
is the default format.

The Console format is intended to be a human readable format. By default
Expand Down Expand Up @@ -869,7 +869,7 @@ name,iterations,real_time,cpu_time,bytes_per_second,items_per_second,label
## Output Files
The library supports writing the output of the benchmark to a file specified
by `--benchmark_out=<filename>`. The format of the output can be specified
using `--benchmark_out_format={json|console|csv}`. Specifying
using `--benchmark_out_format={json|console}`. Specifying
`--benchmark_out` does not suppress the console output.

## Debug vs Release
Expand Down
12 changes: 0 additions & 12 deletions include/benchmark/benchmark.h
Original file line number Diff line number Diff line change
Expand Up @@ -1347,18 +1347,6 @@ class JSONReporter : public BenchmarkReporter {
bool first_report_;
};

class CSVReporter : public BenchmarkReporter {
public:
CSVReporter() : printed_header_(false) {}
virtual bool ReportContext(const Context& context);
virtual void ReportRuns(const std::vector<Run>& reports);

private:
void PrintRunData(const Run& report);

bool printed_header_;
std::set< std::string > user_counter_names_;
};

inline const char* GetTimeUnitString(TimeUnit unit) {
switch (unit) {
Expand Down
12 changes: 5 additions & 7 deletions src/benchmark.cc
Original file line number Diff line number Diff line change
Expand Up @@ -76,11 +76,11 @@ DEFINE_bool(benchmark_report_aggregates_only, false,

DEFINE_string(benchmark_format, "console",
"The format to use for console output. Valid values are "
"'console', 'json', or 'csv'.");
"'console', or 'json'.");

DEFINE_string(benchmark_out_format, "json",
"The format to use for file output. Valid values are "
"'console', 'json', or 'csv'.");
"'console', or 'json'.");

DEFINE_string(benchmark_out, "", "The file to write additonal output to");

Expand Down Expand Up @@ -532,8 +532,6 @@ std::unique_ptr<BenchmarkReporter> CreateReporter(
return PtrType(new ConsoleReporter(output_opts));
} else if (name == "json") {
return PtrType(new JSONReporter);
} else if (name == "csv") {
return PtrType(new CSVReporter);
} else {
std::cerr << "Unexpected format: '" << name << "'\n";
std::exit(1);
Expand Down Expand Up @@ -642,9 +640,9 @@ void PrintUsageAndExit() {
" [--benchmark_min_time=<min_time>]\n"
" [--benchmark_repetitions=<num_repetitions>]\n"
" [--benchmark_report_aggregates_only={true|false}\n"
" [--benchmark_format=<console|json|csv>]\n"
" [--benchmark_format=<console|json>]\n"
" [--benchmark_out=<filename>]\n"
" [--benchmark_out_format=<json|console|csv>]\n"
" [--benchmark_out_format=<json|console>]\n"
" [--benchmark_color={auto|true|false}]\n"
" [--benchmark_counters_tabular={true|false}]\n"
" [--v=<verbosity>]\n");
Expand Down Expand Up @@ -684,7 +682,7 @@ void ParseCommandLineFlags(int* argc, char** argv) {
}
for (auto const* flag :
{&FLAGS_benchmark_format, &FLAGS_benchmark_out_format})
if (*flag != "console" && *flag != "json" && *flag != "csv") {
if (*flag != "console" && *flag != "json") {
PrintUsageAndExit();
}
if (FLAGS_benchmark_color.empty()) {
Expand Down
149 changes: 0 additions & 149 deletions src/csv_reporter.cc

This file was deleted.

3 changes: 0 additions & 3 deletions test/complexity_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -33,9 +33,6 @@ int AddComplexityTest(std::string big_o_test_name, std::string rms_test_name,
{"\"name\": \"%rms_name\",$"},
{"\"rms\": %float$", MR_Next},
{"}", MR_Next}});
AddCases(TC_CSVOut, {{"^\"%bigo_name\",,%float,%float,%bigo,,,,,$"},
{"^\"%bigo_name\"", MR_Not},
{"^\"%rms_name\",,%float,%float,,,,,,$", MR_Next}});
return 0;
}

Expand Down
130 changes: 0 additions & 130 deletions test/output_test.h
Original file line number Diff line number Diff line change
Expand Up @@ -42,8 +42,6 @@ enum TestCaseID {
TC_ConsoleErr,
TC_JSONOut,
TC_JSONErr,
TC_CSVOut,
TC_CSVErr,

TC_NumID // PRIVATE
};
Expand All @@ -60,134 +58,6 @@ int SetSubstitutions(
// Run all output tests.
void RunOutputTests(int argc, char* argv[]);

// ========================================================================= //
// ------------------------- Results checking ------------------------------ //
// ========================================================================= //

// Call this macro to register a benchmark for checking its results. This
// should be all that's needed. It subscribes a function to check the (CSV)
// results of a benchmark. This is done only after verifying that the output
// strings are really as expected.
// bm_name_pattern: a name or a regex pattern which will be matched against
// all the benchmark names. Matching benchmarks
// will be the subject of a call to checker_function
// checker_function: should be of type ResultsCheckFn (see below)
#define CHECK_BENCHMARK_RESULTS(bm_name_pattern, checker_function) \
size_t CONCAT(dummy, __LINE__) = AddChecker(bm_name_pattern, checker_function)

struct Results;
typedef std::function< void(Results const&) > ResultsCheckFn;

size_t AddChecker(const char* bm_name_pattern, ResultsCheckFn fn);

// Class holding the results of a benchmark.
// It is passed in calls to checker functions.
struct Results {

// the benchmark name
std::string name;
// the benchmark fields
std::map< std::string, std::string > values;

Results(const std::string& n) : name(n) {}

int NumThreads() const;

typedef enum { kCpuTime, kRealTime } BenchmarkTime;

// get cpu_time or real_time in seconds
double GetTime(BenchmarkTime which) const;

// get the real_time duration of the benchmark in seconds.
// it is better to use fuzzy float checks for this, as the float
// ASCII formatting is lossy.
double DurationRealTime() const {
return GetAs< double >("iterations") * GetTime(kRealTime);
}
// get the cpu_time duration of the benchmark in seconds
double DurationCPUTime() const {
return GetAs< double >("iterations") * GetTime(kCpuTime);
}

// get the string for a result by name, or nullptr if the name
// is not found
const std::string* Get(const char* entry_name) const {
auto it = values.find(entry_name);
if(it == values.end()) return nullptr;
return &it->second;
}

// get a result by name, parsed as a specific type.
// NOTE: for counters, use GetCounterAs instead.
template <class T>
T GetAs(const char* entry_name) const;

// counters are written as doubles, so they have to be read first
// as a double, and only then converted to the asked type.
template <class T>
T GetCounterAs(const char* entry_name) const {
double dval = GetAs< double >(entry_name);
T tval = static_cast< T >(dval);
return tval;
}
};

template <class T>
T Results::GetAs(const char* entry_name) const {
auto *sv = Get(entry_name);
CHECK(sv != nullptr && !sv->empty());
std::stringstream ss;
ss << *sv;
T out;
ss >> out;
CHECK(!ss.fail());
return out;
}

//----------------------------------
// Macros to help in result checking. Do not use them with arguments causing
// side-effects.

#define _CHECK_RESULT_VALUE(entry, getfn, var_type, var_name, relationship, value) \
CONCAT(CHECK_, relationship) \
(entry.getfn< var_type >(var_name), (value)) << "\n" \
<< __FILE__ << ":" << __LINE__ << ": " << (entry).name << ":\n" \
<< __FILE__ << ":" << __LINE__ << ": " \
<< "expected (" << #var_type << ")" << (var_name) \
<< "=" << (entry).getfn< var_type >(var_name) \
<< " to be " #relationship " to " << (value) << "\n"

// check with tolerance. eps_factor is the tolerance window, which is
// interpreted relative to value (eg, 0.1 means 10% of value).
#define _CHECK_FLOAT_RESULT_VALUE(entry, getfn, var_type, var_name, relationship, value, eps_factor) \
CONCAT(CHECK_FLOAT_, relationship) \
(entry.getfn< var_type >(var_name), (value), (eps_factor) * (value)) << "\n" \
<< __FILE__ << ":" << __LINE__ << ": " << (entry).name << ":\n" \
<< __FILE__ << ":" << __LINE__ << ": " \
<< "expected (" << #var_type << ")" << (var_name) \
<< "=" << (entry).getfn< var_type >(var_name) \
<< " to be " #relationship " to " << (value) << "\n" \
<< __FILE__ << ":" << __LINE__ << ": " \
<< "with tolerance of " << (eps_factor) * (value) \
<< " (" << (eps_factor)*100. << "%), " \
<< "but delta was " << ((entry).getfn< var_type >(var_name) - (value)) \
<< " (" << (((entry).getfn< var_type >(var_name) - (value)) \
/ \
((value) > 1.e-5 || value < -1.e-5 ? value : 1.e-5)*100.) \
<< "%)"

#define CHECK_RESULT_VALUE(entry, var_type, var_name, relationship, value) \
_CHECK_RESULT_VALUE(entry, GetAs, var_type, var_name, relationship, value)

#define CHECK_COUNTER_VALUE(entry, var_type, var_name, relationship, value) \
_CHECK_RESULT_VALUE(entry, GetCounterAs, var_type, var_name, relationship, value)

#define CHECK_FLOAT_RESULT_VALUE(entry, var_name, relationship, value, eps_factor) \
_CHECK_FLOAT_RESULT_VALUE(entry, GetAs, double, var_name, relationship, value, eps_factor)

#define CHECK_FLOAT_COUNTER_VALUE(entry, var_name, relationship, value, eps_factor) \
_CHECK_FLOAT_RESULT_VALUE(entry, GetCounterAs, double, var_name, relationship, value, eps_factor)

// ========================================================================= //
// --------------------------- Misc Utilities ------------------------------ //
// ========================================================================= //
Expand Down
Loading

0 comments on commit b01b035

Please sign in to comment.