diff --git a/README.md b/README.md index 45e27ed580..f2de00b11d 100644 --- a/README.md +++ b/README.md @@ -800,7 +800,7 @@ BM_memcpy/32k 1834 ns 1837 ns 357143 ## Output Formats The library supports multiple output formats. Use the -`--benchmark_format=` flag to set the format type. `console` +`--benchmark_format=` flag to set the format type. `console` is the default format. The Console format is intended to be a human readable format. By default @@ -869,7 +869,7 @@ name,iterations,real_time,cpu_time,bytes_per_second,items_per_second,label ## Output Files The library supports writing the output of the benchmark to a file specified by `--benchmark_out=`. The format of the output can be specified -using `--benchmark_out_format={json|console|csv}`. Specifying +using `--benchmark_out_format={json|console}`. Specifying `--benchmark_out` does not suppress the console output. ## Debug vs Release diff --git a/include/benchmark/benchmark.h b/include/benchmark/benchmark.h index 340cbc1ecf..8689aa56f4 100644 --- a/include/benchmark/benchmark.h +++ b/include/benchmark/benchmark.h @@ -1347,18 +1347,6 @@ class JSONReporter : public BenchmarkReporter { bool first_report_; }; -class CSVReporter : public BenchmarkReporter { - public: - CSVReporter() : printed_header_(false) {} - virtual bool ReportContext(const Context& context); - virtual void ReportRuns(const std::vector& reports); - - private: - void PrintRunData(const Run& report); - - bool printed_header_; - std::set< std::string > user_counter_names_; -}; inline const char* GetTimeUnitString(TimeUnit unit) { switch (unit) { diff --git a/src/benchmark.cc b/src/benchmark.cc index 1a7d218283..172dc81514 100644 --- a/src/benchmark.cc +++ b/src/benchmark.cc @@ -76,11 +76,11 @@ DEFINE_bool(benchmark_report_aggregates_only, false, DEFINE_string(benchmark_format, "console", "The format to use for console output. Valid values are " - "'console', 'json', or 'csv'."); + "'console', or 'json'."); DEFINE_string(benchmark_out_format, "json", "The format to use for file output. Valid values are " - "'console', 'json', or 'csv'."); + "'console', or 'json'."); DEFINE_string(benchmark_out, "", "The file to write additonal output to"); @@ -532,8 +532,6 @@ std::unique_ptr CreateReporter( return PtrType(new ConsoleReporter(output_opts)); } else if (name == "json") { return PtrType(new JSONReporter); - } else if (name == "csv") { - return PtrType(new CSVReporter); } else { std::cerr << "Unexpected format: '" << name << "'\n"; std::exit(1); @@ -642,9 +640,9 @@ void PrintUsageAndExit() { " [--benchmark_min_time=]\n" " [--benchmark_repetitions=]\n" " [--benchmark_report_aggregates_only={true|false}\n" - " [--benchmark_format=]\n" + " [--benchmark_format=]\n" " [--benchmark_out=]\n" - " [--benchmark_out_format=]\n" + " [--benchmark_out_format=]\n" " [--benchmark_color={auto|true|false}]\n" " [--benchmark_counters_tabular={true|false}]\n" " [--v=]\n"); @@ -684,7 +682,7 @@ void ParseCommandLineFlags(int* argc, char** argv) { } for (auto const* flag : {&FLAGS_benchmark_format, &FLAGS_benchmark_out_format}) - if (*flag != "console" && *flag != "json" && *flag != "csv") { + if (*flag != "console" && *flag != "json") { PrintUsageAndExit(); } if (FLAGS_benchmark_color.empty()) { diff --git a/src/csv_reporter.cc b/src/csv_reporter.cc deleted file mode 100644 index 35510645b0..0000000000 --- a/src/csv_reporter.cc +++ /dev/null @@ -1,149 +0,0 @@ -// Copyright 2015 Google Inc. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "benchmark/benchmark.h" -#include "complexity.h" - -#include -#include -#include -#include -#include -#include - -#include "string_util.h" -#include "timers.h" -#include "check.h" - -// File format reference: http://edoceo.com/utilitas/csv-file-format. - -namespace benchmark { - -namespace { -std::vector elements = { - "name", "iterations", "real_time", "cpu_time", - "time_unit", "bytes_per_second", "items_per_second", "label", - "error_occurred", "error_message"}; -} // namespace - -bool CSVReporter::ReportContext(const Context& context) { - PrintBasicContext(&GetErrorStream(), context); - return true; -} - -void CSVReporter::ReportRuns(const std::vector & reports) { - std::ostream& Out = GetOutputStream(); - - if (!printed_header_) { - // save the names of all the user counters - for (const auto& run : reports) { - for (const auto& cnt : run.counters) { - user_counter_names_.insert(cnt.first); - } - } - - // print the header - for (auto B = elements.begin(); B != elements.end();) { - Out << *B++; - if (B != elements.end()) Out << ","; - } - for (auto B = user_counter_names_.begin(); B != user_counter_names_.end();) { - Out << ",\"" << *B++ << "\""; - } - Out << "\n"; - - printed_header_ = true; - } else { - // check that all the current counters are saved in the name set - for (const auto& run : reports) { - for (const auto& cnt : run.counters) { - CHECK(user_counter_names_.find(cnt.first) != user_counter_names_.end()) - << "All counters must be present in each run. " - << "Counter named \"" << cnt.first - << "\" was not in a run after being added to the header"; - } - } - } - - // print results for each run - for (const auto& run : reports) { - PrintRunData(run); - } - -} - -void CSVReporter::PrintRunData(const Run & run) { - std::ostream& Out = GetOutputStream(); - - // Field with embedded double-quote characters must be doubled and the field - // delimited with double-quotes. - std::string name = run.benchmark_name; - ReplaceAll(&name, "\"", "\"\""); - Out << '"' << name << "\","; - if (run.error_occurred) { - Out << std::string(elements.size() - 3, ','); - Out << "true,"; - std::string msg = run.error_message; - ReplaceAll(&msg, "\"", "\"\""); - Out << '"' << msg << "\"\n"; - return; - } - - // Do not print iteration on bigO and RMS report - if (!run.report_big_o && !run.report_rms) { - Out << run.iterations; - } - Out << ","; - - Out << run.GetAdjustedRealTime() << ","; - Out << run.GetAdjustedCPUTime() << ","; - - // Do not print timeLabel on bigO and RMS report - if (run.report_big_o) { - Out << GetBigOString(run.complexity); - } else if (!run.report_rms) { - Out << GetTimeUnitString(run.time_unit); - } - Out << ","; - - if (run.bytes_per_second > 0.0) { - Out << run.bytes_per_second; - } - Out << ","; - if (run.items_per_second > 0.0) { - Out << run.items_per_second; - } - Out << ","; - if (!run.report_label.empty()) { - // Field with embedded double-quote characters must be doubled and the field - // delimited with double-quotes. - std::string label = run.report_label; - ReplaceAll(&label, "\"", "\"\""); - Out << "\"" << label << "\""; - } - Out << ",,"; // for error_occurred and error_message - - // Print user counters - for (const auto &ucn : user_counter_names_) { - auto it = run.counters.find(ucn); - if(it == run.counters.end()) { - Out << ","; - } else { - Out << "," << it->second; - } - } - Out << '\n'; -} - -} // end namespace benchmark diff --git a/src/json_reporter.cc b/src/json_reporter.cc index b5ae302ad4..63a1deac12 100644 --- a/src/json_reporter.cc +++ b/src/json_reporter.cc @@ -64,6 +64,7 @@ int64_t RoundDouble(double v) { return static_cast(v + 0.5); } } // end namespace + bool JSONReporter::ReportContext(const Context& context) { std::ostream& out = GetOutputStream(); diff --git a/test/complexity_test.cc b/test/complexity_test.cc index 89dfa580e6..d3a79fa63e 100644 --- a/test/complexity_test.cc +++ b/test/complexity_test.cc @@ -33,9 +33,6 @@ int AddComplexityTest(std::string big_o_test_name, std::string rms_test_name, {"\"name\": \"%rms_name\",$"}, {"\"rms\": %float$", MR_Next}, {"}", MR_Next}}); - AddCases(TC_CSVOut, {{"^\"%bigo_name\",,%float,%float,%bigo,,,,,$"}, - {"^\"%bigo_name\"", MR_Not}, - {"^\"%rms_name\",,%float,%float,,,,,,$", MR_Next}}); return 0; } diff --git a/test/output_test.h b/test/output_test.h index 897a13866b..83daf5a495 100644 --- a/test/output_test.h +++ b/test/output_test.h @@ -42,8 +42,6 @@ enum TestCaseID { TC_ConsoleErr, TC_JSONOut, TC_JSONErr, - TC_CSVOut, - TC_CSVErr, TC_NumID // PRIVATE }; @@ -60,134 +58,6 @@ int SetSubstitutions( // Run all output tests. void RunOutputTests(int argc, char* argv[]); -// ========================================================================= // -// ------------------------- Results checking ------------------------------ // -// ========================================================================= // - -// Call this macro to register a benchmark for checking its results. This -// should be all that's needed. It subscribes a function to check the (CSV) -// results of a benchmark. This is done only after verifying that the output -// strings are really as expected. -// bm_name_pattern: a name or a regex pattern which will be matched against -// all the benchmark names. Matching benchmarks -// will be the subject of a call to checker_function -// checker_function: should be of type ResultsCheckFn (see below) -#define CHECK_BENCHMARK_RESULTS(bm_name_pattern, checker_function) \ - size_t CONCAT(dummy, __LINE__) = AddChecker(bm_name_pattern, checker_function) - -struct Results; -typedef std::function< void(Results const&) > ResultsCheckFn; - -size_t AddChecker(const char* bm_name_pattern, ResultsCheckFn fn); - -// Class holding the results of a benchmark. -// It is passed in calls to checker functions. -struct Results { - - // the benchmark name - std::string name; - // the benchmark fields - std::map< std::string, std::string > values; - - Results(const std::string& n) : name(n) {} - - int NumThreads() const; - - typedef enum { kCpuTime, kRealTime } BenchmarkTime; - - // get cpu_time or real_time in seconds - double GetTime(BenchmarkTime which) const; - - // get the real_time duration of the benchmark in seconds. - // it is better to use fuzzy float checks for this, as the float - // ASCII formatting is lossy. - double DurationRealTime() const { - return GetAs< double >("iterations") * GetTime(kRealTime); - } - // get the cpu_time duration of the benchmark in seconds - double DurationCPUTime() const { - return GetAs< double >("iterations") * GetTime(kCpuTime); - } - - // get the string for a result by name, or nullptr if the name - // is not found - const std::string* Get(const char* entry_name) const { - auto it = values.find(entry_name); - if(it == values.end()) return nullptr; - return &it->second; - } - - // get a result by name, parsed as a specific type. - // NOTE: for counters, use GetCounterAs instead. - template - T GetAs(const char* entry_name) const; - - // counters are written as doubles, so they have to be read first - // as a double, and only then converted to the asked type. - template - T GetCounterAs(const char* entry_name) const { - double dval = GetAs< double >(entry_name); - T tval = static_cast< T >(dval); - return tval; - } -}; - -template -T Results::GetAs(const char* entry_name) const { - auto *sv = Get(entry_name); - CHECK(sv != nullptr && !sv->empty()); - std::stringstream ss; - ss << *sv; - T out; - ss >> out; - CHECK(!ss.fail()); - return out; -} - -//---------------------------------- -// Macros to help in result checking. Do not use them with arguments causing -// side-effects. - -#define _CHECK_RESULT_VALUE(entry, getfn, var_type, var_name, relationship, value) \ - CONCAT(CHECK_, relationship) \ - (entry.getfn< var_type >(var_name), (value)) << "\n" \ - << __FILE__ << ":" << __LINE__ << ": " << (entry).name << ":\n" \ - << __FILE__ << ":" << __LINE__ << ": " \ - << "expected (" << #var_type << ")" << (var_name) \ - << "=" << (entry).getfn< var_type >(var_name) \ - << " to be " #relationship " to " << (value) << "\n" - -// check with tolerance. eps_factor is the tolerance window, which is -// interpreted relative to value (eg, 0.1 means 10% of value). -#define _CHECK_FLOAT_RESULT_VALUE(entry, getfn, var_type, var_name, relationship, value, eps_factor) \ - CONCAT(CHECK_FLOAT_, relationship) \ - (entry.getfn< var_type >(var_name), (value), (eps_factor) * (value)) << "\n" \ - << __FILE__ << ":" << __LINE__ << ": " << (entry).name << ":\n" \ - << __FILE__ << ":" << __LINE__ << ": " \ - << "expected (" << #var_type << ")" << (var_name) \ - << "=" << (entry).getfn< var_type >(var_name) \ - << " to be " #relationship " to " << (value) << "\n" \ - << __FILE__ << ":" << __LINE__ << ": " \ - << "with tolerance of " << (eps_factor) * (value) \ - << " (" << (eps_factor)*100. << "%), " \ - << "but delta was " << ((entry).getfn< var_type >(var_name) - (value)) \ - << " (" << (((entry).getfn< var_type >(var_name) - (value)) \ - / \ - ((value) > 1.e-5 || value < -1.e-5 ? value : 1.e-5)*100.) \ - << "%)" - -#define CHECK_RESULT_VALUE(entry, var_type, var_name, relationship, value) \ - _CHECK_RESULT_VALUE(entry, GetAs, var_type, var_name, relationship, value) - -#define CHECK_COUNTER_VALUE(entry, var_type, var_name, relationship, value) \ - _CHECK_RESULT_VALUE(entry, GetCounterAs, var_type, var_name, relationship, value) - -#define CHECK_FLOAT_RESULT_VALUE(entry, var_name, relationship, value, eps_factor) \ - _CHECK_FLOAT_RESULT_VALUE(entry, GetAs, double, var_name, relationship, value, eps_factor) - -#define CHECK_FLOAT_COUNTER_VALUE(entry, var_name, relationship, value, eps_factor) \ - _CHECK_FLOAT_RESULT_VALUE(entry, GetCounterAs, double, var_name, relationship, value, eps_factor) - // ========================================================================= // // --------------------------- Misc Utilities ------------------------------ // // ========================================================================= // diff --git a/test/output_test_helper.cc b/test/output_test_helper.cc index 24746f6d27..2ea5224a26 100644 --- a/test/output_test_helper.cc +++ b/test/output_test_helper.cc @@ -42,21 +42,7 @@ SubMap& GetSubstitutions() { {" %s ", "[ ]+"}, {"%time", "[ ]*[0-9]{1,5} ns"}, {"%console_report", "[ ]*[0-9]{1,5} ns [ ]*[0-9]{1,5} ns [ ]*[0-9]+"}, - {"%console_us_report", "[ ]*[0-9] us [ ]*[0-9] us [ ]*[0-9]+"}, - {"%csv_header", - "name,iterations,real_time,cpu_time,time_unit,bytes_per_second," - "items_per_second,label,error_occurred,error_message"}, - {"%csv_report", "[0-9]+," + safe_dec_re + "," + safe_dec_re + ",ns,,,,,"}, - {"%csv_us_report", "[0-9]+," + safe_dec_re + "," + safe_dec_re + ",us,,,,,"}, - {"%csv_bytes_report", - "[0-9]+," + safe_dec_re + "," + safe_dec_re + ",ns," + safe_dec_re + ",,,,"}, - {"%csv_items_report", - "[0-9]+," + safe_dec_re + "," + safe_dec_re + ",ns,," + safe_dec_re + ",,,"}, - {"%csv_bytes_items_report", - "[0-9]+," + safe_dec_re + "," + safe_dec_re + ",ns," + safe_dec_re + - "," + safe_dec_re + ",,,"}, - {"%csv_label_report_begin", "[0-9]+," + safe_dec_re + "," + safe_dec_re + ",ns,,,"}, - {"%csv_label_report_end", ",,"}}; + {"%console_us_report", "[ ]*[0-9] us [ ]*[0-9] us [ ]*[0-9]+"}}; return map; } @@ -153,176 +139,6 @@ class TestReporter : public benchmark::BenchmarkReporter { } // end namespace internal -// ========================================================================= // -// -------------------------- Results checking ----------------------------- // -// ========================================================================= // - -namespace internal { - -// Utility class to manage subscribers for checking benchmark results. -// It works by parsing the CSV output to read the results. -class ResultsChecker { - public: - - struct PatternAndFn : public TestCase { // reusing TestCase for its regexes - PatternAndFn(const std::string& rx, ResultsCheckFn fn_) - : TestCase(rx), fn(fn_) {} - ResultsCheckFn fn; - }; - - std::vector< PatternAndFn > check_patterns; - std::vector< Results > results; - std::vector< std::string > field_names; - - void Add(const std::string& entry_pattern, ResultsCheckFn fn); - - void CheckResults(std::stringstream& output); - - private: - - void SetHeader_(const std::string& csv_header); - void SetValues_(const std::string& entry_csv_line); - - std::vector< std::string > SplitCsv_(const std::string& line); - -}; - -// store the static ResultsChecker in a function to prevent initialization -// order problems -ResultsChecker& GetResultsChecker() { - static ResultsChecker rc; - return rc; -} - -// add a results checker for a benchmark -void ResultsChecker::Add(const std::string& entry_pattern, ResultsCheckFn fn) { - check_patterns.emplace_back(entry_pattern, fn); -} - -// check the results of all subscribed benchmarks -void ResultsChecker::CheckResults(std::stringstream& output) { - // first reset the stream to the start - { - auto start = std::ios::streampos(0); - // clear before calling tellg() - output.clear(); - // seek to zero only when needed - if(output.tellg() > start) output.seekg(start); - // and just in case - output.clear(); - } - // now go over every line and publish it to the ResultsChecker - std::string line; - bool on_first = true; - while (output.eof() == false) { - CHECK(output.good()); - std::getline(output, line); - if (on_first) { - SetHeader_(line); // this is important - on_first = false; - continue; - } - SetValues_(line); - } - // finally we can call the subscribed check functions - for(const auto& p : check_patterns) { - VLOG(2) << "--------------------------------\n"; - VLOG(2) << "checking for benchmarks matching " << p.regex_str << "...\n"; - for(const auto& r : results) { - if(!p.regex->Match(r.name)) { - VLOG(2) << p.regex_str << " is not matched by " << r.name << "\n"; - continue; - } else { - VLOG(2) << p.regex_str << " is matched by " << r.name << "\n"; - } - VLOG(1) << "Checking results of " << r.name << ": ... \n"; - p.fn(r); - VLOG(1) << "Checking results of " << r.name << ": OK.\n"; - } - } -} - -// prepare for the names in this header -void ResultsChecker::SetHeader_(const std::string& csv_header) { - field_names = SplitCsv_(csv_header); -} - -// set the values for a benchmark -void ResultsChecker::SetValues_(const std::string& entry_csv_line) { - if(entry_csv_line.empty()) return; // some lines are empty - CHECK(!field_names.empty()); - auto vals = SplitCsv_(entry_csv_line); - CHECK_EQ(vals.size(), field_names.size()); - results.emplace_back(vals[0]); // vals[0] is the benchmark name - auto &entry = results.back(); - for (size_t i = 1, e = vals.size(); i < e; ++i) { - entry.values[field_names[i]] = vals[i]; - } -} - -// a quick'n'dirty csv splitter (eliminating quotes) -std::vector< std::string > ResultsChecker::SplitCsv_(const std::string& line) { - std::vector< std::string > out; - if(line.empty()) return out; - if(!field_names.empty()) out.reserve(field_names.size()); - size_t prev = 0, pos = line.find_first_of(','), curr = pos; - while(pos != line.npos) { - CHECK(curr > 0); - if(line[prev] == '"') ++prev; - if(line[curr-1] == '"') --curr; - out.push_back(line.substr(prev, curr-prev)); - prev = pos + 1; - pos = line.find_first_of(',', pos + 1); - curr = pos; - } - curr = line.size(); - if(line[prev] == '"') ++prev; - if(line[curr-1] == '"') --curr; - out.push_back(line.substr(prev, curr-prev)); - return out; -} - -} // end namespace internal - -size_t AddChecker(const char* bm_name, ResultsCheckFn fn) -{ - auto &rc = internal::GetResultsChecker(); - rc.Add(bm_name, fn); - return rc.results.size(); -} - -int Results::NumThreads() const { - auto pos = name.find("/threads:"); - if(pos == name.npos) return 1; - auto end = name.find('/', pos + 9); - std::stringstream ss; - ss << name.substr(pos + 9, end); - int num = 1; - ss >> num; - CHECK(!ss.fail()); - return num; -} - -double Results::GetTime(BenchmarkTime which) const { - CHECK(which == kCpuTime || which == kRealTime); - const char *which_str = which == kCpuTime ? "cpu_time" : "real_time"; - double val = GetAs< double >(which_str); - auto unit = Get("time_unit"); - CHECK(unit); - if(*unit == "ns") { - return val * 1.e-9; - } else if(*unit == "us") { - return val * 1.e-6; - } else if(*unit == "ms") { - return val * 1.e-3; - } else if(*unit == "s") { - return val; - } else { - CHECK(1 == 0) << "unknown time unit: " << *unit; - return 0; - } -} - // ========================================================================= // // -------------------------- Public API Definitions------------------------ // // ========================================================================= // @@ -370,7 +186,6 @@ void RunOutputTests(int argc, char* argv[]) { auto options = benchmark::internal::GetOutputOptions(/*force_no_color*/true); benchmark::ConsoleReporter CR(options); benchmark::JSONReporter JR; - benchmark::CSVReporter CSVR; struct ReporterTest { const char* name; std::vector& output_cases; @@ -386,18 +201,14 @@ void RunOutputTests(int argc, char* argv[]) { reporter.SetOutputStream(&out_stream); reporter.SetErrorStream(&err_stream); } - } TestCases[] = { - {"ConsoleReporter", GetTestCaseList(TC_ConsoleOut), - GetTestCaseList(TC_ConsoleErr), CR}, - {"JSONReporter", GetTestCaseList(TC_JSONOut), GetTestCaseList(TC_JSONErr), - JR}, - {"CSVReporter", GetTestCaseList(TC_CSVOut), GetTestCaseList(TC_CSVErr), - CSVR}, - }; + } TestCases[] = {{"ConsoleReporter", GetTestCaseList(TC_ConsoleOut), + GetTestCaseList(TC_ConsoleErr), CR}, + {"JSONReporter", GetTestCaseList(TC_JSONOut), + GetTestCaseList(TC_JSONErr), JR}}; // Create the test reporter and run the benchmarks. std::cout << "Running benchmarks...\n"; - internal::TestReporter test_rep({&CR, &JR, &CSVR}); + internal::TestReporter test_rep({&CR, &JR}); benchmark::RunSpecifiedBenchmarks(&test_rep); for (auto& rep_test : TestCases) { @@ -414,10 +225,4 @@ void RunOutputTests(int argc, char* argv[]) { std::cout << "\n"; } - // now that we know the output is as expected, we can dispatch - // the checks to subscribees. - auto &csv = TestCases[2]; - // would use == but gcc spits a warning - CHECK(std::strcmp(csv.name, "CSVReporter") == 0); - internal::GetResultsChecker().CheckResults(csv.out_stream); } diff --git a/test/reporter_output_test.cc b/test/reporter_output_test.cc index 1620b31396..7343deeb83 100644 --- a/test/reporter_output_test.cc +++ b/test/reporter_output_test.cc @@ -48,7 +48,6 @@ static int AddContextCases() { return 0; } int dummy_register = AddContextCases(); -ADD_CASES(TC_CSVOut, {{"%csv_header"}}); // ========================================================================= // // ------------------------ Testing Basic Output --------------------------- // @@ -67,7 +66,6 @@ ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_basic\",$"}, {"\"cpu_time\": %float,$", MR_Next}, {"\"time_unit\": \"ns\"$", MR_Next}, {"}", MR_Next}}); -ADD_CASES(TC_CSVOut, {{"^\"BM_basic\",%csv_report$"}}); // ========================================================================= // // ------------------------ Testing Bytes per Second Output ---------------- // @@ -89,7 +87,6 @@ ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_bytes_per_second\",$"}, {"\"time_unit\": \"ns\",$", MR_Next}, {"\"bytes_per_second\": %float$", MR_Next}, {"}", MR_Next}}); -ADD_CASES(TC_CSVOut, {{"^\"BM_bytes_per_second\",%csv_bytes_report$"}}); // ========================================================================= // // ------------------------ Testing Items per Second Output ---------------- // @@ -111,7 +108,6 @@ ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_items_per_second\",$"}, {"\"time_unit\": \"ns\",$", MR_Next}, {"\"items_per_second\": %float$", MR_Next}, {"}", MR_Next}}); -ADD_CASES(TC_CSVOut, {{"^\"BM_items_per_second\",%csv_items_report$"}}); // ========================================================================= // // ------------------------ Testing Label Output --------------------------- // @@ -132,8 +128,6 @@ ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_label\",$"}, {"\"time_unit\": \"ns\",$", MR_Next}, {"\"label\": \"some label\"$", MR_Next}, {"}", MR_Next}}); -ADD_CASES(TC_CSVOut, {{"^\"BM_label\",%csv_label_report_begin\"some " - "label\"%csv_label_report_end$"}}); // ========================================================================= // // ------------------------ Testing Error Output --------------------------- // @@ -150,7 +144,6 @@ ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_error\",$"}, {"\"error_occurred\": true,$", MR_Next}, {"\"error_message\": \"message\",$", MR_Next}}); -ADD_CASES(TC_CSVOut, {{"^\"BM_error\",,,,,,,,true,\"message\"$"}}); // ========================================================================= // // ------------------------ Testing No Arg Name Output ----------------------- @@ -164,7 +157,6 @@ void BM_no_arg_name(benchmark::State& state) { BENCHMARK(BM_no_arg_name)->Arg(3); ADD_CASES(TC_ConsoleOut, {{"^BM_no_arg_name/3 %console_report$"}}); ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_no_arg_name/3\",$"}}); -ADD_CASES(TC_CSVOut, {{"^\"BM_no_arg_name/3\",%csv_report$"}}); // ========================================================================= // // ------------------------ Testing Arg Name Output ----------------------- // @@ -177,7 +169,6 @@ void BM_arg_name(benchmark::State& state) { BENCHMARK(BM_arg_name)->ArgName("first")->Arg(3); ADD_CASES(TC_ConsoleOut, {{"^BM_arg_name/first:3 %console_report$"}}); ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_arg_name/first:3\",$"}}); -ADD_CASES(TC_CSVOut, {{"^\"BM_arg_name/first:3\",%csv_report$"}}); // ========================================================================= // // ------------------------ Testing Arg Names Output ----------------------- // @@ -191,7 +182,6 @@ BENCHMARK(BM_arg_names)->Args({2, 5, 4})->ArgNames({"first", "", "third"}); ADD_CASES(TC_ConsoleOut, {{"^BM_arg_names/first:2/5/third:4 %console_report$"}}); ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_arg_names/first:2/5/third:4\",$"}}); -ADD_CASES(TC_CSVOut, {{"^\"BM_arg_names/first:2/5/third:4\",%csv_report$"}}); // ========================================================================= // // ----------------------- Testing Complexity Output ----------------------- // @@ -229,11 +219,7 @@ ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_Repeat/repeats:2\",$"}, {"\"name\": \"BM_Repeat/repeats:2_mean\",$"}, {"\"name\": \"BM_Repeat/repeats:2_median\",$"}, {"\"name\": \"BM_Repeat/repeats:2_stddev\",$"}}); -ADD_CASES(TC_CSVOut, {{"^\"BM_Repeat/repeats:2\",%csv_report$"}, - {"^\"BM_Repeat/repeats:2\",%csv_report$"}, - {"^\"BM_Repeat/repeats:2_mean\",%csv_report$"}, - {"^\"BM_Repeat/repeats:2_median\",%csv_report$"}, - {"^\"BM_Repeat/repeats:2_stddev\",%csv_report$"}}); + // but for two repetitions, mean and median is the same, so let's repeat.. BENCHMARK(BM_Repeat)->Repetitions(3); ADD_CASES(TC_ConsoleOut, {{"^BM_Repeat/repeats:3 %console_report$"}, @@ -248,12 +234,7 @@ ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_Repeat/repeats:3\",$"}, {"\"name\": \"BM_Repeat/repeats:3_mean\",$"}, {"\"name\": \"BM_Repeat/repeats:3_median\",$"}, {"\"name\": \"BM_Repeat/repeats:3_stddev\",$"}}); -ADD_CASES(TC_CSVOut, {{"^\"BM_Repeat/repeats:3\",%csv_report$"}, - {"^\"BM_Repeat/repeats:3\",%csv_report$"}, - {"^\"BM_Repeat/repeats:3\",%csv_report$"}, - {"^\"BM_Repeat/repeats:3_mean\",%csv_report$"}, - {"^\"BM_Repeat/repeats:3_median\",%csv_report$"}, - {"^\"BM_Repeat/repeats:3_stddev\",%csv_report$"}}); + // median differs between even/odd number of repetitions, so just to be sure BENCHMARK(BM_Repeat)->Repetitions(4); ADD_CASES(TC_ConsoleOut, {{"^BM_Repeat/repeats:4 %console_report$"}, @@ -270,13 +251,6 @@ ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_Repeat/repeats:4\",$"}, {"\"name\": \"BM_Repeat/repeats:4_mean\",$"}, {"\"name\": \"BM_Repeat/repeats:4_median\",$"}, {"\"name\": \"BM_Repeat/repeats:4_stddev\",$"}}); -ADD_CASES(TC_CSVOut, {{"^\"BM_Repeat/repeats:4\",%csv_report$"}, - {"^\"BM_Repeat/repeats:4\",%csv_report$"}, - {"^\"BM_Repeat/repeats:4\",%csv_report$"}, - {"^\"BM_Repeat/repeats:4\",%csv_report$"}, - {"^\"BM_Repeat/repeats:4_mean\",%csv_report$"}, - {"^\"BM_Repeat/repeats:4_median\",%csv_report$"}, - {"^\"BM_Repeat/repeats:4_stddev\",%csv_report$"}}); // Test that a non-repeated test still prints non-aggregate results even when // only-aggregate reports have been requested @@ -287,7 +261,6 @@ void BM_RepeatOnce(benchmark::State& state) { BENCHMARK(BM_RepeatOnce)->Repetitions(1)->ReportAggregatesOnly(); ADD_CASES(TC_ConsoleOut, {{"^BM_RepeatOnce/repeats:1 %console_report$"}}); ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_RepeatOnce/repeats:1\",$"}}); -ADD_CASES(TC_CSVOut, {{"^\"BM_RepeatOnce/repeats:1\",%csv_report$"}}); // Test that non-aggregate data is not reported void BM_SummaryRepeat(benchmark::State& state) { @@ -304,10 +277,6 @@ ADD_CASES(TC_JSONOut, {{".*BM_SummaryRepeat/repeats:3 ", MR_Not}, {"\"name\": \"BM_SummaryRepeat/repeats:3_mean\",$"}, {"\"name\": \"BM_SummaryRepeat/repeats:3_median\",$"}, {"\"name\": \"BM_SummaryRepeat/repeats:3_stddev\",$"}}); -ADD_CASES(TC_CSVOut, {{".*BM_SummaryRepeat/repeats:3 ", MR_Not}, - {"^\"BM_SummaryRepeat/repeats:3_mean\",%csv_report$"}, - {"^\"BM_SummaryRepeat/repeats:3_median\",%csv_report$"}, - {"^\"BM_SummaryRepeat/repeats:3_stddev\",%csv_report$"}}); void BM_RepeatTimeUnit(benchmark::State& state) { for (auto _ : state) { @@ -329,11 +298,6 @@ ADD_CASES(TC_JSONOut, {{".*BM_RepeatTimeUnit/repeats:3 ", MR_Not}, {"\"time_unit\": \"us\",?$"}, {"\"name\": \"BM_RepeatTimeUnit/repeats:3_stddev\",$"}, {"\"time_unit\": \"us\",?$"}}); -ADD_CASES(TC_CSVOut, - {{".*BM_RepeatTimeUnit/repeats:3 ", MR_Not}, - {"^\"BM_RepeatTimeUnit/repeats:3_mean\",%csv_us_report$"}, - {"^\"BM_RepeatTimeUnit/repeats:3_median\",%csv_us_report$"}, - {"^\"BM_RepeatTimeUnit/repeats:3_stddev\",%csv_us_report$"}}); // ========================================================================= // // -------------------- Testing user-provided statistics ------------------- // @@ -365,13 +329,6 @@ ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_UserStats/repeats:3\",$"}, {"\"name\": \"BM_UserStats/repeats:3_median\",$"}, {"\"name\": \"BM_UserStats/repeats:3_stddev\",$"}, {"\"name\": \"BM_UserStats/repeats:3_\",$"}}); -ADD_CASES(TC_CSVOut, {{"^\"BM_UserStats/repeats:3\",%csv_report$"}, - {"^\"BM_UserStats/repeats:3\",%csv_report$"}, - {"^\"BM_UserStats/repeats:3\",%csv_report$"}, - {"^\"BM_UserStats/repeats:3_mean\",%csv_report$"}, - {"^\"BM_UserStats/repeats:3_median\",%csv_report$"}, - {"^\"BM_UserStats/repeats:3_stddev\",%csv_report$"}, - {"^\"BM_UserStats/repeats:3_\",%csv_report$"}}); // ========================================================================= // // --------------------------- TEST CASES END ------------------------------ // diff --git a/test/user_counters_tabular_test.cc b/test/user_counters_tabular_test.cc index 9b8a6132e6..3f58c7a01f 100644 --- a/test/user_counters_tabular_test.cc +++ b/test/user_counters_tabular_test.cc @@ -46,8 +46,6 @@ ADD_CASES(TC_ConsoleOut, { {"^BM_CounterSet2_Tabular/threads:%int %console_report [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat$"}, // clang-format on }); -ADD_CASES(TC_CSVOut, {{"%csv_header," - "\"Bar\",\"Bat\",\"Baz\",\"Foo\",\"Frob\",\"Lob\""}}); // ========================================================================= // // ------------------------- Tabular Counters Output ----------------------- // @@ -79,19 +77,6 @@ ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_Counters_Tabular/threads:%int\",$"}, {"\"Frob\": %float,$", MR_Next}, {"\"Lob\": %float$", MR_Next}, {"}", MR_Next}}); -ADD_CASES(TC_CSVOut, {{"^\"BM_Counters_Tabular/threads:%int\",%csv_report," - "%float,%float,%float,%float,%float,%float$"}}); -// VS2013 does not allow this function to be passed as a lambda argument -// to CHECK_BENCHMARK_RESULTS() -void CheckTabular(Results const& e) { - CHECK_COUNTER_VALUE(e, int, "Foo", EQ, 1); - CHECK_COUNTER_VALUE(e, int, "Bar", EQ, 2); - CHECK_COUNTER_VALUE(e, int, "Baz", EQ, 4); - CHECK_COUNTER_VALUE(e, int, "Bat", EQ, 8); - CHECK_COUNTER_VALUE(e, int, "Frob", EQ, 16); - CHECK_COUNTER_VALUE(e, int, "Lob", EQ, 32); -} -CHECK_BENCHMARK_RESULTS("BM_Counters_Tabular/threads:%int", &CheckTabular); // ========================================================================= // // -------------------- Tabular+Rate Counters Output ----------------------- // @@ -123,21 +108,6 @@ ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_CounterRates_Tabular/threads:%int\",$"}, {"\"Frob\": %float,$", MR_Next}, {"\"Lob\": %float$", MR_Next}, {"}", MR_Next}}); -ADD_CASES(TC_CSVOut, {{"^\"BM_CounterRates_Tabular/threads:%int\",%csv_report," - "%float,%float,%float,%float,%float,%float$"}}); -// VS2013 does not allow this function to be passed as a lambda argument -// to CHECK_BENCHMARK_RESULTS() -void CheckTabularRate(Results const& e) { - double t = e.DurationCPUTime(); - CHECK_FLOAT_COUNTER_VALUE(e, "Foo", EQ, 1./t, 0.001); - CHECK_FLOAT_COUNTER_VALUE(e, "Bar", EQ, 2./t, 0.001); - CHECK_FLOAT_COUNTER_VALUE(e, "Baz", EQ, 4./t, 0.001); - CHECK_FLOAT_COUNTER_VALUE(e, "Bat", EQ, 8./t, 0.001); - CHECK_FLOAT_COUNTER_VALUE(e, "Frob", EQ, 16./t, 0.001); - CHECK_FLOAT_COUNTER_VALUE(e, "Lob", EQ, 32./t, 0.001); -} -CHECK_BENCHMARK_RESULTS("BM_CounterRates_Tabular/threads:%int", - &CheckTabularRate); // ========================================================================= // // ------------------------- Tabular Counters Output ----------------------- // @@ -164,16 +134,6 @@ ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_CounterSet0_Tabular/threads:%int\",$"}, {"\"Baz\": %float,$", MR_Next}, {"\"Foo\": %float$", MR_Next}, {"}", MR_Next}}); -ADD_CASES(TC_CSVOut, {{"^\"BM_CounterSet0_Tabular/threads:%int\",%csv_report," - "%float,,%float,%float,,"}}); -// VS2013 does not allow this function to be passed as a lambda argument -// to CHECK_BENCHMARK_RESULTS() -void CheckSet0(Results const& e) { - CHECK_COUNTER_VALUE(e, int, "Foo", EQ, 10); - CHECK_COUNTER_VALUE(e, int, "Bar", EQ, 20); - CHECK_COUNTER_VALUE(e, int, "Baz", EQ, 40); -} -CHECK_BENCHMARK_RESULTS("BM_CounterSet0_Tabular", &CheckSet0); // again. void BM_CounterSet1_Tabular(benchmark::State& state) { @@ -196,16 +156,6 @@ ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_CounterSet1_Tabular/threads:%int\",$"}, {"\"Baz\": %float,$", MR_Next}, {"\"Foo\": %float$", MR_Next}, {"}", MR_Next}}); -ADD_CASES(TC_CSVOut, {{"^\"BM_CounterSet1_Tabular/threads:%int\",%csv_report," - "%float,,%float,%float,,"}}); -// VS2013 does not allow this function to be passed as a lambda argument -// to CHECK_BENCHMARK_RESULTS() -void CheckSet1(Results const& e) { - CHECK_COUNTER_VALUE(e, int, "Foo", EQ, 15); - CHECK_COUNTER_VALUE(e, int, "Bar", EQ, 25); - CHECK_COUNTER_VALUE(e, int, "Baz", EQ, 45); -} -CHECK_BENCHMARK_RESULTS("BM_CounterSet1_Tabular/threads:%int", &CheckSet1); // ========================================================================= // // ------------------------- Tabular Counters Output ----------------------- // @@ -232,16 +182,6 @@ ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_CounterSet2_Tabular/threads:%int\",$"}, {"\"Baz\": %float,$", MR_Next}, {"\"Foo\": %float$", MR_Next}, {"}", MR_Next}}); -ADD_CASES(TC_CSVOut, {{"^\"BM_CounterSet2_Tabular/threads:%int\",%csv_report," - ",%float,%float,%float,,"}}); -// VS2013 does not allow this function to be passed as a lambda argument -// to CHECK_BENCHMARK_RESULTS() -void CheckSet2(Results const& e) { - CHECK_COUNTER_VALUE(e, int, "Foo", EQ, 10); - CHECK_COUNTER_VALUE(e, int, "Bat", EQ, 30); - CHECK_COUNTER_VALUE(e, int, "Baz", EQ, 40); -} -CHECK_BENCHMARK_RESULTS("BM_CounterSet2_Tabular", &CheckSet2); // ========================================================================= // // --------------------------- TEST CASES END ------------------------------ // diff --git a/test/user_counters_test.cc b/test/user_counters_test.cc index 06aafb1fa1..c3e1e5b628 100644 --- a/test/user_counters_test.cc +++ b/test/user_counters_test.cc @@ -12,7 +12,6 @@ ADD_CASES(TC_ConsoleOut, {{"^[-]+$", MR_Next}, {"^Benchmark %s Time %s CPU %s Iterations UserCounters...$", MR_Next}, {"^[-]+$", MR_Next}}); -ADD_CASES(TC_CSVOut, {{"%csv_header,\"bar\",\"foo\""}}); // ========================================================================= // // ------------------------- Simple Counters Output ------------------------ // @@ -34,16 +33,6 @@ ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_Counters_Simple\",$"}, {"\"bar\": %float,$", MR_Next}, {"\"foo\": %float$", MR_Next}, {"}", MR_Next}}); -ADD_CASES(TC_CSVOut, {{"^\"BM_Counters_Simple\",%csv_report,%float,%float$"}}); -// VS2013 does not allow this function to be passed as a lambda argument -// to CHECK_BENCHMARK_RESULTS() -void CheckSimple(Results const& e) { - double its = e.GetAs< double >("iterations"); - CHECK_COUNTER_VALUE(e, int, "foo", EQ, 1); - // check that the value of bar is within 0.1% of the expected value - CHECK_FLOAT_COUNTER_VALUE(e, "bar", EQ, 2.*its, 0.001); -} -CHECK_BENCHMARK_RESULTS("BM_Counters_Simple", &CheckSimple); // ========================================================================= // // --------------------- Counters+Items+Bytes/s Output --------------------- // @@ -72,20 +61,6 @@ ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_Counters_WithBytesAndItemsPSec\",$"}, {"\"bar\": %float,$", MR_Next}, {"\"foo\": %float$", MR_Next}, {"}", MR_Next}}); -ADD_CASES(TC_CSVOut, {{"^\"BM_Counters_WithBytesAndItemsPSec\"," - "%csv_bytes_items_report,%float,%float$"}}); -// VS2013 does not allow this function to be passed as a lambda argument -// to CHECK_BENCHMARK_RESULTS() -void CheckBytesAndItemsPSec(Results const& e) { - double t = e.DurationCPUTime(); // this (and not real time) is the time used - CHECK_COUNTER_VALUE(e, int, "foo", EQ, 1); - CHECK_COUNTER_VALUE(e, int, "bar", EQ, num_calls1); - // check that the values are within 0.1% of the expected values - CHECK_FLOAT_RESULT_VALUE(e, "bytes_per_second", EQ, 364./t, 0.001); - CHECK_FLOAT_RESULT_VALUE(e, "items_per_second", EQ, 150./t, 0.001); -} -CHECK_BENCHMARK_RESULTS("BM_Counters_WithBytesAndItemsPSec", - &CheckBytesAndItemsPSec); // ========================================================================= // // ------------------------- Rate Counters Output -------------------------- // @@ -108,16 +83,6 @@ ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_Counters_Rate\",$"}, {"\"bar\": %float,$", MR_Next}, {"\"foo\": %float$", MR_Next}, {"}", MR_Next}}); -ADD_CASES(TC_CSVOut, {{"^\"BM_Counters_Rate\",%csv_report,%float,%float$"}}); -// VS2013 does not allow this function to be passed as a lambda argument -// to CHECK_BENCHMARK_RESULTS() -void CheckRate(Results const& e) { - double t = e.DurationCPUTime(); // this (and not real time) is the time used - // check that the values are within 0.1% of the expected values - CHECK_FLOAT_COUNTER_VALUE(e, "foo", EQ, 1./t, 0.001); - CHECK_FLOAT_COUNTER_VALUE(e, "bar", EQ, 2./t, 0.001); -} -CHECK_BENCHMARK_RESULTS("BM_Counters_Rate", &CheckRate); // ========================================================================= // // ------------------------- Thread Counters Output ------------------------ // @@ -139,14 +104,6 @@ ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_Counters_Threads/threads:%int\",$"}, {"\"bar\": %float,$", MR_Next}, {"\"foo\": %float$", MR_Next}, {"}", MR_Next}}); -ADD_CASES(TC_CSVOut, {{"^\"BM_Counters_Threads/threads:%int\",%csv_report,%float,%float$"}}); -// VS2013 does not allow this function to be passed as a lambda argument -// to CHECK_BENCHMARK_RESULTS() -void CheckThreads(Results const& e) { - CHECK_COUNTER_VALUE(e, int, "foo", EQ, e.NumThreads()); - CHECK_COUNTER_VALUE(e, int, "bar", EQ, 2 * e.NumThreads()); -} -CHECK_BENCHMARK_RESULTS("BM_Counters_Threads/threads:%int", &CheckThreads); // ========================================================================= // // ---------------------- ThreadAvg Counters Output ------------------------ // @@ -169,15 +126,6 @@ ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_Counters_AvgThreads/threads:%int\",$"}, {"\"bar\": %float,$", MR_Next}, {"\"foo\": %float$", MR_Next}, {"}", MR_Next}}); -ADD_CASES(TC_CSVOut, {{"^\"BM_Counters_AvgThreads/threads:%int\",%csv_report,%float,%float$"}}); -// VS2013 does not allow this function to be passed as a lambda argument -// to CHECK_BENCHMARK_RESULTS() -void CheckAvgThreads(Results const& e) { - CHECK_COUNTER_VALUE(e, int, "foo", EQ, 1); - CHECK_COUNTER_VALUE(e, int, "bar", EQ, 2); -} -CHECK_BENCHMARK_RESULTS("BM_Counters_AvgThreads/threads:%int", - &CheckAvgThreads); // ========================================================================= // // ---------------------- ThreadAvg Counters Output ------------------------ // @@ -200,15 +148,6 @@ ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_Counters_AvgThreadsRate/threads:%int\",$ {"\"bar\": %float,$", MR_Next}, {"\"foo\": %float$", MR_Next}, {"}", MR_Next}}); -ADD_CASES(TC_CSVOut, {{"^\"BM_Counters_AvgThreadsRate/threads:%int\",%csv_report,%float,%float$"}}); -// VS2013 does not allow this function to be passed as a lambda argument -// to CHECK_BENCHMARK_RESULTS() -void CheckAvgThreadsRate(Results const& e) { - CHECK_FLOAT_COUNTER_VALUE(e, "foo", EQ, 1./e.DurationCPUTime(), 0.001); - CHECK_FLOAT_COUNTER_VALUE(e, "bar", EQ, 2./e.DurationCPUTime(), 0.001); -} -CHECK_BENCHMARK_RESULTS("BM_Counters_AvgThreadsRate/threads:%int", - &CheckAvgThreadsRate); // ========================================================================= // // --------------------------- TEST CASES END ------------------------------ //