void BenchmarkSuite::run( const IFilter& filter, BenchmarkResult& suite_result) const { BenchmarkingThreadContext benchmarking_context; bool has_begun_suite = false; for (size_t i = 0; i < impl->m_factories.size(); ++i) { IBenchmarkCaseFactory* factory = impl->m_factories[i]; // Skip benchmark cases that aren't let through by the filter. if (!filter.accepts(factory->get_name())) continue; if (!has_begun_suite) { // Tell the listeners that a benchmark suite is about to be executed. suite_result.begin_suite(*this); suite_result.signal_suite_execution(); has_begun_suite = true; } // Instantiate the benchmark case. auto_ptr<IBenchmarkCase> benchmark(factory->create()); // Recreate the stopwatch (and the underlying timer) for every benchmark // case, since the CPU frequency will fluctuate quite a bit depending on // the CPU load. We need an up-to-date frequency estimation in order to // compute accurate call rates. Impl::StopwatchType stopwatch(100000); // Tell the listeners that a benchmark case is about to be executed. suite_result.begin_case(*this, *benchmark.get()); #ifdef NDEBUG try #endif { suite_result.signal_case_execution(); // Estimate benchmarking parameters. Impl::BenchmarkParams params; Impl::estimate_benchmark_params( benchmark.get(), stopwatch, params); // Measure the overhead of calling IBenchmarkCase::run(). const double overhead = Impl::measure_call_overhead(stopwatch, params); // Run the benchmark case. const double execution_time = Impl::measure_iteration_runtime( benchmark.get(), stopwatch, params); // Gather the timing results. TimingResult timing_result; timing_result.m_iteration_count = params.m_iteration_count; timing_result.m_measurement_count = params.m_measurement_count; timing_result.m_frequency = static_cast<double>(stopwatch.get_timer().frequency()); timing_result.m_ticks = execution_time > overhead ? execution_time - overhead : 0.0; // Post the timing result. suite_result.write( *this, *benchmark.get(), __FILE__, __LINE__, timing_result); } #ifdef NDEBUG catch (const exception& e) { suite_result.write( *this, *benchmark.get(), __FILE__, __LINE__, "an unexpected exception was caught: %s.", e.what()); suite_result.signal_case_failure(); } catch (...) { suite_result.write( *this, *benchmark.get(), __FILE__, __LINE__, "an unexpected exception was caught (no details available)."); suite_result.signal_case_failure(); } #endif // Tell the listeners that the benchmark case execution has ended. suite_result.end_case(*this, *benchmark.get()); } if (has_begun_suite) { // Report a benchmark suite failure if one or more benchmark cases failed. if (suite_result.get_case_failure_count() > 0) suite_result.signal_suite_failure(); // Tell the listeners that the benchmark suite execution has ended. suite_result.end_suite(*this); } }
void BenchmarkSuite::run( const IFilter& filter, BenchmarkResult& suite_result) const { BenchmarkingThreadContext benchmarking_context; bool has_begun_suite = false; for (size_t i = 0; i < impl->m_factories.size(); ++i) { IBenchmarkCaseFactory* factory = impl->m_factories[i]; // Skip benchmark cases that aren't let through by the filter. if (!filter.accepts(factory->get_name())) continue; if (!has_begun_suite) { // Tell the listeners that a benchmark suite is about to be executed. suite_result.begin_suite(*this); suite_result.signal_suite_execution(); has_begun_suite = true; } // Instantiate the benchmark case. unique_ptr<IBenchmarkCase> benchmark(factory->create()); // Recreate the stopwatch (and the underlying timer) for every benchmark // case, since the CPU frequency will fluctuate quite a bit depending on // the CPU load. We need an up-to-date frequency estimation in order to // compute accurate call rates. Impl::StopwatchType stopwatch(100000); // Tell the listeners that a benchmark case is about to be executed. suite_result.begin_case(*this, *benchmark.get()); #ifdef NDEBUG try #endif { suite_result.signal_case_execution(); // Estimate benchmarking parameters. const size_t measurement_count = Impl::compute_measurement_count(benchmark.get(), stopwatch); // Measure the overhead of calling IBenchmarkCase::run(). const double overhead_ticks = Impl::measure_call_overhead_ticks(stopwatch, measurement_count); // Run the benchmark case. const double runtime_ticks = Impl::measure_runtime( benchmark.get(), stopwatch, BenchmarkSuite::Impl::measure_runtime_ticks, measurement_count); #ifdef GENERATE_BENCHMARK_PLOTS vector<Vector2d> points; for (size_t j = 0; j < 100; ++j) { const double ticks = Impl::measure_runtime( benchmark.get(), stopwatch, BenchmarkSuite::Impl::measure_runtime_ticks, max<size_t>(1, measurement_count / 100)); points.emplace_back( static_cast<double>(j), ticks > overhead_ticks ? ticks - overhead_ticks : 0.0); } stringstream sstr; sstr << "unit benchmarks/plots/"; sstr << get_name() << "_" << benchmark->get_name(); sstr << ".gnuplot"; GnuplotFile plotfile; plotfile.new_plot().set_points(points); plotfile.write(sstr.str()); #endif // Gather the timing results. TimingResult timing_result; timing_result.m_iteration_count = 1; timing_result.m_measurement_count = measurement_count; timing_result.m_frequency = static_cast<double>(stopwatch.get_timer().frequency()); timing_result.m_ticks = runtime_ticks > overhead_ticks ? runtime_ticks - overhead_ticks : 0.0; // Post the timing result. suite_result.write( *this, *benchmark.get(), __FILE__, __LINE__, timing_result); } #ifdef NDEBUG catch (const exception& e) { if (e.what()[0] != '\0') { suite_result.write( *this, *benchmark.get(), __FILE__, __LINE__, "an unexpected exception was caught: %s", e.what()); } else { suite_result.write( *this, *benchmark.get(), __FILE__, __LINE__, "an unexpected exception was caught (no details available)."); } suite_result.signal_case_failure(); } catch (...) { suite_result.write( *this, *benchmark.get(), __FILE__, __LINE__, "an unexpected exception was caught (no details available)."); suite_result.signal_case_failure(); } #endif // Tell the listeners that the benchmark case execution has ended. suite_result.end_case(*this, *benchmark.get()); } if (has_begun_suite) { // Report a benchmark suite failure if one or more benchmark cases failed. if (suite_result.get_case_failure_count() > 0) suite_result.signal_suite_failure(); // Tell the listeners that the benchmark suite execution has ended. suite_result.end_suite(*this); } }