Commit 00d9c53b authored by Brian Sheedy's avatar Brian Sheedy Committed by Commit Bot

Switch performance_browser_tests to histograms

Switches use of PrintResult in performance_browser_tests to
PerfResultReporter and whitelists the tests for conversion to histograms
before being uploaded to the perf dashboard.

Bug: 923564
Change-Id: Ica64da69d6fe0a0de680c673b63fe54ff235bc1b
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1877151
Commit-Queue: Brian Sheedy <bsheedy@chromium.org>
Reviewed-by: default avatarYuri Wiitala <miu@chromium.org>
Cr-Commit-Position: refs/heads/master@{#710009}
parent 42b97a68
...@@ -21,7 +21,7 @@ ...@@ -21,7 +21,7 @@
#include "extensions/common/switches.h" #include "extensions/common/switches.h"
#include "extensions/test/extension_test_message_listener.h" #include "extensions/test/extension_test_message_listener.h"
#include "testing/gtest/include/gtest/gtest.h" #include "testing/gtest/include/gtest/gtest.h"
#include "testing/perf/perf_test.h" #include "testing/perf/perf_result_reporter.h"
#include "ui/compositor/compositor_switches.h" #include "ui/compositor/compositor_switches.h"
#include "ui/gl/gl_switches.h" #include "ui/gl/gl_switches.h"
...@@ -39,6 +39,25 @@ constexpr int kMinDataPointsForFullRun = 100; // ~5 sec at 24fps. ...@@ -39,6 +39,25 @@ constexpr int kMinDataPointsForFullRun = 100; // ~5 sec at 24fps.
// Minimum number of events required for data analysis in a non-performance run. // Minimum number of events required for data analysis in a non-performance run.
constexpr int kMinDataPointsForQuickRun = 3; constexpr int kMinDataPointsForQuickRun = 3;
// Metric names are camelcase instead of using underscores since they need to
// be used to get trace events.
constexpr char kMetricPrefixTabCapture[] = "TabCapture.";
constexpr char kMetricCaptureMs[] = "Capture";
constexpr char kMetricCaptureFailRatePercent[] = "CaptureFailRate";
constexpr char kMetricCaptureLatencyMs[] = "CaptureLatency";
constexpr char kMetricCommitAndDrawCompositorFrameMs[] =
"RenderWidget::DidCommitAndDrawCompositorFrame";
perf_test::PerfResultReporter SetUpTabCaptureReporter(
const std::string& story) {
perf_test::PerfResultReporter reporter(kMetricPrefixTabCapture, story);
reporter.RegisterImportantMetric(kMetricCaptureMs, "ms");
reporter.RegisterImportantMetric(kMetricCaptureFailRatePercent, "percent");
reporter.RegisterImportantMetric(kMetricCaptureLatencyMs, "ms");
reporter.RegisterImportantMetric(kMetricCommitAndDrawCompositorFrameMs, "ms");
return reporter;
}
// A convenience macro to run a gtest expectation in the "full performance run" // A convenience macro to run a gtest expectation in the "full performance run"
// setting, or else a warning that something is not being entirely tested in the // setting, or else a warning that something is not being entirely tested in the
// "CQ run" setting. This is required because the test runs in the CQ may not be // "CQ run" setting. This is required because the test runs in the CQ may not be
...@@ -79,6 +98,12 @@ class TabCapturePerformanceTest : public TabCapturePerformanceTestBase, ...@@ -79,6 +98,12 @@ class TabCapturePerformanceTest : public TabCapturePerformanceTestBase,
suffix += "_webrtc"; suffix += "_webrtc";
if (HasFlag(kSmallWindow)) if (HasFlag(kSmallWindow))
suffix += "_small"; suffix += "_small";
// Make sure we always have a story.
if (suffix.size() == 0) {
suffix = "_baseline_story";
}
// Strip off the leading _.
suffix.erase(0, 1);
return suffix; return suffix;
} }
...@@ -128,8 +153,8 @@ class TabCapturePerformanceTest : public TabCapturePerformanceTestBase, ...@@ -128,8 +153,8 @@ class TabCapturePerformanceTest : public TabCapturePerformanceTestBase,
double std_dev_ms = stats.standard_deviation_us / 1000.0; double std_dev_ms = stats.standard_deviation_us / 1000.0;
std::string mean_and_error = base::StringPrintf("%f,%f", mean_ms, std::string mean_and_error = base::StringPrintf("%f,%f", mean_ms,
std_dev_ms); std_dev_ms);
perf_test::PrintResultMeanAndError(kTestName, GetSuffixForTestFlags(), auto reporter = SetUpTabCaptureReporter(GetSuffixForTestFlags());
event_name, mean_and_error, "ms", true); reporter.AddResultMeanAndError(event_name, mean_and_error);
return have_rate_stats; return have_rate_stats;
} }
...@@ -168,10 +193,10 @@ class TabCapturePerformanceTest : public TabCapturePerformanceTestBase, ...@@ -168,10 +193,10 @@ class TabCapturePerformanceTest : public TabCapturePerformanceTestBase,
(count == 0) (count == 0)
? NAN ? NAN
: (sqrt(std::max(0.0, count * sqr_sum - sum * sum)) / count); : (sqrt(std::max(0.0, count * sqr_sum - sum * sum)) / count);
perf_test::PrintResultMeanAndError( auto reporter = SetUpTabCaptureReporter(GetSuffixForTestFlags());
kTestName, GetSuffixForTestFlags(), event_name + "Latency", reporter.AddResultMeanAndError(
base::StringPrintf("%f,%f", mean_us / 1000.0, std_dev_us / 1000.0), event_name + "Latency",
"ms", true); base::StringPrintf("%f,%f", mean_us / 1000.0, std_dev_us / 1000.0));
return count > 0; return count > 0;
} }
...@@ -217,9 +242,8 @@ class TabCapturePerformanceTest : public TabCapturePerformanceTestBase, ...@@ -217,9 +242,8 @@ class TabCapturePerformanceTest : public TabCapturePerformanceTestBase,
} }
fail_percent *= fail_count / events_to_analyze.size(); fail_percent *= fail_count / events_to_analyze.size();
} }
perf_test::PrintResult( auto reporter = SetUpTabCaptureReporter(GetSuffixForTestFlags());
kTestName, GetSuffixForTestFlags(), event_name + "FailRate", reporter.AddResult(event_name + "FailRate", fail_percent);
base::StringPrintf("%f", fail_percent), "percent", true);
return !events_to_analyze.empty(); return !events_to_analyze.empty();
} }
...@@ -229,12 +253,8 @@ class TabCapturePerformanceTest : public TabCapturePerformanceTestBase, ...@@ -229,12 +253,8 @@ class TabCapturePerformanceTest : public TabCapturePerformanceTestBase,
std::string test_page_html_; std::string test_page_html_;
// Naming of performance measurement written to stdout. // Naming of performance measurement written to stdout.
static const char kTestName[];
}; };
// static
const char TabCapturePerformanceTest::kTestName[] = "TabCapturePerformance";
} // namespace } // namespace
IN_PROC_BROWSER_TEST_P(TabCapturePerformanceTest, Performance) { IN_PROC_BROWSER_TEST_P(TabCapturePerformanceTest, Performance) {
...@@ -269,22 +289,25 @@ IN_PROC_BROWSER_TEST_P(TabCapturePerformanceTest, Performance) { ...@@ -269,22 +289,25 @@ IN_PROC_BROWSER_TEST_P(TabCapturePerformanceTest, Performance) {
// Note that any changes to drawing or compositing in the renderer, // Note that any changes to drawing or compositing in the renderer,
// including changes to Blink (e.g., Canvas drawing), layout, etc.; will // including changes to Blink (e.g., Canvas drawing), layout, etc.; will
// have an impact on this result. // have an impact on this result.
EXPECT_FOR_PERFORMANCE_RUN(PrintRateResults( EXPECT_FOR_PERFORMANCE_RUN(
analyzer.get(), "RenderWidget::DidCommitAndDrawCompositorFrame")); PrintRateResults(analyzer.get(), kMetricCommitAndDrawCompositorFrameMs));
// This prints out the average time between capture events in the browser // This prints out the average time between capture events in the browser
// process. This should roughly match the renderer's draw+composite rate. // process. This should roughly match the renderer's draw+composite rate.
EXPECT_FOR_PERFORMANCE_RUN(PrintRateResults(analyzer.get(), "Capture")); EXPECT_FOR_PERFORMANCE_RUN(
PrintRateResults(analyzer.get(), kMetricCaptureMs));
// Analyze mean/stddev of the capture latency. This is a measure of how long // Analyze mean/stddev of the capture latency. This is a measure of how long
// each capture took, from initiation until read-back from the GPU into a // each capture took, from initiation until read-back from the GPU into a
// media::VideoFrame was complete. Lower is better. // media::VideoFrame was complete. Lower is better.
EXPECT_FOR_PERFORMANCE_RUN(PrintLatencyResults(analyzer.get(), "Capture")); EXPECT_FOR_PERFORMANCE_RUN(
PrintLatencyResults(analyzer.get(), kMetricCaptureMs));
// Analyze percentage of failed captures. This measures how often captures // Analyze percentage of failed captures. This measures how often captures
// were initiated, but not completed successfully. Lower is better, and zero // were initiated, but not completed successfully. Lower is better, and zero
// is ideal. // is ideal.
EXPECT_FOR_PERFORMANCE_RUN(PrintFailRateResults(analyzer.get(), "Capture")); EXPECT_FOR_PERFORMANCE_RUN(
PrintFailRateResults(analyzer.get(), kMetricCaptureMs));
} }
#if defined(OS_CHROMEOS) #if defined(OS_CHROMEOS)
......
...@@ -26,71 +26,74 @@ void PerfResultReporter::RegisterImportantMetric( ...@@ -26,71 +26,74 @@ void PerfResultReporter::RegisterImportantMetric(
} }
void PerfResultReporter::AddResult(const std::string& metric_suffix, void PerfResultReporter::AddResult(const std::string& metric_suffix,
size_t value) { size_t value) const {
auto iter = metric_map_.find(metric_suffix); auto info = GetMetricInfoOrFail(metric_suffix);
CHECK(iter != metric_map_.end());
PrintResult(metric_basename_, metric_suffix, story_name_, value, PrintResult(metric_basename_, metric_suffix, story_name_, value, info.units,
iter->second.units, iter->second.important); info.important);
} }
void PerfResultReporter::AddResult(const std::string& metric_suffix, void PerfResultReporter::AddResult(const std::string& metric_suffix,
double value) { double value) const {
auto iter = metric_map_.find(metric_suffix); auto info = GetMetricInfoOrFail(metric_suffix);
CHECK(iter != metric_map_.end());
PrintResult(metric_basename_, metric_suffix, story_name_, value, PrintResult(metric_basename_, metric_suffix, story_name_, value, info.units,
iter->second.units, iter->second.important); info.important);
} }
void PerfResultReporter::AddResult(const std::string& metric_suffix, void PerfResultReporter::AddResult(const std::string& metric_suffix,
const std::string& value) { const std::string& value) const {
auto iter = metric_map_.find(metric_suffix); auto info = GetMetricInfoOrFail(metric_suffix);
CHECK(iter != metric_map_.end());
PrintResult(metric_basename_, metric_suffix, story_name_, value, PrintResult(metric_basename_, metric_suffix, story_name_, value, info.units,
iter->second.units, iter->second.important); info.important);
} }
void PerfResultReporter::AddResult(const std::string& metric_suffix, void PerfResultReporter::AddResult(const std::string& metric_suffix,
const base::TimeDelta& value) { base::TimeDelta value) const {
auto iter = metric_map_.find(metric_suffix); auto info = GetMetricInfoOrFail(metric_suffix);
CHECK(iter != metric_map_.end());
// Decide what time unit to convert the TimeDelta into. Units are based on // Decide what time unit to convert the TimeDelta into. Units are based on
// the legacy units in // the legacy units in
// https://cs.chromium.org/chromium/src/third_party/catapult/tracing/tracing/value/legacy_unit_info.py?q=legacy_unit_info // https://cs.chromium.org/chromium/src/third_party/catapult/tracing/tracing/value/legacy_unit_info.py?q=legacy_unit_info
double time = 0; double time = 0;
if (iter->second.units == "seconds") { if (info.units == "seconds") {
time = value.InSecondsF(); time = value.InSecondsF();
} else if (iter->second.units == "ms" || } else if (info.units == "ms" || info.units == "milliseconds") {
iter->second.units == "milliseconds") {
time = value.InMillisecondsF(); time = value.InMillisecondsF();
} else if (iter->second.units == "us") { } else if (info.units == "us") {
time = value.InMicrosecondsF(); time = value.InMicrosecondsF();
} else if (iter->second.units == "ns") { } else if (info.units == "ns") {
time = value.InNanoseconds(); time = value.InNanoseconds();
} else { } else {
NOTREACHED() << "Attempted to use AddResult with a TimeDelta when " NOTREACHED() << "Attempted to use AddResult with a TimeDelta when "
<< "registered unit for metric " << metric_suffix << " is " << "registered unit for metric " << metric_suffix << " is "
<< iter->second.units; << info.units;
} }
PrintResult(metric_basename_, metric_suffix, story_name_, time, PrintResult(metric_basename_, metric_suffix, story_name_, time, info.units,
iter->second.units, iter->second.important); info.important);
} }
void PerfResultReporter::AddResultList(const std::string& metric_suffix, void PerfResultReporter::AddResultList(const std::string& metric_suffix,
const std::string& values) { const std::string& values) const {
auto iter = metric_map_.find(metric_suffix); auto info = GetMetricInfoOrFail(metric_suffix);
CHECK(iter != metric_map_.end());
PrintResultList(metric_basename_, metric_suffix, story_name_, values, PrintResultList(metric_basename_, metric_suffix, story_name_, values,
iter->second.units, iter->second.important); info.units, info.important);
}
void PerfResultReporter::AddResultMeanAndError(
const std::string& metric_suffix,
const std::string& mean_and_error) {
auto info = GetMetricInfoOrFail(metric_suffix);
PrintResultMeanAndError(metric_basename_, metric_suffix, story_name_,
mean_and_error, info.units, info.important);
} }
bool PerfResultReporter::GetMetricInfo(const std::string& metric_suffix, bool PerfResultReporter::GetMetricInfo(const std::string& metric_suffix,
MetricInfo* out) { MetricInfo* out) const {
auto iter = metric_map_.find(metric_suffix); auto iter = metric_map_.find(metric_suffix);
if (iter == metric_map_.end()) { if (iter == metric_map_.end()) {
return false; return false;
...@@ -107,4 +110,12 @@ void PerfResultReporter::RegisterMetric(const std::string& metric_suffix, ...@@ -107,4 +110,12 @@ void PerfResultReporter::RegisterMetric(const std::string& metric_suffix,
metric_map_.insert({metric_suffix, {units, important}}); metric_map_.insert({metric_suffix, {units, important}});
} }
MetricInfo PerfResultReporter::GetMetricInfoOrFail(
const std::string& metric_suffix) const {
MetricInfo info;
CHECK(GetMetricInfo(metric_suffix, &info))
<< "Attempted to use unregistered metric " << metric_suffix;
return info;
}
} // namespace perf_test } // namespace perf_test
...@@ -44,26 +44,35 @@ class PerfResultReporter { ...@@ -44,26 +44,35 @@ class PerfResultReporter {
const std::string& units); const std::string& units);
void RegisterImportantMetric(const std::string& metric_suffix, void RegisterImportantMetric(const std::string& metric_suffix,
const std::string& units); const std::string& units);
void AddResult(const std::string& metric_suffix, size_t value); void AddResult(const std::string& metric_suffix, size_t value) const;
void AddResult(const std::string& metric_suffix, double value); void AddResult(const std::string& metric_suffix, double value) const;
void AddResult(const std::string& metric_suffix, const std::string& value); void AddResult(const std::string& metric_suffix,
const std::string& value) const;
// A special version of AddResult that will automatically convert the given // A special version of AddResult that will automatically convert the given
// TimeDelta into a double with the correct units for the registered metric. // TimeDelta into a double with the correct units for the registered metric.
void AddResult(const std::string& metric_suffix, void AddResult(const std::string& metric_suffix, base::TimeDelta value) const;
const base::TimeDelta& value);
void AddResultList(const std::string& metric_suffix, void AddResultList(const std::string& metric_suffix,
const std::string& values); const std::string& values) const;
// Users should prefer AddResultList if possible, as otherwise the min/max
// values reported on the perf dashboard aren't useful.
// |mean_and_error| should be a comma-separated string of mean then
// error/stddev, e.g. "2.4,0.5".
void AddResultMeanAndError(const std::string& metric_suffix,
const std::string& mean_and_error);
// Returns true and fills the pointer if the metric is registered, otherwise // Returns true and fills the pointer if the metric is registered, otherwise
// returns false. // returns false.
bool GetMetricInfo(const std::string& metric_suffix, MetricInfo* out); bool GetMetricInfo(const std::string& metric_suffix, MetricInfo* out) const;
private: private:
void RegisterMetric(const std::string& metric_suffix, void RegisterMetric(const std::string& metric_suffix,
const std::string& units, const std::string& units,
bool important); bool important);
MetricInfo GetMetricInfoOrFail(const std::string& metric_suffix) const;
std::string metric_basename_; std::string metric_basename_;
std::string story_name_; std::string story_name_;
std::unordered_map<std::string, MetricInfo> metric_map_; std::unordered_map<std::string, MetricInfo> metric_map_;
......
...@@ -64,6 +64,7 @@ GTEST_CONVERSION_WHITELIST = [ ...@@ -64,6 +64,7 @@ GTEST_CONVERSION_WHITELIST = [
'media_perftests', 'media_perftests',
'net_perftests', 'net_perftests',
'passthrough_command_buffer_perftests', 'passthrough_command_buffer_perftests',
'performance_browser_tests',
'services_perftests', 'services_perftests',
'tracing_perftests', 'tracing_perftests',
'validating_command_buffer_perftests', 'validating_command_buffer_perftests',
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment