Commit 00d9c53b authored by Brian Sheedy's avatar Brian Sheedy Committed by Commit Bot

Switch performance_browser_tests to histograms

Switches use of PrintResult in performance_browser_tests to
PerfResultReporter and whitelists the tests for conversion to histograms
before being uploaded to the perf dashboard.

Bug: 923564
Change-Id: Ica64da69d6fe0a0de680c673b63fe54ff235bc1b
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1877151
Commit-Queue: Brian Sheedy <bsheedy@chromium.org>
Reviewed-by: default avatarYuri Wiitala <miu@chromium.org>
Cr-Commit-Position: refs/heads/master@{#710009}
parent 42b97a68
......@@ -21,7 +21,7 @@
#include "extensions/common/switches.h"
#include "extensions/test/extension_test_message_listener.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "testing/perf/perf_test.h"
#include "testing/perf/perf_result_reporter.h"
#include "ui/compositor/compositor_switches.h"
#include "ui/gl/gl_switches.h"
......@@ -39,6 +39,25 @@ constexpr int kMinDataPointsForFullRun = 100; // ~5 sec at 24fps.
// Minimum number of events required for data analysis in a non-performance run.
constexpr int kMinDataPointsForQuickRun = 3;
// Metric names are camelcase instead of using underscores since they need to
// be used to get trace events.
constexpr char kMetricPrefixTabCapture[] = "TabCapture.";
constexpr char kMetricCaptureMs[] = "Capture";
constexpr char kMetricCaptureFailRatePercent[] = "CaptureFailRate";
constexpr char kMetricCaptureLatencyMs[] = "CaptureLatency";
constexpr char kMetricCommitAndDrawCompositorFrameMs[] =
"RenderWidget::DidCommitAndDrawCompositorFrame";
perf_test::PerfResultReporter SetUpTabCaptureReporter(
const std::string& story) {
perf_test::PerfResultReporter reporter(kMetricPrefixTabCapture, story);
reporter.RegisterImportantMetric(kMetricCaptureMs, "ms");
reporter.RegisterImportantMetric(kMetricCaptureFailRatePercent, "percent");
reporter.RegisterImportantMetric(kMetricCaptureLatencyMs, "ms");
reporter.RegisterImportantMetric(kMetricCommitAndDrawCompositorFrameMs, "ms");
return reporter;
}
// A convenience macro to run a gtest expectation in the "full performance run"
// setting, or else a warning that something is not being entirely tested in the
// "CQ run" setting. This is required because the test runs in the CQ may not be
......@@ -79,6 +98,12 @@ class TabCapturePerformanceTest : public TabCapturePerformanceTestBase,
suffix += "_webrtc";
if (HasFlag(kSmallWindow))
suffix += "_small";
// Make sure we always have a story.
if (suffix.size() == 0) {
suffix = "_baseline_story";
}
// Strip off the leading _.
suffix.erase(0, 1);
return suffix;
}
......@@ -128,8 +153,8 @@ class TabCapturePerformanceTest : public TabCapturePerformanceTestBase,
double std_dev_ms = stats.standard_deviation_us / 1000.0;
std::string mean_and_error = base::StringPrintf("%f,%f", mean_ms,
std_dev_ms);
perf_test::PrintResultMeanAndError(kTestName, GetSuffixForTestFlags(),
event_name, mean_and_error, "ms", true);
auto reporter = SetUpTabCaptureReporter(GetSuffixForTestFlags());
reporter.AddResultMeanAndError(event_name, mean_and_error);
return have_rate_stats;
}
......@@ -168,10 +193,10 @@ class TabCapturePerformanceTest : public TabCapturePerformanceTestBase,
(count == 0)
? NAN
: (sqrt(std::max(0.0, count * sqr_sum - sum * sum)) / count);
perf_test::PrintResultMeanAndError(
kTestName, GetSuffixForTestFlags(), event_name + "Latency",
base::StringPrintf("%f,%f", mean_us / 1000.0, std_dev_us / 1000.0),
"ms", true);
auto reporter = SetUpTabCaptureReporter(GetSuffixForTestFlags());
reporter.AddResultMeanAndError(
event_name + "Latency",
base::StringPrintf("%f,%f", mean_us / 1000.0, std_dev_us / 1000.0));
return count > 0;
}
......@@ -217,9 +242,8 @@ class TabCapturePerformanceTest : public TabCapturePerformanceTestBase,
}
fail_percent *= fail_count / events_to_analyze.size();
}
perf_test::PrintResult(
kTestName, GetSuffixForTestFlags(), event_name + "FailRate",
base::StringPrintf("%f", fail_percent), "percent", true);
auto reporter = SetUpTabCaptureReporter(GetSuffixForTestFlags());
reporter.AddResult(event_name + "FailRate", fail_percent);
return !events_to_analyze.empty();
}
......@@ -229,12 +253,8 @@ class TabCapturePerformanceTest : public TabCapturePerformanceTestBase,
std::string test_page_html_;
// Naming of performance measurement written to stdout.
static const char kTestName[];
};
// static
const char TabCapturePerformanceTest::kTestName[] = "TabCapturePerformance";
} // namespace
IN_PROC_BROWSER_TEST_P(TabCapturePerformanceTest, Performance) {
......@@ -269,22 +289,25 @@ IN_PROC_BROWSER_TEST_P(TabCapturePerformanceTest, Performance) {
// Note that any changes to drawing or compositing in the renderer,
// including changes to Blink (e.g., Canvas drawing), layout, etc.; will
// have an impact on this result.
EXPECT_FOR_PERFORMANCE_RUN(PrintRateResults(
analyzer.get(), "RenderWidget::DidCommitAndDrawCompositorFrame"));
EXPECT_FOR_PERFORMANCE_RUN(
PrintRateResults(analyzer.get(), kMetricCommitAndDrawCompositorFrameMs));
// This prints out the average time between capture events in the browser
// process. This should roughly match the renderer's draw+composite rate.
EXPECT_FOR_PERFORMANCE_RUN(PrintRateResults(analyzer.get(), "Capture"));
EXPECT_FOR_PERFORMANCE_RUN(
PrintRateResults(analyzer.get(), kMetricCaptureMs));
// Analyze mean/stddev of the capture latency. This is a measure of how long
// each capture took, from initiation until read-back from the GPU into a
// media::VideoFrame was complete. Lower is better.
EXPECT_FOR_PERFORMANCE_RUN(PrintLatencyResults(analyzer.get(), "Capture"));
EXPECT_FOR_PERFORMANCE_RUN(
PrintLatencyResults(analyzer.get(), kMetricCaptureMs));
// Analyze percentage of failed captures. This measures how often captures
// were initiated, but not completed successfully. Lower is better, and zero
// is ideal.
EXPECT_FOR_PERFORMANCE_RUN(PrintFailRateResults(analyzer.get(), "Capture"));
EXPECT_FOR_PERFORMANCE_RUN(
PrintFailRateResults(analyzer.get(), kMetricCaptureMs));
}
#if defined(OS_CHROMEOS)
......
......@@ -26,71 +26,74 @@ void PerfResultReporter::RegisterImportantMetric(
}
void PerfResultReporter::AddResult(const std::string& metric_suffix,
size_t value) {
auto iter = metric_map_.find(metric_suffix);
CHECK(iter != metric_map_.end());
size_t value) const {
auto info = GetMetricInfoOrFail(metric_suffix);
PrintResult(metric_basename_, metric_suffix, story_name_, value,
iter->second.units, iter->second.important);
PrintResult(metric_basename_, metric_suffix, story_name_, value, info.units,
info.important);
}
void PerfResultReporter::AddResult(const std::string& metric_suffix,
double value) {
auto iter = metric_map_.find(metric_suffix);
CHECK(iter != metric_map_.end());
double value) const {
auto info = GetMetricInfoOrFail(metric_suffix);
PrintResult(metric_basename_, metric_suffix, story_name_, value,
iter->second.units, iter->second.important);
PrintResult(metric_basename_, metric_suffix, story_name_, value, info.units,
info.important);
}
void PerfResultReporter::AddResult(const std::string& metric_suffix,
const std::string& value) {
auto iter = metric_map_.find(metric_suffix);
CHECK(iter != metric_map_.end());
const std::string& value) const {
auto info = GetMetricInfoOrFail(metric_suffix);
PrintResult(metric_basename_, metric_suffix, story_name_, value,
iter->second.units, iter->second.important);
PrintResult(metric_basename_, metric_suffix, story_name_, value, info.units,
info.important);
}
void PerfResultReporter::AddResult(const std::string& metric_suffix,
const base::TimeDelta& value) {
auto iter = metric_map_.find(metric_suffix);
CHECK(iter != metric_map_.end());
base::TimeDelta value) const {
auto info = GetMetricInfoOrFail(metric_suffix);
// Decide what time unit to convert the TimeDelta into. Units are based on
// the legacy units in
// https://cs.chromium.org/chromium/src/third_party/catapult/tracing/tracing/value/legacy_unit_info.py?q=legacy_unit_info
double time = 0;
if (iter->second.units == "seconds") {
if (info.units == "seconds") {
time = value.InSecondsF();
} else if (iter->second.units == "ms" ||
iter->second.units == "milliseconds") {
} else if (info.units == "ms" || info.units == "milliseconds") {
time = value.InMillisecondsF();
} else if (iter->second.units == "us") {
} else if (info.units == "us") {
time = value.InMicrosecondsF();
} else if (iter->second.units == "ns") {
} else if (info.units == "ns") {
time = value.InNanoseconds();
} else {
NOTREACHED() << "Attempted to use AddResult with a TimeDelta when "
<< "registered unit for metric " << metric_suffix << " is "
<< iter->second.units;
<< info.units;
}
PrintResult(metric_basename_, metric_suffix, story_name_, time,
iter->second.units, iter->second.important);
PrintResult(metric_basename_, metric_suffix, story_name_, time, info.units,
info.important);
}
void PerfResultReporter::AddResultList(const std::string& metric_suffix,
const std::string& values) {
auto iter = metric_map_.find(metric_suffix);
CHECK(iter != metric_map_.end());
const std::string& values) const {
auto info = GetMetricInfoOrFail(metric_suffix);
PrintResultList(metric_basename_, metric_suffix, story_name_, values,
iter->second.units, iter->second.important);
info.units, info.important);
}
void PerfResultReporter::AddResultMeanAndError(
const std::string& metric_suffix,
const std::string& mean_and_error) {
auto info = GetMetricInfoOrFail(metric_suffix);
PrintResultMeanAndError(metric_basename_, metric_suffix, story_name_,
mean_and_error, info.units, info.important);
}
bool PerfResultReporter::GetMetricInfo(const std::string& metric_suffix,
MetricInfo* out) {
MetricInfo* out) const {
auto iter = metric_map_.find(metric_suffix);
if (iter == metric_map_.end()) {
return false;
......@@ -107,4 +110,12 @@ void PerfResultReporter::RegisterMetric(const std::string& metric_suffix,
metric_map_.insert({metric_suffix, {units, important}});
}
MetricInfo PerfResultReporter::GetMetricInfoOrFail(
const std::string& metric_suffix) const {
MetricInfo info;
CHECK(GetMetricInfo(metric_suffix, &info))
<< "Attempted to use unregistered metric " << metric_suffix;
return info;
}
} // namespace perf_test
......@@ -44,26 +44,35 @@ class PerfResultReporter {
const std::string& units);
void RegisterImportantMetric(const std::string& metric_suffix,
const std::string& units);
void AddResult(const std::string& metric_suffix, size_t value);
void AddResult(const std::string& metric_suffix, double value);
void AddResult(const std::string& metric_suffix, const std::string& value);
void AddResult(const std::string& metric_suffix, size_t value) const;
void AddResult(const std::string& metric_suffix, double value) const;
void AddResult(const std::string& metric_suffix,
const std::string& value) const;
// A special version of AddResult that will automatically convert the given
// TimeDelta into a double with the correct units for the registered metric.
void AddResult(const std::string& metric_suffix,
const base::TimeDelta& value);
void AddResult(const std::string& metric_suffix, base::TimeDelta value) const;
void AddResultList(const std::string& metric_suffix,
const std::string& values);
const std::string& values) const;
// Users should prefer AddResultList if possible, as otherwise the min/max
// values reported on the perf dashboard aren't useful.
// |mean_and_error| should be a comma-separated string of mean then
// error/stddev, e.g. "2.4,0.5".
void AddResultMeanAndError(const std::string& metric_suffix,
const std::string& mean_and_error);
// Returns true and fills the pointer if the metric is registered, otherwise
// returns false.
bool GetMetricInfo(const std::string& metric_suffix, MetricInfo* out);
bool GetMetricInfo(const std::string& metric_suffix, MetricInfo* out) const;
private:
void RegisterMetric(const std::string& metric_suffix,
const std::string& units,
bool important);
MetricInfo GetMetricInfoOrFail(const std::string& metric_suffix) const;
std::string metric_basename_;
std::string story_name_;
std::unordered_map<std::string, MetricInfo> metric_map_;
......
......@@ -64,6 +64,7 @@ GTEST_CONVERSION_WHITELIST = [
'media_perftests',
'net_perftests',
'passthrough_command_buffer_perftests',
'performance_browser_tests',
'services_perftests',
'tracing_perftests',
'validating_command_buffer_perftests',
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment