Commit 350c9a56 authored by Brian Sheedy's avatar Brian Sheedy Committed by Commit Bot

Fix performance_browser_tests metric name

Speculatively fixes RenderWidget::DidCommitAndDrawCompositorFrame in
performance_browser_tests not being parsed as a perf result by changing
it to RendererFrameDraw. It is believe that the colons in the original
name were interfering with the parser script's ability to correctly
extract the metric name.

Bug: 923564
Change-Id: I3f5a91a0b11ea8a6bd3363b8ecf6aa044a8425d4
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1888774
Commit-Queue: Brian Sheedy <bsheedy@chromium.org>
Reviewed-by: default avatarYuri Wiitala <miu@chromium.org>
Cr-Commit-Position: refs/heads/master@{#711909}
parent fb110ec7
......@@ -3,6 +3,7 @@
// found in the LICENSE file.
#include <cmath>
#include <unordered_map>
#include "base/command_line.h"
#include "base/strings/stringprintf.h"
......@@ -39,14 +40,24 @@ constexpr int kMinDataPointsForFullRun = 100; // ~5 sec at 24fps.
// Minimum number of events required for data analysis in a non-performance run.
constexpr int kMinDataPointsForQuickRun = 3;
// Metric names are camelcase instead of using underscores since they need to
// be used to get trace events.
constexpr char kMetricPrefixTabCapture[] = "TabCapture.";
constexpr char kMetricCaptureMs[] = "Capture";
constexpr char kMetricCaptureFailRatePercent[] = "CaptureFailRate";
constexpr char kMetricCaptureLatencyMs[] = "CaptureLatency";
constexpr char kMetricCommitAndDrawCompositorFrameMs[] =
constexpr char kMetricCaptureMs[] = "capture";
constexpr char kMetricCaptureFailRatePercent[] = "capture_fail_rate";
constexpr char kMetricCaptureLatencyMs[] = "capture_latency";
constexpr char kMetricRendererFrameDrawMs[] = "renderer_frame_draw";
constexpr char kEventCapture[] = "Capture";
constexpr char kEventSuffixFailRate[] = "FailRate";
constexpr char kEventSuffixLatency[] = "Latency";
constexpr char kEventCommitAndDrawCompositorFrame[] =
"RenderWidget::DidCommitAndDrawCompositorFrame";
const std::unordered_map<std::string, std::string> kEventToMetricMap(
{{kEventCapture, kMetricCaptureMs},
{std::string(kEventCapture) + kEventSuffixFailRate,
kMetricCaptureFailRatePercent},
{std::string(kEventCapture) + kEventSuffixLatency,
kMetricCaptureLatencyMs},
{kEventCommitAndDrawCompositorFrame, kMetricRendererFrameDrawMs}});
perf_test::PerfResultReporter SetUpTabCaptureReporter(
const std::string& story) {
......@@ -54,10 +65,15 @@ perf_test::PerfResultReporter SetUpTabCaptureReporter(
reporter.RegisterImportantMetric(kMetricCaptureMs, "ms");
reporter.RegisterImportantMetric(kMetricCaptureFailRatePercent, "percent");
reporter.RegisterImportantMetric(kMetricCaptureLatencyMs, "ms");
reporter.RegisterImportantMetric(kMetricCommitAndDrawCompositorFrameMs, "ms");
reporter.RegisterImportantMetric(kMetricRendererFrameDrawMs, "ms");
return reporter;
}
std::string GetMetricFromEventName(const std::string& event_name) {
auto iter = kEventToMetricMap.find(event_name);
return iter == kEventToMetricMap.end() ? event_name : iter->second;
}
// A convenience macro to run a gtest expectation in the "full performance run"
// setting, or else a warning that something is not being entirely tested in the
// "CQ run" setting. This is required because the test runs in the CQ may not be
......@@ -154,7 +170,8 @@ class TabCapturePerformanceTest : public TabCapturePerformanceTestBase,
std::string mean_and_error = base::StringPrintf("%f,%f", mean_ms,
std_dev_ms);
auto reporter = SetUpTabCaptureReporter(GetSuffixForTestFlags());
reporter.AddResultMeanAndError(event_name, mean_and_error);
reporter.AddResultMeanAndError(GetMetricFromEventName(event_name),
mean_and_error);
return have_rate_stats;
}
......@@ -195,7 +212,7 @@ class TabCapturePerformanceTest : public TabCapturePerformanceTestBase,
: (sqrt(std::max(0.0, count * sqr_sum - sum * sum)) / count);
auto reporter = SetUpTabCaptureReporter(GetSuffixForTestFlags());
reporter.AddResultMeanAndError(
event_name + "Latency",
GetMetricFromEventName(event_name + kEventSuffixLatency),
base::StringPrintf("%f,%f", mean_us / 1000.0, std_dev_us / 1000.0));
return count > 0;
}
......@@ -243,7 +260,9 @@ class TabCapturePerformanceTest : public TabCapturePerformanceTestBase,
fail_percent *= fail_count / events_to_analyze.size();
}
auto reporter = SetUpTabCaptureReporter(GetSuffixForTestFlags());
reporter.AddResult(event_name + "FailRate", fail_percent);
reporter.AddResult(
GetMetricFromEventName(event_name + kEventSuffixFailRate),
fail_percent);
return !events_to_analyze.empty();
}
......@@ -274,8 +293,8 @@ IN_PROC_BROWSER_TEST_P(TabCapturePerformanceTest, Performance) {
// Observe the running browser for a while, collecting a trace.
std::unique_ptr<trace_analyzer::TraceAnalyzer> analyzer = TraceAndObserve(
"gpu,gpu.capture",
std::vector<base::StringPiece>{
"RenderWidget::DidCommitAndDrawCompositorFrame", "Capture"},
std::vector<base::StringPiece>{kEventCommitAndDrawCompositorFrame,
kEventCapture},
// In a full performance run, events will be trimmed from both ends of
// trace. Otherwise, just require the bare-minimum to verify the stats
// calculations will work.
......@@ -290,24 +309,23 @@ IN_PROC_BROWSER_TEST_P(TabCapturePerformanceTest, Performance) {
// including changes to Blink (e.g., Canvas drawing), layout, etc.; will
// have an impact on this result.
EXPECT_FOR_PERFORMANCE_RUN(
PrintRateResults(analyzer.get(), kMetricCommitAndDrawCompositorFrameMs));
PrintRateResults(analyzer.get(), kEventCommitAndDrawCompositorFrame));
// This prints out the average time between capture events in the browser
// process. This should roughly match the renderer's draw+composite rate.
EXPECT_FOR_PERFORMANCE_RUN(
PrintRateResults(analyzer.get(), kMetricCaptureMs));
EXPECT_FOR_PERFORMANCE_RUN(PrintRateResults(analyzer.get(), kEventCapture));
// Analyze mean/stddev of the capture latency. This is a measure of how long
// each capture took, from initiation until read-back from the GPU into a
// media::VideoFrame was complete. Lower is better.
EXPECT_FOR_PERFORMANCE_RUN(
PrintLatencyResults(analyzer.get(), kMetricCaptureMs));
PrintLatencyResults(analyzer.get(), kEventCapture));
// Analyze percentage of failed captures. This measures how often captures
// were initiated, but not completed successfully. Lower is better, and zero
// is ideal.
EXPECT_FOR_PERFORMANCE_RUN(
PrintFailRateResults(analyzer.get(), kMetricCaptureMs));
PrintFailRateResults(analyzer.get(), kEventCapture));
}
#if defined(OS_CHROMEOS)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment