Commit 00d9c53b authored by Brian Sheedy's avatar Brian Sheedy Committed by Commit Bot

Switch performance_browser_tests to histograms

Switches use of PrintResult in performance_browser_tests to
PerfResultReporter and whitelists the tests for conversion to histograms
before being uploaded to the perf dashboard.

Bug: 923564
Change-Id: Ica64da69d6fe0a0de680c673b63fe54ff235bc1b
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1877151
Commit-Queue: Brian Sheedy <bsheedy@chromium.org>
Reviewed-by: default avatarYuri Wiitala <miu@chromium.org>
Cr-Commit-Position: refs/heads/master@{#710009}
parent 42b97a68
......@@ -18,6 +18,7 @@
#include "base/files/file_path.h"
#include "base/files/file_util.h"
#include "base/strings/strcat.h"
#include "base/strings/string_number_conversions.h"
#include "base/strings/stringprintf.h"
#include "base/test/trace_event_analyzer.h"
#include "base/time/default_tick_clock.h"
......@@ -46,7 +47,7 @@
#include "net/base/rand_callback.h"
#include "net/log/net_log_source.h"
#include "net/socket/udp_server_socket.h"
#include "testing/perf/perf_test.h"
#include "testing/perf/perf_result_reporter.h"
namespace {
......@@ -62,6 +63,64 @@ constexpr int kMinDataPointsForFullRun = 100; // 1s of audio, ~5s at 24fps.
// Minimum number of events required for data analysis in a non-performance run.
constexpr int kMinDataPointsForQuickRun = 3;
constexpr char kMetricPrefixCastV2[] = "CastV2.";
constexpr char kMetricTimeBetweenCapturesMs[] = "time_between_captures";
constexpr char kMetricAvSyncMs[] = "av_sync";
constexpr char kMetricAbsAvSyncMs[] = "abs_av_sync";
constexpr char kMetricAudioJitterMs[] = "audio_jitter";
constexpr char kMetricVideoJitterMs[] = "video_jitter";
constexpr char kMetricPlayoutResolutionLines[] = "playout_resolution";
constexpr char kMetricResolutionChangesCount[] = "resolution_changes";
constexpr char kMetricFrameDropRatePercent[] = "frame_drop_rate";
constexpr char kMetricTotalLatencyMs[] = "total_latency";
constexpr char kMetricCaptureDurationMs[] = "capture_duration";
constexpr char kMetricSendToRendererMs[] = "send_to_renderer";
constexpr char kMetricEncodeMs[] = "encode";
constexpr char kMetricTransmitMs[] = "transmit";
constexpr char kMetricDecodeMs[] = "decode";
constexpr char kMetricCastLatencyMs[] = "cast_latency";
perf_test::PerfResultReporter SetUpCastV2Reporter(const std::string& story) {
perf_test::PerfResultReporter reporter(kMetricPrefixCastV2, story);
reporter.RegisterImportantMetric(kMetricTimeBetweenCapturesMs, "ms");
reporter.RegisterImportantMetric(kMetricAvSyncMs, "ms");
reporter.RegisterImportantMetric(kMetricAbsAvSyncMs, "ms");
reporter.RegisterImportantMetric(kMetricAudioJitterMs, "ms");
reporter.RegisterImportantMetric(kMetricVideoJitterMs, "ms");
reporter.RegisterImportantMetric(kMetricPlayoutResolutionLines, "lines");
reporter.RegisterImportantMetric(kMetricResolutionChangesCount, "count");
reporter.RegisterImportantMetric(kMetricFrameDropRatePercent, "percent");
reporter.RegisterImportantMetric(kMetricTotalLatencyMs, "ms");
reporter.RegisterImportantMetric(kMetricCaptureDurationMs, "ms");
reporter.RegisterImportantMetric(kMetricSendToRendererMs, "ms");
reporter.RegisterImportantMetric(kMetricEncodeMs, "ms");
reporter.RegisterImportantMetric(kMetricTransmitMs, "ms");
reporter.RegisterImportantMetric(kMetricDecodeMs, "ms");
reporter.RegisterImportantMetric(kMetricCastLatencyMs, "ms");
return reporter;
}
std::string VectorToString(const std::vector<double>& values) {
CHECK(values.size());
std::string csv;
for (const auto& val : values) {
csv += base::NumberToString(val) + ",";
}
// Strip off trailing comma.
csv.pop_back();
return csv;
}
void MaybeAddResultList(const perf_test::PerfResultReporter& reporter,
const std::string& metric,
const std::vector<double>& values) {
if (values.size() == 0) {
LOG(ERROR) << "No events for " << metric;
return;
}
reporter.AddResultList(metric, VectorToString(values));
}
// A convenience macro to run a gtest expectation in the "full performance run"
// setting, or else a warning that something is not being entirely tested in the
// "CQ run" setting. This is required because the test runs in the CQ may not be
......@@ -166,22 +225,6 @@ class MeanAndError {
return base::StringPrintf("%f,%f", mean_, std_dev_);
}
void Print(const std::string& measurement,
const std::string& modifier,
const std::string& trace,
const std::string& unit) {
if (num_values_ > 0) {
perf_test::PrintResultMeanAndError(measurement,
modifier,
trace,
AsString(),
unit,
true);
} else {
LOG(ERROR) << "No events for " << measurement << modifier << " " << trace;
}
}
private:
size_t num_values_;
double mean_;
......@@ -192,7 +235,7 @@ class MeanAndError {
// It computes the average error of deltas and the average delta.
// If data[x] == x * A + B, then this function returns zero.
// The unit is milliseconds.
static MeanAndError AnalyzeJitter(const std::vector<TimeData>& data) {
static std::vector<double> AnalyzeJitter(const std::vector<TimeData>& data) {
VLOG(0) << "Jitter analysis on " << data.size() << " values.";
std::vector<double> deltas;
double sum = 0.0;
......@@ -212,7 +255,7 @@ static MeanAndError AnalyzeJitter(const std::vector<TimeData>& data) {
}
}
return MeanAndError(deltas);
return deltas;
}
// An in-process Cast receiver that examines the audio/video frames being
......@@ -253,7 +296,7 @@ class TestPatternReceiver : public media::cast::InProcessReceiver {
}
}
void Analyze(const std::string& name, const std::string& modifier) {
void Analyze(const std::string& story) {
// First, find the minimum rtp timestamp for each audio and video frame.
// Note that the data encoded in the audio stream contains video frame
// numbers. So in a 30-fps video stream, there will be 1/30s of "1", then
......@@ -280,15 +323,20 @@ class TestPatternReceiver : public media::cast::InProcessReceiver {
EXPECT_FOR_PERFORMANCE_RUN(min_data_points <=
static_cast<int>(deltas.size()));
MeanAndError av_sync(deltas);
av_sync.Print(name, modifier, "av_sync", "ms");
auto reporter = SetUpCastV2Reporter(story);
MaybeAddResultList(reporter, kMetricAvSyncMs, deltas);
// Close to zero is better (av_sync can be negative).
if (deltas.size()) {
MeanAndError av_sync(deltas);
av_sync.SetMeanAsAbsoluteValue();
av_sync.Print(name, modifier, "abs_av_sync", "ms");
reporter.AddResultMeanAndError(kMetricAbsAvSyncMs, av_sync.AsString());
}
// lower is better.
AnalyzeJitter(audio_events_).Print(name, modifier, "audio_jitter", "ms");
MaybeAddResultList(reporter, kMetricAudioJitterMs,
AnalyzeJitter(audio_events_));
// lower is better.
AnalyzeJitter(video_events_).Print(name, modifier, "video_jitter", "ms");
MaybeAddResultList(reporter, kMetricVideoJitterMs,
AnalyzeJitter(video_events_));
// Mean resolution of video at receiver. Lower stddev is better, while the
// mean should be something reasonable given the network constraints
......@@ -305,8 +353,8 @@ class TestPatternReceiver : public media::cast::InProcessReceiver {
std::back_inserter(slice_for_analysis),
[](int lines) { return static_cast<double>(lines); });
}
MeanAndError(slice_for_analysis)
.Print(name, modifier, "playout_resolution", "lines");
MaybeAddResultList(reporter, kMetricPlayoutResolutionLines,
slice_for_analysis);
// Number of resolution changes. Lower is better (and 1 is ideal). Zero
// indicates a lack of data.
......@@ -320,8 +368,8 @@ class TestPatternReceiver : public media::cast::InProcessReceiver {
}
}
EXPECT_FOR_PERFORMANCE_RUN(change_count > 0);
perf_test::PrintResult(name, modifier, "resolution_changes",
base::NumberToString(change_count), "count", true);
reporter.AddResult(kMetricResolutionChangesCount,
static_cast<size_t>(change_count));
}
private:
......@@ -395,7 +443,7 @@ class CastV2PerformanceTest : public TabCapturePerformanceTestBase,
std::string suffix;
// Note: Add "_gpu" tag for backwards-compatibility with existing
// Performance Dashboard timeseries data.
suffix += "_gpu";
suffix += "gpu";
if (HasFlag(kSmallWindow))
suffix += "_small";
if (HasFlag(k24fps))
......@@ -506,25 +554,21 @@ class CastV2PerformanceTest : public TabCapturePerformanceTestBase,
// Given a vector of vector of data, extract the difference between
// two columns (|col_a| and |col_b|) and output the result as a
// performance metric.
void OutputMeasurement(const std::string& test_name,
const std::vector<std::vector<double>>& data,
const std::string& measurement_name,
void OutputMeasurement(const std::vector<std::vector<double>>& data,
const std::string& metric,
int col_a,
int col_b) {
std::vector<double> tmp;
for (size_t i = 0; i < data.size(); i++) {
tmp.push_back((data[i][col_b] - data[i][col_a]) / 1000.0);
}
return MeanAndError(tmp).Print(test_name,
GetSuffixForTestFlags(),
measurement_name,
"ms");
auto reporter = SetUpCastV2Reporter(GetSuffixForTestFlags());
MaybeAddResultList(reporter, metric, tmp);
}
// Analyze the latency of each frame as it goes from capture to playout. The
// event tracing system is used to track the frames.
void AnalyzeLatency(const std::string& test_name,
trace_analyzer::TraceAnalyzer* analyzer) {
void AnalyzeLatency(trace_analyzer::TraceAnalyzer* analyzer) {
// Retrieve and index all "checkpoint" events related to frames progressing
// from start to finish.
trace_analyzer::TraceEventVector capture_events;
......@@ -598,9 +642,8 @@ class CastV2PerformanceTest : public TabCapturePerformanceTestBase,
(capture_event_count == 0)
? NAN
: (100.0 * traced_frames.size() / capture_event_count);
perf_test::PrintResult(
test_name, GetSuffixForTestFlags(), "frame_drop_rate",
base::StringPrintf("%f", 100 - success_percent), "percent", true);
auto reporter = SetUpCastV2Reporter(GetSuffixForTestFlags());
reporter.AddResult(kMetricFrameDropRatePercent, 100 - success_percent);
// Report the latency between various pairs of checkpoints in the pipeline.
// Lower latency is better for all of these measurements.
......@@ -615,16 +658,17 @@ class CastV2PerformanceTest : public TabCapturePerformanceTestBase,
// 6 = Receiver: frame fully received from network
// 7 = Receiver: frame decoded
// 8 = Receiver: frame played out
OutputMeasurement(test_name, traced_frames, "total_latency", 0, 8);
OutputMeasurement(test_name, traced_frames, "capture_duration", 0, 1);
OutputMeasurement(test_name, traced_frames, "send_to_renderer", 1, 3);
OutputMeasurement(test_name, traced_frames, "encode", 3, 5);
OutputMeasurement(test_name, traced_frames, "transmit", 5, 6);
OutputMeasurement(test_name, traced_frames, "decode", 6, 7);
OutputMeasurement(test_name, traced_frames, "cast_latency", 3, 8);
OutputMeasurement(traced_frames, kMetricTotalLatencyMs, 0, 8);
OutputMeasurement(traced_frames, kMetricCaptureDurationMs, 0, 1);
OutputMeasurement(traced_frames, kMetricSendToRendererMs, 1, 3);
OutputMeasurement(traced_frames, kMetricEncodeMs, 3, 5);
OutputMeasurement(traced_frames, kMetricTransmitMs, 5, 6);
OutputMeasurement(traced_frames, kMetricDecodeMs, 6, 7);
OutputMeasurement(traced_frames, kMetricCastLatencyMs, 3, 8);
}
MeanAndError AnalyzeTraceDistance(trace_analyzer::TraceAnalyzer* analyzer,
std::vector<double> AnalyzeTraceDistance(
trace_analyzer::TraceAnalyzer* analyzer,
const std::string& event_name) {
trace_analyzer::TraceEventVector events;
QueryTraceEvents(analyzer, event_name, &events);
......@@ -636,7 +680,7 @@ class CastV2PerformanceTest : public TabCapturePerformanceTestBase,
double delta_micros = events[i]->timestamp - events[i - 1]->timestamp;
deltas.push_back(delta_micros / 1000.0);
}
return MeanAndError(deltas);
return deltas;
}
protected:
......@@ -649,14 +693,8 @@ class CastV2PerformanceTest : public TabCapturePerformanceTestBase,
// capture frame rate is always fixed at 30 FPS. This allows testing of the
// entire system when it is forced to perform a 60→30 frame rate conversion.
static constexpr int kMaxCaptureFrameRate = 30;
// Naming of performance measurement written to stdout.
static const char kTestName[];
};
// static
const char CastV2PerformanceTest::kTestName[] = "CastV2Performance";
} // namespace
// TODO(https://crbug.com/974427) Disabled due to flakiness.
......@@ -761,14 +799,14 @@ IN_PROC_BROWSER_TEST_P(CastV2PerformanceTest, DISABLED_Performance) {
// this score cannot get any better than 33.33 ms). However, the measurement
// is important since it provides a valuable check that capture can keep up
// with the content's framerate.
MeanAndError capture_data = AnalyzeTraceDistance(analyzer.get(), "Capture");
// Lower is better.
capture_data.Print(kTestName, GetSuffixForTestFlags(),
"time_between_captures", "ms");
auto reporter = SetUpCastV2Reporter(GetSuffixForTestFlags());
MaybeAddResultList(reporter, kMetricTimeBetweenCapturesMs,
AnalyzeTraceDistance(analyzer.get(), "Capture"));
receiver->Analyze(kTestName, GetSuffixForTestFlags());
receiver->Analyze(GetSuffixForTestFlags());
AnalyzeLatency(kTestName, analyzer.get());
AnalyzeLatency(analyzer.get());
}
#if !defined(OS_CHROMEOS) || !defined(MEMORY_SANITIZER)
......
......@@ -21,7 +21,7 @@
#include "extensions/common/switches.h"
#include "extensions/test/extension_test_message_listener.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "testing/perf/perf_test.h"
#include "testing/perf/perf_result_reporter.h"
#include "ui/compositor/compositor_switches.h"
#include "ui/gl/gl_switches.h"
......@@ -39,6 +39,25 @@ constexpr int kMinDataPointsForFullRun = 100; // ~5 sec at 24fps.
// Minimum number of events required for data analysis in a non-performance run.
constexpr int kMinDataPointsForQuickRun = 3;
// Metric names are camelcase instead of using underscores since they need to
// be used to get trace events.
constexpr char kMetricPrefixTabCapture[] = "TabCapture.";
constexpr char kMetricCaptureMs[] = "Capture";
constexpr char kMetricCaptureFailRatePercent[] = "CaptureFailRate";
constexpr char kMetricCaptureLatencyMs[] = "CaptureLatency";
constexpr char kMetricCommitAndDrawCompositorFrameMs[] =
"RenderWidget::DidCommitAndDrawCompositorFrame";
perf_test::PerfResultReporter SetUpTabCaptureReporter(
const std::string& story) {
perf_test::PerfResultReporter reporter(kMetricPrefixTabCapture, story);
reporter.RegisterImportantMetric(kMetricCaptureMs, "ms");
reporter.RegisterImportantMetric(kMetricCaptureFailRatePercent, "percent");
reporter.RegisterImportantMetric(kMetricCaptureLatencyMs, "ms");
reporter.RegisterImportantMetric(kMetricCommitAndDrawCompositorFrameMs, "ms");
return reporter;
}
// A convenience macro to run a gtest expectation in the "full performance run"
// setting, or else a warning that something is not being entirely tested in the
// "CQ run" setting. This is required because the test runs in the CQ may not be
......@@ -79,6 +98,12 @@ class TabCapturePerformanceTest : public TabCapturePerformanceTestBase,
suffix += "_webrtc";
if (HasFlag(kSmallWindow))
suffix += "_small";
// Make sure we always have a story.
if (suffix.size() == 0) {
suffix = "_baseline_story";
}
// Strip off the leading _.
suffix.erase(0, 1);
return suffix;
}
......@@ -128,8 +153,8 @@ class TabCapturePerformanceTest : public TabCapturePerformanceTestBase,
double std_dev_ms = stats.standard_deviation_us / 1000.0;
std::string mean_and_error = base::StringPrintf("%f,%f", mean_ms,
std_dev_ms);
perf_test::PrintResultMeanAndError(kTestName, GetSuffixForTestFlags(),
event_name, mean_and_error, "ms", true);
auto reporter = SetUpTabCaptureReporter(GetSuffixForTestFlags());
reporter.AddResultMeanAndError(event_name, mean_and_error);
return have_rate_stats;
}
......@@ -168,10 +193,10 @@ class TabCapturePerformanceTest : public TabCapturePerformanceTestBase,
(count == 0)
? NAN
: (sqrt(std::max(0.0, count * sqr_sum - sum * sum)) / count);
perf_test::PrintResultMeanAndError(
kTestName, GetSuffixForTestFlags(), event_name + "Latency",
base::StringPrintf("%f,%f", mean_us / 1000.0, std_dev_us / 1000.0),
"ms", true);
auto reporter = SetUpTabCaptureReporter(GetSuffixForTestFlags());
reporter.AddResultMeanAndError(
event_name + "Latency",
base::StringPrintf("%f,%f", mean_us / 1000.0, std_dev_us / 1000.0));
return count > 0;
}
......@@ -217,9 +242,8 @@ class TabCapturePerformanceTest : public TabCapturePerformanceTestBase,
}
fail_percent *= fail_count / events_to_analyze.size();
}
perf_test::PrintResult(
kTestName, GetSuffixForTestFlags(), event_name + "FailRate",
base::StringPrintf("%f", fail_percent), "percent", true);
auto reporter = SetUpTabCaptureReporter(GetSuffixForTestFlags());
reporter.AddResult(event_name + "FailRate", fail_percent);
return !events_to_analyze.empty();
}
......@@ -229,12 +253,8 @@ class TabCapturePerformanceTest : public TabCapturePerformanceTestBase,
std::string test_page_html_;
// Naming of performance measurement written to stdout.
static const char kTestName[];
};
// static
const char TabCapturePerformanceTest::kTestName[] = "TabCapturePerformance";
} // namespace
IN_PROC_BROWSER_TEST_P(TabCapturePerformanceTest, Performance) {
......@@ -269,22 +289,25 @@ IN_PROC_BROWSER_TEST_P(TabCapturePerformanceTest, Performance) {
// Note that any changes to drawing or compositing in the renderer,
// including changes to Blink (e.g., Canvas drawing), layout, etc.; will
// have an impact on this result.
EXPECT_FOR_PERFORMANCE_RUN(PrintRateResults(
analyzer.get(), "RenderWidget::DidCommitAndDrawCompositorFrame"));
EXPECT_FOR_PERFORMANCE_RUN(
PrintRateResults(analyzer.get(), kMetricCommitAndDrawCompositorFrameMs));
// This prints out the average time between capture events in the browser
// process. This should roughly match the renderer's draw+composite rate.
EXPECT_FOR_PERFORMANCE_RUN(PrintRateResults(analyzer.get(), "Capture"));
EXPECT_FOR_PERFORMANCE_RUN(
PrintRateResults(analyzer.get(), kMetricCaptureMs));
// Analyze mean/stddev of the capture latency. This is a measure of how long
// each capture took, from initiation until read-back from the GPU into a
// media::VideoFrame was complete. Lower is better.
EXPECT_FOR_PERFORMANCE_RUN(PrintLatencyResults(analyzer.get(), "Capture"));
EXPECT_FOR_PERFORMANCE_RUN(
PrintLatencyResults(analyzer.get(), kMetricCaptureMs));
// Analyze percentage of failed captures. This measures how often captures
// were initiated, but not completed successfully. Lower is better, and zero
// is ideal.
EXPECT_FOR_PERFORMANCE_RUN(PrintFailRateResults(analyzer.get(), "Capture"));
EXPECT_FOR_PERFORMANCE_RUN(
PrintFailRateResults(analyzer.get(), kMetricCaptureMs));
}
#if defined(OS_CHROMEOS)
......
......@@ -26,71 +26,74 @@ void PerfResultReporter::RegisterImportantMetric(
}
void PerfResultReporter::AddResult(const std::string& metric_suffix,
size_t value) {
auto iter = metric_map_.find(metric_suffix);
CHECK(iter != metric_map_.end());
size_t value) const {
auto info = GetMetricInfoOrFail(metric_suffix);
PrintResult(metric_basename_, metric_suffix, story_name_, value,
iter->second.units, iter->second.important);
PrintResult(metric_basename_, metric_suffix, story_name_, value, info.units,
info.important);
}
void PerfResultReporter::AddResult(const std::string& metric_suffix,
double value) {
auto iter = metric_map_.find(metric_suffix);
CHECK(iter != metric_map_.end());
double value) const {
auto info = GetMetricInfoOrFail(metric_suffix);
PrintResult(metric_basename_, metric_suffix, story_name_, value,
iter->second.units, iter->second.important);
PrintResult(metric_basename_, metric_suffix, story_name_, value, info.units,
info.important);
}
void PerfResultReporter::AddResult(const std::string& metric_suffix,
const std::string& value) {
auto iter = metric_map_.find(metric_suffix);
CHECK(iter != metric_map_.end());
const std::string& value) const {
auto info = GetMetricInfoOrFail(metric_suffix);
PrintResult(metric_basename_, metric_suffix, story_name_, value,
iter->second.units, iter->second.important);
PrintResult(metric_basename_, metric_suffix, story_name_, value, info.units,
info.important);
}
void PerfResultReporter::AddResult(const std::string& metric_suffix,
const base::TimeDelta& value) {
auto iter = metric_map_.find(metric_suffix);
CHECK(iter != metric_map_.end());
base::TimeDelta value) const {
auto info = GetMetricInfoOrFail(metric_suffix);
// Decide what time unit to convert the TimeDelta into. Units are based on
// the legacy units in
// https://cs.chromium.org/chromium/src/third_party/catapult/tracing/tracing/value/legacy_unit_info.py?q=legacy_unit_info
double time = 0;
if (iter->second.units == "seconds") {
if (info.units == "seconds") {
time = value.InSecondsF();
} else if (iter->second.units == "ms" ||
iter->second.units == "milliseconds") {
} else if (info.units == "ms" || info.units == "milliseconds") {
time = value.InMillisecondsF();
} else if (iter->second.units == "us") {
} else if (info.units == "us") {
time = value.InMicrosecondsF();
} else if (iter->second.units == "ns") {
} else if (info.units == "ns") {
time = value.InNanoseconds();
} else {
NOTREACHED() << "Attempted to use AddResult with a TimeDelta when "
<< "registered unit for metric " << metric_suffix << " is "
<< iter->second.units;
<< info.units;
}
PrintResult(metric_basename_, metric_suffix, story_name_, time,
iter->second.units, iter->second.important);
PrintResult(metric_basename_, metric_suffix, story_name_, time, info.units,
info.important);
}
void PerfResultReporter::AddResultList(const std::string& metric_suffix,
const std::string& values) {
auto iter = metric_map_.find(metric_suffix);
CHECK(iter != metric_map_.end());
const std::string& values) const {
auto info = GetMetricInfoOrFail(metric_suffix);
PrintResultList(metric_basename_, metric_suffix, story_name_, values,
iter->second.units, iter->second.important);
info.units, info.important);
}
void PerfResultReporter::AddResultMeanAndError(
const std::string& metric_suffix,
const std::string& mean_and_error) {
auto info = GetMetricInfoOrFail(metric_suffix);
PrintResultMeanAndError(metric_basename_, metric_suffix, story_name_,
mean_and_error, info.units, info.important);
}
bool PerfResultReporter::GetMetricInfo(const std::string& metric_suffix,
MetricInfo* out) {
MetricInfo* out) const {
auto iter = metric_map_.find(metric_suffix);
if (iter == metric_map_.end()) {
return false;
......@@ -107,4 +110,12 @@ void PerfResultReporter::RegisterMetric(const std::string& metric_suffix,
metric_map_.insert({metric_suffix, {units, important}});
}
MetricInfo PerfResultReporter::GetMetricInfoOrFail(
const std::string& metric_suffix) const {
MetricInfo info;
CHECK(GetMetricInfo(metric_suffix, &info))
<< "Attempted to use unregistered metric " << metric_suffix;
return info;
}
} // namespace perf_test
......@@ -44,26 +44,35 @@ class PerfResultReporter {
const std::string& units);
void RegisterImportantMetric(const std::string& metric_suffix,
const std::string& units);
void AddResult(const std::string& metric_suffix, size_t value);
void AddResult(const std::string& metric_suffix, double value);
void AddResult(const std::string& metric_suffix, const std::string& value);
void AddResult(const std::string& metric_suffix, size_t value) const;
void AddResult(const std::string& metric_suffix, double value) const;
void AddResult(const std::string& metric_suffix,
const std::string& value) const;
// A special version of AddResult that will automatically convert the given
// TimeDelta into a double with the correct units for the registered metric.
void AddResult(const std::string& metric_suffix,
const base::TimeDelta& value);
void AddResult(const std::string& metric_suffix, base::TimeDelta value) const;
void AddResultList(const std::string& metric_suffix,
const std::string& values);
const std::string& values) const;
// Users should prefer AddResultList if possible, as otherwise the min/max
// values reported on the perf dashboard aren't useful.
// |mean_and_error| should be a comma-separated string of mean then
// error/stddev, e.g. "2.4,0.5".
void AddResultMeanAndError(const std::string& metric_suffix,
const std::string& mean_and_error);
// Returns true and fills the pointer if the metric is registered, otherwise
// returns false.
bool GetMetricInfo(const std::string& metric_suffix, MetricInfo* out);
bool GetMetricInfo(const std::string& metric_suffix, MetricInfo* out) const;
private:
void RegisterMetric(const std::string& metric_suffix,
const std::string& units,
bool important);
MetricInfo GetMetricInfoOrFail(const std::string& metric_suffix) const;
std::string metric_basename_;
std::string story_name_;
std::unordered_map<std::string, MetricInfo> metric_map_;
......
......@@ -64,6 +64,7 @@ GTEST_CONVERSION_WHITELIST = [
'media_perftests',
'net_perftests',
'passthrough_command_buffer_perftests',
'performance_browser_tests',
'services_perftests',
'tracing_perftests',
'validating_command_buffer_perftests',
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment