Commit 2a301a6d authored by Brian Sheedy's avatar Brian Sheedy Committed by Commit Bot

Switch components_perftests to histograms

Switches components_perftests to use PerfResultReporter instead of
PrintResult directly and whitelists it for conversion to histograms
before uploading to the perf dashboard.

This ends up changing most reported stories/metrics, so old data on the
dashboard will need to be migrated and alert patterns updated.

Bug: 923564
Change-Id: If6ed8ebec6c6c15e2ae48a54d2e7e1a7f3a04bb7
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1797495Reviewed-by: default avatarDaniel Rubery <drubery@chromium.org>
Reviewed-by: default avatarPeng Huang <penghuang@chromium.org>
Reviewed-by: default avatarScott Violet <sky@chromium.org>
Reviewed-by: default avatarCharlie Harrison <csharrison@chromium.org>
Reviewed-by: default avatarssid <ssid@chromium.org>
Commit-Queue: Brian Sheedy <bsheedy@chromium.org>
Cr-Commit-Position: refs/heads/master@{#701219}
parent c06db835
......@@ -16,7 +16,7 @@
#include "base/memory/discardable_shared_memory.h"
#include "base/process/process_metrics.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "testing/perf/perf_test.h"
#include "testing/perf/perf_result_reporter.h"
namespace discardable_memory {
namespace {
......@@ -92,8 +92,10 @@ TEST(DiscardableSharedMemoryHeapTest, SearchFreeLists) {
spans.clear();
perf_test::PrintResult("search_free_list", "", "",
count / accumulator.InSecondsF(), "runs/s", true);
perf_test::PerfResultReporter reporter("DiscardableSharedMemoryHeap.",
"search_free_list");
reporter.RegisterImportantMetric("throughput", "runs/s");
reporter.AddResult("throughput", count / accumulator.InSecondsF());
}
} // namespace
......
......@@ -31,7 +31,7 @@
#include "components/leveldb_proto/testing/proto/test_db.pb.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "testing/perf/perf_test.h"
#include "testing/perf/perf_result_reporter.h"
#include "third_party/leveldatabase/env_chromium.h"
#include "third_party/leveldatabase/leveldb_chrome.h"
......@@ -70,6 +70,16 @@ struct PerfStats {
};
static const std::string kSingleDBName = "singledb";
static constexpr char kMetricNumRunsCount[] = "num_runs";
static constexpr char kMetricTimeMs[] = "time";
static constexpr char kMetricMaxMemoryUseBytes[] = "max_memory_use";
static constexpr char kMetricAverageMemoryUseBytes[] = "average_memory_use";
static constexpr char kMetricTotalMemoryUseBytes[] = "total_memory_use";
static constexpr char kMetricMemUseAfterWritesBytes[] =
"memory_use_after_writes";
static constexpr char kMetricMemUseAfterLoadBytes[] = "memory_use_after_load";
static constexpr char kMetricTotalTimeTakenMs[] = "total_time_taken";
static constexpr char kMetricMaxIndTimeTakenMs[] = "max_individual_time_taken";
static const int kSmallDataSize = 10;
static const int kMediumDataSize = 100;
......@@ -237,7 +247,7 @@ class ProtoDBPerfTest : public testing::Test {
// Runs a test to alternately insert elements with different prefixes into
// either a database for each prefix or a single DB.
void RunAlternatingInsertTest(const std::string& test_name,
const std::string& test_modifier,
const std::string& story_name,
const std::vector<std::string>& prefixes,
const TestParams& params) {
// Make the entries for each database first.
......@@ -266,23 +276,21 @@ class ProtoDBPerfTest : public testing::Test {
remaining -= params.batch_size;
}
perf_test::PrintResult(test_name, test_modifier, "num_runs",
static_cast<size_t>(stats.num_runs), "", true);
perf_test::PrintResult(test_name, test_modifier, "time", stats.time_ms,
"ms", true);
perf_test::PrintResult(test_name, test_modifier, "max memory use",
static_cast<size_t>(stats.max_memory_used_bytes),
"bytes", true);
perf_test::PerfResultReporter reporter =
SetUpReporter(test_name, story_name);
reporter.AddResult(kMetricNumRunsCount,
static_cast<size_t>(stats.num_runs));
reporter.AddResult(kMetricTimeMs, stats.time_ms);
reporter.AddResult(kMetricMaxMemoryUseBytes,
static_cast<size_t>(stats.max_memory_used_bytes));
uint64_t average_memory_use = stats.memory_summed_bytes / stats.num_runs;
perf_test::PrintResult(test_name, test_modifier, "average memory use",
static_cast<size_t>(average_memory_use), "bytes",
true);
reporter.AddResult(kMetricAverageMemoryUseBytes,
static_cast<size_t>(average_memory_use));
}
// Used to measure the impact on memory in the case where the distribution of
// entries isn't equal amongst individual databases.
void RunDistributionTestAndCleanup(const std::string& test_name,
const std::string& test_modifier,
void RunDistributionTestAndCleanup(const std::string& story_name,
const std::vector<TestParams>& test_params,
bool single_db) {
std::vector<std::string> prefixes;
......@@ -314,9 +322,10 @@ class ProtoDBPerfTest : public testing::Test {
}
}
perf_test::PrintResult(test_name, test_modifier, "Total memory use",
static_cast<size_t>(stats.max_memory_used_bytes),
"bytes", true);
perf_test::PerfResultReporter reporter =
SetUpReporter("Distribution", story_name);
reporter.AddResult(kMetricTotalMemoryUseBytes,
static_cast<size_t>(stats.max_memory_used_bytes));
ShutdownDBs();
}
......@@ -362,20 +371,17 @@ class ProtoDBPerfTest : public testing::Test {
uint64_t memory_use_after;
GetApproximateMemoryUsage(&memory_use_after);
auto test_modifier_str = base::StringPrintf(
"%s_%u_%u_%zu", test_modifier.c_str(), num_dbs, num_entries, data_size);
perf_test::PrintResult(
"ProtoDBPerfTest", test_modifier_str, "Memory use after writes",
static_cast<size_t>(memory_use_before), "bytes", true);
perf_test::PrintResult(
"ProtoDBPerfTest", test_modifier_str, "Memory use after load",
static_cast<size_t>(memory_use_after), "bytes", true);
perf_test::PrintResult("ProtoDBPerfTest", test_modifier_str,
"Total time taken", static_cast<size_t>(time_ms),
"ms", true);
perf_test::PrintResult("ProtoDBPerfTest", test_modifier_str,
"Max individual time taken",
static_cast<size_t>(max_time_ms), "ms", true);
auto story_name =
base::StringPrintf("%u_%u_%zu", num_dbs, num_entries, data_size);
perf_test::PerfResultReporter reporter =
SetUpReporter(test_modifier, story_name);
reporter.AddResult(kMetricMemUseAfterWritesBytes,
static_cast<size_t>(memory_use_before));
reporter.AddResult(kMetricMemUseAfterLoadBytes,
static_cast<size_t>(memory_use_after));
reporter.AddResult(kMetricTotalTimeTakenMs, static_cast<size_t>(time_ms));
reporter.AddResult(kMetricMaxIndTimeTakenMs,
static_cast<size_t>(max_time_ms));
ShutdownDBs();
}
......@@ -421,20 +427,17 @@ class ProtoDBPerfTest : public testing::Test {
uint64_t memory_use_after;
GetApproximateMemoryUsage(&memory_use_after);
auto test_modifier_str = base::StringPrintf(
"%s_%u_%u_%zu", test_modifier.c_str(), num_dbs, num_entries, data_size);
perf_test::PrintResult(
"ProtoDBPerfTest", test_modifier_str, "Memory use after writes",
static_cast<size_t>(memory_use_before), "bytes", true);
perf_test::PrintResult(
"ProtoDBPerfTest", test_modifier_str, "Memory use after load",
static_cast<size_t>(memory_use_after), "bytes", true);
perf_test::PrintResult("ProtoDBPerfTest", test_modifier_str,
"Total time taken", static_cast<size_t>(time_ms),
"ms", true);
perf_test::PrintResult("ProtoDBPerfTest", test_modifier_str,
"Max individual time taken",
static_cast<size_t>(max_time_ms), "ms", true);
auto story_name =
base::StringPrintf("%u_%u_%zu", num_dbs, num_entries, data_size);
perf_test::PerfResultReporter reporter =
SetUpReporter(test_modifier, story_name);
reporter.AddResult(kMetricMemUseAfterWritesBytes,
static_cast<size_t>(memory_use_before));
reporter.AddResult(kMetricMemUseAfterLoadBytes,
static_cast<size_t>(memory_use_after));
reporter.AddResult(kMetricTotalTimeTakenMs, static_cast<size_t>(time_ms));
reporter.AddResult(kMetricMaxIndTimeTakenMs,
static_cast<size_t>(max_time_ms));
ShutdownDBs();
}
......@@ -555,6 +558,22 @@ class ProtoDBPerfTest : public testing::Test {
->GetApproximateMemoryUse(memory_use);
}
perf_test::PerfResultReporter SetUpReporter(const std::string& test_type,
const std::string& story_name) {
perf_test::PerfResultReporter reporter("ProtoDB_" + test_type + ".",
story_name);
reporter.RegisterImportantMetric(kMetricNumRunsCount, "count");
reporter.RegisterImportantMetric(kMetricTimeMs, "ms");
reporter.RegisterImportantMetric(kMetricMaxMemoryUseBytes, "bytes");
reporter.RegisterImportantMetric(kMetricAverageMemoryUseBytes, "bytes");
reporter.RegisterImportantMetric(kMetricTotalMemoryUseBytes, "bytes");
reporter.RegisterImportantMetric(kMetricMemUseAfterWritesBytes, "bytes");
reporter.RegisterImportantMetric(kMetricMemUseAfterLoadBytes, "bytes");
reporter.RegisterImportantMetric(kMetricTotalTimeTakenMs, "ms");
reporter.RegisterImportantMetric(kMetricMaxIndTimeTakenMs, "ms");
return reporter;
}
std::map<std::string, std::unique_ptr<TestDatabase>> dbs_;
std::unique_ptr<MessageLoop> main_loop_;
scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
......@@ -649,35 +668,33 @@ TEST_F(ProtoDBPerfTest, InsertSingleDBAlternating_LargeBatch_1000b) {
}
TEST_F(ProtoDBPerfTest, DistributionTestSmall_FewEntries_Single) {
RunDistributionTestAndCleanup("DistributionTestSmall_FewEntries", "Single",
RunDistributionTestAndCleanup("Small_FewEntries_Single",
kFewEntriesDistributionTestParams, true);
}
TEST_F(ProtoDBPerfTest, DistributionTestSmall_FewEntries_Multi) {
RunDistributionTestAndCleanup("DistributionTestSmall_FewEntries", "Multi",
RunDistributionTestAndCleanup("Small_FewEntries_Multi",
kFewEntriesDistributionTestParams, false);
}
TEST_F(ProtoDBPerfTest, DistributionTestSmall_ManyEntries_Single) {
RunDistributionTestAndCleanup("DistributionTestSmall_ManyEntries", "Single",
RunDistributionTestAndCleanup("Small_ManyEntries_Single",
kManyEntriesDistributionTestParams, true);
}
TEST_F(ProtoDBPerfTest, DistributionTestSmall_ManyEntries_Multi) {
RunDistributionTestAndCleanup("DistributionTestSmall_ManyEntries", "Multi",
RunDistributionTestAndCleanup("Small_ManyEntries_Multi",
kManyEntriesDistributionTestParams, false);
}
TEST_F(ProtoDBPerfTest, DistributionTestSmall_ManyEntries_Batch_Single) {
RunDistributionTestAndCleanup("DistributionTestSmall_ManyEntries_Batch",
"Single", kManyEntriesDistributionTestParams,
true);
RunDistributionTestAndCleanup("Small_ManyEntries_Batch_Single",
kManyEntriesDistributionTestParams, true);
}
TEST_F(ProtoDBPerfTest, DistributionTestSmall_ManyEntries_Batch_Multi) {
RunDistributionTestAndCleanup("DistributionTestSmall_ManyEntries_Batch",
"Multi", kManyEntriesDistributionTestParams,
false);
RunDistributionTestAndCleanup("Small_ManyEntries_Batch_Multi",
kManyEntriesDistributionTestParams, false);
}
TEST_F(ProtoDBPerfTest, LoadEntriesSingle_Small) {
......
......@@ -21,7 +21,7 @@
#include "components/omnibox/browser/history_test_util.h"
#include "components/omnibox/browser/in_memory_url_index_test_util.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "testing/perf/perf_test.h"
#include "testing/perf/perf_result_reporter.h"
namespace history {
......@@ -150,16 +150,21 @@ void HQPPerfTestOnePopularURL::PrepareData() {
}
void HQPPerfTestOnePopularURL::PrintMeasurements(
const std::string& trace_name,
const std::string& story_name,
const std::vector<base::TimeDelta>& measurements) {
auto* test_info = ::testing::UnitTest::GetInstance()->current_test_info();
std::string durations;
for (const auto& measurement : measurements)
durations += std::to_string(measurement.InMillisecondsRoundedUp()) + ',';
perf_test::PrintResultList(test_info->test_case_name(), test_info->name(),
trace_name, durations, "ms", true);
// Strip off trailing comma.
durations.pop_back();
auto metric_prefix = std::string(test_info->test_case_name()) + "_" +
std::string(test_info->name());
perf_test::PerfResultReporter reporter(metric_prefix, story_name);
reporter.RegisterImportantMetric(".duration", "ms");
reporter.AddResultList(".duration", durations);
}
base::TimeDelta HQPPerfTestOnePopularURL::RunTest(const base::string16& text) {
......
......@@ -17,10 +17,23 @@
#include "components/safe_browsing/db/v4_test_util.h"
#include "crypto/sha2.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "testing/perf/perf_test.h"
#include "testing/perf/perf_result_reporter.h"
namespace safe_browsing {
namespace {
constexpr char kMetricPrefixV4Store[] = "V4Store.";
constexpr char kMetricGetMatchingHashPrefixMs[] = "get_matching_hash_prefix";
perf_test::PerfResultReporter SetUpV4StoreReporter(const std::string& story) {
perf_test::PerfResultReporter reporter(kMetricPrefixV4Store, story);
reporter.RegisterImportantMetric(kMetricGetMatchingHashPrefixMs, "ms");
return reporter;
}
} // namespace
class V4StorePerftest : public testing::Test {};
TEST_F(V4StorePerftest, StressTest) {
......@@ -53,6 +66,7 @@ TEST_F(V4StorePerftest, StressTest) {
store->SetPrefixes(std::move(prefixes), kMinHashPrefixLength);
size_t matches = 0;
auto reporter = SetUpV4StoreReporter("stress_test");
base::ElapsedTimer timer;
for (size_t i = 0; i < kNumPrefixes; i++) {
size_t index = i * kMaxHashPrefixLength;
......@@ -60,8 +74,8 @@ TEST_F(V4StorePerftest, StressTest) {
full_hashes_piece.substr(index, kMaxHashPrefixLength);
matches += !store->GetMatchingHashPrefix(full_hash).empty();
}
perf_test::PrintResult("GetMatchingHashPrefix", "", "",
timer.Elapsed().InMillisecondsF(), "ms", true);
reporter.AddResult(kMetricGetMatchingHashPrefixMs,
timer.Elapsed().InMillisecondsF());
EXPECT_EQ(kNumPrefixes, matches);
}
......
......@@ -23,10 +23,17 @@
#include "components/subresource_filter/tools/filter_tool.h"
#include "components/subresource_filter/tools/indexing_tool.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "testing/perf/perf_test.h"
#include "testing/perf/perf_result_reporter.h"
namespace subresource_filter {
namespace {
static constexpr char kMetricIndexAndWriteTimeUs[] = "index_and_write_time";
static constexpr char kMetricMedianMatchTimeUs[] = "median_match_time";
} // namespace
class IndexedRulesetPerftest : public testing::Test {
public:
IndexedRulesetPerftest() {}
......@@ -69,6 +76,13 @@ class IndexedRulesetPerftest : public testing::Test {
const base::FilePath& unindexed_path() const { return unindexed_path_; }
perf_test::PerfResultReporter SetUpReporter(const std::string& story_name) {
perf_test::PerfResultReporter reporter("IndexedRuleset.", story_name);
reporter.RegisterImportantMetric(kMetricIndexAndWriteTimeUs, "us");
reporter.RegisterImportantMetric(kMetricMedianMatchTimeUs, "us");
return reporter;
}
private:
base::ScopedTempDir scoped_dir_;
base::FilePath unindexed_path_;
......@@ -91,9 +105,9 @@ TEST_F(IndexedRulesetPerftest, IndexRuleset) {
base::ElapsedTimer timer;
ASSERT_TRUE(IndexAndWriteRuleset(unindexed_path(), indexed_path));
perf_test::PrintResult("index_and_write_time", "", "",
static_cast<size_t>(timer.Elapsed().InMicroseconds()),
"microseconds", true /* important */);
perf_test::PerfResultReporter reporter = SetUpReporter("IndexRuleset");
reporter.AddResult(kMetricIndexAndWriteTimeUs,
static_cast<size_t>(timer.Elapsed().InMicroseconds()));
}
TEST_F(IndexedRulesetPerftest, MatchAll) {
......@@ -105,9 +119,8 @@ TEST_F(IndexedRulesetPerftest, MatchAll) {
results.push_back(timer.Elapsed().InMicroseconds());
}
std::sort(results.begin(), results.end());
perf_test::PrintResult("median_match_time", "", "",
static_cast<size_t>(results[2]), "microseconds",
true /* important */);
perf_test::PerfResultReporter reporter = SetUpReporter("MatchAll");
reporter.AddResult(kMetricMedianMatchTimeUs, static_cast<size_t>(results[2]));
}
} // namespace subresource_filter
......@@ -15,7 +15,7 @@
#include "content/public/test/browser_task_environment.h"
#include "content/public/test/test_utils.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "testing/perf/perf_test.h"
#include "testing/perf/perf_result_reporter.h"
#include "url/gurl.h"
using base::TimeDelta;
......@@ -24,24 +24,42 @@ namespace visitedlink {
namespace {
static constexpr char kMetricAddAndQueryMs[] = "add_and_query";
static constexpr char kMetricTableInitMs[] = "table_initialization";
static constexpr char kMetricLinkInitMs[] = "link_init";
static constexpr char kMetricDatabaseFlushMs[] = "database_flush";
static constexpr char kMetricColdLoadTimeMs[] = "cold_load_time";
static constexpr char kMetricHotLoadTimeMs[] = "hot_load_time";
perf_test::PerfResultReporter SetUpReporter(const std::string& metric_suffix) {
perf_test::PerfResultReporter reporter("VisitedLink.", metric_suffix);
reporter.RegisterImportantMetric(kMetricAddAndQueryMs, "ms");
reporter.RegisterImportantMetric(kMetricTableInitMs, "ms");
reporter.RegisterImportantMetric(kMetricLinkInitMs, "ms");
reporter.RegisterImportantMetric(kMetricDatabaseFlushMs, "ms");
reporter.RegisterImportantMetric(kMetricColdLoadTimeMs, "ms");
reporter.RegisterImportantMetric(kMetricHotLoadTimeMs, "ms");
return reporter;
}
// Designed like base/test/perf_time_logger but uses testing/perf instead of
// base/test/perf* to report timings.
class TimeLogger {
public:
explicit TimeLogger(std::string test_name);
explicit TimeLogger(std::string metric_suffix);
~TimeLogger();
void Done();
private:
bool logged_;
std::string test_name_;
std::string metric_suffix_;
base::ElapsedTimer timer_;
DISALLOW_COPY_AND_ASSIGN(TimeLogger);
};
TimeLogger::TimeLogger(std::string test_name)
: logged_(false), test_name_(std::move(test_name)) {}
TimeLogger::TimeLogger(std::string metric_suffix)
: logged_(false), metric_suffix_(std::move(metric_suffix)) {}
TimeLogger::~TimeLogger() {
if (!logged_)
......@@ -52,8 +70,8 @@ void TimeLogger::Done() {
// We use a floating-point millisecond value because it is more
// intuitive than microseconds and we want more precision than
// integer milliseconds.
perf_test::PrintResult(test_name_, std::string(), std::string(),
timer_.Elapsed().InMillisecondsF(), "ms", true);
perf_test::PerfResultReporter reporter = SetUpReporter("baseline_story");
reporter.AddResult(metric_suffix_, timer_.Elapsed().InMillisecondsF());
logged_ = true;
}
......@@ -118,7 +136,7 @@ TEST_F(VisitedLink, TestAddAndQuery) {
ASSERT_TRUE(master.Init());
content::RunAllTasksUntilIdle();
TimeLogger timer("Visited_link_add_and_query");
TimeLogger timer(kMetricAddAndQueryMs);
// first check without anything in the table
CheckVisited(master, added_prefix, 0, add_count);
......@@ -144,13 +162,13 @@ TEST_F(VisitedLink, TestAddAndQuery) {
TEST_F(VisitedLink, DISABLED_TestLoad) {
// create a big DB
{
TimeLogger table_initialization_timer("Table_initialization");
TimeLogger table_initialization_timer(kMetricTableInitMs);
VisitedLinkMaster master(new DummyVisitedLinkEventListener(), nullptr, true,
true, db_path_, 0);
// time init with empty table
TimeLogger initTimer("Empty_visited_link_init");
TimeLogger initTimer(kMetricLinkInitMs);
bool success = master.Init();
content::RunAllTasksUntilIdle();
initTimer.Done();
......@@ -163,7 +181,7 @@ TEST_F(VisitedLink, DISABLED_TestLoad) {
FillTable(master, added_prefix, 0, load_test_add_count);
// time writing the file out out
TimeLogger flushTimer("Visited_link_database_flush");
TimeLogger flushTimer(kMetricDatabaseFlushMs);
master.RewriteFile();
// TODO(maruel): Without calling FlushFileBuffers(master.file_); you don't
// know really how much time it took to write the file.
......@@ -222,12 +240,9 @@ TEST_F(VisitedLink, DISABLED_TestLoad) {
hot_sum += hot_load_times[i];
}
perf_test::PrintResult("Visited_link_cold_load_time", std::string(),
std::string(), cold_sum / cold_load_times.size(), "ms",
true);
perf_test::PrintResult("Visited_link_hot_load_time", std::string(),
std::string(), hot_sum / hot_load_times.size(), "ms",
true);
perf_test::PerfResultReporter reporter = SetUpReporter("baseline_story");
reporter.AddResult(kMetricColdLoadTimeMs, cold_sum / cold_load_times.size());
reporter.AddResult(kMetricHotLoadTimeMs, hot_sum / hot_load_times.size());
}
} // namespace visitedlink
......@@ -52,6 +52,15 @@ void PerfResultReporter::AddResult(const std::string& metric_suffix,
iter->second.units, iter->second.important);
}
void PerfResultReporter::AddResultList(const std::string& metric_suffix,
const std::string& values) {
auto iter = metric_map_.find(metric_suffix);
CHECK(iter != metric_map_.end());
PrintResultList(metric_basename_, metric_suffix, story_name_, values,
iter->second.units, iter->second.important);
}
bool PerfResultReporter::GetMetricInfo(const std::string& metric_suffix,
MetricInfo* out) {
auto iter = metric_map_.find(metric_suffix);
......
......@@ -45,6 +45,8 @@ class PerfResultReporter {
void AddResult(const std::string& metric_suffix, size_t value);
void AddResult(const std::string& metric_suffix, double value);
void AddResult(const std::string& metric_suffix, const std::string& value);
void AddResultList(const std::string& metric_suffix,
const std::string& values);
// Returns true and fills the pointer if the metric is registered, otherwise
// returns false.
......
......@@ -57,6 +57,7 @@ DATA_FORMAT_UNKNOWN = 'unknown'
GTEST_CONVERSION_WHITELIST = [
'angle_perftests',
'cc_perftests',
'components_perftests',
'gpu_perftests',
'latency_perftests',
'load_library_perf_tests',
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment