Commit 2a301a6d authored by Brian Sheedy's avatar Brian Sheedy Committed by Commit Bot

Switch components_perftests to histograms

Switches components_perftests to use PerfResultReporter instead of
PrintResult directly and whitelists it for conversion to histograms
before uploading to the perf dashboard.

This ends up changing most reported stories/metrics, so old data on the
dashboard will need to be migrated and alert patterns updated.

Bug: 923564
Change-Id: If6ed8ebec6c6c15e2ae48a54d2e7e1a7f3a04bb7
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1797495Reviewed-by: default avatarDaniel Rubery <drubery@chromium.org>
Reviewed-by: default avatarPeng Huang <penghuang@chromium.org>
Reviewed-by: default avatarScott Violet <sky@chromium.org>
Reviewed-by: default avatarCharlie Harrison <csharrison@chromium.org>
Reviewed-by: default avatarssid <ssid@chromium.org>
Commit-Queue: Brian Sheedy <bsheedy@chromium.org>
Cr-Commit-Position: refs/heads/master@{#701219}
parent c06db835
...@@ -16,7 +16,7 @@ ...@@ -16,7 +16,7 @@
#include "base/memory/discardable_shared_memory.h" #include "base/memory/discardable_shared_memory.h"
#include "base/process/process_metrics.h" #include "base/process/process_metrics.h"
#include "testing/gtest/include/gtest/gtest.h" #include "testing/gtest/include/gtest/gtest.h"
#include "testing/perf/perf_test.h" #include "testing/perf/perf_result_reporter.h"
namespace discardable_memory { namespace discardable_memory {
namespace { namespace {
...@@ -92,8 +92,10 @@ TEST(DiscardableSharedMemoryHeapTest, SearchFreeLists) { ...@@ -92,8 +92,10 @@ TEST(DiscardableSharedMemoryHeapTest, SearchFreeLists) {
spans.clear(); spans.clear();
perf_test::PrintResult("search_free_list", "", "", perf_test::PerfResultReporter reporter("DiscardableSharedMemoryHeap.",
count / accumulator.InSecondsF(), "runs/s", true); "search_free_list");
reporter.RegisterImportantMetric("throughput", "runs/s");
reporter.AddResult("throughput", count / accumulator.InSecondsF());
} }
} // namespace } // namespace
......
...@@ -31,7 +31,7 @@ ...@@ -31,7 +31,7 @@
#include "components/leveldb_proto/testing/proto/test_db.pb.h" #include "components/leveldb_proto/testing/proto/test_db.pb.h"
#include "testing/gmock/include/gmock/gmock.h" #include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h" #include "testing/gtest/include/gtest/gtest.h"
#include "testing/perf/perf_test.h" #include "testing/perf/perf_result_reporter.h"
#include "third_party/leveldatabase/env_chromium.h" #include "third_party/leveldatabase/env_chromium.h"
#include "third_party/leveldatabase/leveldb_chrome.h" #include "third_party/leveldatabase/leveldb_chrome.h"
...@@ -70,6 +70,16 @@ struct PerfStats { ...@@ -70,6 +70,16 @@ struct PerfStats {
}; };
static const std::string kSingleDBName = "singledb"; static const std::string kSingleDBName = "singledb";
static constexpr char kMetricNumRunsCount[] = "num_runs";
static constexpr char kMetricTimeMs[] = "time";
static constexpr char kMetricMaxMemoryUseBytes[] = "max_memory_use";
static constexpr char kMetricAverageMemoryUseBytes[] = "average_memory_use";
static constexpr char kMetricTotalMemoryUseBytes[] = "total_memory_use";
static constexpr char kMetricMemUseAfterWritesBytes[] =
"memory_use_after_writes";
static constexpr char kMetricMemUseAfterLoadBytes[] = "memory_use_after_load";
static constexpr char kMetricTotalTimeTakenMs[] = "total_time_taken";
static constexpr char kMetricMaxIndTimeTakenMs[] = "max_individual_time_taken";
static const int kSmallDataSize = 10; static const int kSmallDataSize = 10;
static const int kMediumDataSize = 100; static const int kMediumDataSize = 100;
...@@ -237,7 +247,7 @@ class ProtoDBPerfTest : public testing::Test { ...@@ -237,7 +247,7 @@ class ProtoDBPerfTest : public testing::Test {
// Runs a test to alternately insert elements with different prefixes into // Runs a test to alternately insert elements with different prefixes into
// either a database for each prefix or a single DB. // either a database for each prefix or a single DB.
void RunAlternatingInsertTest(const std::string& test_name, void RunAlternatingInsertTest(const std::string& test_name,
const std::string& test_modifier, const std::string& story_name,
const std::vector<std::string>& prefixes, const std::vector<std::string>& prefixes,
const TestParams& params) { const TestParams& params) {
// Make the entries for each database first. // Make the entries for each database first.
...@@ -266,23 +276,21 @@ class ProtoDBPerfTest : public testing::Test { ...@@ -266,23 +276,21 @@ class ProtoDBPerfTest : public testing::Test {
remaining -= params.batch_size; remaining -= params.batch_size;
} }
perf_test::PrintResult(test_name, test_modifier, "num_runs", perf_test::PerfResultReporter reporter =
static_cast<size_t>(stats.num_runs), "", true); SetUpReporter(test_name, story_name);
perf_test::PrintResult(test_name, test_modifier, "time", stats.time_ms, reporter.AddResult(kMetricNumRunsCount,
"ms", true); static_cast<size_t>(stats.num_runs));
perf_test::PrintResult(test_name, test_modifier, "max memory use", reporter.AddResult(kMetricTimeMs, stats.time_ms);
static_cast<size_t>(stats.max_memory_used_bytes), reporter.AddResult(kMetricMaxMemoryUseBytes,
"bytes", true); static_cast<size_t>(stats.max_memory_used_bytes));
uint64_t average_memory_use = stats.memory_summed_bytes / stats.num_runs; uint64_t average_memory_use = stats.memory_summed_bytes / stats.num_runs;
perf_test::PrintResult(test_name, test_modifier, "average memory use", reporter.AddResult(kMetricAverageMemoryUseBytes,
static_cast<size_t>(average_memory_use), "bytes", static_cast<size_t>(average_memory_use));
true);
} }
// Used to measure the impact on memory in the case where the distribution of // Used to measure the impact on memory in the case where the distribution of
// entries isn't equal amongst individual databases. // entries isn't equal amongst individual databases.
void RunDistributionTestAndCleanup(const std::string& test_name, void RunDistributionTestAndCleanup(const std::string& story_name,
const std::string& test_modifier,
const std::vector<TestParams>& test_params, const std::vector<TestParams>& test_params,
bool single_db) { bool single_db) {
std::vector<std::string> prefixes; std::vector<std::string> prefixes;
...@@ -314,9 +322,10 @@ class ProtoDBPerfTest : public testing::Test { ...@@ -314,9 +322,10 @@ class ProtoDBPerfTest : public testing::Test {
} }
} }
perf_test::PrintResult(test_name, test_modifier, "Total memory use", perf_test::PerfResultReporter reporter =
static_cast<size_t>(stats.max_memory_used_bytes), SetUpReporter("Distribution", story_name);
"bytes", true); reporter.AddResult(kMetricTotalMemoryUseBytes,
static_cast<size_t>(stats.max_memory_used_bytes));
ShutdownDBs(); ShutdownDBs();
} }
...@@ -362,20 +371,17 @@ class ProtoDBPerfTest : public testing::Test { ...@@ -362,20 +371,17 @@ class ProtoDBPerfTest : public testing::Test {
uint64_t memory_use_after; uint64_t memory_use_after;
GetApproximateMemoryUsage(&memory_use_after); GetApproximateMemoryUsage(&memory_use_after);
auto test_modifier_str = base::StringPrintf( auto story_name =
"%s_%u_%u_%zu", test_modifier.c_str(), num_dbs, num_entries, data_size); base::StringPrintf("%u_%u_%zu", num_dbs, num_entries, data_size);
perf_test::PrintResult( perf_test::PerfResultReporter reporter =
"ProtoDBPerfTest", test_modifier_str, "Memory use after writes", SetUpReporter(test_modifier, story_name);
static_cast<size_t>(memory_use_before), "bytes", true); reporter.AddResult(kMetricMemUseAfterWritesBytes,
perf_test::PrintResult( static_cast<size_t>(memory_use_before));
"ProtoDBPerfTest", test_modifier_str, "Memory use after load", reporter.AddResult(kMetricMemUseAfterLoadBytes,
static_cast<size_t>(memory_use_after), "bytes", true); static_cast<size_t>(memory_use_after));
perf_test::PrintResult("ProtoDBPerfTest", test_modifier_str, reporter.AddResult(kMetricTotalTimeTakenMs, static_cast<size_t>(time_ms));
"Total time taken", static_cast<size_t>(time_ms), reporter.AddResult(kMetricMaxIndTimeTakenMs,
"ms", true); static_cast<size_t>(max_time_ms));
perf_test::PrintResult("ProtoDBPerfTest", test_modifier_str,
"Max individual time taken",
static_cast<size_t>(max_time_ms), "ms", true);
ShutdownDBs(); ShutdownDBs();
} }
...@@ -421,20 +427,17 @@ class ProtoDBPerfTest : public testing::Test { ...@@ -421,20 +427,17 @@ class ProtoDBPerfTest : public testing::Test {
uint64_t memory_use_after; uint64_t memory_use_after;
GetApproximateMemoryUsage(&memory_use_after); GetApproximateMemoryUsage(&memory_use_after);
auto test_modifier_str = base::StringPrintf( auto story_name =
"%s_%u_%u_%zu", test_modifier.c_str(), num_dbs, num_entries, data_size); base::StringPrintf("%u_%u_%zu", num_dbs, num_entries, data_size);
perf_test::PrintResult( perf_test::PerfResultReporter reporter =
"ProtoDBPerfTest", test_modifier_str, "Memory use after writes", SetUpReporter(test_modifier, story_name);
static_cast<size_t>(memory_use_before), "bytes", true); reporter.AddResult(kMetricMemUseAfterWritesBytes,
perf_test::PrintResult( static_cast<size_t>(memory_use_before));
"ProtoDBPerfTest", test_modifier_str, "Memory use after load", reporter.AddResult(kMetricMemUseAfterLoadBytes,
static_cast<size_t>(memory_use_after), "bytes", true); static_cast<size_t>(memory_use_after));
perf_test::PrintResult("ProtoDBPerfTest", test_modifier_str, reporter.AddResult(kMetricTotalTimeTakenMs, static_cast<size_t>(time_ms));
"Total time taken", static_cast<size_t>(time_ms), reporter.AddResult(kMetricMaxIndTimeTakenMs,
"ms", true); static_cast<size_t>(max_time_ms));
perf_test::PrintResult("ProtoDBPerfTest", test_modifier_str,
"Max individual time taken",
static_cast<size_t>(max_time_ms), "ms", true);
ShutdownDBs(); ShutdownDBs();
} }
...@@ -555,6 +558,22 @@ class ProtoDBPerfTest : public testing::Test { ...@@ -555,6 +558,22 @@ class ProtoDBPerfTest : public testing::Test {
->GetApproximateMemoryUse(memory_use); ->GetApproximateMemoryUse(memory_use);
} }
perf_test::PerfResultReporter SetUpReporter(const std::string& test_type,
const std::string& story_name) {
perf_test::PerfResultReporter reporter("ProtoDB_" + test_type + ".",
story_name);
reporter.RegisterImportantMetric(kMetricNumRunsCount, "count");
reporter.RegisterImportantMetric(kMetricTimeMs, "ms");
reporter.RegisterImportantMetric(kMetricMaxMemoryUseBytes, "bytes");
reporter.RegisterImportantMetric(kMetricAverageMemoryUseBytes, "bytes");
reporter.RegisterImportantMetric(kMetricTotalMemoryUseBytes, "bytes");
reporter.RegisterImportantMetric(kMetricMemUseAfterWritesBytes, "bytes");
reporter.RegisterImportantMetric(kMetricMemUseAfterLoadBytes, "bytes");
reporter.RegisterImportantMetric(kMetricTotalTimeTakenMs, "ms");
reporter.RegisterImportantMetric(kMetricMaxIndTimeTakenMs, "ms");
return reporter;
}
std::map<std::string, std::unique_ptr<TestDatabase>> dbs_; std::map<std::string, std::unique_ptr<TestDatabase>> dbs_;
std::unique_ptr<MessageLoop> main_loop_; std::unique_ptr<MessageLoop> main_loop_;
scoped_refptr<base::SingleThreadTaskRunner> task_runner_; scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
...@@ -649,35 +668,33 @@ TEST_F(ProtoDBPerfTest, InsertSingleDBAlternating_LargeBatch_1000b) { ...@@ -649,35 +668,33 @@ TEST_F(ProtoDBPerfTest, InsertSingleDBAlternating_LargeBatch_1000b) {
} }
TEST_F(ProtoDBPerfTest, DistributionTestSmall_FewEntries_Single) { TEST_F(ProtoDBPerfTest, DistributionTestSmall_FewEntries_Single) {
RunDistributionTestAndCleanup("DistributionTestSmall_FewEntries", "Single", RunDistributionTestAndCleanup("Small_FewEntries_Single",
kFewEntriesDistributionTestParams, true); kFewEntriesDistributionTestParams, true);
} }
TEST_F(ProtoDBPerfTest, DistributionTestSmall_FewEntries_Multi) { TEST_F(ProtoDBPerfTest, DistributionTestSmall_FewEntries_Multi) {
RunDistributionTestAndCleanup("DistributionTestSmall_FewEntries", "Multi", RunDistributionTestAndCleanup("Small_FewEntries_Multi",
kFewEntriesDistributionTestParams, false); kFewEntriesDistributionTestParams, false);
} }
TEST_F(ProtoDBPerfTest, DistributionTestSmall_ManyEntries_Single) { TEST_F(ProtoDBPerfTest, DistributionTestSmall_ManyEntries_Single) {
RunDistributionTestAndCleanup("DistributionTestSmall_ManyEntries", "Single", RunDistributionTestAndCleanup("Small_ManyEntries_Single",
kManyEntriesDistributionTestParams, true); kManyEntriesDistributionTestParams, true);
} }
TEST_F(ProtoDBPerfTest, DistributionTestSmall_ManyEntries_Multi) { TEST_F(ProtoDBPerfTest, DistributionTestSmall_ManyEntries_Multi) {
RunDistributionTestAndCleanup("DistributionTestSmall_ManyEntries", "Multi", RunDistributionTestAndCleanup("Small_ManyEntries_Multi",
kManyEntriesDistributionTestParams, false); kManyEntriesDistributionTestParams, false);
} }
TEST_F(ProtoDBPerfTest, DistributionTestSmall_ManyEntries_Batch_Single) { TEST_F(ProtoDBPerfTest, DistributionTestSmall_ManyEntries_Batch_Single) {
RunDistributionTestAndCleanup("DistributionTestSmall_ManyEntries_Batch", RunDistributionTestAndCleanup("Small_ManyEntries_Batch_Single",
"Single", kManyEntriesDistributionTestParams, kManyEntriesDistributionTestParams, true);
true);
} }
TEST_F(ProtoDBPerfTest, DistributionTestSmall_ManyEntries_Batch_Multi) { TEST_F(ProtoDBPerfTest, DistributionTestSmall_ManyEntries_Batch_Multi) {
RunDistributionTestAndCleanup("DistributionTestSmall_ManyEntries_Batch", RunDistributionTestAndCleanup("Small_ManyEntries_Batch_Multi",
"Multi", kManyEntriesDistributionTestParams, kManyEntriesDistributionTestParams, false);
false);
} }
TEST_F(ProtoDBPerfTest, LoadEntriesSingle_Small) { TEST_F(ProtoDBPerfTest, LoadEntriesSingle_Small) {
......
...@@ -21,7 +21,7 @@ ...@@ -21,7 +21,7 @@
#include "components/omnibox/browser/history_test_util.h" #include "components/omnibox/browser/history_test_util.h"
#include "components/omnibox/browser/in_memory_url_index_test_util.h" #include "components/omnibox/browser/in_memory_url_index_test_util.h"
#include "testing/gtest/include/gtest/gtest.h" #include "testing/gtest/include/gtest/gtest.h"
#include "testing/perf/perf_test.h" #include "testing/perf/perf_result_reporter.h"
namespace history { namespace history {
...@@ -150,16 +150,21 @@ void HQPPerfTestOnePopularURL::PrepareData() { ...@@ -150,16 +150,21 @@ void HQPPerfTestOnePopularURL::PrepareData() {
} }
void HQPPerfTestOnePopularURL::PrintMeasurements( void HQPPerfTestOnePopularURL::PrintMeasurements(
const std::string& trace_name, const std::string& story_name,
const std::vector<base::TimeDelta>& measurements) { const std::vector<base::TimeDelta>& measurements) {
auto* test_info = ::testing::UnitTest::GetInstance()->current_test_info(); auto* test_info = ::testing::UnitTest::GetInstance()->current_test_info();
std::string durations; std::string durations;
for (const auto& measurement : measurements) for (const auto& measurement : measurements)
durations += std::to_string(measurement.InMillisecondsRoundedUp()) + ','; durations += std::to_string(measurement.InMillisecondsRoundedUp()) + ',';
// Strip off trailing comma.
perf_test::PrintResultList(test_info->test_case_name(), test_info->name(), durations.pop_back();
trace_name, durations, "ms", true);
auto metric_prefix = std::string(test_info->test_case_name()) + "_" +
std::string(test_info->name());
perf_test::PerfResultReporter reporter(metric_prefix, story_name);
reporter.RegisterImportantMetric(".duration", "ms");
reporter.AddResultList(".duration", durations);
} }
base::TimeDelta HQPPerfTestOnePopularURL::RunTest(const base::string16& text) { base::TimeDelta HQPPerfTestOnePopularURL::RunTest(const base::string16& text) {
......
...@@ -17,10 +17,23 @@ ...@@ -17,10 +17,23 @@
#include "components/safe_browsing/db/v4_test_util.h" #include "components/safe_browsing/db/v4_test_util.h"
#include "crypto/sha2.h" #include "crypto/sha2.h"
#include "testing/gtest/include/gtest/gtest.h" #include "testing/gtest/include/gtest/gtest.h"
#include "testing/perf/perf_test.h" #include "testing/perf/perf_result_reporter.h"
namespace safe_browsing { namespace safe_browsing {
namespace {
constexpr char kMetricPrefixV4Store[] = "V4Store.";
constexpr char kMetricGetMatchingHashPrefixMs[] = "get_matching_hash_prefix";
perf_test::PerfResultReporter SetUpV4StoreReporter(const std::string& story) {
perf_test::PerfResultReporter reporter(kMetricPrefixV4Store, story);
reporter.RegisterImportantMetric(kMetricGetMatchingHashPrefixMs, "ms");
return reporter;
}
} // namespace
class V4StorePerftest : public testing::Test {}; class V4StorePerftest : public testing::Test {};
TEST_F(V4StorePerftest, StressTest) { TEST_F(V4StorePerftest, StressTest) {
...@@ -53,6 +66,7 @@ TEST_F(V4StorePerftest, StressTest) { ...@@ -53,6 +66,7 @@ TEST_F(V4StorePerftest, StressTest) {
store->SetPrefixes(std::move(prefixes), kMinHashPrefixLength); store->SetPrefixes(std::move(prefixes), kMinHashPrefixLength);
size_t matches = 0; size_t matches = 0;
auto reporter = SetUpV4StoreReporter("stress_test");
base::ElapsedTimer timer; base::ElapsedTimer timer;
for (size_t i = 0; i < kNumPrefixes; i++) { for (size_t i = 0; i < kNumPrefixes; i++) {
size_t index = i * kMaxHashPrefixLength; size_t index = i * kMaxHashPrefixLength;
...@@ -60,8 +74,8 @@ TEST_F(V4StorePerftest, StressTest) { ...@@ -60,8 +74,8 @@ TEST_F(V4StorePerftest, StressTest) {
full_hashes_piece.substr(index, kMaxHashPrefixLength); full_hashes_piece.substr(index, kMaxHashPrefixLength);
matches += !store->GetMatchingHashPrefix(full_hash).empty(); matches += !store->GetMatchingHashPrefix(full_hash).empty();
} }
perf_test::PrintResult("GetMatchingHashPrefix", "", "", reporter.AddResult(kMetricGetMatchingHashPrefixMs,
timer.Elapsed().InMillisecondsF(), "ms", true); timer.Elapsed().InMillisecondsF());
EXPECT_EQ(kNumPrefixes, matches); EXPECT_EQ(kNumPrefixes, matches);
} }
......
...@@ -23,10 +23,17 @@ ...@@ -23,10 +23,17 @@
#include "components/subresource_filter/tools/filter_tool.h" #include "components/subresource_filter/tools/filter_tool.h"
#include "components/subresource_filter/tools/indexing_tool.h" #include "components/subresource_filter/tools/indexing_tool.h"
#include "testing/gtest/include/gtest/gtest.h" #include "testing/gtest/include/gtest/gtest.h"
#include "testing/perf/perf_test.h" #include "testing/perf/perf_result_reporter.h"
namespace subresource_filter { namespace subresource_filter {
namespace {
static constexpr char kMetricIndexAndWriteTimeUs[] = "index_and_write_time";
static constexpr char kMetricMedianMatchTimeUs[] = "median_match_time";
} // namespace
class IndexedRulesetPerftest : public testing::Test { class IndexedRulesetPerftest : public testing::Test {
public: public:
IndexedRulesetPerftest() {} IndexedRulesetPerftest() {}
...@@ -69,6 +76,13 @@ class IndexedRulesetPerftest : public testing::Test { ...@@ -69,6 +76,13 @@ class IndexedRulesetPerftest : public testing::Test {
const base::FilePath& unindexed_path() const { return unindexed_path_; } const base::FilePath& unindexed_path() const { return unindexed_path_; }
perf_test::PerfResultReporter SetUpReporter(const std::string& story_name) {
perf_test::PerfResultReporter reporter("IndexedRuleset.", story_name);
reporter.RegisterImportantMetric(kMetricIndexAndWriteTimeUs, "us");
reporter.RegisterImportantMetric(kMetricMedianMatchTimeUs, "us");
return reporter;
}
private: private:
base::ScopedTempDir scoped_dir_; base::ScopedTempDir scoped_dir_;
base::FilePath unindexed_path_; base::FilePath unindexed_path_;
...@@ -91,9 +105,9 @@ TEST_F(IndexedRulesetPerftest, IndexRuleset) { ...@@ -91,9 +105,9 @@ TEST_F(IndexedRulesetPerftest, IndexRuleset) {
base::ElapsedTimer timer; base::ElapsedTimer timer;
ASSERT_TRUE(IndexAndWriteRuleset(unindexed_path(), indexed_path)); ASSERT_TRUE(IndexAndWriteRuleset(unindexed_path(), indexed_path));
perf_test::PrintResult("index_and_write_time", "", "", perf_test::PerfResultReporter reporter = SetUpReporter("IndexRuleset");
static_cast<size_t>(timer.Elapsed().InMicroseconds()), reporter.AddResult(kMetricIndexAndWriteTimeUs,
"microseconds", true /* important */); static_cast<size_t>(timer.Elapsed().InMicroseconds()));
} }
TEST_F(IndexedRulesetPerftest, MatchAll) { TEST_F(IndexedRulesetPerftest, MatchAll) {
...@@ -105,9 +119,8 @@ TEST_F(IndexedRulesetPerftest, MatchAll) { ...@@ -105,9 +119,8 @@ TEST_F(IndexedRulesetPerftest, MatchAll) {
results.push_back(timer.Elapsed().InMicroseconds()); results.push_back(timer.Elapsed().InMicroseconds());
} }
std::sort(results.begin(), results.end()); std::sort(results.begin(), results.end());
perf_test::PrintResult("median_match_time", "", "", perf_test::PerfResultReporter reporter = SetUpReporter("MatchAll");
static_cast<size_t>(results[2]), "microseconds", reporter.AddResult(kMetricMedianMatchTimeUs, static_cast<size_t>(results[2]));
true /* important */);
} }
} // namespace subresource_filter } // namespace subresource_filter
...@@ -15,7 +15,7 @@ ...@@ -15,7 +15,7 @@
#include "content/public/test/browser_task_environment.h" #include "content/public/test/browser_task_environment.h"
#include "content/public/test/test_utils.h" #include "content/public/test/test_utils.h"
#include "testing/gtest/include/gtest/gtest.h" #include "testing/gtest/include/gtest/gtest.h"
#include "testing/perf/perf_test.h" #include "testing/perf/perf_result_reporter.h"
#include "url/gurl.h" #include "url/gurl.h"
using base::TimeDelta; using base::TimeDelta;
...@@ -24,24 +24,42 @@ namespace visitedlink { ...@@ -24,24 +24,42 @@ namespace visitedlink {
namespace { namespace {
static constexpr char kMetricAddAndQueryMs[] = "add_and_query";
static constexpr char kMetricTableInitMs[] = "table_initialization";
static constexpr char kMetricLinkInitMs[] = "link_init";
static constexpr char kMetricDatabaseFlushMs[] = "database_flush";
static constexpr char kMetricColdLoadTimeMs[] = "cold_load_time";
static constexpr char kMetricHotLoadTimeMs[] = "hot_load_time";
perf_test::PerfResultReporter SetUpReporter(const std::string& metric_suffix) {
perf_test::PerfResultReporter reporter("VisitedLink.", metric_suffix);
reporter.RegisterImportantMetric(kMetricAddAndQueryMs, "ms");
reporter.RegisterImportantMetric(kMetricTableInitMs, "ms");
reporter.RegisterImportantMetric(kMetricLinkInitMs, "ms");
reporter.RegisterImportantMetric(kMetricDatabaseFlushMs, "ms");
reporter.RegisterImportantMetric(kMetricColdLoadTimeMs, "ms");
reporter.RegisterImportantMetric(kMetricHotLoadTimeMs, "ms");
return reporter;
}
// Designed like base/test/perf_time_logger but uses testing/perf instead of // Designed like base/test/perf_time_logger but uses testing/perf instead of
// base/test/perf* to report timings. // base/test/perf* to report timings.
class TimeLogger { class TimeLogger {
public: public:
explicit TimeLogger(std::string test_name); explicit TimeLogger(std::string metric_suffix);
~TimeLogger(); ~TimeLogger();
void Done(); void Done();
private: private:
bool logged_; bool logged_;
std::string test_name_; std::string metric_suffix_;
base::ElapsedTimer timer_; base::ElapsedTimer timer_;
DISALLOW_COPY_AND_ASSIGN(TimeLogger); DISALLOW_COPY_AND_ASSIGN(TimeLogger);
}; };
TimeLogger::TimeLogger(std::string test_name) TimeLogger::TimeLogger(std::string metric_suffix)
: logged_(false), test_name_(std::move(test_name)) {} : logged_(false), metric_suffix_(std::move(metric_suffix)) {}
TimeLogger::~TimeLogger() { TimeLogger::~TimeLogger() {
if (!logged_) if (!logged_)
...@@ -52,8 +70,8 @@ void TimeLogger::Done() { ...@@ -52,8 +70,8 @@ void TimeLogger::Done() {
// We use a floating-point millisecond value because it is more // We use a floating-point millisecond value because it is more
// intuitive than microseconds and we want more precision than // intuitive than microseconds and we want more precision than
// integer milliseconds. // integer milliseconds.
perf_test::PrintResult(test_name_, std::string(), std::string(), perf_test::PerfResultReporter reporter = SetUpReporter("baseline_story");
timer_.Elapsed().InMillisecondsF(), "ms", true); reporter.AddResult(metric_suffix_, timer_.Elapsed().InMillisecondsF());
logged_ = true; logged_ = true;
} }
...@@ -118,7 +136,7 @@ TEST_F(VisitedLink, TestAddAndQuery) { ...@@ -118,7 +136,7 @@ TEST_F(VisitedLink, TestAddAndQuery) {
ASSERT_TRUE(master.Init()); ASSERT_TRUE(master.Init());
content::RunAllTasksUntilIdle(); content::RunAllTasksUntilIdle();
TimeLogger timer("Visited_link_add_and_query"); TimeLogger timer(kMetricAddAndQueryMs);
// first check without anything in the table // first check without anything in the table
CheckVisited(master, added_prefix, 0, add_count); CheckVisited(master, added_prefix, 0, add_count);
...@@ -144,13 +162,13 @@ TEST_F(VisitedLink, TestAddAndQuery) { ...@@ -144,13 +162,13 @@ TEST_F(VisitedLink, TestAddAndQuery) {
TEST_F(VisitedLink, DISABLED_TestLoad) { TEST_F(VisitedLink, DISABLED_TestLoad) {
// create a big DB // create a big DB
{ {
TimeLogger table_initialization_timer("Table_initialization"); TimeLogger table_initialization_timer(kMetricTableInitMs);
VisitedLinkMaster master(new DummyVisitedLinkEventListener(), nullptr, true, VisitedLinkMaster master(new DummyVisitedLinkEventListener(), nullptr, true,
true, db_path_, 0); true, db_path_, 0);
// time init with empty table // time init with empty table
TimeLogger initTimer("Empty_visited_link_init"); TimeLogger initTimer(kMetricLinkInitMs);
bool success = master.Init(); bool success = master.Init();
content::RunAllTasksUntilIdle(); content::RunAllTasksUntilIdle();
initTimer.Done(); initTimer.Done();
...@@ -163,7 +181,7 @@ TEST_F(VisitedLink, DISABLED_TestLoad) { ...@@ -163,7 +181,7 @@ TEST_F(VisitedLink, DISABLED_TestLoad) {
FillTable(master, added_prefix, 0, load_test_add_count); FillTable(master, added_prefix, 0, load_test_add_count);
// time writing the file out out // time writing the file out out
TimeLogger flushTimer("Visited_link_database_flush"); TimeLogger flushTimer(kMetricDatabaseFlushMs);
master.RewriteFile(); master.RewriteFile();
// TODO(maruel): Without calling FlushFileBuffers(master.file_); you don't // TODO(maruel): Without calling FlushFileBuffers(master.file_); you don't
// know really how much time it took to write the file. // know really how much time it took to write the file.
...@@ -222,12 +240,9 @@ TEST_F(VisitedLink, DISABLED_TestLoad) { ...@@ -222,12 +240,9 @@ TEST_F(VisitedLink, DISABLED_TestLoad) {
hot_sum += hot_load_times[i]; hot_sum += hot_load_times[i];
} }
perf_test::PrintResult("Visited_link_cold_load_time", std::string(), perf_test::PerfResultReporter reporter = SetUpReporter("baseline_story");
std::string(), cold_sum / cold_load_times.size(), "ms", reporter.AddResult(kMetricColdLoadTimeMs, cold_sum / cold_load_times.size());
true); reporter.AddResult(kMetricHotLoadTimeMs, hot_sum / hot_load_times.size());
perf_test::PrintResult("Visited_link_hot_load_time", std::string(),
std::string(), hot_sum / hot_load_times.size(), "ms",
true);
} }
} // namespace visitedlink } // namespace visitedlink
...@@ -52,6 +52,15 @@ void PerfResultReporter::AddResult(const std::string& metric_suffix, ...@@ -52,6 +52,15 @@ void PerfResultReporter::AddResult(const std::string& metric_suffix,
iter->second.units, iter->second.important); iter->second.units, iter->second.important);
} }
void PerfResultReporter::AddResultList(const std::string& metric_suffix,
const std::string& values) {
auto iter = metric_map_.find(metric_suffix);
CHECK(iter != metric_map_.end());
PrintResultList(metric_basename_, metric_suffix, story_name_, values,
iter->second.units, iter->second.important);
}
bool PerfResultReporter::GetMetricInfo(const std::string& metric_suffix, bool PerfResultReporter::GetMetricInfo(const std::string& metric_suffix,
MetricInfo* out) { MetricInfo* out) {
auto iter = metric_map_.find(metric_suffix); auto iter = metric_map_.find(metric_suffix);
......
...@@ -45,6 +45,8 @@ class PerfResultReporter { ...@@ -45,6 +45,8 @@ class PerfResultReporter {
void AddResult(const std::string& metric_suffix, size_t value); void AddResult(const std::string& metric_suffix, size_t value);
void AddResult(const std::string& metric_suffix, double value); void AddResult(const std::string& metric_suffix, double value);
void AddResult(const std::string& metric_suffix, const std::string& value); void AddResult(const std::string& metric_suffix, const std::string& value);
void AddResultList(const std::string& metric_suffix,
const std::string& values);
// Returns true and fills the pointer if the metric is registered, otherwise // Returns true and fills the pointer if the metric is registered, otherwise
// returns false. // returns false.
......
...@@ -57,6 +57,7 @@ DATA_FORMAT_UNKNOWN = 'unknown' ...@@ -57,6 +57,7 @@ DATA_FORMAT_UNKNOWN = 'unknown'
GTEST_CONVERSION_WHITELIST = [ GTEST_CONVERSION_WHITELIST = [
'angle_perftests', 'angle_perftests',
'cc_perftests', 'cc_perftests',
'components_perftests',
'gpu_perftests', 'gpu_perftests',
'latency_perftests', 'latency_perftests',
'load_library_perf_tests', 'load_library_perf_tests',
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment