Commit 95123be7 authored by Alex Turner's avatar Alex Turner Committed by Commit Bot

Add memory metrics for ad_tagging.cluster_telemetry benchmark

Enables memoryMetric when a benchmark argument is provided, setting the
appropriate options. These metrics are put behind a flag as it adds over
300 metrics, which may crowd the output unnecessarily.

Bug: 1042404
Change-Id: I05c5e9d2434c5a07a3d2aa632c64f0d077c2443c
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2002761
Commit-Queue: Alex Turner <alexmt@chromium.org>
Reviewed-by: default avatarJohn Delaney <johnidel@chromium.org>
Cr-Commit-Position: refs/heads/master@{#732661}
parent da3d3591
...@@ -2,6 +2,7 @@ ...@@ -2,6 +2,7 @@
# Use of this source code is governed by a BSD-style license that can be # Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file. # found in the LICENSE file.
from benchmarks import memory
from contrib.cluster_telemetry import ct_benchmarks_util, page_set from contrib.cluster_telemetry import ct_benchmarks_util, page_set
from core import perf_benchmark from core import perf_benchmark
import py_utils import py_utils
...@@ -24,24 +25,41 @@ class AdTaggingClusterTelemetry(perf_benchmark.PerfBenchmark): ...@@ -24,24 +25,41 @@ class AdTaggingClusterTelemetry(perf_benchmark.PerfBenchmark):
def AddBenchmarkCommandLineArgs(cls, parser): def AddBenchmarkCommandLineArgs(cls, parser):
ct_benchmarks_util.AddBenchmarkCommandLineArgs(parser) ct_benchmarks_util.AddBenchmarkCommandLineArgs(parser)
parser.add_option( parser.add_option(
'--verbose-cpu-metrics-for-adtagging', '--verbose-cpu-metrics',
action='store_true', action='store_true',
help='Enables non-UMA CPU metrics for the ad_tagging.cluster_telemetry ' help='Enables non-UMA CPU metrics.')
'benchmark.') parser.add_option(
'--verbose-memory-metrics',
action='store_true',
help='Enables non-UMA memory metrics.')
@classmethod @classmethod
def ProcessCommandLineArgs(cls, parser, args): def ProcessCommandLineArgs(cls, parser, args):
ct_benchmarks_util.ValidateCommandLineArgs(parser, args) ct_benchmarks_util.ValidateCommandLineArgs(parser, args)
cls.enable_limited_cpu_time_metric = args.verbose_cpu_metrics_for_adtagging cls.enable_limited_cpu_time_metric = args.verbose_cpu_metrics
cls.enable_memory_metric = args.verbose_memory_metrics
def GetExtraOutDirectories(self): def GetExtraOutDirectories(self):
# The indexed filter list does not end up in a build directory on cluster # The indexed filter list does not end up in a build directory on cluster
# telemetry; so we add its location here. # telemetry; so we add its location here.
return ['/b/s/w/ir/out/telemetry_isolates'] return ['/b/s/w/ir/out/telemetry_isolates']
def SetExtraBrowserOptions(self, options):
if self.enable_memory_metric:
memory.SetExtraBrowserOptionsForMemoryMeasurement(options)
def CreateCoreTimelineBasedMeasurementOptions(self): def CreateCoreTimelineBasedMeasurementOptions(self):
category_filter = chrome_trace_category_filter.CreateLowOverheadFilter() category_filter = chrome_trace_category_filter.CreateLowOverheadFilter()
tbm_options = timeline_based_measurement.Options(category_filter) if self.enable_memory_metric:
tbm_options = memory.CreateCoreTimelineBasedMemoryMeasurementOptions()
# The memory options only include the filters needed for memory
# measurement. We reintroduce the filters required for other metrics.
tbm_options.ExtendTraceCategoryFilter(
category_filter.filter_string.split(','))
else:
tbm_options = timeline_based_measurement.Options(category_filter)
uma_histograms = [ uma_histograms = [
'Ads.ResourceUsage.Size.Network.Mainframe.AdResource', 'Ads.ResourceUsage.Size.Network.Mainframe.AdResource',
'Ads.ResourceUsage.Size.Network.Mainframe.VanillaResource', 'Ads.ResourceUsage.Size.Network.Mainframe.VanillaResource',
...@@ -58,14 +76,15 @@ class AdTaggingClusterTelemetry(perf_benchmark.PerfBenchmark): ...@@ -58,14 +76,15 @@ class AdTaggingClusterTelemetry(perf_benchmark.PerfBenchmark):
for histogram in uma_histograms: for histogram in uma_histograms:
tbm_options.config.chrome_trace_config.EnableUMAHistograms(histogram) tbm_options.config.chrome_trace_config.EnableUMAHistograms(histogram)
timeline_based_metrics = ['umaMetric'] tbm_options.AddTimelineBasedMetric('umaMetric')
if self.enable_limited_cpu_time_metric: if self.enable_limited_cpu_time_metric:
timeline_based_metrics.append('limitedCpuTimeMetric') tbm_options.AddTimelineBasedMetric('limitedCpuTimeMetric')
tbm_options.SetTimelineBasedMetrics(timeline_based_metrics)
return tbm_options return tbm_options
def CreateStorySet(self, options): def CreateStorySet(self, options):
enable_memory_metric = self.enable_memory_metric
def NavigateToPageAndLeavePage(self, action_runner): def NavigateToPageAndLeavePage(self, action_runner):
url = self.file_path_url_with_scheme if self.is_file else self.url url = self.file_path_url_with_scheme if self.is_file else self.url
action_runner.Navigate(url, action_runner.Navigate(url,
...@@ -77,6 +96,10 @@ class AdTaggingClusterTelemetry(perf_benchmark.PerfBenchmark): ...@@ -77,6 +96,10 @@ class AdTaggingClusterTelemetry(perf_benchmark.PerfBenchmark):
except py_utils.TimeoutException: except py_utils.TimeoutException:
pass pass
# We measure memory after the wait as we want a proxy for page memory.
if enable_memory_metric:
action_runner.MeasureMemory(deterministic_mode=True)
# Navigate away to an untracked page to trigger recording of page load # Navigate away to an untracked page to trigger recording of page load
# metrics # metrics
action_runner.Navigate('about:blank', action_runner.Navigate('about:blank',
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment