Commit 20d65b28 authored by Juan Antonio Navarro Perez's avatar Juan Antonio Navarro Perez Committed by Commit Bot

[Telemetry] Migrate clients from AddValue to AddMeasurement

AddMeasurement is the new API that will be used to support ad hoc
measurements collected by benchmarks.

This also helps clients remove their dependency from legacy Telemetry
values.

Bug: 999484
Change-Id: I7d03c3380ddc99dce27b2200009d824faca82c47
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1780839Reviewed-by: default avatarRavi Mistry <rmistry@chromium.org>
Reviewed-by: default avatarvmpstr <vmpstr@chromium.org>
Reviewed-by: default avatarPaul Jensen <pauljensen@chromium.org>
Reviewed-by: default avatarKentaro Hara <haraken@chromium.org>
Reviewed-by: default avatarCaleb Rouleau <crouleau@chromium.org>
Commit-Queue: Juan Antonio Navarro Pérez <perezju@chromium.org>
Cr-Commit-Position: refs/heads/master@{#693642}
parent 301e5d07
...@@ -68,7 +68,6 @@ from pylib import constants ...@@ -68,7 +68,6 @@ from pylib import constants
from telemetry import android from telemetry import android
from telemetry import benchmark from telemetry import benchmark
from telemetry import story from telemetry import story
from telemetry.value import scalar
from telemetry.web_perf import timeline_based_measurement from telemetry.web_perf import timeline_based_measurement
# pylint: enable=wrong-import-position # pylint: enable=wrong-import-position
...@@ -140,8 +139,7 @@ class CronetPerfTestMeasurement( ...@@ -140,8 +139,7 @@ class CronetPerfTestMeasurement(
jsonResults = json.loads(self._device.ReadFile( jsonResults = json.loads(self._device.ReadFile(
perf_test_utils.GetConfig(self._device)['RESULTS_FILE'])) perf_test_utils.GetConfig(self._device)['RESULTS_FILE']))
for test in jsonResults: for test in jsonResults:
results.AddValue(scalar.ScalarValue(results.current_page, test, results.AddMeasurement(test, 'ms', jsonResults[test])
'ms', jsonResults[test]))
def DidRunStory(self, platform, results): def DidRunStory(self, platform, results):
# Skip parent implementation which calls into tracing_controller which this # Skip parent implementation which calls into tracing_controller which this
......
...@@ -13,7 +13,6 @@ import random ...@@ -13,7 +13,6 @@ import random
from core import perf_benchmark from core import perf_benchmark
from telemetry import benchmark from telemetry import benchmark
from telemetry.value import scalar
from telemetry.page import legacy_page_test from telemetry.page import legacy_page_test
from page_sets import dummy_story_set from page_sets import dummy_story_set
...@@ -27,12 +26,8 @@ class _DummyTest(legacy_page_test.LegacyPageTest): ...@@ -27,12 +26,8 @@ class _DummyTest(legacy_page_test.LegacyPageTest):
def ValidateAndMeasurePage(self, page, tab, results): def ValidateAndMeasurePage(self, page, tab, results):
del tab # unused del tab # unused
results.AddValue(scalar.ScalarValue( value = random.gauss(self._avg, self._std)
page=page, results.AddMeasurement('gaussian-value', 'ms', value)
name='gaussian-value', units='ms',
value=random.gauss(self._avg, self._std),
description=('Random number that follows the Gaussian distribution '
'with mean=%s and std=%s' % (self._avg, self._std))))
class _DummyBenchmark(perf_benchmark.PerfBenchmark): class _DummyBenchmark(perf_benchmark.PerfBenchmark):
......
...@@ -12,7 +12,6 @@ import page_sets ...@@ -12,7 +12,6 @@ import page_sets
from telemetry import benchmark from telemetry import benchmark
from telemetry import timeline from telemetry import timeline
from telemetry.page import legacy_page_test from telemetry.page import legacy_page_test
from telemetry.value import scalar
from tracing.trace_data import trace_data as trace_data_module from tracing.trace_data import trace_data as trace_data_module
...@@ -47,8 +46,7 @@ class _GenericTraceMeasurement(legacy_page_test.LegacyPageTest): ...@@ -47,8 +46,7 @@ class _GenericTraceMeasurement(legacy_page_test.LegacyPageTest):
if not isinstance(arg_value, int): if not isinstance(arg_value, int):
continue continue
value_name = '/'.join([event['cat'], event['name'], arg_name]) value_name = '/'.join([event['cat'], event['name'], arg_name])
results.AddValue(scalar.ScalarValue( results.AddMeasurement(value_name, 'count', arg_value)
results.current_page, value_name, 'count', arg_value))
class _GenericTraceBenchmark(perf_benchmark.PerfBenchmark): class _GenericTraceBenchmark(perf_benchmark.PerfBenchmark):
...@@ -101,4 +99,3 @@ class GenericTraceClusterTelemetry(_GenericTraceBenchmark): ...@@ -101,4 +99,3 @@ class GenericTraceClusterTelemetry(_GenericTraceBenchmark):
def CreateStorySet(self, options): def CreateStorySet(self, options):
return ct_page_set.CTPageSet( return ct_page_set.CTPageSet(
options.urls_list, options.user_agent, options.archive_data_file) options.urls_list, options.user_agent, options.archive_data_file)
...@@ -6,7 +6,6 @@ import logging ...@@ -6,7 +6,6 @@ import logging
import json import json
from telemetry.core import exceptions from telemetry.core import exceptions
from telemetry.value import scalar
from metrics import Metric from metrics import Metric
...@@ -46,17 +45,15 @@ class MediaRouterCPUMemoryMetric(Metric): ...@@ -46,17 +45,15 @@ class MediaRouterCPUMemoryMetric(Metric):
avg_result = round(avg_result/(1024 * 1024), 2) avg_result = round(avg_result/(1024 * 1024), 2)
logging.info('metric: %s, process: %s, average value: %s' % logging.info('metric: %s, process: %s, average value: %s' %
(metric, process, str(avg_result))) (metric, process, str(avg_result)))
results.AddValue(scalar.ScalarValue( results.AddMeasurement(
results.current_page,
'%s_%s' % (METRICS.get(metric).get('display_name'), process), '%s_%s' % (METRICS.get(metric).get('display_name'), process),
METRICS.get(metric).get('units'), METRICS.get(metric).get('units'),
avg_result)) avg_result)
# Calculate MR extension wakeup time # Calculate MR extension wakeup time
if 'mr_extension' in perf_results['cpu']: if 'mr_extension' in perf_results['cpu']:
wakeup_percentage = round( wakeup_percentage = round(
(len(perf_results['cpu']['mr_extension']) * 100 / (len(perf_results['cpu']['mr_extension']) * 100 /
len(perf_results['cpu']['browser'])), 2) len(perf_results['cpu']['browser'])), 2)
results.AddValue(scalar.ScalarValue( results.AddMeasurement(
results.current_page, 'mr_extension_wakeup_percentage', 'mr_extension_wakeup_percentage', '%', wakeup_percentage)
'%', wakeup_percentage))
...@@ -7,8 +7,6 @@ import os ...@@ -7,8 +7,6 @@ import os
from telemetry.page import legacy_page_test from telemetry.page import legacy_page_test
from telemetry.timeline.model import TimelineModel from telemetry.timeline.model import TimelineModel
from telemetry.timeline import tracing_config from telemetry.timeline import tracing_config
from telemetry.value import list_of_scalar_values
from telemetry.value import scalar
_CR_RENDERER_MAIN = 'CrRendererMain' _CR_RENDERER_MAIN = 'CrRendererMain'
...@@ -41,14 +39,11 @@ def _AddTracingResults(thread, results): ...@@ -41,14 +39,11 @@ def _AddTracingResults(thread, results):
return 'idle' return 'idle'
return None # Unknown return None # Unknown
def DumpMetric(page, name, values, unit, results): def DumpMetric(name, values, unit):
if values[name]: if values[name]:
results.AddValue(list_of_scalar_values.ListOfScalarValues( results.AddMeasurement(name, unit, values[name])
page, name, unit, values[name])) results.AddMeasurement(name + '_max', unit, max(values[name]))
results.AddValue(scalar.ScalarValue( results.AddMeasurement(name + '_total', unit, sum(values[name]))
page, name + '_max', unit, max(values[name])))
results.AddValue(scalar.ScalarValue(
page, name + '_total', unit, sum(values[name])))
events = thread.all_slices events = thread.all_slices
async_slices = [s for s in thread.async_slices async_slices = [s for s in thread.async_slices
...@@ -90,21 +85,19 @@ def _AddTracingResults(thread, results): ...@@ -90,21 +85,19 @@ def _AddTracingResults(thread, results):
values['oilpan_%s_lazy_sweep' % reason].append(lazy_sweep_time) values['oilpan_%s_lazy_sweep' % reason].append(lazy_sweep_time)
values['oilpan_%s_complete_sweep' % reason].append(complete_sweep_time) values['oilpan_%s_complete_sweep' % reason].append(complete_sweep_time)
page = results.current_page
unit = 'ms' unit = 'ms'
# Dump each metric # Dump each metric
for reason in _GC_REASONS: for reason in _GC_REASONS:
for stage in _GC_STAGES: for stage in _GC_STAGES:
DumpMetric(page, 'oilpan_%s_%s' % (reason, stage), values, unit, results) DumpMetric('oilpan_%s_%s' % (reason, stage), values, unit)
# Summarize each stage # Summarize each stage
for stage in _GC_STAGES: for stage in _GC_STAGES:
total_time = 0 total_time = 0
for reason in _GC_REASONS: for reason in _GC_REASONS:
total_time += sum(values['oilpan_%s_%s' % (reason, stage)]) total_time += sum(values['oilpan_%s_%s' % (reason, stage)])
results.AddValue( results.AddMeasurement('oilpan_%s' % stage, unit, total_time)
scalar.ScalarValue(page, 'oilpan_%s' % stage, unit, total_time))
# Summarize sweeping time # Summarize sweeping time
total_sweep_time = 0 total_sweep_time = 0
...@@ -113,15 +106,14 @@ def _AddTracingResults(thread, results): ...@@ -113,15 +106,14 @@ def _AddTracingResults(thread, results):
for reason in _GC_REASONS: for reason in _GC_REASONS:
sweep_time += sum(values['oilpan_%s_%s' % (reason, stage)]) sweep_time += sum(values['oilpan_%s_%s' % (reason, stage)])
key = 'oilpan_%s' % stage key = 'oilpan_%s' % stage
results.AddValue(scalar.ScalarValue(page, key, unit, sweep_time)) results.AddMeasurement(key, unit, sweep_time)
total_sweep_time += sweep_time total_sweep_time += sweep_time
results.AddValue( results.AddMeasurement('oilpan_sweep', unit, total_sweep_time)
scalar.ScalarValue(page, 'oilpan_sweep', unit, total_sweep_time))
gc_time = 0 gc_time = 0
for key in values: for key in values:
gc_time += sum(values[key]) gc_time += sum(values[key])
results.AddValue(scalar.ScalarValue(page, 'oilpan_gc', unit, gc_time)) results.AddMeasurement('oilpan_gc', unit, gc_time)
class _OilpanGCTimesBase(legacy_page_test.LegacyPageTest): class _OilpanGCTimesBase(legacy_page_test.LegacyPageTest):
......
...@@ -5,7 +5,6 @@ ...@@ -5,7 +5,6 @@
import time import time
from telemetry.page import legacy_page_test from telemetry.page import legacy_page_test
from telemetry.value import scalar
import py_utils import py_utils
...@@ -72,23 +71,15 @@ class RasterizeAndRecordMicro(legacy_page_test.LegacyPageTest): ...@@ -72,23 +71,15 @@ class RasterizeAndRecordMicro(legacy_page_test.LegacyPageTest):
paint_op_memory_usage = data.get('paint_op_memory_usage', 0) paint_op_memory_usage = data.get('paint_op_memory_usage', 0)
paint_op_count = data.get('paint_op_count', 0) paint_op_count = data.get('paint_op_count', 0)
results.AddValue(scalar.ScalarValue( results.AddMeasurement('pixels_recorded', 'pixels', pixels_recorded)
results.current_page, 'pixels_recorded', 'pixels', pixels_recorded)) results.AddMeasurement('pixels_rasterized', 'pixels', pixels_rasterized)
results.AddValue(scalar.ScalarValue( results.AddMeasurement('rasterize_time', 'ms', rasterize_time)
results.current_page, 'pixels_rasterized', 'pixels', pixels_rasterized)) results.AddMeasurement('record_time', 'ms', record_time)
results.AddValue(scalar.ScalarValue( results.AddMeasurement('painter_memory_usage', 'bytes',
results.current_page, 'rasterize_time', 'ms', rasterize_time)) painter_memory_usage)
results.AddValue(scalar.ScalarValue( results.AddMeasurement('paint_op_memory_usage', 'bytes',
results.current_page, 'record_time', 'ms', record_time)) paint_op_memory_usage)
results.AddValue(scalar.ScalarValue( results.AddMeasurement('paint_op_count', 'count', paint_op_count)
results.current_page, 'painter_memory_usage', 'bytes',
painter_memory_usage))
results.AddValue(scalar.ScalarValue(
results.current_page, 'paint_op_memory_usage', 'bytes',
paint_op_memory_usage))
results.AddValue(scalar.ScalarValue(
results.current_page, 'paint_op_count', 'count',
paint_op_count))
record_time_painting_disabled = data['record_time_painting_disabled_ms'] record_time_painting_disabled = data['record_time_painting_disabled_ms']
record_time_caching_disabled = data['record_time_caching_disabled_ms'] record_time_caching_disabled = data['record_time_caching_disabled_ms']
...@@ -98,21 +89,16 @@ class RasterizeAndRecordMicro(legacy_page_test.LegacyPageTest): ...@@ -98,21 +89,16 @@ class RasterizeAndRecordMicro(legacy_page_test.LegacyPageTest):
data['record_time_subsequence_caching_disabled_ms'] data['record_time_subsequence_caching_disabled_ms']
record_time_partial_invalidation = \ record_time_partial_invalidation = \
data['record_time_partial_invalidation_ms'] data['record_time_partial_invalidation_ms']
results.AddValue(scalar.ScalarValue( results.AddMeasurement('record_time_painting_disabled', 'ms',
results.current_page, 'record_time_painting_disabled', 'ms', record_time_painting_disabled)
record_time_painting_disabled)) results.AddMeasurement('record_time_caching_disabled', 'ms',
results.AddValue(scalar.ScalarValue( record_time_caching_disabled)
results.current_page, 'record_time_caching_disabled', 'ms', results.AddMeasurement('record_time_construction_disabled', 'ms',
record_time_caching_disabled)) record_time_construction_disabled)
results.AddValue(scalar.ScalarValue( results.AddMeasurement('record_time_subsequence_caching_disabled', 'ms',
results.current_page, 'record_time_construction_disabled', 'ms', record_time_subsequence_caching_disabled)
record_time_construction_disabled)) results.AddMeasurement('record_time_partial_invalidation_ms', 'ms',
results.AddValue(scalar.ScalarValue( record_time_partial_invalidation)
results.current_page, 'record_time_subsequence_caching_disabled', 'ms',
record_time_subsequence_caching_disabled))
results.AddValue(scalar.ScalarValue(
results.current_page, 'record_time_partial_invalidation_ms', 'ms',
record_time_partial_invalidation))
if self._report_detailed_results: if self._report_detailed_results:
pixels_rasterized_with_non_solid_color = \ pixels_rasterized_with_non_solid_color = \
...@@ -124,20 +110,14 @@ class RasterizeAndRecordMicro(legacy_page_test.LegacyPageTest): ...@@ -124,20 +110,14 @@ class RasterizeAndRecordMicro(legacy_page_test.LegacyPageTest):
data['total_picture_layers_with_no_content'] data['total_picture_layers_with_no_content']
total_picture_layers_off_screen = data['total_picture_layers_off_screen'] total_picture_layers_off_screen = data['total_picture_layers_off_screen']
results.AddValue(scalar.ScalarValue( results.AddMeasurement('pixels_rasterized_with_non_solid_color',
results.current_page, 'pixels_rasterized_with_non_solid_color', 'pixels', pixels_rasterized_with_non_solid_color)
'pixels', pixels_rasterized_with_non_solid_color)) results.AddMeasurement('pixels_rasterized_as_opaque', 'pixels',
results.AddValue(scalar.ScalarValue( pixels_rasterized_as_opaque)
results.current_page, 'pixels_rasterized_as_opaque', 'pixels', results.AddMeasurement('total_layers', 'count', total_layers)
pixels_rasterized_as_opaque)) results.AddMeasurement('total_picture_layers', 'count',
results.AddValue(scalar.ScalarValue( total_picture_layers)
results.current_page, 'total_layers', 'count', total_layers)) results.AddMeasurement('total_picture_layers_with_no_content', 'count',
results.AddValue(scalar.ScalarValue( total_picture_layers_with_no_content)
results.current_page, 'total_picture_layers', 'count', results.AddMeasurement('total_picture_layers_off_screen', 'count',
total_picture_layers)) total_picture_layers_off_screen)
results.AddValue(scalar.ScalarValue(
results.current_page, 'total_picture_layers_with_no_content', 'count',
total_picture_layers_with_no_content))
results.AddValue(scalar.ScalarValue(
results.current_page, 'total_picture_layers_off_screen', 'count',
total_picture_layers_off_screen))
...@@ -5,7 +5,6 @@ import glob ...@@ -5,7 +5,6 @@ import glob
import os import os
from telemetry.page import legacy_page_test from telemetry.page import legacy_page_test
from telemetry.value import scalar
class SkpicturePrinter(legacy_page_test.LegacyPageTest): class SkpicturePrinter(legacy_page_test.LegacyPageTest):
...@@ -32,5 +31,4 @@ class SkpicturePrinter(legacy_page_test.LegacyPageTest): ...@@ -32,5 +31,4 @@ class SkpicturePrinter(legacy_page_test.LegacyPageTest):
'chrome.gpuBenchmarking.printToSkPicture({{ outpath }});', 'chrome.gpuBenchmarking.printToSkPicture({{ outpath }});',
outpath=outpath) outpath=outpath)
pictures = glob.glob(os.path.join(outpath, '*.skp')) pictures = glob.glob(os.path.join(outpath, '*.skp'))
results.AddValue(scalar.ScalarValue( results.AddMeasurement('saved_picture_count', 'count', len(pictures))
results.current_page, 'saved_picture_count', 'count', len(pictures)))
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment