Commit 20d65b28 authored by Juan Antonio Navarro Perez's avatar Juan Antonio Navarro Perez Committed by Commit Bot

[Telemetry] Migrate clients from AddValue to AddMeasurement

AddMeasurement is the new API that will be used to support ad hoc
measurements collected by benchmarks.

This also helps clients remove their dependency from legacy Telemetry
values.

Bug: 999484
Change-Id: I7d03c3380ddc99dce27b2200009d824faca82c47
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1780839Reviewed-by: default avatarRavi Mistry <rmistry@chromium.org>
Reviewed-by: default avatarvmpstr <vmpstr@chromium.org>
Reviewed-by: default avatarPaul Jensen <pauljensen@chromium.org>
Reviewed-by: default avatarKentaro Hara <haraken@chromium.org>
Reviewed-by: default avatarCaleb Rouleau <crouleau@chromium.org>
Commit-Queue: Juan Antonio Navarro Pérez <perezju@chromium.org>
Cr-Commit-Position: refs/heads/master@{#693642}
parent 301e5d07
......@@ -68,7 +68,6 @@ from pylib import constants
from telemetry import android
from telemetry import benchmark
from telemetry import story
from telemetry.value import scalar
from telemetry.web_perf import timeline_based_measurement
# pylint: enable=wrong-import-position
......@@ -140,8 +139,7 @@ class CronetPerfTestMeasurement(
jsonResults = json.loads(self._device.ReadFile(
perf_test_utils.GetConfig(self._device)['RESULTS_FILE']))
for test in jsonResults:
results.AddValue(scalar.ScalarValue(results.current_page, test,
'ms', jsonResults[test]))
results.AddMeasurement(test, 'ms', jsonResults[test])
def DidRunStory(self, platform, results):
# Skip parent implementation which calls into tracing_controller which this
......
......@@ -13,7 +13,6 @@ import random
from core import perf_benchmark
from telemetry import benchmark
from telemetry.value import scalar
from telemetry.page import legacy_page_test
from page_sets import dummy_story_set
......@@ -27,12 +26,8 @@ class _DummyTest(legacy_page_test.LegacyPageTest):
def ValidateAndMeasurePage(self, page, tab, results):
del tab # unused
results.AddValue(scalar.ScalarValue(
page=page,
name='gaussian-value', units='ms',
value=random.gauss(self._avg, self._std),
description=('Random number that follows the Gaussian distribution '
'with mean=%s and std=%s' % (self._avg, self._std))))
value = random.gauss(self._avg, self._std)
results.AddMeasurement('gaussian-value', 'ms', value)
class _DummyBenchmark(perf_benchmark.PerfBenchmark):
......
......@@ -12,7 +12,6 @@ import page_sets
from telemetry import benchmark
from telemetry import timeline
from telemetry.page import legacy_page_test
from telemetry.value import scalar
from tracing.trace_data import trace_data as trace_data_module
......@@ -47,8 +46,7 @@ class _GenericTraceMeasurement(legacy_page_test.LegacyPageTest):
if not isinstance(arg_value, int):
continue
value_name = '/'.join([event['cat'], event['name'], arg_name])
results.AddValue(scalar.ScalarValue(
results.current_page, value_name, 'count', arg_value))
results.AddMeasurement(value_name, 'count', arg_value)
class _GenericTraceBenchmark(perf_benchmark.PerfBenchmark):
......@@ -101,4 +99,3 @@ class GenericTraceClusterTelemetry(_GenericTraceBenchmark):
def CreateStorySet(self, options):
return ct_page_set.CTPageSet(
options.urls_list, options.user_agent, options.archive_data_file)
......@@ -6,7 +6,6 @@ import logging
import json
from telemetry.core import exceptions
from telemetry.value import scalar
from metrics import Metric
......@@ -46,17 +45,15 @@ class MediaRouterCPUMemoryMetric(Metric):
avg_result = round(avg_result/(1024 * 1024), 2)
logging.info('metric: %s, process: %s, average value: %s' %
(metric, process, str(avg_result)))
results.AddValue(scalar.ScalarValue(
results.current_page,
results.AddMeasurement(
'%s_%s' % (METRICS.get(metric).get('display_name'), process),
METRICS.get(metric).get('units'),
avg_result))
avg_result)
# Calculate MR extension wakeup time
if 'mr_extension' in perf_results['cpu']:
wakeup_percentage = round(
(len(perf_results['cpu']['mr_extension']) * 100 /
len(perf_results['cpu']['browser'])), 2)
results.AddValue(scalar.ScalarValue(
results.current_page, 'mr_extension_wakeup_percentage',
'%', wakeup_percentage))
results.AddMeasurement(
'mr_extension_wakeup_percentage', '%', wakeup_percentage)
......@@ -7,8 +7,6 @@ import os
from telemetry.page import legacy_page_test
from telemetry.timeline.model import TimelineModel
from telemetry.timeline import tracing_config
from telemetry.value import list_of_scalar_values
from telemetry.value import scalar
_CR_RENDERER_MAIN = 'CrRendererMain'
......@@ -41,14 +39,11 @@ def _AddTracingResults(thread, results):
return 'idle'
return None # Unknown
def DumpMetric(page, name, values, unit, results):
def DumpMetric(name, values, unit):
if values[name]:
results.AddValue(list_of_scalar_values.ListOfScalarValues(
page, name, unit, values[name]))
results.AddValue(scalar.ScalarValue(
page, name + '_max', unit, max(values[name])))
results.AddValue(scalar.ScalarValue(
page, name + '_total', unit, sum(values[name])))
results.AddMeasurement(name, unit, values[name])
results.AddMeasurement(name + '_max', unit, max(values[name]))
results.AddMeasurement(name + '_total', unit, sum(values[name]))
events = thread.all_slices
async_slices = [s for s in thread.async_slices
......@@ -90,21 +85,19 @@ def _AddTracingResults(thread, results):
values['oilpan_%s_lazy_sweep' % reason].append(lazy_sweep_time)
values['oilpan_%s_complete_sweep' % reason].append(complete_sweep_time)
page = results.current_page
unit = 'ms'
# Dump each metric
for reason in _GC_REASONS:
for stage in _GC_STAGES:
DumpMetric(page, 'oilpan_%s_%s' % (reason, stage), values, unit, results)
DumpMetric('oilpan_%s_%s' % (reason, stage), values, unit)
# Summarize each stage
for stage in _GC_STAGES:
total_time = 0
for reason in _GC_REASONS:
total_time += sum(values['oilpan_%s_%s' % (reason, stage)])
results.AddValue(
scalar.ScalarValue(page, 'oilpan_%s' % stage, unit, total_time))
results.AddMeasurement('oilpan_%s' % stage, unit, total_time)
# Summarize sweeping time
total_sweep_time = 0
......@@ -113,15 +106,14 @@ def _AddTracingResults(thread, results):
for reason in _GC_REASONS:
sweep_time += sum(values['oilpan_%s_%s' % (reason, stage)])
key = 'oilpan_%s' % stage
results.AddValue(scalar.ScalarValue(page, key, unit, sweep_time))
results.AddMeasurement(key, unit, sweep_time)
total_sweep_time += sweep_time
results.AddValue(
scalar.ScalarValue(page, 'oilpan_sweep', unit, total_sweep_time))
results.AddMeasurement('oilpan_sweep', unit, total_sweep_time)
gc_time = 0
for key in values:
gc_time += sum(values[key])
results.AddValue(scalar.ScalarValue(page, 'oilpan_gc', unit, gc_time))
results.AddMeasurement('oilpan_gc', unit, gc_time)
class _OilpanGCTimesBase(legacy_page_test.LegacyPageTest):
......
......@@ -5,7 +5,6 @@
import time
from telemetry.page import legacy_page_test
from telemetry.value import scalar
import py_utils
......@@ -72,23 +71,15 @@ class RasterizeAndRecordMicro(legacy_page_test.LegacyPageTest):
paint_op_memory_usage = data.get('paint_op_memory_usage', 0)
paint_op_count = data.get('paint_op_count', 0)
results.AddValue(scalar.ScalarValue(
results.current_page, 'pixels_recorded', 'pixels', pixels_recorded))
results.AddValue(scalar.ScalarValue(
results.current_page, 'pixels_rasterized', 'pixels', pixels_rasterized))
results.AddValue(scalar.ScalarValue(
results.current_page, 'rasterize_time', 'ms', rasterize_time))
results.AddValue(scalar.ScalarValue(
results.current_page, 'record_time', 'ms', record_time))
results.AddValue(scalar.ScalarValue(
results.current_page, 'painter_memory_usage', 'bytes',
painter_memory_usage))
results.AddValue(scalar.ScalarValue(
results.current_page, 'paint_op_memory_usage', 'bytes',
paint_op_memory_usage))
results.AddValue(scalar.ScalarValue(
results.current_page, 'paint_op_count', 'count',
paint_op_count))
results.AddMeasurement('pixels_recorded', 'pixels', pixels_recorded)
results.AddMeasurement('pixels_rasterized', 'pixels', pixels_rasterized)
results.AddMeasurement('rasterize_time', 'ms', rasterize_time)
results.AddMeasurement('record_time', 'ms', record_time)
results.AddMeasurement('painter_memory_usage', 'bytes',
painter_memory_usage)
results.AddMeasurement('paint_op_memory_usage', 'bytes',
paint_op_memory_usage)
results.AddMeasurement('paint_op_count', 'count', paint_op_count)
record_time_painting_disabled = data['record_time_painting_disabled_ms']
record_time_caching_disabled = data['record_time_caching_disabled_ms']
......@@ -98,21 +89,16 @@ class RasterizeAndRecordMicro(legacy_page_test.LegacyPageTest):
data['record_time_subsequence_caching_disabled_ms']
record_time_partial_invalidation = \
data['record_time_partial_invalidation_ms']
results.AddValue(scalar.ScalarValue(
results.current_page, 'record_time_painting_disabled', 'ms',
record_time_painting_disabled))
results.AddValue(scalar.ScalarValue(
results.current_page, 'record_time_caching_disabled', 'ms',
record_time_caching_disabled))
results.AddValue(scalar.ScalarValue(
results.current_page, 'record_time_construction_disabled', 'ms',
record_time_construction_disabled))
results.AddValue(scalar.ScalarValue(
results.current_page, 'record_time_subsequence_caching_disabled', 'ms',
record_time_subsequence_caching_disabled))
results.AddValue(scalar.ScalarValue(
results.current_page, 'record_time_partial_invalidation_ms', 'ms',
record_time_partial_invalidation))
results.AddMeasurement('record_time_painting_disabled', 'ms',
record_time_painting_disabled)
results.AddMeasurement('record_time_caching_disabled', 'ms',
record_time_caching_disabled)
results.AddMeasurement('record_time_construction_disabled', 'ms',
record_time_construction_disabled)
results.AddMeasurement('record_time_subsequence_caching_disabled', 'ms',
record_time_subsequence_caching_disabled)
results.AddMeasurement('record_time_partial_invalidation_ms', 'ms',
record_time_partial_invalidation)
if self._report_detailed_results:
pixels_rasterized_with_non_solid_color = \
......@@ -124,20 +110,14 @@ class RasterizeAndRecordMicro(legacy_page_test.LegacyPageTest):
data['total_picture_layers_with_no_content']
total_picture_layers_off_screen = data['total_picture_layers_off_screen']
results.AddValue(scalar.ScalarValue(
results.current_page, 'pixels_rasterized_with_non_solid_color',
'pixels', pixels_rasterized_with_non_solid_color))
results.AddValue(scalar.ScalarValue(
results.current_page, 'pixels_rasterized_as_opaque', 'pixels',
pixels_rasterized_as_opaque))
results.AddValue(scalar.ScalarValue(
results.current_page, 'total_layers', 'count', total_layers))
results.AddValue(scalar.ScalarValue(
results.current_page, 'total_picture_layers', 'count',
total_picture_layers))
results.AddValue(scalar.ScalarValue(
results.current_page, 'total_picture_layers_with_no_content', 'count',
total_picture_layers_with_no_content))
results.AddValue(scalar.ScalarValue(
results.current_page, 'total_picture_layers_off_screen', 'count',
total_picture_layers_off_screen))
results.AddMeasurement('pixels_rasterized_with_non_solid_color',
'pixels', pixels_rasterized_with_non_solid_color)
results.AddMeasurement('pixels_rasterized_as_opaque', 'pixels',
pixels_rasterized_as_opaque)
results.AddMeasurement('total_layers', 'count', total_layers)
results.AddMeasurement('total_picture_layers', 'count',
total_picture_layers)
results.AddMeasurement('total_picture_layers_with_no_content', 'count',
total_picture_layers_with_no_content)
results.AddMeasurement('total_picture_layers_off_screen', 'count',
total_picture_layers_off_screen)
......@@ -5,7 +5,6 @@ import glob
import os
from telemetry.page import legacy_page_test
from telemetry.value import scalar
class SkpicturePrinter(legacy_page_test.LegacyPageTest):
......@@ -32,5 +31,4 @@ class SkpicturePrinter(legacy_page_test.LegacyPageTest):
'chrome.gpuBenchmarking.printToSkPicture({{ outpath }});',
outpath=outpath)
pictures = glob.glob(os.path.join(outpath, '*.skp'))
results.AddValue(scalar.ScalarValue(
results.current_page, 'saved_picture_count', 'count', len(pictures)))
results.AddMeasurement('saved_picture_count', 'count', len(pictures))
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment