Commit a95601c7 authored by Ehsan Chiniforooshan's avatar Ehsan Chiniforooshan Committed by Commit Bot

Telemetry: Merging legacy and TBMv2 metrics

This CL adds the new TBMv2 rendering metrics to the set of
legacy smoothness and thread-times metrics that are currently
used in the rendering benchmark. This is a temporary situation
to verify that the new metrics catch regressions correctly
before retiring old thread-times metrics.

Bug: chromium:760553
Change-Id: I3cd37e632f328fa79919d936cbe5714b8b742ed5
Reviewed-on: https://chromium-review.googlesource.com/1054253
Commit-Queue: Ehsan Chiniforooshan <chiniforooshan@chromium.org>
Reviewed-by: default avatarBen Hayden <benjhayden@chromium.org>
Reviewed-by: default avatarNed Nguyen <nednguyen@google.com>
Cr-Commit-Position: refs/heads/master@{#564900}
parent 0f19062a
......@@ -2,6 +2,8 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from tracing.metrics import metric_runner
from telemetry.page import legacy_page_test
from telemetry.timeline import model as model_module
from telemetry.value import trace
......@@ -55,6 +57,8 @@ class Rendering(legacy_page_test.LegacyPageTest):
def ValidateAndMeasurePage(self, _, tab, results):
self._results = results
tab.browser.platform.tracing_controller.telemetry_info = (
results.telemetry_info)
trace_result = tab.browser.platform.tracing_controller.StopTracing()
# TODO(charliea): This is part of a three-sided Chromium/Telemetry patch
......@@ -82,6 +86,18 @@ class Rendering(legacy_page_test.LegacyPageTest):
thread_times_metric = timeline.ThreadTimesTimelineMetric()
thread_times_metric.AddResults(model, renderer_thread, records, results)
# TBMv2 metrics.
mre_result = metric_runner.RunMetric(
trace_value.filename, metrics=['renderingMetric'],
extra_import_options={'trackDetailedModelStats': True},
report_progress=False, canonical_url=results.telemetry_info.trace_url)
for f in mre_result.failures:
results.Fail(f.stack)
results.ImportHistogramDicts(mre_result.pairs.get('histograms', []),
import_immediately=False)
def DidRunPage(self, platform):
if platform.tracing_controller.is_tracing_running:
trace_result = platform.tracing_controller.StopTracing()
......
# Copyright 2018 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.testing import options_for_unittests
from telemetry.testing import page_test_test_case
from telemetry.util import wpr_modes
from measurements import rendering
RENDERING_THREAD_GROUPS = ['all', 'browser', 'fast_path', 'gpu', 'io', 'other',
'raster', 'renderer_compositor', 'renderer_main']
THREAD_TIMES_THREAD_GROUPS = ['GPU', 'IO', 'browser', 'display_compositor',
'other', 'raster', 'renderer_compositor',
'renderer_main', 'total_all', 'total_fast_path']
class RenderingUnitTest(page_test_test_case.PageTestTestCase):
"""Test for rendering measurement
Runs rendering measurement on a simple page and verifies the existence of
all metrics in the results. The correctness of metrics is not tested here.
"""
def setUp(self):
self._options = options_for_unittests.GetCopy()
self._options.browser_options.wpr_mode = wpr_modes.WPR_OFF
self._options.pageset_repeat = 2
def testRendering(self):
ps = self.CreateStorySetFromFileInUnittestDataDir('scrollable_page.html')
results = self.RunMeasurement(
rendering.Rendering(), ps, options=self._options)
self.assertFalse(results.had_failures)
# Build a map from histogram name to the number of sample values.
num_samples = {}
for histogram in results.AsHistogramDicts():
if 'name' in histogram and 'sampleValues' in histogram:
histogram_name = histogram['name']
current_num_samples = len(histogram['sampleValues'])
if histogram_name in num_samples:
num_samples[histogram_name] += current_num_samples
else:
num_samples[histogram_name] = current_num_samples
# Check the existence of cores_per_second metrics.
for thread_group in RENDERING_THREAD_GROUPS:
# We should have at least two sample values for each metric, since
# pageset_repeat is 2.
self.assertGreater(
num_samples.get('cores_per_second_%s_thread' % thread_group, 0), 1)
# Check the existence of some of the legacy metrics.
self.assertGreater(num_samples.get('frame_times', 0), 1)
self.assertGreater(num_samples.get('percentage_smooth', 0), 1)
for thread_group in THREAD_TIMES_THREAD_GROUPS:
self.assertGreater(
num_samples.get('thread_%s_cpu_time_per_second' % thread_group, 0), 1)
self.assertGreater(
num_samples.get('thread_%s_cpu_time_per_frame' % thread_group, 0), 1)
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment