Commit c5cc2a4f authored by nednguyen's avatar nednguyen Committed by Commit bot

Support tracing metrics for measureTime & measureFrameTime methods in blink_perf (Reland)

* Fix include:
   1) handle cases thread_times = None
   2) disable testBlinkPerfTracingMetricsForMeasureTime on Win

This CL adds support for tracing metrics with measureTime &
measureFrameTime methods in blink_perf harness.

If a test has "tracingCategories" & "traceEventsToMeasure" fields
specified, Telemetry will enable the specified tracing categories &
compute the cpu time of all the specified trace events (one per test
run).

Detailed examples of how to use this new API are in third_party/WebKit/PerformanceTests/TestData/

BUG=701059, 715822

TBR=wangxianzhu@chromium.org

Reviewer: you can see this in action by patching this CL & run:
./tools/perf/run_benchmark --browser=system blink_perf.testing --story-filter=frame --output-format=json
[ RUN      ] append-child-measure-time.html
...
...
CPU times of trace event "UpdateLayoutTree":
values 1.8760000000, 3.2080000000, 3.4260000000, 1.9020000000, 1.6990000000, 2.2940000000, 1.8570000000, 1.7100000000, 1.4170000000, 1.4150000000 ms
avg 2.0804000000 ms

CPU times of trace event "FrameView::layout":
values 4.6150000000, 8.3140000000, 8.6080000000, 4.4360000000, 4.0940000000, 5.9130000000, 4.5970000000, 3.9440000000, 3.4590000000, 3.4600000000 ms
avg 5.1440000000 ms
[ RUN      ] color-changes-measure-frame-time.html
...
...
CPU times of trace event "FrameView::prePaint":
values 5.9890000000, 5.9640000000, 6.2360000000, 6.3870000000, 5.9490000000, 6.3450000000, 6.3340000000, 6.9270000000, 6.8910000000 ms
avg 6.3357777778 ms

CPU times of trace event "FrameView::paintTree":
values 30.0950000000, 30.5020000000, 30.6510000000, 30.3380000000, 30.4160000000, 30.3480000000, 30.5110000000, 30.7570000000, 30.6140000000 ms
avg 30.4702222222 ms

After running the command above, the command also produces 2 trace files
locally, one for each test.

Review-Url: https://codereview.chromium.org/2819343002
Cr-Commit-Position: refs/heads/master@{#467445}
Committed: https://chromium.googlesource.com/chromium/src/+/0d2d43615b4553f00ac711f54dbcc0835f04366b

patch from issue 2819343002 at patchset 270001 (http://crrev.com/2819343002#ps270001)

Review-Url: https://codereview.chromium.org/2844133002
Cr-Commit-Position: refs/heads/master@{#467637}
parent 4840b4c8
<!DOCTYPE html>
<html>
<body>
<div id="holder">
</div>
<script src="../resources/runner.js"></script>
</script>
<script>
var holderElement = document.getElementById("holder");
PerfTestRunner.measureTime({
description: "Measures performance of layout when adding many child elements.",
setup: function() {
while (holderElement.firstChild) {
holderElement.removeChild(holderElement.firstChild);
}
},
run: function() {
for (var i = 0; i < 50; ++i) {
var element = document.createElement("div");
element.title = 'dummy';
element.innerText = "FOO";
holderElement.appendChild(element);
PerfTestRunner.forceLayout();
}
},
warmUpCount: 3,
iterationCount: 10,
tracingCategories: 'blink',
traceEventsToMeasure: ['FrameView::layout', 'UpdateLayoutTree']
});
</script>
</body>
</html>
<!DOCTYPE html>
<body>
<script src="../resources/runner.js"></script>
<style>
span {
padding: 1px;
}
.changeColor {
background-color: green;
}
</style>
<script>
// This test measures the lifecycle update performance of changing background
// colors in large trees.
function buildTree(parent, depth, arity, tagNameCallback, createElementCallback) {
for (var child = 0; child < arity; child++) {
var element = document.createElement(tagNameCallback(depth));
parent.appendChild(element);
createElementCallback(element, depth);
if (depth > 1)
buildTree(element, depth - 1, arity, tagNameCallback, createElementCallback);
}
}
// Build a tall tree that is skinny. A middle layer of
// the tree should have the changeColor class.
buildTree(document.body, 15, 2,
function(depth) {
// Use divs at upper levels to avoid too much layout time.
return depth > 9 ? 'div' : 'span';
},
function(element, depth) {
element.style.backgroundColor = 'green';
if (depth == 5)
element.setAttribute('class', 'changeColor');
}
);
// Build a short tree that is fat. A middle layer of
// the tree should have the changeColor class.
buildTree(document.body, 6, 7,
function(depth) {
// Use divs at upper levels to avoid too much layout time.
return depth > 4 ? 'div' : 'span';
},
function(element, depth) {
element.style.backgroundColor = 'orange';
if (depth == 3)
element.setAttribute('class', 'changeColor');
}
);
var runCount = 0;
var elementsToChange = document.getElementsByClassName('changeColor');
var colors = [
"rgb(128, 18, 237)",
"rgb(191, 1, 191)",
"rgb(237, 18, 128)",
"rgb(255, 64, 64)",
"rgb(237, 127, 18)",
"rgb(191, 191, 1)",
"rgb(128, 237, 18)",
"rgb(64, 255, 64)",
"rgb(18, 237, 127)",
"rgb(1, 191, 191)",
"rgb(18, 128, 237)",
"rgb(64, 64, 255)"
];
PerfTestRunner.measureFrameTime({
run: function() {
runCount++;
var newColor = colors[runCount % colors.length];
for (var index = 0; index < elementsToChange.length; index++)
elementsToChange[index].style.backgroundColor = newColor;
},
warmUpCount: 3,
iterationCount: 10,
tracingCategories: 'blink',
traceEventsToMeasure: ['FrameView::prePaint', 'FrameView::paintTree']
});
</script>
</body>
...@@ -157,8 +157,16 @@ if (window.testRunner) { ...@@ -157,8 +157,16 @@ if (window.testRunner) {
PerfTestRunner.log("Running " + iterationCount + " times"); PerfTestRunner.log("Running " + iterationCount + " times");
if (test.doNotIgnoreInitialRun) if (test.doNotIgnoreInitialRun)
completedIterations++; completedIterations++;
if (runner)
if (runner && test.tracingCategories && window.testRunner &&
window.testRunner.supportTracing) {
window.testRunner.traceEventsToMeasure = test.traceEventsToMeasure;
window.testRunner.startTracing(test.tracingCategories, function() {
scheduleNextRun(scheduler, runner);
});
} else if (runner) {
scheduleNextRun(scheduler, runner); scheduleNextRun(scheduler, runner);
}
} }
function scheduleNextRun(scheduler, runner) { function scheduleNextRun(scheduler, runner) {
...@@ -225,8 +233,17 @@ if (window.testRunner) { ...@@ -225,8 +233,17 @@ if (window.testRunner) {
logInDocument("Got an exception while finalizing the test with name=" + exception.name + ", message=" + exception.message); logInDocument("Got an exception while finalizing the test with name=" + exception.name + ", message=" + exception.message);
} }
if (window.testRunner) if (window.testRunner) {
testRunner.notifyDone(); if (currentTest.traceEventsToMeasure &&
testRunner.supportTracing) {
testRunner.stopTracingAndMeasure(
currentTest.traceEventsToMeasure, function() {
testRunner.notifyDone();
});
} else {
testRunner.notifyDone();
}
}
} }
PerfTestRunner.prepareToMeasureValuesAsync = function (test) { PerfTestRunner.prepareToMeasureValuesAsync = function (test) {
...@@ -248,6 +265,25 @@ if (window.testRunner) { ...@@ -248,6 +265,25 @@ if (window.testRunner) {
finish(); finish();
} }
function addRunTestStartMarker() {
if (!window.testRunner || !window.testRunner.supportTracing)
return;
if (completedIterations < 0)
console.time('blink_perf.runTest.warmup');
else
console.time('blink_perf.runTest');
}
function addRunTestEndMarker() {
if (!window.testRunner || !window.testRunner.supportTracing)
return;
if (completedIterations < 0)
console.timeEnd('blink_perf.runTest.warmup');
else
console.timeEnd('blink_perf.runTest');
}
PerfTestRunner.measureFrameTime = function (test) { PerfTestRunner.measureFrameTime = function (test) {
PerfTestRunner.unit = "ms"; PerfTestRunner.unit = "ms";
PerfTestRunner.bufferedLog = true; PerfTestRunner.bufferedLog = true;
...@@ -261,9 +297,12 @@ if (window.testRunner) { ...@@ -261,9 +297,12 @@ if (window.testRunner) {
var lastFrameTime = -1; var lastFrameTime = -1;
function measureFrameTimeOnce() { function measureFrameTimeOnce() {
if (lastFrameTime != -1)
addRunTestEndMarker();
var now = PerfTestRunner.now(); var now = PerfTestRunner.now();
var result = lastFrameTime == -1 ? -1 : now - lastFrameTime; var result = lastFrameTime == -1 ? -1 : now - lastFrameTime;
lastFrameTime = now; lastFrameTime = now;
addRunTestStartMarker();
var returnValue = currentTest.run(); var returnValue = currentTest.run();
if (returnValue - 0 === returnValue) { if (returnValue - 0 === returnValue) {
...@@ -290,7 +329,9 @@ if (window.testRunner) { ...@@ -290,7 +329,9 @@ if (window.testRunner) {
PerfTestRunner.gc(); PerfTestRunner.gc();
var start = PerfTestRunner.now(); var start = PerfTestRunner.now();
addRunTestStartMarker();
var returnValue = currentTest.run(); var returnValue = currentTest.run();
addRunTestEndMarker();
var end = PerfTestRunner.now(); var end = PerfTestRunner.now();
if (returnValue - 0 === returnValue) { if (returnValue - 0 === returnValue) {
......
...@@ -12,6 +12,24 @@ testRunner.notifyDone = function() { ...@@ -12,6 +12,24 @@ testRunner.notifyDone = function() {
this.isDone = true; this.isDone = true;
}; };
testRunner.supportTracing = true;
// If this is true, blink_perf tests is put on paused waiting for tracing to
// be started. |scheduleTestRun| should be invoked after tracing is started
// to continue blink perf test.
testRunner.isWaitingForTracingStart = false;
testRunner.startTracing = function(tracingCategories, scheduleTestRun) {
this.tracingCategories = tracingCategories;
this.scheduleTestRun = scheduleTestRun;
this.isWaitingForTracingStart = true;
}
testRunner.stopTracingAndMeasure = function(traceEventsToMeasure, callback) {
testRunner.traceEventsToMeasure = traceEventsToMeasure;
callback();
}
window.GCController = {}; window.GCController = {};
GCController.collect = function() { GCController.collect = function() {
......
...@@ -12,8 +12,14 @@ from telemetry import page as page_module ...@@ -12,8 +12,14 @@ from telemetry import page as page_module
from telemetry.page import legacy_page_test from telemetry.page import legacy_page_test
from telemetry.page import shared_page_state from telemetry.page import shared_page_state
from telemetry import story from telemetry import story
from telemetry.timeline import bounds
from telemetry.timeline import model as model_module
from telemetry.timeline import tracing_config
from telemetry.value import list_of_scalar_values from telemetry.value import list_of_scalar_values
from telemetry.value import scalar from telemetry.value import scalar
from telemetry.value import trace
from benchmarks import pywebsocket_server from benchmarks import pywebsocket_server
from measurements import timeline_controller from measurements import timeline_controller
...@@ -73,6 +79,56 @@ def CreateStorySetFromPath(path, skipped_file, ...@@ -73,6 +79,56 @@ def CreateStorySetFromPath(path, skipped_file,
return ps return ps
def _ComputeTraceEventsThreadTimeForBlinkPerf(
renderer_thread, trace_events_to_measure):
""" Compute the CPU duration for each of |trace_events_to_measure| during
blink_perf test.
Args:
renderer_thread: the renderer thread which run blink_perf test.
trace_events_to_measure: a list of string names of trace events to measure
CPU duration for.
Returns:
a dictionary in which each key is a trace event' name (from
|trace_events_to_measure| list), and value is a list of numbers that
represents to total cpu time of that trace events in each blink_perf test.
"""
trace_cpu_time_metrics = {}
# Collect the bounds of "blink_perf.runTest" events.
test_runs_bounds = []
for event in renderer_thread.async_slices:
if event.name == "blink_perf.runTest":
test_runs_bounds.append(bounds.Bounds.CreateFromEvent(event))
test_runs_bounds.sort(key=lambda b: b.min)
for t in trace_events_to_measure:
trace_cpu_time_metrics[t] = [0.0] * len(test_runs_bounds)
for event_name in trace_events_to_measure:
curr_test_runs_bound_index = 0
for event in renderer_thread.IterAllSlicesOfName(event_name):
while (curr_test_runs_bound_index < len(test_runs_bounds) and
event.start > test_runs_bounds[curr_test_runs_bound_index].max):
curr_test_runs_bound_index += 1
if curr_test_runs_bound_index >= len(test_runs_bounds):
break
curr_test_bound = test_runs_bounds[curr_test_runs_bound_index]
intersect_wall_time = bounds.Bounds.GetOverlapBetweenBounds(
curr_test_bound, bounds.Bounds.CreateFromEvent(event))
if event.thread_duration and event.duration:
intersect_cpu_time = (
intersect_wall_time * event.thread_duration / event.duration)
else:
intersect_cpu_time = intersect_wall_time
trace_cpu_time_metrics[event_name][curr_test_runs_bound_index] += (
intersect_cpu_time)
return trace_cpu_time_metrics
class _BlinkPerfMeasurement(legacy_page_test.LegacyPageTest): class _BlinkPerfMeasurement(legacy_page_test.LegacyPageTest):
"""Tuns a blink performance test and reports the results.""" """Tuns a blink performance test and reports the results."""
...@@ -98,8 +154,53 @@ class _BlinkPerfMeasurement(legacy_page_test.LegacyPageTest): ...@@ -98,8 +154,53 @@ class _BlinkPerfMeasurement(legacy_page_test.LegacyPageTest):
if 'content-shell' in options.browser_type: if 'content-shell' in options.browser_type:
options.AppendExtraBrowserArgs('--expose-internals-for-testing') options.AppendExtraBrowserArgs('--expose-internals-for-testing')
def _ContinueTestRunWithTracing(self, tab):
tracing_categories = tab.EvaluateJavaScript(
'testRunner.tracingCategories')
config = tracing_config.TracingConfig()
config.enable_chrome_trace = True
config.chrome_trace_config.category_filter.AddFilterString(
'blink.console') # This is always required for js land trace event
config.chrome_trace_config.category_filter.AddFilterString(
tracing_categories)
tab.browser.platform.tracing_controller.StartTracing(config)
tab.EvaluateJavaScript('testRunner.scheduleTestRun()')
tab.WaitForJavaScriptCondition('testRunner.isDone')
return tab.browser.platform.tracing_controller.StopTracing()
def PrintAndCollectTraceEventMetrics(self, trace_cpu_time_metrics, results):
unit = 'ms'
print
for trace_event_name, cpu_times in trace_cpu_time_metrics.iteritems():
print 'CPU times of trace event "%s":' % trace_event_name
cpu_times_string = ', '.join(['{0:.10f}'.format(t) for t in cpu_times])
print 'values %s %s' % (cpu_times_string, unit)
avg = 0.0
if cpu_times:
avg = sum(cpu_times)/len(cpu_times)
print 'avg', '{0:.10f}'.format(avg), unit
results.AddValue(list_of_scalar_values.ListOfScalarValues(
results.current_page, name=trace_event_name, units=unit,
values=cpu_times))
print
print '\n'
def ValidateAndMeasurePage(self, page, tab, results): def ValidateAndMeasurePage(self, page, tab, results):
tab.WaitForJavaScriptCondition('testRunner.isDone', timeout=600) tab.WaitForJavaScriptCondition(
'testRunner.isDone || testRunner.isWaitingForTracingStart', timeout=600)
trace_cpu_time_metrics = {}
if tab.EvaluateJavaScript('testRunner.isWaitingForTracingStart'):
trace_data = self._ContinueTestRunWithTracing(tab)
trace_value = trace.TraceValue(page, trace_data)
results.AddValue(trace_value)
trace_events_to_measure = tab.EvaluateJavaScript(
'window.testRunner.traceEventsToMeasure')
model = model_module.TimelineModel(trace_data)
renderer_thread = model.GetRendererThreadFromTabId(tab.id)
trace_cpu_time_metrics = _ComputeTraceEventsThreadTimeForBlinkPerf(
renderer_thread, trace_events_to_measure)
log = tab.EvaluateJavaScript('document.getElementById("log").innerHTML') log = tab.EvaluateJavaScript('document.getElementById("log").innerHTML')
...@@ -120,6 +221,8 @@ class _BlinkPerfMeasurement(legacy_page_test.LegacyPageTest): ...@@ -120,6 +221,8 @@ class _BlinkPerfMeasurement(legacy_page_test.LegacyPageTest):
print log print log
self.PrintAndCollectTraceEventMetrics(trace_cpu_time_metrics, results)
# TODO(wangxianzhu): Convert the paint benchmarks to use the new blink_perf # TODO(wangxianzhu): Convert the paint benchmarks to use the new blink_perf
# tracing once it's ready. # tracing once it's ready.
......
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import unittest
from telemetry import decorators
from telemetry import story
from telemetry.page import page as page_module
from telemetry.testing import options_for_unittests
from telemetry.testing import page_test_test_case
from telemetry.timeline import async_slice
from telemetry.timeline import model as model_module
from benchmarks import blink_perf
class BlinkPerfTest(page_test_test_case.PageTestTestCase):
_BLINK_PERF_TEST_DATA_DIR = os.path.join(os.path.dirname(__file__),
'..', '..', '..', 'third_party', 'WebKit', 'PerformanceTests',
'TestData')
_BLINK_PERF_RESOURCES_DIR = os.path.join(os.path.dirname(__file__),
'..', '..', '..', 'third_party', 'WebKit', 'PerformanceTests',
'resources')
def setUp(self):
self._options = options_for_unittests.GetCopy()
# pylint: disable=protected-access
self._measurement = blink_perf._BlinkPerfMeasurement()
# pylint: enable=protected-access
def _CreateStorySetForTestFile(self, test_file_name):
story_set = story.StorySet(base_dir=self._BLINK_PERF_TEST_DATA_DIR,
serving_dirs={self._BLINK_PERF_TEST_DATA_DIR,
self._BLINK_PERF_RESOURCES_DIR})
page = page_module.Page('file://' + test_file_name, story_set,
base_dir=story_set.base_dir)
story_set.AddStory(page)
return story_set
@decorators.Disabled('win') # crbug.com/715822
def testBlinkPerfTracingMetricsForMeasureTime(self):
results = self.RunMeasurement(measurement=self._measurement,
ps=self._CreateStorySetForTestFile('append-child-measure-time.html'),
options=self._options)
self.assertFalse(results.failures)
self.assertEquals(len(results.FindAllTraceValues()), 1)
frame_view_layouts = results.FindAllPageSpecificValuesNamed(
'FrameView::layout')
self.assertEquals(len(frame_view_layouts), 1)
self.assertGreater(frame_view_layouts[0].GetRepresentativeNumber, 0.1)
update_layout_trees = results.FindAllPageSpecificValuesNamed(
'UpdateLayoutTree')
self.assertEquals(len(update_layout_trees), 1)
self.assertGreater(update_layout_trees[0].GetRepresentativeNumber, 0.1)
@decorators.Disabled('android') # crbug.com/715685
def testBlinkPerfTracingMetricsForMeasureFrameTime(self):
results = self.RunMeasurement(measurement=self._measurement,
ps=self._CreateStorySetForTestFile(
'color-changes-measure-frame-time.html'),
options=self._options)
self.assertFalse(results.failures)
self.assertEquals(len(results.FindAllTraceValues()), 1)
frame_view_prepaints = results.FindAllPageSpecificValuesNamed(
'FrameView::prePaint')
self.assertEquals(len(frame_view_prepaints), 1)
self.assertGreater(frame_view_prepaints[0].GetRepresentativeNumber, 0.1)
frame_view_painttrees = results.FindAllPageSpecificValuesNamed(
'FrameView::paintTree')
self.assertEquals(len(frame_view_painttrees), 1)
self.assertGreater(frame_view_painttrees[0].GetRepresentativeNumber, 0.1)
# pylint: disable=protected-access
# This is needed for testing _ComputeTraceEventsThreadTimeForBlinkPerf method.
class ComputeTraceEventsMetricsForBlinkPerfTest(unittest.TestCase):
def _AddBlinkTestSlice(self, renderer_thread, start, end):
s = async_slice.AsyncSlice(
'blink', 'blink_perf.runTest',
timestamp=start, duration=end - start, start_thread=renderer_thread,
end_thread=renderer_thread)
renderer_thread.AddAsyncSlice(s)
def testTraceEventMetricsSingleBlinkTest(self):
model = model_module.TimelineModel()
renderer_main = model.GetOrCreateProcess(1).GetOrCreateThread(2)
renderer_main.name = 'CrRendererMain'
# Set up a main thread model that looks like:
# [ blink_perf.run_test ]
# | [ foo ] [ bar ] [ baz ]
# | | | | | | | |
# 100 120 140 400 420 500 550 600
# | | |
# CPU dur: 15 18 70
#
self._AddBlinkTestSlice(renderer_main, 100, 550)
renderer_main.BeginSlice('blink', 'foo', 120, 122)
renderer_main.EndSlice(140, 137)
renderer_main.BeginSlice('blink', 'bar', 400, 402)
renderer_main.EndSlice(420, 420)
# Since this "baz" slice has CPU duration = 70ms, wall-time duration = 100ms
# & its overalapped wall-time with "blink_perf.run_test" is 50 ms, its
# overlapped CPU time with "blink_perf.run_test" is
# 50 * 70 / 100 = 35ms.
renderer_main.BeginSlice('blink', 'baz', 500, 520)
renderer_main.EndSlice(600, 590)
self.assertEquals(
blink_perf._ComputeTraceEventsThreadTimeForBlinkPerf(
renderer_main, ['foo', 'bar', 'baz']),
{'foo': [15], 'bar': [18], 'baz': [35]})
def testTraceEventMetricsMultiBlinkTest(self):
model = model_module.TimelineModel()
renderer_main = model.GetOrCreateProcess(1).GetOrCreateThread(2)
renderer_main.name = 'CrRendererMain'
# Set up a main thread model that looks like:
# [ blink_perf.run_test ] [ blink_perf.run_test ]
# | [ foo ] [ bar ] | [ | foo ] |
# | | | | | | | | | | |
# 100 120 140 400 420 440 500 520 600 640
# | | |
# CPU dur: 15 18 40
#
self._AddBlinkTestSlice(renderer_main, 100, 440)
self._AddBlinkTestSlice(renderer_main, 520, 640)
renderer_main.BeginSlice('blink', 'foo', 120, 122)
renderer_main.EndSlice(140, 137)
renderer_main.BeginSlice('blink', 'bar', 400, 402)
renderer_main.EndSlice(420, 420)
# Since this "foo" slice has CPU duration = 40ms, wall-time duration = 100ms
# & its overalapped wall-time with "blink_perf.run_test" is 80 ms, its
# overlapped CPU time with "blink_perf.run_test" is
# 80 * 40 / 100 = 32ms.
renderer_main.BeginSlice('blink', 'foo', 500, 520)
renderer_main.EndSlice(600, 560)
self.assertEquals(
blink_perf._ComputeTraceEventsThreadTimeForBlinkPerf(
renderer_main, ['foo', 'bar', 'baz']),
{'foo': [15, 32], 'bar': [18, 0], 'baz': [0, 0]})
def testTraceEventMetricsNoThreadTimeAvailable(self):
model = model_module.TimelineModel()
renderer_main = model.GetOrCreateProcess(1).GetOrCreateThread(2)
renderer_main.name = 'CrRendererMain'
# Set up a main thread model that looks like:
# [ blink_perf.run_test ]
# | [ foo ] [ bar ] |
# | | | | | |
# 100 120 140 400 420 550
# | |
# CPU dur: None None
#
self._AddBlinkTestSlice(renderer_main, 100, 550)
renderer_main.BeginSlice('blink', 'foo', 120)
renderer_main.EndSlice(140)
renderer_main.BeginSlice('blink', 'bar', 400)
renderer_main.EndSlice(420)
self.assertEquals(
blink_perf._ComputeTraceEventsThreadTimeForBlinkPerf(
renderer_main, ['foo', 'bar']),
{'foo': [20], 'bar': [20]})
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment