Commit c034b8d8 authored by Ned Nguyen's avatar Ned Nguyen Committed by Commit Bot

Remove measurements that are no longer used by any benchmark

Bug: 891705
Change-Id: Ied9506803dd2dd09a0bcfbff73274cc96d014715
Reviewed-on: https://chromium-review.googlesource.com/c/1258965Reviewed-by: default avatarCaleb Rouleau <crouleau@chromium.org>
Commit-Queue: Ned Nguyen <nednguyen@google.com>
Cr-Commit-Position: refs/heads/master@{#596252}
parent 5e40eec4
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import legacy_page_test
from telemetry.timeline import model
from telemetry.timeline import tracing_config
from telemetry.value import scalar
class DrawProperties(legacy_page_test.LegacyPageTest):
def __init__(self):
super(DrawProperties, self).__init__()
def CustomizeBrowserOptions(self, options):
options.AppendExtraBrowserArgs([
'--enable-prefer-compositing-to-lcd-text',
])
def WillNavigateToPage(self, page, tab):
del page # unused
config = tracing_config.TracingConfig()
config.chrome_trace_config.category_filter.AddDisabledByDefault(
'disabled-by-default-cc.debug.cdp-perf')
config.enable_chrome_trace = True
tab.browser.platform.tracing_controller.StartTracing(config)
def ComputeAverageOfDurations(self, timeline_model, name):
events = timeline_model.GetAllEventsOfName(name)
event_durations = [d.duration for d in events]
assert event_durations, 'Failed to find durations'
duration_sum = sum(event_durations)
duration_count = len(event_durations)
duration_avg = duration_sum / duration_count
return duration_avg
def ValidateAndMeasurePage(self, page, tab, results):
del page # unused
timeline_data = tab.browser.platform.tracing_controller.StopTracing()[0]
timeline_model = model.TimelineModel(timeline_data)
pt_avg = self.ComputeAverageOfDurations(
timeline_model,
'LayerTreeHostCommon::ComputeVisibleRectsWithPropertyTrees')
results.AddValue(scalar.ScalarValue(
results.current_page, 'PT_avg_cost', 'ms', pt_avg,
description='Average time spent processing property trees'))
def DidRunPage(self, platform):
tracing_controller = platform.tracing_controller
if tracing_controller.is_tracing_running:
tracing_controller.StopTracing()
# Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import legacy_page_test
from telemetry.timeline import model
from telemetry.timeline import tracing_config
from telemetry.value import scalar
from metrics import power
class ImageDecoding(legacy_page_test.LegacyPageTest):
def __init__(self):
super(ImageDecoding, self).__init__()
self._power_metric = None
def CustomizeBrowserOptions(self, options):
options.AppendExtraBrowserArgs('--enable-gpu-benchmarking')
power.PowerMetric.CustomizeBrowserOptions(options)
def WillStartBrowser(self, platform):
self._power_metric = power.PowerMetric(platform)
def WillNavigateToPage(self, page, tab):
tab.ExecuteJavaScript("""
if (window.chrome &&
chrome.gpuBenchmarking &&
chrome.gpuBenchmarking.clearImageCache) {
chrome.gpuBenchmarking.clearImageCache();
}
""")
self._power_metric.Start(page, tab)
config = tracing_config.TracingConfig()
# FIXME: Remove the timeline category when impl-side painting is on
# everywhere.
# FIXME: Remove webkit.console when blink.console lands in chromium and
# the ref builds are updated. crbug.com/386847
# FIXME: Remove the devtools.timeline category when impl-side painting is
# on everywhere.
config.chrome_trace_config.category_filter.AddDisabledByDefault(
'disabled-by-default-devtools.timeline')
for c in ['blink', 'devtools.timeline', 'webkit.console', 'blink.console']:
config.chrome_trace_config.category_filter.AddIncludedCategory(c)
config.enable_chrome_trace = True
tab.browser.platform.tracing_controller.StartTracing(config)
def ValidateAndMeasurePage(self, page, tab, results):
timeline_data = tab.browser.platform.tracing_controller.StopTracing()[0]
timeline_model = model.TimelineModel(timeline_data)
self._power_metric.Stop(page, tab)
self._power_metric.AddResults(tab, results)
def _IsDone():
return tab.EvaluateJavaScript('isDone')
decode_image_events = timeline_model.GetAllEventsOfName(
'ImageFrameGenerator::decode')
# FIXME: Remove this when impl-side painting is on everywhere.
if not decode_image_events:
decode_image_events = timeline_model.GetAllEventsOfName('Decode Image')
# If it is a real image page, then store only the last-minIterations
# decode tasks.
if (hasattr(
page,
'image_decoding_measurement_limit_results_to_min_iterations') and
page.image_decoding_measurement_limit_results_to_min_iterations):
assert _IsDone()
min_iterations = tab.EvaluateJavaScript('minIterations')
decode_image_events = decode_image_events[-min_iterations:]
durations = [d.duration for d in decode_image_events]
assert durations, 'Failed to find image decode trace events.'
image_decoding_avg = sum(durations) / len(durations)
results.AddValue(scalar.ScalarValue(
results.current_page, 'ImageDecoding_avg', 'ms', image_decoding_avg,
description='Average decode time for images in 4 different '
'formats: gif, png, jpg, and webp. The image files are '
'located at chrome/test/data/image_decoding.'))
results.AddValue(scalar.ScalarValue(
results.current_page, 'ImageLoading_avg', 'ms',
tab.EvaluateJavaScript('averageLoadingTimeMs()')))
def DidRunPage(self, platform):
self._power_metric.Close()
if platform.tracing_controller.is_tracing_running:
platform.tracing_controller.StopTracing()
// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
(function() {
// The Mozilla DHTML performance tests need to explicitly call a function to
// trigger the next page visit, rather than directly using the onload handler.
// To meet needs of the DHTML performance tests without forking this head.js
// file, use a variable |__install_onload_handler| to indicate whether the
// |__onload| handler should be added to onload event listener.
// Install |__onload| by default if there is no pre-configuration.
if (typeof(__install_onload_handler) == 'undefined')
var __install_onload_handler = true;
// This is the timeout used in setTimeout inside the DHTML tests. Chrome has
// a much more accurate timer resolution than other browsers do. This results
// in Chrome running these tests much faster than other browsers. In order to
// compare Chrome with other browsers on DHTML performance alone, set this
// value to ~15.
var __test_timeout = 0;
function __set_cookie(name, value) {
document.cookie = name + "=" + value + "; path=/";
}
function __onbeforeunload() {
// Call GC twice to cleanup JS heap before starting a new test.
if (window.gc) {
window.gc();
window.gc();
}
}
// The function |__onload| is used by the DHTML tests.
window.__onload = function() {
if (!__install_onload_handler && !performance.timing.loadEventEnd)
return;
var unused = document.body.offsetHeight; // force layout
window.__pc_load_time = window.performance.now();
};
// The function |__eval_later| now is only used by the DHTML tests.
window.__eval_later = function(expression) {
setTimeout(expression, __test_timeout);
};
if (window.parent == window) { // Ignore subframes.
window.__pc_load_time = null;
addEventListener("load", __onload);
addEventListener("beforeunload", __onbeforeunload);
}
})();
\ No newline at end of file
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import legacy_page_test
from telemetry.timeline.model import TimelineModel
from telemetry.timeline import tracing_config
from telemetry.util import statistics
from telemetry.value import scalar
class TaskExecutionTime(legacy_page_test.LegacyPageTest):
IDLE_SECTION_TRIGGER = 'SingleThreadIdleTaskRunner::RunTask'
IDLE_SECTION = 'IDLE'
NORMAL_SECTION = 'NORMAL'
_TIME_OUT_IN_SECONDS = 60
_NUMBER_OF_RESULTS_TO_DISPLAY = 10
_BROWSER_THREADS = ['Chrome_ChildIOThread',
'Chrome_IOThread']
_RENDERER_THREADS = ['Chrome_ChildIOThread',
'Chrome_IOThread',
'CrRendererMain']
_CATEGORIES = ['benchmark',
'blink',
'blink.console',
'blink_gc',
'cc',
'gpu',
'ipc',
'renderer.scheduler',
'toplevel',
'v8',
'webkit.console']
def __init__(self):
super(TaskExecutionTime, self).__init__()
self._renderer_process = None
self._browser_process = None
self._results = None
def WillNavigateToPage(self, page, tab):
del page # unused
config = tracing_config.TracingConfig()
for category in self._CATEGORIES:
config.chrome_trace_config.category_filter.AddIncludedCategory(
category)
config.enable_chrome_trace = True
tab.browser.platform.tracing_controller.StartTracing(
config, self._TIME_OUT_IN_SECONDS)
def ValidateAndMeasurePage(self, page, tab, results):
del page # unused
trace_data = tab.browser.platform.tracing_controller.StopTracing()[0]
timeline_model = TimelineModel(trace_data)
self._renderer_process = timeline_model.GetFirstRendererProcess(tab.id)
self._browser_process = timeline_model.browser_process
self._AddResults(results)
def _AddResults(self, results):
self._results = results
for thread in self._BROWSER_THREADS:
self._AddTasksFromThreadToResults(self._browser_process, thread)
for thread in self._RENDERER_THREADS:
self._AddTasksFromThreadToResults(self._renderer_process, thread)
def _AddTasksFromThreadToResults(self, process, thread_name):
if process is None:
return
sections = TaskExecutionTime._GetSectionsForThread(process, thread_name)
self._ReportSectionPercentages(sections.values(),
'%s:%s' % (process.name, thread_name))
# Create list with top |_NUMBER_OF_RESULTS_TO_DISPLAY| for each section.
for section in sections.itervalues():
if section.name == TaskExecutionTime.IDLE_SECTION:
# Skip sections we don't report.
continue
self._AddSlowestTasksToResults(section.tasks.values())
def _AddSlowestTasksToResults(self, tasks):
sorted_tasks = sorted(
tasks,
key=lambda slice: slice.median_self_duration,
reverse=True)
for task in sorted_tasks[:self.GetExpectedResultCount()]:
self._results.AddValue(scalar.ScalarValue(
self._results.current_page,
task.name,
'ms',
task.median_self_duration,
description='Slowest tasks'))
def _ReportSectionPercentages(self, section_values, metric_prefix):
all_sectionstotal_duration = sum(
section.total_duration for section in section_values)
if not all_sectionstotal_duration:
# Nothing was recorded, so early out.
return
for section in section_values:
section_name = section.name or TaskExecutionTime.NORMAL_SECTION
section_percentage_of_total = (
(section.total_duration * 100.0) / all_sectionstotal_duration)
self._results.AddValue(scalar.ScalarValue(
self._results.current_page,
'%s:Section_%s' % (metric_prefix, section_name),
'%',
section_percentage_of_total,
description='Idle task percentage'))
@staticmethod
def _GetSectionsForThread(process, target_thread):
sections = {}
for thread in process.threads.itervalues():
if thread.name != target_thread:
continue
for task_slice in thread.IterAllSlices():
_ProcessTasksForThread(
sections,
'%s:%s' % (process.name, thread.name),
task_slice)
return sections
@staticmethod
def GetExpectedResultCount():
return TaskExecutionTime._NUMBER_OF_RESULTS_TO_DISPLAY
def _ProcessTasksForThread(
sections,
thread_name,
task_slice,
section_name=None):
if task_slice.self_thread_time is None:
# Early out if this slice is a TRACE_EVENT_INSTANT, as it has no duration.
return
# Note: By setting a different section below we split off this task into
# a different sorting bucket. Too add extra granularity (e.g. tasks executed
# during page loading) add logic to set a different section name here. The
# section name is set before the slice's data is recorded so the triggering
# event will be included in its own section (i.e. the idle trigger will be
# recorded as an idle event).
if task_slice.name == TaskExecutionTime.IDLE_SECTION_TRIGGER:
section_name = TaskExecutionTime.IDLE_SECTION
# Add the thread name and section (e.g. 'Idle') to the test name
# so it is human-readable.
reported_name = thread_name + ':'
if section_name:
reported_name += section_name + ':'
if 'src_func' in task_slice.args:
# Data contains the name of the timed function, use it as the name.
reported_name += task_slice.args['src_func']
elif 'line' in task_slice.args:
# Data contains IPC class and line numbers, use these as the name.
reported_name += 'IPC_Class_' + str(task_slice.args['class'])
reported_name += ':Line_' + str(task_slice.args['line'])
else:
# Fall back to use the name of the task slice.
reported_name += task_slice.name.lower()
# Replace any '.'s with '_'s as V8 uses them and it confuses the dashboard.
reported_name = reported_name.replace('.', '_')
# If this task is in a new section create a section object and add it to the
# section dictionary.
if section_name not in sections:
sections[section_name] = Section(section_name)
sections[section_name].AddTask(reported_name, task_slice.self_thread_time)
# Process sub slices recursively, passing the current section down.
for sub_slice in task_slice.sub_slices:
_ProcessTasksForThread(
sections,
thread_name,
sub_slice,
section_name)
class NameAndDurations(object):
def __init__(self, name, self_duration):
self.name = name
self.self_durations = [self_duration]
def Update(self, self_duration):
self.self_durations.append(self_duration)
@property
def median_self_duration(self):
return statistics.Median(self.self_durations)
class Section(object):
def __init__(self, name):
# A section holds a dictionary, keyed on task name, of all the tasks that
# exist within it and the total duration of those tasks.
self.name = name
self.tasks = {}
self.total_duration = 0
def AddTask(self, name, duration):
if name in self.tasks:
# section_tasks already contains an entry for this (e.g. from an earlier
# slice), add the new duration so we can calculate a median value later.
self.tasks[name].Update(duration)
else:
# This is a new task so create a new entry for it.
self.tasks[name] = NameAndDurations(name, duration)
# Accumulate total duration for all tasks in this section.
self.total_duration += duration
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import legacy_page_test
from telemetry.timeline.model import TimelineModel
from telemetry.timeline import tracing_config
from telemetry.util import statistics
from telemetry.value import scalar
class V8GCTimes(legacy_page_test.LegacyPageTest):
_TIME_OUT_IN_SECONDS = 60
_CATEGORIES = ['blink.console',
'renderer.scheduler',
'v8',
'webkit.console']
_RENDERER_MAIN_THREAD = 'CrRendererMain'
_IDLE_TASK_PARENT = 'SingleThreadIdleTaskRunner::RunTask'
def __init__(self):
super(V8GCTimes, self).__init__()
def WillNavigateToPage(self, page, tab):
del page # unused
config = tracing_config.TracingConfig()
for category in self._CATEGORIES:
config.chrome_trace_config.category_filter.AddIncludedCategory(
category)
config.enable_chrome_trace = True
tab.browser.platform.tracing_controller.StartTracing(
config, self._TIME_OUT_IN_SECONDS)
def ValidateAndMeasurePage(self, page, tab, results):
del page # unused
trace_data = tab.browser.platform.tracing_controller.StopTracing()[0]
timeline_model = TimelineModel(trace_data)
renderer_process = timeline_model.GetFirstRendererProcess(tab.id)
self._AddV8MetricsToResults(renderer_process, results)
def DidRunPage(self, platform):
if platform.tracing_controller.is_tracing_running:
platform.tracing_controller.StopTracing()
def _AddV8MetricsToResults(self, process, results):
if process is None:
return
for thread in process.threads.values():
if thread.name != self._RENDERER_MAIN_THREAD:
continue
self._AddV8EventStatsToResults(thread, results)
self._AddCpuTimeStatsToResults(thread, results)
def _AddV8EventStatsToResults(self, thread, results):
v8_event_stats = [
V8EventStat('V8.GCIncrementalMarking',
'v8_gc_incremental_marking',
'incremental marking steps'),
V8EventStat('V8.GCScavenger',
'v8_gc_scavenger',
'scavenges'),
V8EventStat('V8.GCCompactor',
'v8_gc_mark_compactor',
'mark-sweep-compactor'),
V8EventStat('V8.GCFinalizeMC',
'v8_gc_finalize_incremental',
'finalization of incremental marking'),
V8EventStat('V8.GCFinalizeMCReduceMemory',
'v8_gc_finalize_incremental_reduce_memory',
'finalization of incremental marking with memory reducer')]
# Find all V8 GC events in the trace.
for event in thread.IterAllSlices():
event_stat = _FindV8EventStatForEvent(v8_event_stats, event.name)
if not event_stat:
continue
event_stat.thread_duration += event.thread_duration
event_stat.max_thread_duration = max(event_stat.max_thread_duration,
event.thread_duration)
event_stat.count += 1
parent_idle_task = _ParentIdleTask(event)
if parent_idle_task:
allotted_idle_time = parent_idle_task.args['allotted_time_ms']
idle_task_wall_overrun = 0
if event.duration > allotted_idle_time:
idle_task_wall_overrun = event.duration - allotted_idle_time
# Don't count time over the deadline as being inside idle time.
# Since the deadline should be relative to wall clock we compare
# allotted_time_ms with wall duration instead of thread duration, and
# then assume the thread duration was inside idle for the same
# percentage of time.
inside_idle = event.thread_duration * statistics.DivideIfPossibleOrZero(
event.duration - idle_task_wall_overrun, event.duration)
event_stat.thread_duration_inside_idle += inside_idle
event_stat.idle_task_overrun_duration += idle_task_wall_overrun
for v8_event_stat in v8_event_stats:
results.AddValue(scalar.ScalarValue(
results.current_page, v8_event_stat.result_name, 'ms',
v8_event_stat.thread_duration,
description=('Total thread duration spent in %s' %
v8_event_stat.result_description)))
results.AddValue(scalar.ScalarValue(
results.current_page, '%s_max' % v8_event_stat.result_name, 'ms',
v8_event_stat.max_thread_duration,
description=('Max thread duration spent in %s' %
v8_event_stat.result_description)))
results.AddValue(scalar.ScalarValue(
results.current_page, '%s_count' % v8_event_stat.result_name, 'count',
v8_event_stat.count,
description=('Number of %s' %
v8_event_stat.result_description)))
average_thread_duration = statistics.DivideIfPossibleOrZero(
v8_event_stat.thread_duration, v8_event_stat.count)
results.AddValue(scalar.ScalarValue(
results.current_page, '%s_average' % v8_event_stat.result_name, 'ms',
average_thread_duration,
description=('Average thread duration spent in %s' %
v8_event_stat.result_description)))
results.AddValue(scalar.ScalarValue(
results.current_page,
'%s_outside_idle' %
v8_event_stat.result_name, 'ms',
v8_event_stat.thread_duration_outside_idle,
description=(
'Total thread duration spent in %s outside of idle tasks' %
v8_event_stat.result_description)))
results.AddValue(
scalar.ScalarValue(
results.current_page,
'%s_idle_deadline_overrun' %
v8_event_stat.result_name, 'ms',
v8_event_stat.idle_task_overrun_duration,
description=(
'Total idle task deadline overrun for %s idle tasks' %
v8_event_stat.result_description)))
results.AddValue(scalar.ScalarValue(
results.current_page,
'%s_percentage_idle' %
v8_event_stat.result_name,
'idle%',
v8_event_stat.percentage_thread_duration_during_idle,
description=(
'Percentage of %s spent in idle time' %
v8_event_stat.result_description)))
# Add total metrics.
gc_total = sum(x.thread_duration for x in v8_event_stats)
gc_total_outside_idle = sum(
x.thread_duration_outside_idle for x in v8_event_stats)
gc_total_idle_deadline_overrun = sum(
x.idle_task_overrun_duration for x in v8_event_stats)
gc_total_percentage_idle = statistics.DivideIfPossibleOrZero(
100 * (gc_total - gc_total_outside_idle), gc_total)
results.AddValue(
scalar.ScalarValue(
results.current_page, 'v8_gc_total', 'ms',
gc_total,
description=('Total thread duration of all garbage '
'collection events')))
results.AddValue(
scalar.ScalarValue(
results.current_page, 'v8_gc_total_outside_idle',
'ms', gc_total_outside_idle,
description=(
'Total thread duration of all garbage collection events '
'outside of idle tasks')))
results.AddValue(
scalar.ScalarValue(
results.current_page,
'v8_gc_total_idle_deadline_overrun', 'ms',
gc_total_idle_deadline_overrun,
description=(
'Total idle task deadline overrun for all idle tasks garbage '
'collection events')))
results.AddValue(
scalar.ScalarValue(
results.current_page,
'v8_gc_total_percentage_idle', 'idle%',
gc_total_percentage_idle,
description=(
'Percentage of the thread duration of all garbage collection '
'events spent inside of idle tasks')))
def _AddCpuTimeStatsToResults(self, thread, results):
if thread.toplevel_slices:
start_time = min(s.start for s in thread.toplevel_slices)
end_time = max(s.end for s in thread.toplevel_slices)
duration = end_time - start_time
cpu_time = sum(s.thread_duration for s in thread.toplevel_slices)
else:
duration = cpu_time = 0
results.AddValue(scalar.ScalarValue(
results.current_page, 'duration', 'ms', duration))
results.AddValue(scalar.ScalarValue(
results.current_page, 'cpu_time', 'ms', cpu_time))
def _FindV8EventStatForEvent(v8_event_stats_list, event_name):
for v8_event_stat in v8_event_stats_list:
if v8_event_stat.src_event_name == event_name:
return v8_event_stat
return None
def _ParentIdleTask(event):
parent = event.parent_slice
while parent:
# pylint: disable=protected-access
if parent.name == V8GCTimes._IDLE_TASK_PARENT:
return parent
parent = parent.parent_slice
return None
class V8EventStat(object):
def __init__(self, src_event_name, result_name, result_description):
self.src_event_name = src_event_name
self.result_name = result_name
self.result_description = result_description
self.thread_duration = 0.0
self.thread_duration_inside_idle = 0.0
self.idle_task_overrun_duration = 0.0
self.max_thread_duration = 0.0
self.count = 0
@property
def thread_duration_outside_idle(self):
return self.thread_duration - self.thread_duration_inside_idle
@property
def percentage_thread_duration_during_idle(self):
return statistics.DivideIfPossibleOrZero(
100 * self.thread_duration_inside_idle, self.thread_duration)
This diff is collapsed.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment