Commit c034b8d8 authored by Ned Nguyen's avatar Ned Nguyen Committed by Commit Bot

Remove measurements that are no longer used by any benchmark

Bug: 891705
Change-Id: Ied9506803dd2dd09a0bcfbff73274cc96d014715
Reviewed-on: https://chromium-review.googlesource.com/c/1258965Reviewed-by: default avatarCaleb Rouleau <crouleau@chromium.org>
Commit-Queue: Ned Nguyen <nednguyen@google.com>
Cr-Commit-Position: refs/heads/master@{#596252}
parent 5e40eec4
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import legacy_page_test
from telemetry.timeline import model
from telemetry.timeline import tracing_config
from telemetry.value import scalar
class DrawProperties(legacy_page_test.LegacyPageTest):
def __init__(self):
super(DrawProperties, self).__init__()
def CustomizeBrowserOptions(self, options):
options.AppendExtraBrowserArgs([
'--enable-prefer-compositing-to-lcd-text',
])
def WillNavigateToPage(self, page, tab):
del page # unused
config = tracing_config.TracingConfig()
config.chrome_trace_config.category_filter.AddDisabledByDefault(
'disabled-by-default-cc.debug.cdp-perf')
config.enable_chrome_trace = True
tab.browser.platform.tracing_controller.StartTracing(config)
def ComputeAverageOfDurations(self, timeline_model, name):
events = timeline_model.GetAllEventsOfName(name)
event_durations = [d.duration for d in events]
assert event_durations, 'Failed to find durations'
duration_sum = sum(event_durations)
duration_count = len(event_durations)
duration_avg = duration_sum / duration_count
return duration_avg
def ValidateAndMeasurePage(self, page, tab, results):
del page # unused
timeline_data = tab.browser.platform.tracing_controller.StopTracing()[0]
timeline_model = model.TimelineModel(timeline_data)
pt_avg = self.ComputeAverageOfDurations(
timeline_model,
'LayerTreeHostCommon::ComputeVisibleRectsWithPropertyTrees')
results.AddValue(scalar.ScalarValue(
results.current_page, 'PT_avg_cost', 'ms', pt_avg,
description='Average time spent processing property trees'))
def DidRunPage(self, platform):
tracing_controller = platform.tracing_controller
if tracing_controller.is_tracing_running:
tracing_controller.StopTracing()
# Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import legacy_page_test
from telemetry.timeline import model
from telemetry.timeline import tracing_config
from telemetry.value import scalar
from metrics import power
class ImageDecoding(legacy_page_test.LegacyPageTest):
def __init__(self):
super(ImageDecoding, self).__init__()
self._power_metric = None
def CustomizeBrowserOptions(self, options):
options.AppendExtraBrowserArgs('--enable-gpu-benchmarking')
power.PowerMetric.CustomizeBrowserOptions(options)
def WillStartBrowser(self, platform):
self._power_metric = power.PowerMetric(platform)
def WillNavigateToPage(self, page, tab):
tab.ExecuteJavaScript("""
if (window.chrome &&
chrome.gpuBenchmarking &&
chrome.gpuBenchmarking.clearImageCache) {
chrome.gpuBenchmarking.clearImageCache();
}
""")
self._power_metric.Start(page, tab)
config = tracing_config.TracingConfig()
# FIXME: Remove the timeline category when impl-side painting is on
# everywhere.
# FIXME: Remove webkit.console when blink.console lands in chromium and
# the ref builds are updated. crbug.com/386847
# FIXME: Remove the devtools.timeline category when impl-side painting is
# on everywhere.
config.chrome_trace_config.category_filter.AddDisabledByDefault(
'disabled-by-default-devtools.timeline')
for c in ['blink', 'devtools.timeline', 'webkit.console', 'blink.console']:
config.chrome_trace_config.category_filter.AddIncludedCategory(c)
config.enable_chrome_trace = True
tab.browser.platform.tracing_controller.StartTracing(config)
def ValidateAndMeasurePage(self, page, tab, results):
timeline_data = tab.browser.platform.tracing_controller.StopTracing()[0]
timeline_model = model.TimelineModel(timeline_data)
self._power_metric.Stop(page, tab)
self._power_metric.AddResults(tab, results)
def _IsDone():
return tab.EvaluateJavaScript('isDone')
decode_image_events = timeline_model.GetAllEventsOfName(
'ImageFrameGenerator::decode')
# FIXME: Remove this when impl-side painting is on everywhere.
if not decode_image_events:
decode_image_events = timeline_model.GetAllEventsOfName('Decode Image')
# If it is a real image page, then store only the last-minIterations
# decode tasks.
if (hasattr(
page,
'image_decoding_measurement_limit_results_to_min_iterations') and
page.image_decoding_measurement_limit_results_to_min_iterations):
assert _IsDone()
min_iterations = tab.EvaluateJavaScript('minIterations')
decode_image_events = decode_image_events[-min_iterations:]
durations = [d.duration for d in decode_image_events]
assert durations, 'Failed to find image decode trace events.'
image_decoding_avg = sum(durations) / len(durations)
results.AddValue(scalar.ScalarValue(
results.current_page, 'ImageDecoding_avg', 'ms', image_decoding_avg,
description='Average decode time for images in 4 different '
'formats: gif, png, jpg, and webp. The image files are '
'located at chrome/test/data/image_decoding.'))
results.AddValue(scalar.ScalarValue(
results.current_page, 'ImageLoading_avg', 'ms',
tab.EvaluateJavaScript('averageLoadingTimeMs()')))
def DidRunPage(self, platform):
self._power_metric.Close()
if platform.tracing_controller.is_tracing_running:
platform.tracing_controller.StopTracing()
// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
(function() {
// The Mozilla DHTML performance tests need to explicitly call a function to
// trigger the next page visit, rather than directly using the onload handler.
// To meet needs of the DHTML performance tests without forking this head.js
// file, use a variable |__install_onload_handler| to indicate whether the
// |__onload| handler should be added to onload event listener.
// Install |__onload| by default if there is no pre-configuration.
if (typeof(__install_onload_handler) == 'undefined')
var __install_onload_handler = true;
// This is the timeout used in setTimeout inside the DHTML tests. Chrome has
// a much more accurate timer resolution than other browsers do. This results
// in Chrome running these tests much faster than other browsers. In order to
// compare Chrome with other browsers on DHTML performance alone, set this
// value to ~15.
var __test_timeout = 0;
function __set_cookie(name, value) {
document.cookie = name + "=" + value + "; path=/";
}
function __onbeforeunload() {
// Call GC twice to cleanup JS heap before starting a new test.
if (window.gc) {
window.gc();
window.gc();
}
}
// The function |__onload| is used by the DHTML tests.
window.__onload = function() {
if (!__install_onload_handler && !performance.timing.loadEventEnd)
return;
var unused = document.body.offsetHeight; // force layout
window.__pc_load_time = window.performance.now();
};
// The function |__eval_later| now is only used by the DHTML tests.
window.__eval_later = function(expression) {
setTimeout(expression, __test_timeout);
};
if (window.parent == window) { // Ignore subframes.
window.__pc_load_time = null;
addEventListener("load", __onload);
addEventListener("beforeunload", __onbeforeunload);
}
})();
\ No newline at end of file
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import legacy_page_test
from telemetry.timeline.model import TimelineModel
from telemetry.timeline import tracing_config
from telemetry.util import statistics
from telemetry.value import scalar
class TaskExecutionTime(legacy_page_test.LegacyPageTest):
IDLE_SECTION_TRIGGER = 'SingleThreadIdleTaskRunner::RunTask'
IDLE_SECTION = 'IDLE'
NORMAL_SECTION = 'NORMAL'
_TIME_OUT_IN_SECONDS = 60
_NUMBER_OF_RESULTS_TO_DISPLAY = 10
_BROWSER_THREADS = ['Chrome_ChildIOThread',
'Chrome_IOThread']
_RENDERER_THREADS = ['Chrome_ChildIOThread',
'Chrome_IOThread',
'CrRendererMain']
_CATEGORIES = ['benchmark',
'blink',
'blink.console',
'blink_gc',
'cc',
'gpu',
'ipc',
'renderer.scheduler',
'toplevel',
'v8',
'webkit.console']
def __init__(self):
super(TaskExecutionTime, self).__init__()
self._renderer_process = None
self._browser_process = None
self._results = None
def WillNavigateToPage(self, page, tab):
del page # unused
config = tracing_config.TracingConfig()
for category in self._CATEGORIES:
config.chrome_trace_config.category_filter.AddIncludedCategory(
category)
config.enable_chrome_trace = True
tab.browser.platform.tracing_controller.StartTracing(
config, self._TIME_OUT_IN_SECONDS)
def ValidateAndMeasurePage(self, page, tab, results):
del page # unused
trace_data = tab.browser.platform.tracing_controller.StopTracing()[0]
timeline_model = TimelineModel(trace_data)
self._renderer_process = timeline_model.GetFirstRendererProcess(tab.id)
self._browser_process = timeline_model.browser_process
self._AddResults(results)
def _AddResults(self, results):
self._results = results
for thread in self._BROWSER_THREADS:
self._AddTasksFromThreadToResults(self._browser_process, thread)
for thread in self._RENDERER_THREADS:
self._AddTasksFromThreadToResults(self._renderer_process, thread)
def _AddTasksFromThreadToResults(self, process, thread_name):
if process is None:
return
sections = TaskExecutionTime._GetSectionsForThread(process, thread_name)
self._ReportSectionPercentages(sections.values(),
'%s:%s' % (process.name, thread_name))
# Create list with top |_NUMBER_OF_RESULTS_TO_DISPLAY| for each section.
for section in sections.itervalues():
if section.name == TaskExecutionTime.IDLE_SECTION:
# Skip sections we don't report.
continue
self._AddSlowestTasksToResults(section.tasks.values())
def _AddSlowestTasksToResults(self, tasks):
sorted_tasks = sorted(
tasks,
key=lambda slice: slice.median_self_duration,
reverse=True)
for task in sorted_tasks[:self.GetExpectedResultCount()]:
self._results.AddValue(scalar.ScalarValue(
self._results.current_page,
task.name,
'ms',
task.median_self_duration,
description='Slowest tasks'))
def _ReportSectionPercentages(self, section_values, metric_prefix):
all_sectionstotal_duration = sum(
section.total_duration for section in section_values)
if not all_sectionstotal_duration:
# Nothing was recorded, so early out.
return
for section in section_values:
section_name = section.name or TaskExecutionTime.NORMAL_SECTION
section_percentage_of_total = (
(section.total_duration * 100.0) / all_sectionstotal_duration)
self._results.AddValue(scalar.ScalarValue(
self._results.current_page,
'%s:Section_%s' % (metric_prefix, section_name),
'%',
section_percentage_of_total,
description='Idle task percentage'))
@staticmethod
def _GetSectionsForThread(process, target_thread):
sections = {}
for thread in process.threads.itervalues():
if thread.name != target_thread:
continue
for task_slice in thread.IterAllSlices():
_ProcessTasksForThread(
sections,
'%s:%s' % (process.name, thread.name),
task_slice)
return sections
@staticmethod
def GetExpectedResultCount():
return TaskExecutionTime._NUMBER_OF_RESULTS_TO_DISPLAY
def _ProcessTasksForThread(
sections,
thread_name,
task_slice,
section_name=None):
if task_slice.self_thread_time is None:
# Early out if this slice is a TRACE_EVENT_INSTANT, as it has no duration.
return
# Note: By setting a different section below we split off this task into
# a different sorting bucket. Too add extra granularity (e.g. tasks executed
# during page loading) add logic to set a different section name here. The
# section name is set before the slice's data is recorded so the triggering
# event will be included in its own section (i.e. the idle trigger will be
# recorded as an idle event).
if task_slice.name == TaskExecutionTime.IDLE_SECTION_TRIGGER:
section_name = TaskExecutionTime.IDLE_SECTION
# Add the thread name and section (e.g. 'Idle') to the test name
# so it is human-readable.
reported_name = thread_name + ':'
if section_name:
reported_name += section_name + ':'
if 'src_func' in task_slice.args:
# Data contains the name of the timed function, use it as the name.
reported_name += task_slice.args['src_func']
elif 'line' in task_slice.args:
# Data contains IPC class and line numbers, use these as the name.
reported_name += 'IPC_Class_' + str(task_slice.args['class'])
reported_name += ':Line_' + str(task_slice.args['line'])
else:
# Fall back to use the name of the task slice.
reported_name += task_slice.name.lower()
# Replace any '.'s with '_'s as V8 uses them and it confuses the dashboard.
reported_name = reported_name.replace('.', '_')
# If this task is in a new section create a section object and add it to the
# section dictionary.
if section_name not in sections:
sections[section_name] = Section(section_name)
sections[section_name].AddTask(reported_name, task_slice.self_thread_time)
# Process sub slices recursively, passing the current section down.
for sub_slice in task_slice.sub_slices:
_ProcessTasksForThread(
sections,
thread_name,
sub_slice,
section_name)
class NameAndDurations(object):
def __init__(self, name, self_duration):
self.name = name
self.self_durations = [self_duration]
def Update(self, self_duration):
self.self_durations.append(self_duration)
@property
def median_self_duration(self):
return statistics.Median(self.self_durations)
class Section(object):
def __init__(self, name):
# A section holds a dictionary, keyed on task name, of all the tasks that
# exist within it and the total duration of those tasks.
self.name = name
self.tasks = {}
self.total_duration = 0
def AddTask(self, name, duration):
if name in self.tasks:
# section_tasks already contains an entry for this (e.g. from an earlier
# slice), add the new duration so we can calculate a median value later.
self.tasks[name].Update(duration)
else:
# This is a new task so create a new entry for it.
self.tasks[name] = NameAndDurations(name, duration)
# Accumulate total duration for all tasks in this section.
self.total_duration += duration
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry import decorators
from telemetry.internal.results import page_test_results
from telemetry.page import page as page_module
from telemetry.testing import options_for_unittests
from telemetry.testing import page_test_test_case
from telemetry.timeline import model as model_module
from telemetry.timeline import slice as slice_data
from telemetry.util import wpr_modes
from measurements import task_execution_time
class TestTaskExecutionTimePage(page_module.Page):
def __init__(self, page_set, base_dir):
super(TestTaskExecutionTimePage, self).__init__(
'file://blank.html', page_set, base_dir, name='blank.html')
def RunPageInteractions(self, action_runner):
with action_runner.CreateGestureInteraction('ScrollAction'):
action_runner.ScrollPage()
# Disable for accessing private API of task_execution_time.
# pylint: disable=protected-access
class TaskExecutionTimeUnitTest(page_test_test_case.PageTestTestCase):
def setUp(self):
self._options = options_for_unittests.GetCopy()
self._options.browser_options.wpr_mode = wpr_modes.WPR_OFF
self._first_thread_name = (
task_execution_time.TaskExecutionTime._RENDERER_THREADS[0])
self._measurement = None
self._page_set = None
@decorators.Enabled('android')
def testSomeResultsReturnedFromDummyPage(self):
self._GenerateDataForEmptyPageSet()
results = self.RunMeasurement(self._measurement,
self._page_set,
options=self._options)
self.assertGreater(len(results.all_page_specific_values), 0)
# http://crbug.com/466994
@decorators.Disabled('all')
def testSlicesConformToRequiredNamingConventionsUsingDummyPage(self):
"""This test ensures the presence of required keywords.
Some arbitrary keywords are required to generate the names of the top 10
tasks. The code has a weak dependancy on 'src_func', 'class' and 'line'
existing; if they exist in a slice's args they are used to generate a
name, if they don't exists the code falls back to using the name of the
slice, which is less clear.
If the code has been refactored and these keywords no longer exist
the code that relies on them in task_execution_time.py should be
updated to use the appropriate technique for assertaining this data
(and this test changed in the same way).
"""
self._GenerateDataForEmptyPageSet()
self.RunMeasurement(self._measurement,
self._page_set,
options=self._options)
required_keywords = {'src_func': 0, 'class': 0, 'line': 0}
# Check all slices and count the uses of the required keywords.
for thread in self._measurement._renderer_process.threads.itervalues():
for slice_info in thread.IterAllSlices():
_CheckSliceForKeywords(slice_info, required_keywords)
# Confirm that all required keywords have at least one instance.
for use_counts in required_keywords.itervalues():
self.assertGreater(use_counts, 0)
def testMockedResultsCorrectlyReturned(self):
data = self._GenerateResultsFromMockedData()
# Confirm we get back 4 results (3 tasks and a section-use %).
self.assertEqual(len(data.results.all_page_specific_values), 4)
# Check that the 3 tasks we added exist in the resulting output
# sorted.
task_prefix = 'process 1:%s:' % (self._first_thread_name)
slow_result = self._findResultFromName(task_prefix + 'slow', data)
self.assertEqual(slow_result.value, 1000)
medium_result = self._findResultFromName(task_prefix + 'medium', data)
self.assertEqual(medium_result.value, 500)
fast_result = self._findResultFromName(task_prefix + 'fast', data)
self.assertEqual(fast_result.value, 1)
def testNonIdlePercentagesAreCorrect(self):
data = self._GenerateResultsFromMockedData()
# Confirm that 100% of tasks are in the normal section.
percentage_result = self._findResultFromName(
'process 1:%s:Section_%s' % (
self._first_thread_name,
task_execution_time.TaskExecutionTime.NORMAL_SECTION),
data)
self.assertEqual(percentage_result.value, 100)
def testIdleTasksAreReported(self):
data = self._GenerateResultsFromMockedIdleData()
# The 'slow_sub_slice' should be inside the Idle section and therefore
# removed from the results.
for result in data.results.all_page_specific_values:
if 'slow_sub_slice' in result.name:
self.fail('Tasks within idle section should not be reported')
# The 'not_idle' slice should not have the IDLE_SECTION added to its name
# and should exist.
for result in data.results.all_page_specific_values:
if 'not_idle' in result.name:
self.assertTrue(
task_execution_time.TaskExecutionTime.IDLE_SECTION
not in result.name)
break
else:
self.fail('Task was incorrectly marked as Idle')
def testIdlePercentagesAreCorrect(self):
data = self._GenerateResultsFromMockedIdleData()
# Check the percentage section usage is correctly calculated.
# Total = 1000 (idle) + 250 (normal), so normal = (250 * 100) / 1250 = 20%.
normal_percentage_result = self._findResultFromName(
'process 1:%s:Section_%s' % (
self._first_thread_name,
task_execution_time.TaskExecutionTime.NORMAL_SECTION),
data)
self.assertEqual(normal_percentage_result.value, 20)
# Check the percentage section usage is correctly calculated.
idle_percentage_result = self._findResultFromName(
'process 1:%s:Section_%s' % (
self._first_thread_name,
task_execution_time.TaskExecutionTime.IDLE_SECTION),
data)
self.assertEqual(idle_percentage_result.value, 80)
def testTopNTasksAreCorrectlyReported(self):
data = self._GenerateDataForEmptyPageSet()
# Add too many increasing-durtation tasks and confirm we only get the
# slowest _NUMBER_OF_RESULTS_TO_DISPLAY tasks reported back.
duration = 0
extra = 5
for duration in xrange(
task_execution_time.TaskExecutionTime._NUMBER_OF_RESULTS_TO_DISPLAY +
extra):
data.AddSlice('task' + str(duration), 0, duration)
# Run the code we are testing.
self._measurement._AddResults(data.results)
# Check that the last (i.e. biggest) _NUMBER_OF_RESULTS_TO_DISPLAY get
# returned in the results.
for duration in xrange(
extra,
extra +
task_execution_time.TaskExecutionTime._NUMBER_OF_RESULTS_TO_DISPLAY):
self._findResultFromName(
'process 1:%s:task%s' % (self._first_thread_name, str(duration)),
data)
def _findResultFromName(self, name, data):
for result in data.results.all_page_specific_values:
if result.name == name:
return result
self.fail('Expected result "%s" missing.' % (name))
def _GenerateResultsFromMockedData(self):
data = self._GenerateDataForEmptyPageSet()
data.AddSlice('fast', 0, 1)
data.AddSlice('medium', 0, 500)
data.AddSlice('slow', 0, 1000)
# Run the code we are testing and return results.
self._measurement._AddResults(data.results)
return data
def _GenerateResultsFromMockedIdleData(self):
data = self._GenerateDataForEmptyPageSet()
# Make a slice that looks like an idle task parent.
slice_start_time = 0
slow_slice_duration = 1000
fast_slice_duration = 250
parent_slice = data.AddSlice(
task_execution_time.TaskExecutionTime.IDLE_SECTION_TRIGGER,
slice_start_time,
slow_slice_duration)
# Add a sub-slice, this should be reported back as occuring in idle time.
sub_slice = slice_data.Slice(
None,
'category',
'slow_sub_slice',
slice_start_time,
slow_slice_duration,
slice_start_time,
slow_slice_duration,
[])
parent_slice.sub_slices.append(sub_slice)
# Add a non-idle task.
data.AddSlice('not_idle', slice_start_time, fast_slice_duration)
# Run the code we are testing.
self._measurement._AddResults(data.results)
return data
def _GenerateDataForEmptyPageSet(self):
self._measurement = task_execution_time.TaskExecutionTime()
self._page_set = self.CreateEmptyPageSet()
page = TestTaskExecutionTimePage(self._page_set, self._page_set.base_dir)
self._page_set.AddStory(page)
# Get the name of a thread used by task_execution_time metric and set up
# some dummy execution data pretending to be from that thread & process.
data = TaskExecutionTestData(self._first_thread_name)
self._measurement._renderer_process = data._renderer_process
# Pretend we are about to run the tests to silence lower level asserts.
data.results.WillRunPage(page)
return data
def _CheckSliceForKeywords(slice_info, required_keywords):
for argument in slice_info.args:
if argument in required_keywords:
required_keywords[argument] += 1
# recurse into our sub-slices.
for sub_slice in slice_info.sub_slices:
_CheckSliceForKeywords(sub_slice, required_keywords)
class TaskExecutionTestData(object):
def __init__(self, thread_name):
self._model = model_module.TimelineModel()
self._renderer_process = self._model.GetOrCreateProcess(1)
self._renderer_thread = self._renderer_process.GetOrCreateThread(2)
self._renderer_thread.name = thread_name
self._results = page_test_results.PageTestResults()
@property
def results(self):
return self._results
def AddSlice(self, name, timestamp, duration):
new_slice = slice_data.Slice(
None,
'category',
name,
timestamp,
duration,
timestamp,
duration,
[])
self._renderer_thread.all_slices.append(new_slice)
return new_slice
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import legacy_page_test
from telemetry.timeline.model import TimelineModel
from telemetry.timeline import tracing_config
from telemetry.util import statistics
from telemetry.value import scalar
class V8GCTimes(legacy_page_test.LegacyPageTest):
_TIME_OUT_IN_SECONDS = 60
_CATEGORIES = ['blink.console',
'renderer.scheduler',
'v8',
'webkit.console']
_RENDERER_MAIN_THREAD = 'CrRendererMain'
_IDLE_TASK_PARENT = 'SingleThreadIdleTaskRunner::RunTask'
def __init__(self):
super(V8GCTimes, self).__init__()
def WillNavigateToPage(self, page, tab):
del page # unused
config = tracing_config.TracingConfig()
for category in self._CATEGORIES:
config.chrome_trace_config.category_filter.AddIncludedCategory(
category)
config.enable_chrome_trace = True
tab.browser.platform.tracing_controller.StartTracing(
config, self._TIME_OUT_IN_SECONDS)
def ValidateAndMeasurePage(self, page, tab, results):
del page # unused
trace_data = tab.browser.platform.tracing_controller.StopTracing()[0]
timeline_model = TimelineModel(trace_data)
renderer_process = timeline_model.GetFirstRendererProcess(tab.id)
self._AddV8MetricsToResults(renderer_process, results)
def DidRunPage(self, platform):
if platform.tracing_controller.is_tracing_running:
platform.tracing_controller.StopTracing()
def _AddV8MetricsToResults(self, process, results):
if process is None:
return
for thread in process.threads.values():
if thread.name != self._RENDERER_MAIN_THREAD:
continue
self._AddV8EventStatsToResults(thread, results)
self._AddCpuTimeStatsToResults(thread, results)
def _AddV8EventStatsToResults(self, thread, results):
v8_event_stats = [
V8EventStat('V8.GCIncrementalMarking',
'v8_gc_incremental_marking',
'incremental marking steps'),
V8EventStat('V8.GCScavenger',
'v8_gc_scavenger',
'scavenges'),
V8EventStat('V8.GCCompactor',
'v8_gc_mark_compactor',
'mark-sweep-compactor'),
V8EventStat('V8.GCFinalizeMC',
'v8_gc_finalize_incremental',
'finalization of incremental marking'),
V8EventStat('V8.GCFinalizeMCReduceMemory',
'v8_gc_finalize_incremental_reduce_memory',
'finalization of incremental marking with memory reducer')]
# Find all V8 GC events in the trace.
for event in thread.IterAllSlices():
event_stat = _FindV8EventStatForEvent(v8_event_stats, event.name)
if not event_stat:
continue
event_stat.thread_duration += event.thread_duration
event_stat.max_thread_duration = max(event_stat.max_thread_duration,
event.thread_duration)
event_stat.count += 1
parent_idle_task = _ParentIdleTask(event)
if parent_idle_task:
allotted_idle_time = parent_idle_task.args['allotted_time_ms']
idle_task_wall_overrun = 0
if event.duration > allotted_idle_time:
idle_task_wall_overrun = event.duration - allotted_idle_time
# Don't count time over the deadline as being inside idle time.
# Since the deadline should be relative to wall clock we compare
# allotted_time_ms with wall duration instead of thread duration, and
# then assume the thread duration was inside idle for the same
# percentage of time.
inside_idle = event.thread_duration * statistics.DivideIfPossibleOrZero(
event.duration - idle_task_wall_overrun, event.duration)
event_stat.thread_duration_inside_idle += inside_idle
event_stat.idle_task_overrun_duration += idle_task_wall_overrun
for v8_event_stat in v8_event_stats:
results.AddValue(scalar.ScalarValue(
results.current_page, v8_event_stat.result_name, 'ms',
v8_event_stat.thread_duration,
description=('Total thread duration spent in %s' %
v8_event_stat.result_description)))
results.AddValue(scalar.ScalarValue(
results.current_page, '%s_max' % v8_event_stat.result_name, 'ms',
v8_event_stat.max_thread_duration,
description=('Max thread duration spent in %s' %
v8_event_stat.result_description)))
results.AddValue(scalar.ScalarValue(
results.current_page, '%s_count' % v8_event_stat.result_name, 'count',
v8_event_stat.count,
description=('Number of %s' %
v8_event_stat.result_description)))
average_thread_duration = statistics.DivideIfPossibleOrZero(
v8_event_stat.thread_duration, v8_event_stat.count)
results.AddValue(scalar.ScalarValue(
results.current_page, '%s_average' % v8_event_stat.result_name, 'ms',
average_thread_duration,
description=('Average thread duration spent in %s' %
v8_event_stat.result_description)))
results.AddValue(scalar.ScalarValue(
results.current_page,
'%s_outside_idle' %
v8_event_stat.result_name, 'ms',
v8_event_stat.thread_duration_outside_idle,
description=(
'Total thread duration spent in %s outside of idle tasks' %
v8_event_stat.result_description)))
results.AddValue(
scalar.ScalarValue(
results.current_page,
'%s_idle_deadline_overrun' %
v8_event_stat.result_name, 'ms',
v8_event_stat.idle_task_overrun_duration,
description=(
'Total idle task deadline overrun for %s idle tasks' %
v8_event_stat.result_description)))
results.AddValue(scalar.ScalarValue(
results.current_page,
'%s_percentage_idle' %
v8_event_stat.result_name,
'idle%',
v8_event_stat.percentage_thread_duration_during_idle,
description=(
'Percentage of %s spent in idle time' %
v8_event_stat.result_description)))
# Add total metrics.
gc_total = sum(x.thread_duration for x in v8_event_stats)
gc_total_outside_idle = sum(
x.thread_duration_outside_idle for x in v8_event_stats)
gc_total_idle_deadline_overrun = sum(
x.idle_task_overrun_duration for x in v8_event_stats)
gc_total_percentage_idle = statistics.DivideIfPossibleOrZero(
100 * (gc_total - gc_total_outside_idle), gc_total)
results.AddValue(
scalar.ScalarValue(
results.current_page, 'v8_gc_total', 'ms',
gc_total,
description=('Total thread duration of all garbage '
'collection events')))
results.AddValue(
scalar.ScalarValue(
results.current_page, 'v8_gc_total_outside_idle',
'ms', gc_total_outside_idle,
description=(
'Total thread duration of all garbage collection events '
'outside of idle tasks')))
results.AddValue(
scalar.ScalarValue(
results.current_page,
'v8_gc_total_idle_deadline_overrun', 'ms',
gc_total_idle_deadline_overrun,
description=(
'Total idle task deadline overrun for all idle tasks garbage '
'collection events')))
results.AddValue(
scalar.ScalarValue(
results.current_page,
'v8_gc_total_percentage_idle', 'idle%',
gc_total_percentage_idle,
description=(
'Percentage of the thread duration of all garbage collection '
'events spent inside of idle tasks')))
def _AddCpuTimeStatsToResults(self, thread, results):
if thread.toplevel_slices:
start_time = min(s.start for s in thread.toplevel_slices)
end_time = max(s.end for s in thread.toplevel_slices)
duration = end_time - start_time
cpu_time = sum(s.thread_duration for s in thread.toplevel_slices)
else:
duration = cpu_time = 0
results.AddValue(scalar.ScalarValue(
results.current_page, 'duration', 'ms', duration))
results.AddValue(scalar.ScalarValue(
results.current_page, 'cpu_time', 'ms', cpu_time))
def _FindV8EventStatForEvent(v8_event_stats_list, event_name):
for v8_event_stat in v8_event_stats_list:
if v8_event_stat.src_event_name == event_name:
return v8_event_stat
return None
def _ParentIdleTask(event):
parent = event.parent_slice
while parent:
# pylint: disable=protected-access
if parent.name == V8GCTimes._IDLE_TASK_PARENT:
return parent
parent = parent.parent_slice
return None
class V8EventStat(object):
def __init__(self, src_event_name, result_name, result_description):
self.src_event_name = src_event_name
self.result_name = result_name
self.result_description = result_description
self.thread_duration = 0.0
self.thread_duration_inside_idle = 0.0
self.idle_task_overrun_duration = 0.0
self.max_thread_duration = 0.0
self.count = 0
@property
def thread_duration_outside_idle(self):
return self.thread_duration - self.thread_duration_inside_idle
@property
def percentage_thread_duration_during_idle(self):
return statistics.DivideIfPossibleOrZero(
100 * self.thread_duration_inside_idle, self.thread_duration)
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry import decorators
from telemetry.internal.results import page_test_results
from telemetry.page import page as page_module
from telemetry.testing import options_for_unittests
from telemetry.testing import page_test_test_case
from telemetry.timeline import model as model_module
from telemetry.util import wpr_modes
from measurements import v8_gc_times
class V8GCTimesTestPageHelper(object):
def __init__(self, page_set):
self._page_set = page_set
self._model = model_module.TimelineModel()
self._renderer_process = self._model.GetOrCreateProcess(1)
self._renderer_thread = self._renderer_process.GetOrCreateThread(2)
self._renderer_thread.name = 'CrRendererMain'
def AddEvent(self, category, name, thread_start, thread_duration,
args=None, wall_start=None, wall_duration=None):
wall_start = wall_start or thread_start
wall_duration = wall_duration or thread_duration
self._renderer_thread.BeginSlice(category, name, wall_start, thread_start,
args=args)
self._renderer_thread.EndSlice(wall_start + wall_duration,
thread_start + thread_duration)
class MockV8GCTimesPage(page_module.Page):
def __init__(self, page_set):
super(V8GCTimesTestPageHelper.MockV8GCTimesPage, self).__init__(
'file://blank.html', page_set, page_set.base_dir, name='blank.html')
def MeasureFakePage(self):
# Create a fake page and add it to the page set.
results = page_test_results.PageTestResults()
page = V8GCTimesTestPageHelper.MockV8GCTimesPage(self._page_set)
self._page_set.AddStory(page)
# Pretend we're about to run the tests to silence lower level asserts.
results.WillRunPage(page)
v8_gc_times_metric = v8_gc_times.V8GCTimes()
# pylint: disable=protected-access
v8_gc_times_metric._renderer_process = self._renderer_process
# Finalize the timeline import.
self._model.FinalizeImport()
# Measure the V8GCTimes metric and return the results
# pylint: disable=protected-access
v8_gc_times_metric._AddV8MetricsToResults(self._renderer_process, results)
results.DidRunPage(page)
return results
class V8GCTimesTests(page_test_test_case.PageTestTestCase):
def setUp(self):
self._options = options_for_unittests.GetCopy()
self._options.browser_options.wpr_mode = wpr_modes.WPR_OFF
def testWithNoTraceEvents(self):
test_page_helper = V8GCTimesTestPageHelper(
self.CreateEmptyPageSet())
results = test_page_helper.MeasureFakePage()
self._AssertResultsEqual(_GetEmptyResults(), _ActualValues(results))
def testWithNoGarbageCollectionEvents(self):
test_page_helper = V8GCTimesTestPageHelper(
self.CreateEmptyPageSet())
test_page_helper.AddEvent(
'toplevel', 'PostMessage', thread_start=0, thread_duration=14,
wall_start=5, wall_duration=35)
results = test_page_helper.MeasureFakePage()
expected = _GetEmptyResults()
expected['duration'] = ('ms', 35)
expected['cpu_time'] = ('ms', 14)
self._AssertResultsEqual(expected, _ActualValues(results))
def testWithGarbageCollectionEvents(self):
test_page_helper = V8GCTimesTestPageHelper(
self.CreateEmptyPageSet())
test_page_helper.AddEvent(
'toplevel', 'PostMessage', thread_start=0, thread_duration=77,
wall_start=5, wall_duration=88)
test_page_helper.AddEvent('v8', 'V8.GCScavenger', 5, 4)
test_page_helper.AddEvent('v8', 'V8.GCScavenger', 15, 3)
test_page_helper.AddEvent('v8', 'V8.GCIncrementalMarking', 23, 4)
test_page_helper.AddEvent('v8', 'V8.GCIncrementalMarking', 34, 2)
test_page_helper.AddEvent('v8', 'V8.GCFinalizeMC', 38, 2)
test_page_helper.AddEvent('v8', 'V8.GCFinalizeMC', 42, 3)
test_page_helper.AddEvent('v8', 'V8.GCFinalizeMCReduceMemory', 46, 4)
test_page_helper.AddEvent('v8', 'V8.GCFinalizeMCReduceMemory', 51, 5)
test_page_helper.AddEvent('v8', 'V8.GCCompactor', 62, 4)
test_page_helper.AddEvent('v8', 'V8.GCCompactor', 72, 5)
results = test_page_helper.MeasureFakePage()
expected = _GetEmptyResults()
expected['duration'] = ('ms', 88)
expected['cpu_time'] = ('ms', 77)
expected['v8_gc_incremental_marking'] = ('ms', 6.0)
expected['v8_gc_incremental_marking_average'] = ('ms', 3.0)
expected['v8_gc_incremental_marking_count'] = ('count', 2)
expected['v8_gc_incremental_marking_max'] = ('ms', 4.0)
expected['v8_gc_incremental_marking_outside_idle'] = ('ms', 6.0)
expected['v8_gc_finalize_incremental'] = ('ms', 5.0)
expected['v8_gc_finalize_incremental_average'] = ('ms', 2.5)
expected['v8_gc_finalize_incremental_count'] = ('count', 2)
expected['v8_gc_finalize_incremental_max'] = ('ms', 3.0)
expected['v8_gc_finalize_incremental_outside_idle'] = ('ms', 5.0)
expected['v8_gc_finalize_incremental_reduce_memory'] = ('ms', 9.0)
expected['v8_gc_finalize_incremental_reduce_memory_average'] = ('ms', 4.5)
expected['v8_gc_finalize_incremental_reduce_memory_count'] = ('count', 2)
expected['v8_gc_finalize_incremental_reduce_memory_max'] = ('ms', 5.0)
expected['v8_gc_finalize_incremental_reduce_memory_outside_idle'] = (
'ms', 9.0)
expected['v8_gc_scavenger'] = ('ms', 7.0)
expected['v8_gc_scavenger_average'] = ('ms', 3.5)
expected['v8_gc_scavenger_count'] = ('count', 2)
expected['v8_gc_scavenger_max'] = ('ms', 4.0)
expected['v8_gc_scavenger_outside_idle'] = ('ms', 7.0)
expected['v8_gc_mark_compactor'] = ('ms', 9.0)
expected['v8_gc_mark_compactor_average'] = ('ms', 4.5)
expected['v8_gc_mark_compactor_count'] = ('count', 2)
expected['v8_gc_mark_compactor_max'] = ('ms', 5.0)
expected['v8_gc_mark_compactor_outside_idle'] = ('ms', 9.0)
expected['v8_gc_total'] = ('ms', 36.0)
expected['v8_gc_total_outside_idle'] = ('ms', 36.0)
self._AssertResultsEqual(expected, _ActualValues(results))
def testWithIdleTaskGarbageCollectionEvents(self):
test_page_helper = V8GCTimesTestPageHelper(
self.CreateEmptyPageSet())
test_page_helper.AddEvent(
'toplevel', 'PostMessage', thread_start=0, thread_duration=57,
wall_start=5, wall_duration=68)
test_page_helper.AddEvent('v8', 'V8.GCScavenger', 5, 4)
test_page_helper.AddEvent(
'renderer.scheduler', 'SingleThreadIdleTaskRunner::RunTask',
15, 4, {'allotted_time_ms': 12})
test_page_helper.AddEvent('v8', 'V8.GCScavenger', 15, 3)
test_page_helper.AddEvent('v8', 'V8.GCIncrementalMarking', 23, 4)
test_page_helper.AddEvent(
'renderer.scheduler', 'SingleThreadIdleTaskRunner::RunTask',
34, 3, {'allotted_time_ms': 12})
test_page_helper.AddEvent('v8', 'V8.GCIncrementalMarking', 34, 2)
test_page_helper.AddEvent('v8', 'V8.GCCompactor', 42, 4)
test_page_helper.AddEvent(
'renderer.scheduler', 'SingleThreadIdleTaskRunner::RunTask',
52, 6, {'allotted_time_ms': 12})
test_page_helper.AddEvent('v8', 'V8.GCCompactor', 52, 5)
results = test_page_helper.MeasureFakePage()
expected = _GetEmptyResults()
expected['duration'] = ('ms', 68)
expected['cpu_time'] = ('ms', 57)
expected['v8_gc_incremental_marking'] = ('ms', 6.0)
expected['v8_gc_incremental_marking_average'] = ('ms', 3.0)
expected['v8_gc_incremental_marking_count'] = ('count', 2)
expected['v8_gc_incremental_marking_max'] = ('ms', 4.0)
expected['v8_gc_incremental_marking_outside_idle'] = ('ms', 4.0)
expected['v8_gc_incremental_marking_percentage_idle'] = \
('idle%', 100 * 2 / 6.0)
expected['v8_gc_scavenger'] = ('ms', 7.0)
expected['v8_gc_scavenger_average'] = ('ms', 3.5)
expected['v8_gc_scavenger_count'] = ('count', 2)
expected['v8_gc_scavenger_max'] = ('ms', 4.0)
expected['v8_gc_scavenger_outside_idle'] = ('ms', 4.0)
expected['v8_gc_scavenger_percentage_idle'] = ('idle%', 100 * 3 / 7.0)
expected['v8_gc_mark_compactor'] = ('ms', 9.0)
expected['v8_gc_mark_compactor_average'] = ('ms', 4.5)
expected['v8_gc_mark_compactor_count'] = ('count', 2)
expected['v8_gc_mark_compactor_max'] = ('ms', 5.0)
expected['v8_gc_mark_compactor_outside_idle'] = ('ms', 4.0)
expected['v8_gc_mark_compactor_percentage_idle'] = ('idle%', 100 * 5 / 9.0)
expected['v8_gc_total'] = ('ms', 22.0)
expected['v8_gc_total_outside_idle'] = ('ms', 12.0)
expected['v8_gc_total_percentage_idle'] = ('idle%', 100 * 10 / 22.0)
self._AssertResultsEqual(expected, _ActualValues(results))
def testWithIdleTaskOverruns(self):
test_page_helper = V8GCTimesTestPageHelper(
self.CreateEmptyPageSet())
test_page_helper.AddEvent(
'toplevel', 'PostMessage', thread_start=0, thread_duration=80,
wall_start=5, wall_duration=92)
test_page_helper.AddEvent(
'renderer.scheduler', 'SingleThreadIdleTaskRunner::RunTask',
15, 15, {'allotted_time_ms': 8})
test_page_helper.AddEvent('v8', 'V8.GCScavenger', 15, 14)
test_page_helper.AddEvent(
'renderer.scheduler', 'SingleThreadIdleTaskRunner::RunTask',
34, 15, {'allotted_time_ms': 6})
test_page_helper.AddEvent('v8', 'V8.GCIncrementalMarking', 34, 14)
test_page_helper.AddEvent(
'renderer.scheduler', 'SingleThreadIdleTaskRunner::RunTask',
52, 23, {'allotted_time_ms': 9})
test_page_helper.AddEvent('v8', 'V8.GCCompactor', 52, 22)
results = test_page_helper.MeasureFakePage()
expected = _GetEmptyResults()
expected['duration'] = ('ms', 92)
expected['cpu_time'] = ('ms', 80)
expected['v8_gc_incremental_marking'] = ('ms', 14.0)
expected['v8_gc_incremental_marking_average'] = ('ms', 14.0)
expected['v8_gc_incremental_marking_count'] = ('count', 1)
expected['v8_gc_incremental_marking_max'] = ('ms', 14.0)
expected['v8_gc_incremental_marking_outside_idle'] = ('ms', 8.0)
expected['v8_gc_incremental_marking_idle_deadline_overrun'] = ('ms', 8.0)
expected['v8_gc_incremental_marking_percentage_idle'] = \
('idle%', 100 * 6 / 14.0)
expected['v8_gc_scavenger'] = ('ms', 14.0)
expected['v8_gc_scavenger_average'] = ('ms', 14.0)
expected['v8_gc_scavenger_count'] = ('count', 1)
expected['v8_gc_scavenger_max'] = ('ms', 14.0)
expected['v8_gc_scavenger_outside_idle'] = ('ms', 6.0)
expected['v8_gc_scavenger_idle_deadline_overrun'] = ('ms', 6.0)
expected['v8_gc_scavenger_percentage_idle'] = ('idle%', 100 * 8 / 14.0)
expected['v8_gc_mark_compactor'] = ('ms', 22.0)
expected['v8_gc_mark_compactor_average'] = ('ms', 22.0)
expected['v8_gc_mark_compactor_count'] = ('count', 1)
expected['v8_gc_mark_compactor_max'] = ('ms', 22.0)
expected['v8_gc_mark_compactor_outside_idle'] = ('ms', 13.0)
expected['v8_gc_mark_compactor_idle_deadline_overrun'] = ('ms', 13.0)
expected['v8_gc_mark_compactor_percentage_idle'] = ('idle%', 100 * 9 / 22.0)
expected['v8_gc_total'] = ('ms', 50.0)
expected['v8_gc_total_outside_idle'] = ('ms', 27.0)
expected['v8_gc_total_idle_deadline_overrun'] = ('ms', 27.0)
expected['v8_gc_total_percentage_idle'] = ('idle%', 100 * 23 / 50.0)
self._AssertResultsEqual(expected, _ActualValues(results))
def testWithIdleTaskWallDurationOverruns(self):
test_page_helper = V8GCTimesTestPageHelper(
self.CreateEmptyPageSet())
test_page_helper.AddEvent(
'toplevel', 'PostMessage', thread_start=0, thread_duration=80,
wall_start=5, wall_duration=92)
test_page_helper.AddEvent(
'renderer.scheduler', 'SingleThreadIdleTaskRunner::RunTask',
15, 15, {'allotted_time_ms': 8})
test_page_helper.AddEvent(
'v8', 'V8.GCScavenger', thread_start=15, thread_duration=4,
wall_start=15, wall_duration=14)
results = test_page_helper.MeasureFakePage()
expected = _GetEmptyResults()
expected['duration'] = ('ms', 92)
expected['cpu_time'] = ('ms', 80)
expected['v8_gc_scavenger'] = ('ms', 4.0)
expected['v8_gc_scavenger_average'] = ('ms', 4.0)
expected['v8_gc_scavenger_count'] = ('count', 1)
expected['v8_gc_scavenger_max'] = ('ms', 4.0)
expected_outside_idle = 4.0 - (4.0 * 8 / 14)
expected['v8_gc_scavenger_outside_idle'] = ('ms', expected_outside_idle)
expected['v8_gc_scavenger_idle_deadline_overrun'] = ('ms', 6.0)
expected['v8_gc_scavenger_percentage_idle'] = \
('idle%', 100 * (4.0 - expected_outside_idle) / 4.0)
expected['v8_gc_total'] = expected['v8_gc_scavenger']
expected['v8_gc_total_outside_idle'] = \
expected['v8_gc_scavenger_outside_idle']
expected['v8_gc_total_idle_deadline_overrun'] = \
expected['v8_gc_scavenger_idle_deadline_overrun']
expected['v8_gc_total_percentage_idle'] = \
expected['v8_gc_scavenger_percentage_idle']
self._AssertResultsEqual(expected, _ActualValues(results))
def _AssertResultsEqual(self, expected, actual):
for key in expected.iterkeys():
self.assertIn(key, actual.keys())
self.assertEqual(expected[key], actual[key],
'Result for [' + key + '] - expected ' +
str(expected[key]) + ' but got ' + str(actual[key]))
@decorators.Disabled('win') # crbug.com/416502
def testCleanUpTrace(self):
self.TestTracingCleanedUp(v8_gc_times.V8GCTimes, self._options)
def _ActualValues(results):
return dict(list(
(v.name, (v.units, v.value))
for v in results.all_page_specific_values
))
def _GetEmptyResults():
return {'cpu_time': ('ms', 0.0),
'duration': ('ms', 0.0),
'v8_gc_incremental_marking': ('ms', 0.0),
'v8_gc_incremental_marking_average': ('ms', 0.0),
'v8_gc_incremental_marking_count': ('count', 0),
'v8_gc_incremental_marking_max': ('ms', 0.0),
'v8_gc_incremental_marking_idle_deadline_overrun': ('ms', 0.0),
'v8_gc_incremental_marking_outside_idle': ('ms', 0.0),
'v8_gc_incremental_marking_percentage_idle': ('idle%', 0.0),
'v8_gc_finalize_incremental': ('ms', 0.0),
'v8_gc_finalize_incremental_average': ('ms', 0.0),
'v8_gc_finalize_incremental_count': ('count', 0),
'v8_gc_finalize_incremental_max': ('ms', 0.0),
'v8_gc_finalize_incremental_idle_deadline_overrun': ('ms', 0.0),
'v8_gc_finalize_incremental_outside_idle': ('ms', 0.0),
'v8_gc_finalize_incremental_percentage_idle': ('idle%', 0.0),
'v8_gc_finalize_incremental_reduce_memory': ('ms', 0.0),
'v8_gc_finalize_incremental_reduce_memory_average': ('ms', 0.0),
'v8_gc_finalize_incremental_reduce_memory_count': ('count', 0),
'v8_gc_finalize_incremental_reduce_memory_max': ('ms', 0.0),
'v8_gc_finalize_incremental_reduce_memory_idle_deadline_overrun':
('ms', 0.0),
'v8_gc_finalize_incremental_reduce_memory_outside_idle': ('ms', 0.0),
'v8_gc_finalize_incremental_reduce_memory_percentage_idle':
('idle%', 0.0),
'v8_gc_mark_compactor': ('ms', 0.0),
'v8_gc_mark_compactor_average': ('ms', 0.0),
'v8_gc_mark_compactor_count': ('count', 0),
'v8_gc_mark_compactor_max': ('ms', 0.0),
'v8_gc_mark_compactor_idle_deadline_overrun': ('ms', 0.0),
'v8_gc_mark_compactor_outside_idle': ('ms', 0.0),
'v8_gc_mark_compactor_percentage_idle': ('idle%', 0.0),
'v8_gc_scavenger': ('ms', 0.0),
'v8_gc_scavenger_average': ('ms', 0.0),
'v8_gc_scavenger_count': ('count', 0),
'v8_gc_scavenger_max': ('ms', 0.0),
'v8_gc_scavenger_idle_deadline_overrun': ('ms', 0.0),
'v8_gc_scavenger_outside_idle': ('ms', 0.0),
'v8_gc_scavenger_percentage_idle': ('idle%', 0.0),
'v8_gc_total': ('ms', 0.0),
'v8_gc_total_idle_deadline_overrun': ('ms', 0.0),
'v8_gc_total_outside_idle': ('ms', 0.0)}
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment