Commit 07192d53 authored by Juan Antonio Navarro Perez's avatar Juan Antonio Navarro Perez Committed by Commit Bot

[tools/perf] Migrate press benchmarks to AddMeasurement

This is mostly an API change (*), the underlying implementation of
AddMeasurement is still the same (via legacy values in Telemetry), so
no behavior changes are expected.

(*) Except for a few measurement descriptions which were tweaked for
clarity.

Depends on:
https://chromium-review.googlesource.com/c/catapult/+/1840835

TBR=hablich@chromium.org

Bug: 999484
Change-Id: I01c79f1741dbbde6d179c33bb91defade8fd0155
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1841551
Commit-Queue: Juan Antonio Navarro Pérez <perezju@chromium.org>
Auto-Submit: Juan Antonio Navarro Pérez <perezju@chromium.org>
Reviewed-by: default avatarCaleb Rouleau <crouleau@chromium.org>
Cr-Commit-Position: refs/heads/master@{#705086}
parent 23739adc
......@@ -7,12 +7,10 @@ from telemetry.web_perf import timeline_based_measurement
class DualMetricMeasurement(story_test.StoryTest):
"""Test class for a benchmark that aggregates all metrics.
"""Test class supporting both ad hoc measurements and trace based metrics.
Assumes both javascript as well as tracing metrics might be defined.
All pages associated with this measurement must implement
GetJavascriptMetricValues().
Currently only works with PressStory pages, which implement
GetMeasurements().
"""
def __init__(self, tbm_options):
super(DualMetricMeasurement, self).__init__()
......@@ -43,8 +41,8 @@ class DualMetricMeasurement(story_test.StoryTest):
for histogram in results.current_page.GetJavascriptMetricHistograms():
results.AddHistogram(histogram)
else:
for value in results.current_page.GetJavascriptMetricValues():
results.AddValue(value)
for value in results.current_page.GetMeasurements():
results.AddMeasurement(**value)
# This call is necessary to convert the current ScalarValues to
# histograms before more histograms are added. If we don't,
# when histograms get added by TBM2 page_test_results will see those and
......
......@@ -4,7 +4,7 @@
from page_sets import press_story
from telemetry import story
from telemetry.value import scalar
class Jetstream2Story(press_story.PressStory):
URL = 'http://browserbench.org/JetStream/'
......@@ -57,25 +57,19 @@ class Jetstream2Story(press_story.PressStory):
})();"""
)
self.AddJavascriptMetricValue(
scalar.ScalarValue(self, 'Score', 'score', score))
self.AddMeasurement('Score', 'score', score)
for k, v in result.iteritems():
# Replace '.' in the benchmark name, because '.' is interpreted
# as a sub-category of the metric
benchmark = str(k).replace('.', '_')
self.AddJavascriptMetricValue(scalar.ScalarValue(
self, benchmark, 'score', v['Score'],
important=False,
description='Geometric mean of the iterations'))
self.AddJavascriptMetricValue(scalar.ScalarValue(
self, benchmark+'.Iterations', 'number', v['Iterations'],
important=False,
description='Total number of iterations'))
self.AddMeasurement(
benchmark, 'score', v['Score'],
description='Geometric mean of the iterations')
self.AddMeasurement(
'%s.Iterations' % benchmark, 'number', v['Iterations'],
description='Total number of iterations')
for sub_k, sub_v in v['SubResults'].iteritems():
self.AddJavascriptMetricValue(scalar.ScalarValue(
self, benchmark+'.'+str(sub_k), 'score', sub_v,
important=False))
self.AddMeasurement('%s.%s' % (benchmark, sub_k), 'score', sub_v)
class Jetstream2StorySet(story.StorySet):
......
......@@ -6,7 +6,6 @@ import json
from page_sets import press_story
from telemetry import story
from telemetry.util import statistics
from telemetry.value import list_of_scalar_values
class JetstreamStory(press_story.PressStory):
......@@ -41,9 +40,7 @@ class JetstreamStory(press_story.PressStory):
all_score_lists = []
for k, v in result.iteritems():
self.AddJavascriptMetricValue(list_of_scalar_values.ListOfScalarValues(
self, k.replace('.', '_'), 'score', v['result'],
important=False))
self.AddMeasurement(k.replace('.', '_'), 'score', v['result'])
# Collect all test scores to compute geometric mean.
for i, score in enumerate(v['result']):
if len(all_score_lists) <= i:
......@@ -52,9 +49,7 @@ class JetstreamStory(press_story.PressStory):
all_scores = []
for score_list in all_score_lists:
all_scores.append(statistics.GeometricMean(score_list))
self.AddJavascriptMetricValue(
list_of_scalar_values.ListOfScalarValues(
self, 'Score', 'score', all_scores))
self.AddMeasurement('Score', 'score', all_scores)
class JetstreamStorySet(story.StorySet):
......
......@@ -6,9 +6,6 @@ import json
from page_sets import press_story
from telemetry import story
from telemetry.value import list_of_scalar_values
from telemetry.value import scalar
DESCRIPTIONS = {
'ai-astar':
......@@ -78,18 +75,16 @@ class KrakenStory(press_story.PressStory):
for key in result_dict:
if key == 'v':
continue
self.AddJavascriptMetricValue(list_of_scalar_values.ListOfScalarValues(
self, key, 'ms', result_dict[key], important=False,
description=DESCRIPTIONS.get(key)))
self.AddMeasurement(key, 'ms', result_dict[key],
description=DESCRIPTIONS.get(key))
total += _Mean(result_dict[key])
# TODO(tonyg/nednguyen): This measurement shouldn't calculate Total. The
# results system should do that for us.
self.AddJavascriptMetricValue(scalar.ScalarValue(
self, 'Total', 'ms', total,
description='Total of the means of the results for each type '
'of benchmark in [Mozilla\'s Kraken JavaScript benchmark]'
'(http://krakenbenchmark.mozilla.org/)'))
self.AddMeasurement(
'Total', 'ms', total,
description='Sum of the mean runtime for each type of benchmark in '
"Mozilla's Kraken JavaScript benchmark")
class KrakenStorySet(story.StorySet):
......
......@@ -2,12 +2,11 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry import story
from telemetry.util import statistics
from telemetry.value import scalar
from page_sets import press_story
_GB = 1024 * 1024 * 1024
DESCRIPTIONS = {
......@@ -91,18 +90,15 @@ class OctaneStory(press_story.PressStory):
if 'Skipped' not in score_and_name[1]:
name = score_and_name[0]
score = float(score_and_name[1])
self.AddJavascriptMetricValue(scalar.ScalarValue(
self, name, 'score', score, important=False,
description=DESCRIPTIONS.get(name)))
self.AddMeasurement(name, 'score', score,
description=DESCRIPTIONS.get(name))
# Collect all test scores to compute geometric mean.
all_scores.append(score)
total = statistics.GeometricMean(all_scores)
self.AddJavascriptMetricValue(
scalar.ScalarValue(self, 'Total.Score', 'score', total,
description='Geometric mean of the scores of each '
'individual benchmark in the Octane '
'benchmark collection.'))
self.AddMeasurement('Total.Score', 'score', total,
description='Geometric mean of the scores of each '
'individual benchmark in the Octane collection.')
class OctaneStorySet(story.StorySet):
......
......@@ -14,14 +14,16 @@ class PressStory(page_module.Page):
Example Implementation:
class FooPressStory:
URL = 'http://foo'
URL = 'http://example.com/foo_story'
NAME = 'FooStory'
def ExecuteTest(self, action_runner):
//Execute some javascript
// Execute some javascript
def ParseTestResults(self, action_runner):
some_value = action_runner.EvaluateJavascript("some javascript")
self.AddJavascriptMetricValue(some_value)
js_code = 'some_js_expression;'
self.AddJavaScriptMeasurement(name, unit, js_code)
"""
URL = None
DETERMINISTIC_JS = False
......@@ -33,19 +35,59 @@ class PressStory(page_module.Page):
base_dir=ps.base_dir,
make_javascript_deterministic=self.DETERMINISTIC_JS,
name=self.NAME if self.NAME else self.URL)
self._values = []
self._measurements = []
self._histogram_values = []
def GetJavascriptMetricValues(self):
return self._values
def AddJavascriptMetricValue(self, value):
self._values.append(value)
self._action_runner = None
def AddMeasurement(self, name, unit, samples, description=None):
"""Record an ad-hoc measurement.
Args:
name: A string with the name of the measurement (e.g. 'score', 'runtime',
etc).
unit: A string specifying the unit used for measurements (e.g. 'ms',
'count', etc).
samples: Either a single numeric value or a list of numeric values to
record as part of this measurement.
description: An optional string with a short human readable description
of the measurement.
"""
# TODO(crbug.com/999484): Ideally, these should be recorded directly into
# the results object, rather than held on this temporary list. That needs,
# however, another slight refactor to make the results object available at
# this point.
self._measurements.append({'name': name, 'unit': unit, 'samples': samples,
'description': description})
def AddJavaScriptMeasurement(self, name, unit, code, **kwargs):
"""Run some JavaScript to obtain and record an ad-hoc measurements.
Args:
name: A string with the name of the measurement (e.g. 'score', 'runtime',
etc).
unit: A string specifying the unit used for measurements (e.g. 'ms',
'count', etc).
code: A piece of JavaScript code to run on the current tab, it must
return either a single or a list of numeric values. These are the
values for the measurement to be recorded.
description: An optional string with a short human readable description
of the measurement.
Other keyword arguments provide values to be interpolated within
the JavaScript code. See telemetry.util.js_template for details.
"""
description = kwargs.pop('description', None)
samples = self._action_runner.EvaluateJavaScript(code, **kwargs)
self.AddMeasurement(name, unit, samples, description)
def GetMeasurements(self):
return self._measurements
def GetJavascriptMetricHistograms(self):
"""DEPRECATED: Use measurements instead."""
return self._histogram_values
def AddJavascriptMetricHistogram(self, value):
"""DEPRECATED: Use measurements instead."""
self._histogram_values.append(value)
def ExecuteTest(self, action_runner):
......@@ -55,5 +97,9 @@ class PressStory(page_module.Page):
pass
def RunPageInteractions(self, action_runner):
self.ExecuteTest(action_runner)
self.ParseTestResults(action_runner)
self._action_runner = action_runner
try:
self.ExecuteTest(action_runner)
self.ParseTestResults(action_runner)
finally:
self._action_runner = None
......@@ -3,10 +3,9 @@
# found in the LICENSE file.
from telemetry import story
from telemetry.value import list_of_scalar_values
from page_sets import press_story
class SpeedometerStory(press_story.PressStory):
URL='http://browserbench.org/Speedometer/'
......@@ -44,30 +43,24 @@ class SpeedometerStory(press_story.PressStory):
timeout=600)
def ParseTestResults(self, action_runner):
self.AddJavascriptMetricValue(list_of_scalar_values.ListOfScalarValues(
self, 'Total', 'ms',
action_runner.EvaluateJavaScript('benchmarkClient._timeValues'),
important=True))
self.AddJavascriptMetricValue(list_of_scalar_values.ListOfScalarValues(
self, 'RunsPerMinute', 'score',
action_runner.EvaluateJavaScript(
'[parseFloat(document.getElementById("result-number").innerText)];'
),
important=True))
self.AddJavaScriptMeasurement('Total', 'ms', 'benchmarkClient._timeValues')
self.AddJavaScriptMeasurement(
'RunsPerMinute', 'score',
'[parseFloat(document.getElementById("result-number").innerText)];')
# Extract the timings for each suite
for suite_name in self.enabled_suites:
self.AddJavascriptMetricValue(list_of_scalar_values.ListOfScalarValues(
self, suite_name, 'ms',
action_runner.EvaluateJavaScript("""
var suite_times = [];
for(var i = 0; i < benchmarkClient.iterationCount; i++) {
suite_times.push(
benchmarkClient._measuredValues[i].tests[{{ key }}].total);
};
suite_times;
""",
key=suite_name), important=False))
self.AddJavaScriptMeasurement(
suite_name, 'ms',
"""
var suite_times = [];
for(var i = 0; i < benchmarkClient.iterationCount; i++) {
suite_times.push(
benchmarkClient._measuredValues[i].tests[{{ key }}].total);
};
suite_times;
""",
key=suite_name)
class SpeedometerStorySet(story.StorySet):
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment