Commit a3d2110e authored by eakuefner's avatar eakuefner Committed by Commit bot

[Telemetry] Add option to output Chart JSON summary.

This CL adds an option --chartjson to the test runner to automatically perform
summarization to the specialized Chart JSON format that the perf dashboard has
been modified to accept. Passing --chartjson is mutually exclusive with the --output-formatter
option.

Review URL: https://codereview.chromium.org/531973002

Cr-Commit-Position: refs/heads/master@{#293089}
parent 40921469
...@@ -2,6 +2,7 @@ ...@@ -2,6 +2,7 @@
# Use of this source code is governed by a BSD-style license that can be # Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file. # found in the LICENSE file.
import json
import logging import logging
import optparse import optparse
import os import os
...@@ -17,6 +18,7 @@ from telemetry.page import page_runner ...@@ -17,6 +18,7 @@ from telemetry.page import page_runner
from telemetry.page import page_set from telemetry.page import page_set
from telemetry.page import page_test from telemetry.page import page_test
from telemetry.page import test_expectations from telemetry.page import test_expectations
from telemetry.results import chart_json
from telemetry.results import results_options from telemetry.results import results_options
from telemetry.util import cloud_storage from telemetry.util import cloud_storage
...@@ -89,13 +91,20 @@ class Benchmark(command_line.Command): ...@@ -89,13 +91,20 @@ class Benchmark(command_line.Command):
self._DownloadGeneratedProfileArchive(finder_options) self._DownloadGeneratedProfileArchive(finder_options)
results = results_options.CreateResults(self.GetMetadata(), finder_options) benchmark_metadata = self.GetMetadata()
results = results_options.CreateResults(benchmark_metadata, finder_options)
try: try:
page_runner.Run(pt, ps, expectations, finder_options, results) page_runner.Run(pt, ps, expectations, finder_options, results)
except page_test.TestNotSupportedOnPlatformFailure as failure: except page_test.TestNotSupportedOnPlatformFailure as failure:
logging.warning(str(failure)) logging.warning(str(failure))
results.PrintSummary() if finder_options.chartjson:
print json.dumps(chart_json.ResultsAsChartDict(
benchmark_metadata,
results.all_page_specific_values,
results.all_summary_values))
else:
results.PrintSummary()
return len(results.failures) return len(results.failures)
def _DownloadGeneratedProfileArchive(self, options): def _DownloadGeneratedProfileArchive(self, options):
......
...@@ -22,6 +22,8 @@ _OUTPUT_FORMAT_CHOICES = ('html', 'buildbot', 'block', 'csv', 'gtest', 'json', ...@@ -22,6 +22,8 @@ _OUTPUT_FORMAT_CHOICES = ('html', 'buildbot', 'block', 'csv', 'gtest', 'json',
def AddResultsOptions(parser): def AddResultsOptions(parser):
group = optparse.OptionGroup(parser, 'Results options') group = optparse.OptionGroup(parser, 'Results options')
group.add_option('--chartjson', action='store_true',
help='Output Chart JSON. Ignores --output-format.')
group.add_option('--output-format', group.add_option('--output-format',
default=_OUTPUT_FORMAT_CHOICES[0], default=_OUTPUT_FORMAT_CHOICES[0],
choices=_OUTPUT_FORMAT_CHOICES, choices=_OUTPUT_FORMAT_CHOICES,
...@@ -46,7 +48,7 @@ def AddResultsOptions(parser): ...@@ -46,7 +48,7 @@ def AddResultsOptions(parser):
parser.add_option_group(group) parser.add_option_group(group)
def CreateResults(metadata, options): def CreateResults(benchmark_metadata, options):
""" """
Args: Args:
options: Contains the options specified in AddResultsOptions. options: Contains the options specified in AddResultsOptions.
...@@ -73,7 +75,7 @@ def CreateResults(metadata, options): ...@@ -73,7 +75,7 @@ def CreateResults(metadata, options):
output_formatters = [] output_formatters = []
output_skipped_tests_summary = True output_skipped_tests_summary = True
reporter = None reporter = None
if options.output_format == 'none': if options.output_format == 'none' or options.chartjson:
pass pass
elif options.output_format == 'csv': elif options.output_format == 'csv':
output_formatters.append(csv_output_formatter.CsvOutputFormatter( output_formatters.append(csv_output_formatter.CsvOutputFormatter(
...@@ -95,12 +97,13 @@ def CreateResults(metadata, options): ...@@ -95,12 +97,13 @@ def CreateResults(metadata, options):
output_formatters.append(buildbot_output_formatter.BuildbotOutputFormatter( output_formatters.append(buildbot_output_formatter.BuildbotOutputFormatter(
sys.stdout, trace_tag=options.output_trace_tag)) sys.stdout, trace_tag=options.output_trace_tag))
output_formatters.append(html_output_formatter.HtmlOutputFormatter( output_formatters.append(html_output_formatter.HtmlOutputFormatter(
output_stream, metadata, options.reset_results, output_stream, benchmark_metadata, options.reset_results,
options.upload_results, options.browser_type, options.upload_results, options.browser_type,
options.results_label, trace_tag=options.output_trace_tag)) options.results_label, trace_tag=options.output_trace_tag))
elif options.output_format == 'json': elif options.output_format == 'json':
output_formatters.append( output_formatters.append(
json_output_formatter.JsonOutputFormatter(output_stream, metadata)) json_output_formatter.JsonOutputFormatter(output_stream,
benchmark_metadata))
else: else:
# Should never be reached. The parser enforces the choices. # Should never be reached. The parser enforces the choices.
raise Exception('Invalid --output-format "%s". Valid choices are: %s' raise Exception('Invalid --output-format "%s". Valid choices are: %s'
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment