Commit 33202a51 authored by Mikhail Khokhlov's avatar Mikhail Khokhlov Committed by Commit Bot

[tools/perf] Factor out functions that work with trace processor

We have two different functions that call trace_processor in two
different modules. This CL factors them out to a single module
so that it was easier to reuse them in Results Processor.

Bug: 990304
Change-Id: I1c94b0d48e520841b5c6e8bed08dec07138f02ac
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1939806
Commit-Queue: Mikhail Khokhlov <khokhlov@google.com>
Reviewed-by: default avatarJuan Antonio Navarro Pérez <perezju@chromium.org>
Cr-Commit-Position: refs/heads/master@{#720573}
parent 6ff04804
......@@ -19,7 +19,7 @@ from py_utils import cloud_storage
from core import path_util
from core.results_processor import formatters
from core.results_processor import util
from core.results_processor import trace_processor
from core.tbmv3 import trace_processor
def ArgumentParser(standalone=False):
......
......@@ -22,8 +22,8 @@ from py_utils import cloud_storage
from core.results_processor import command_line
from core.results_processor import compute_metrics
from core.results_processor import formatters
from core.results_processor import trace_processor
from core.results_processor import util
from core.tbmv3 import trace_processor
from tracing.trace_data import trace_data
from tracing.value.diagnostics import all_diagnostics
......@@ -193,8 +193,8 @@ def ConvertProtoTraces(test_result, trace_processor_path):
CONVERTED_JSON_SUFFIX)
json_trace_name = (posixpath.splitext(proto_trace_name)[0] +
CONVERTED_JSON_SUFFIX)
trace_processor.ConvertProtoTracesToJson(
trace_processor_path, [proto_file_path], json_file_path)
trace_processor.ConvertProtoTraceToJson(
trace_processor_path, proto_file_path, json_file_path)
artifacts[json_trace_name] = {
'filePath': json_file_path,
'contentType': 'application/json',
......
# Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import gzip
import logging
import os
import shutil
import subprocess
from py_utils import tempfile_ext
TP_BINARY_NAME = 'trace_processor_shell'
EXPORT_JSON_QUERY_TEMPLATE = 'select export_json(%s)\n'
def _SqlString(s):
"""Produce a valid SQL string constant."""
return "'%s'" % s.replace("'", "''")
def ConvertProtoTracesToJson(trace_processor_path, proto_files, json_path):
if trace_processor_path is None:
raise RuntimeError('Trace processor executable is not supplied. '
'Please use the --trace-processor-path flag.')
if not os.path.isfile(trace_processor_path):
raise RuntimeError("Can't find trace processor executable at %s" %
trace_processor_path)
with tempfile_ext.NamedTemporaryFile() as concatenated_trace:
for trace_file in proto_files:
if trace_file.endswith('.pb.gz'):
with gzip.open(trace_file, 'rb') as f:
shutil.copyfileobj(f, concatenated_trace)
else:
with open(trace_file, 'rb') as f:
shutil.copyfileobj(f, concatenated_trace)
concatenated_trace.close()
with tempfile_ext.NamedTemporaryFile() as query_file:
query_file.write(EXPORT_JSON_QUERY_TEMPLATE % _SqlString(json_path))
query_file.close()
subprocess.check_call([
trace_processor_path,
concatenated_trace.name,
'-q', query_file.name,
])
logging.info('Converted json trace written to %s', json_path)
return json_path
# Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import gzip
import os
import shutil
import tempfile
import unittest
from core.results_processor import trace_processor
import mock
class TraceProcessorTestCase(unittest.TestCase):
def setUp(self):
self.temp_dir = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.temp_dir)
def testConvertProtoTracesToJson(self):
trace_plain = os.path.join(self.temp_dir, 'trace1.pb')
with open(trace_plain, 'w') as f:
f.write('a')
trace_gzipped = os.path.join(self.temp_dir, 'trace2.pb.gz')
with gzip.open(trace_gzipped, 'w') as f:
f.write('b')
with mock.patch('os.path.isfile', return_value=True):
with mock.patch('subprocess.check_call'):
trace_processor.ConvertProtoTracesToJson(
'/path/to/tp', [trace_plain, trace_gzipped], '/path/to/json')
......@@ -8,11 +8,8 @@ from __future__ import print_function
import argparse
import json
import os
import subprocess
import sys
from collections import namedtuple
# TODO(crbug.com/1012687): Adding tools/perf to path. We can remove this when
# we have a wrapper script under tools/perf that sets up import paths more
# nicely.
......@@ -22,97 +19,13 @@ from core import path_util
path_util.AddPyUtilsToPath()
path_util.AddTracingToPath()
from tracing.value import histogram_set
from core.tbmv3 import trace_processor
_CHROMIUM_SRC_PATH = os.path.join(
os.path.dirname(__file__), '..', '..', '..', '..')
_DEFAULT_TP_PATH = os.path.realpath(os.path.join(
_CHROMIUM_SRC_PATH, 'out', 'Debug', 'trace_processor_shell'))
_METRICS_PATH = os.path.realpath(os.path.join(os.path.dirname(__file__),
'metrics'))
MetricFiles = namedtuple('MetricFiles', ('sql', 'proto', 'config'))
def CreateMetricFiles(metric_name):
return MetricFiles(
sql=os.path.join(_METRICS_PATH, metric_name + '.sql'),
proto=os.path.join(_METRICS_PATH, metric_name + '.proto'),
config=os.path.join(_METRICS_PATH, metric_name + '_config.json'))
class MetricFileNotFound(Exception):
pass
class TraceProcessorNotFound(Exception):
pass
class TraceProcessorError(Exception):
pass
def _CheckFilesExist(trace_processor_path, metric_files):
if not os.path.exists(trace_processor_path):
raise TraceProcessorNotFound('Could not find trace processor shell at %s'
% trace_processor_path)
# Currently assuming all metric files live in tbmv3/metrics directory. We will
# revise this decision later.
for filetype, path in metric_files._asdict().iteritems():
if not os.path.exists(path):
raise MetricFileNotFound('metric %s file not found at %s'
% (filetype, path))
def _RunTraceProcessorMetric(trace_processor_path, trace, metric_files):
trace_processor_args = [trace_processor_path, trace,
'--run-metrics', metric_files.sql,
'--metrics-output=json',
'--extra-metrics', _METRICS_PATH]
try:
output = subprocess.check_output(trace_processor_args)
except subprocess.CalledProcessError:
raise TraceProcessorError(
'Failed to compute metrics. Check trace processor logs.')
return json.loads(output)
def _ScopedHistogramName(metric_name, histogram_name):
"""Returns scoped histogram name by preprending metric name.
This is useful for avoiding histogram name collision. The '_metric' suffix of
the metric name is dropped from scoped name. Example:
_ScopedHistogramName("console_error_metric", "js_errors")
=> "console_error::js_errors"
"""
metric_suffix = '_metric'
suffix_length = len(metric_suffix)
# TODO(crbug.com/1012687): Decide on whether metrics should always have
# '_metric' suffix.
if metric_name[-suffix_length:] == metric_suffix:
scope = metric_name[:-suffix_length]
else:
scope = metric_name
return '::'.join([scope, histogram_name])
def _ProduceHistograms(metric_name, metric_files, measurements):
histograms = histogram_set.HistogramSet()
with open(metric_files.config) as f:
config = json.load(f)
metric_root_field = 'perfetto.protos.' + metric_name
for histogram_config in config['histograms']:
histogram_name = histogram_config['name']
samples = measurements[metric_root_field][histogram_name]
scoped_histogram_name = _ScopedHistogramName(metric_name, histogram_name)
description = histogram_config['description']
histograms.CreateHistogram(scoped_histogram_name,
histogram_config['unit'], samples,
description=description)
return histograms
def _WriteHistogramSetToFile(histograms, outfile):
......@@ -129,19 +42,15 @@ def Main():
help='Trace file you want to compute metric on')
parser.add_argument('--metric', required=True,
help=('Name of the metric you want to run'))
parser.add_argument('--trace_processor_path', default=_DEFAULT_TP_PATH,
parser.add_argument('--trace-processor-path', default=_DEFAULT_TP_PATH,
help='Path to trace processor shell. '
'Default: %(default)s')
parser.add_argument('--outfile', default='results.json',
help='Path to output file. Default: %(default)s')
args = parser.parse_args()
metric_files = CreateMetricFiles(args.metric)
_CheckFilesExist(args.trace_processor_path, metric_files)
measurements = _RunTraceProcessorMetric(args.trace_processor_path,
args.trace, metric_files)
histograms = _ProduceHistograms(args.metric, metric_files, measurements)
histograms = trace_processor.RunMetric(args.trace_processor_path,
args.trace, args.metric)
_WriteHistogramSetToFile(histograms, args.outfile)
print('JSON result created in file://%s' % (args.outfile))
......
# Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import logging
import os
import subprocess
from collections import namedtuple
from py_utils import tempfile_ext
from tracing.value import histogram_set
TP_BINARY_NAME = 'trace_processor_shell'
EXPORT_JSON_QUERY_TEMPLATE = 'select export_json(%s)\n'
METRICS_PATH = os.path.realpath(os.path.join(os.path.dirname(__file__),
'metrics'))
MetricFiles = namedtuple('MetricFiles', ('sql', 'proto', 'config'))
def _SqlString(s):
"""Produce a valid SQL string constant."""
return "'%s'" % s.replace("'", "''")
def _CheckTraceProcessor(trace_processor_path):
if trace_processor_path is None:
raise RuntimeError('Trace processor executable is not supplied. '
'Please use the --trace-processor-path flag.')
if not os.path.isfile(trace_processor_path):
raise RuntimeError("Can't find trace processor executable at %s" %
trace_processor_path)
def _CreateMetricFiles(metric_name):
# Currently assuming all metric files live in tbmv3/metrics directory. We will
# revise this decision later.
metric_files = MetricFiles(
sql=os.path.join(METRICS_PATH, metric_name + '.sql'),
proto=os.path.join(METRICS_PATH, metric_name + '.proto'),
config=os.path.join(METRICS_PATH, metric_name + '_config.json'))
for filetype, path in metric_files._asdict().iteritems():
if not os.path.isfile(path):
raise RuntimeError('metric %s file not found at %s' % (filetype, path))
return metric_files
def _ScopedHistogramName(metric_name, histogram_name):
"""Returns scoped histogram name by preprending metric name.
This is useful for avoiding histogram name collision. The '_metric' suffix of
the metric name is dropped from scoped name. Example:
_ScopedHistogramName("console_error_metric", "js_errors")
=> "console_error::js_errors"
"""
metric_suffix = '_metric'
suffix_length = len(metric_suffix)
# TODO(crbug.com/1012687): Decide on whether metrics should always have
# '_metric' suffix.
if metric_name[-suffix_length:] == metric_suffix:
scope = metric_name[:-suffix_length]
else:
scope = metric_name
return '::'.join([scope, histogram_name])
def RunMetric(trace_processor_path, trace_file, metric_name):
"""Run a TBMv3 metric using trace processor.
Args:
trace_processor_path: path to the trace_processor executable.
trace_file: path to the trace file.
metric_name: the metric name (the corresponding files must exist in
tbmv3/metrics directory).
Returns:
A HistogramSet with metric results.
"""
_CheckTraceProcessor(trace_processor_path)
metric_files = _CreateMetricFiles(metric_name)
output = subprocess.check_output([
trace_processor_path,
trace_file,
'--run-metrics', metric_files.sql,
'--metrics-output=json',
'--extra-metrics', METRICS_PATH
])
measurements = json.loads(output)
histograms = histogram_set.HistogramSet()
with open(metric_files.config) as f:
config = json.load(f)
metric_root_field = 'perfetto.protos.' + metric_name
for histogram_config in config['histograms']:
histogram_name = histogram_config['name']
samples = measurements[metric_root_field][histogram_name]
scoped_histogram_name = _ScopedHistogramName(metric_name, histogram_name)
description = histogram_config['description']
histograms.CreateHistogram(scoped_histogram_name,
histogram_config['unit'], samples,
description=description)
return histograms
def ConvertProtoTraceToJson(trace_processor_path, proto_file, json_path):
"""Convert proto trace to json using trace processor.
Args:
trace_processor_path: path to the trace_processor executable.
proto_file: path to the proto trace file.
json_path: path to the output file.
Returns:
Output path.
"""
_CheckTraceProcessor(trace_processor_path)
with tempfile_ext.NamedTemporaryFile() as query_file:
query_file.write(EXPORT_JSON_QUERY_TEMPLATE % _SqlString(json_path))
query_file.close()
subprocess.check_call([
trace_processor_path,
proto_file,
'-q', query_file.name,
])
logging.info('Converted json trace written to %s', json_path)
return json_path
# Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import os
import shutil
import tempfile
import unittest
from core.tbmv3 import trace_processor
import mock
class TraceProcessorTestCase(unittest.TestCase):
def setUp(self):
self.temp_dir = tempfile.mkdtemp()
self.tp_path = os.path.join(self.temp_dir, 'trace_processor_shell')
with open(self.tp_path, 'w'):
pass
with open(os.path.join(self.temp_dir, 'dummy_metric.sql'), 'w'):
pass
with open(os.path.join(self.temp_dir, 'dummy_metric.proto'), 'w'):
pass
with open(os.path.join(self.temp_dir,
'dummy_metric_config.json'), 'w') as config:
json.dump(
{
'name': 'Dummy Metric',
'histograms': [{
'name': 'value',
'description': 'dummy value',
'unit': 'count_smallerIsBetter',
}],
},
config,
)
def tearDown(self):
shutil.rmtree(self.temp_dir)
def testConvertProtoTraceToJson(self):
with mock.patch('subprocess.check_call'):
trace_processor.ConvertProtoTraceToJson(
self.tp_path, '/path/to/proto', '/path/to/json')
def testRunMetric(self):
metric_output = '{"perfetto.protos.dummy_metric": {"value": 7}}'
with mock.patch('core.tbmv3.trace_processor.METRICS_PATH', self.temp_dir):
with mock.patch('subprocess.check_output') as check_output_patch:
check_output_patch.return_value = metric_output
histograms = trace_processor.RunMetric(
self.tp_path, '/path/to/proto', 'dummy_metric')
hist = histograms.GetHistogramNamed('dummy::value')
self.assertEqual(hist.unit, 'count_smallerIsBetter')
self.assertEqual(hist.sample_values, [7])
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment