Commit 878e2919 authored by Mikhail Khokhlov's avatar Mikhail Khokhlov Committed by Commit Bot

[tools/perf] Improve logging in Results Processor

Removes trace processor output from logs. Unifies log format. Leaves
at most one info log message per processing stage.

Bug: 981349
Change-Id: I7cc5abe2e8d06504ca2ff885800d23798251d10b
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1953605Reviewed-by: default avatarEric Seckler <eseckler@chromium.org>
Commit-Queue: Mikhail Khokhlov <khokhlov@google.com>
Cr-Commit-Position: refs/heads/master@{#723381}
parent f961468b
......@@ -24,9 +24,6 @@ def _RunMetric(test_result, metrics):
html_local_path = html_trace['filePath']
html_remote_url = html_trace.get('remoteUrl')
logging.info('%s: Starting to compute metrics on trace.',
test_result['testPath'])
start = time.time()
# The timeout needs to be coordinated with the Swarming IO timeout for the
# task that runs this code. If this timeout is longer or close in length
# to the swarming IO timeout then we risk being forcibly killed for not
......@@ -37,8 +34,6 @@ def _RunMetric(test_result, metrics):
html_local_path, metrics, canonical_url=html_remote_url,
timeout=TEN_MINUTES,
extra_import_options={'trackDetailedModelStats': True})
logging.info('%s: Computing metrics took %.3f seconds.' % (
test_result['testPath'], time.time() - start))
if mre_result.failures:
util.SetUnexpectedFailure(test_result)
......@@ -83,7 +78,10 @@ def ComputeTBMv2Metrics(test_result):
test_result['testPath'], trace_size_in_mib)
return
start = time.time()
test_result['_histograms'].ImportDicts(_RunMetric(test_result, metrics))
logging.info('%s: Computing TBMv2 metrics took %.3f seconds.' % (
test_result['testPath'], time.time() - start))
def ComputeTBMv3Metrics(test_result, trace_processor_path):
......@@ -107,9 +105,11 @@ def ComputeTBMv3Metrics(test_result, trace_processor_path):
test_result['testPath'])
return
start = time.time()
for metric in metrics:
logging.info('%s: Computing metric %s.', test_result['testPath'], metric)
histograms = trace_processor.RunMetric(
trace_processor_path, artifacts[CONCATENATED_PROTO_NAME]['filePath'],
metric)
test_result['_histograms'].Merge(histograms)
logging.info('%s: Computing TBMv3 metrics took %.3f seconds.' % (
test_result['testPath'], time.time() - start))
......@@ -194,8 +194,6 @@ def ConvertProtoTraces(test_result, trace_processor_path):
# individual ones.
for proto_trace_name in proto_traces:
proto_file_path = artifacts[proto_trace_name]['filePath']
logging.info('%s: Converting proto trace %s.',
test_result['testPath'], proto_file_path)
json_file_path = (os.path.splitext(proto_file_path)[0] +
CONVERTED_JSON_SUFFIX)
json_trace_name = (posixpath.splitext(proto_trace_name)[0] +
......@@ -206,6 +204,8 @@ def ConvertProtoTraces(test_result, trace_processor_path):
'filePath': json_file_path,
'contentType': 'application/json',
}
logging.info('%s: Proto trace converted. Source: %s. Destination: %s.',
test_result['testPath'], proto_file_path, json_file_path)
def AggregateTBMv2Traces(test_result):
......@@ -220,13 +220,13 @@ def AggregateTBMv2Traces(test_result):
if traces:
trace_files = [artifacts[name]['filePath'] for name in traces]
html_path = _BuildOutputPath(trace_files, compute_metrics.HTML_TRACE_NAME)
logging.info('%s: Aggregating traces %s.',
test_result['testPath'], trace_files)
trace_data.SerializeAsHtml(trace_files, html_path)
artifacts[compute_metrics.HTML_TRACE_NAME] = {
'filePath': html_path,
'contentType': 'text/html',
}
logging.info('%s: TBMv2 traces aggregated. Sources: %s. Destination: %s.',
test_result['testPath'], trace_files, html_path)
for name in traces:
del artifacts[name]
......@@ -256,6 +256,8 @@ def AggregateTBMv3Traces(test_result):
'filePath': concatenated_path,
'contentType': 'application/x-protobuf',
}
logging.info('%s: Proto traces aggregated. Sources: %s. Destination: %s.',
test_result['testPath'], proto_files, concatenated_path)
for name in traces:
del artifacts[name]
......@@ -291,7 +293,7 @@ def UploadArtifacts(test_result, upload_bucket, run_identifier):
urlsafe_remote_name = re.sub(r'[^A-Za-z0-9/.-]+', '_', remote_name)
artifact['remoteUrl'] = cloud_storage.Insert(
upload_bucket, urlsafe_remote_name, artifact['filePath'])
logging.info('Uploaded %s of %s to %s', name, test_result['testPath'],
logging.info('%s: Uploaded %s to %s', test_result['testPath'], name,
artifact['remoteUrl'])
......
......@@ -3,7 +3,6 @@
# found in the LICENSE file.
import json
import logging
import os
import subprocess
......@@ -35,6 +34,18 @@ def _CheckTraceProcessor(trace_processor_path):
trace_processor_path)
def _RunTraceProcessor(*args):
"""Run trace processor shell with given command line arguments."""
p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
if p.returncode == 0:
return stdout
else:
raise RuntimeError(
'Running trace processor failed. Command line:\n%s\nStderr:\n%s\n' %
(' '.join(args), stderr))
def _CreateMetricFiles(metric_name):
# Currently assuming all metric files live in tbmv3/metrics directory. We will
# revise this decision later.
......@@ -81,13 +92,13 @@ def RunMetric(trace_processor_path, trace_file, metric_name):
"""
_CheckTraceProcessor(trace_processor_path)
metric_files = _CreateMetricFiles(metric_name)
output = subprocess.check_output([
output = _RunTraceProcessor(
trace_processor_path,
trace_file,
'--run-metrics', metric_files.sql,
'--metrics-output=json',
'--extra-metrics', METRICS_PATH
])
)
measurements = json.loads(output)
histograms = histogram_set.HistogramSet()
......@@ -120,12 +131,10 @@ def ConvertProtoTraceToJson(trace_processor_path, proto_file, json_path):
with tempfile_ext.NamedTemporaryFile() as query_file:
query_file.write(EXPORT_JSON_QUERY_TEMPLATE % _SqlString(json_path))
query_file.close()
subprocess.check_call([
_RunTraceProcessor(
trace_processor_path,
proto_file,
'-q', query_file.name,
])
logging.info('Converted json trace written to %s', json_path)
)
return json_path
......@@ -12,6 +12,7 @@ from core.tbmv3 import trace_processor
import mock
RUN_METHOD = 'core.tbmv3.trace_processor._RunTraceProcessor'
class TraceProcessorTestCase(unittest.TestCase):
def setUp(self):
......@@ -41,7 +42,7 @@ class TraceProcessorTestCase(unittest.TestCase):
shutil.rmtree(self.temp_dir)
def testConvertProtoTraceToJson(self):
with mock.patch('subprocess.check_call'):
with mock.patch(RUN_METHOD):
trace_processor.ConvertProtoTraceToJson(
self.tp_path, '/path/to/proto', '/path/to/json')
......@@ -49,8 +50,8 @@ class TraceProcessorTestCase(unittest.TestCase):
metric_output = '{"perfetto.protos.dummy_metric": {"value": 7}}'
with mock.patch('core.tbmv3.trace_processor.METRICS_PATH', self.temp_dir):
with mock.patch('subprocess.check_output') as check_output_patch:
check_output_patch.return_value = metric_output
with mock.patch(RUN_METHOD) as run_patch:
run_patch.return_value = metric_output
histograms = trace_processor.RunMetric(
self.tp_path, '/path/to/proto', 'dummy_metric')
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment