Commit 07e12129 authored by Mikhail Khokhlov's avatar Mikhail Khokhlov Committed by Commit Bot

[tools/perf] Implement artifact uploading in Results Processor

Bug: 981349, 956308
Change-Id: I887cd6752df75d33babfb187d8da1ad78112f4f8
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1824088Reviewed-by: default avatarJuan Antonio Navarro Pérez <perezju@chromium.org>
Commit-Queue: Mikhail Khokhlov <khokhlov@google.com>
Cr-Commit-Position: refs/heads/master@{#702008}
parent 49afa593
...@@ -9,8 +9,12 @@ the standalone version of Results Processor. ...@@ -9,8 +9,12 @@ the standalone version of Results Processor.
""" """
import json import json
import logging
import os import os
import random
import re
from py_utils import cloud_storage
from core.results_processor import command_line from core.results_processor import command_line
from core.results_processor import compute_metrics from core.results_processor import compute_metrics
from core.results_processor import formatters from core.results_processor import formatters
...@@ -43,7 +47,8 @@ def ProcessResults(options): ...@@ -43,7 +47,8 @@ def ProcessResults(options):
_AggregateTraces(intermediate_results) _AggregateTraces(intermediate_results)
_UploadArtifacts(intermediate_results, options.upload_bucket) UploadArtifacts(
intermediate_results, options.upload_bucket, options.results_label)
if any(fmt in FORMATS_WITH_METRICS for fmt in options.output_formats): if any(fmt in FORMATS_WITH_METRICS for fmt in options.output_formats):
histogram_dicts = _ComputeMetrics(intermediate_results, histogram_dicts = _ComputeMetrics(intermediate_results,
...@@ -88,20 +93,48 @@ def _AggregateTraces(intermediate_results): ...@@ -88,20 +93,48 @@ def _AggregateTraces(intermediate_results):
del artifacts[trace] del artifacts[trace]
def _UploadArtifacts(intermediate_results, upload_bucket): def _RemoteName(results_label, start_time, test_path, artifact_name):
"""Construct a name for a given artifact, under which it will be
stored in the cloud.
"""
if results_label:
identifier_parts = [re.sub(r'\W+', '_', results_label)]
else:
identifier_parts = []
# Time is rounded to seconds and delimiters are removed.
# The first 19 chars of the string match 'YYYY-MM-DDTHH:MM:SS'.
identifier_parts.append(re.sub(r'\W+', '', start_time[:19]))
identifier_parts.append(str(random.randint(1, 1e5)))
run_identifier = '_'.join(identifier_parts)
return '/'.join([run_identifier, test_path, artifact_name])
def UploadArtifacts(intermediate_results, upload_bucket, results_label):
"""Upload all artifacts to cloud. """Upload all artifacts to cloud.
For each test run, uploads all its artifacts to cloud and sets remoteUrl For each test run, uploads all its artifacts to cloud and sets remoteUrl
fields in intermediate_results. fields in intermediate_results.
""" """
if upload_bucket is not None: if upload_bucket is None:
for result in intermediate_results['testResults']: return
artifacts = result.get('artifacts', {})
for artifact in artifacts.values(): start_time = intermediate_results['benchmarkRun']['startTime']
# For now, the uploading is done by Telemetry, so we just check that for result in intermediate_results['testResults']:
# remoteUrls are set. artifacts = result.get('artifacts', {})
# TODO(crbug.com/981349): replace this with actual uploading code for name, artifact in artifacts.iteritems():
assert 'remoteUrl' in artifact if 'remoteUrl' in artifact:
continue
# TODO(crbug.com/981349): Remove this check after Telemetry does not
# save histograms as an artifact anymore.
if name == compute_metrics.HISTOGRAM_DICTS_FILE:
continue
artifact['remoteUrl'] = cloud_storage.Insert(
upload_bucket,
_RemoteName(results_label, start_time, result['testPath'], name),
artifact['filePath'],
)
logging.info('Uploaded %s of %s to %s\n' % (
name, result['testPath'], artifact['remoteUrl']))
def _ComputeMetrics(intermediate_results, results_label): def _ComputeMetrics(intermediate_results, results_label):
......
...@@ -6,6 +6,8 @@ ...@@ -6,6 +6,8 @@
import unittest import unittest
import mock
from core.results_processor import processor from core.results_processor import processor
from core.results_processor import testing from core.results_processor import testing
...@@ -37,3 +39,56 @@ class ResultsProcessorUnitTests(unittest.TestCase): ...@@ -37,3 +39,56 @@ class ResultsProcessorUnitTests(unittest.TestCase):
self.assertIn(['linux'], diag_values) self.assertIn(['linux'], diag_values)
self.assertIn([['documentation', 'url']], diag_values) self.assertIn([['documentation', 'url']], diag_values)
self.assertIn(['label'], diag_values) self.assertIn(['label'], diag_values)
def testUploadArtifacts(self):
in_results = testing.IntermediateResults(
test_results=[
testing.TestResult(
'benchmark/story',
artifacts={'log': testing.Artifact('/log.log')},
),
testing.TestResult(
'benchmark/story',
artifacts={
'trace.html': testing.Artifact('/trace.html'),
'screenshot': testing.Artifact('/screenshot.png'),
},
),
],
)
with mock.patch('py_utils.cloud_storage.Insert') as cloud_patch:
cloud_patch.return_value = 'gs://url'
processor.UploadArtifacts(in_results, 'bucket', None)
cloud_patch.assert_has_calls([
mock.call('bucket', mock.ANY, '/log.log'),
mock.call('bucket', mock.ANY, '/trace.html'),
mock.call('bucket', mock.ANY, '/screenshot.png'),
],
any_order=True,
)
for result in in_results['testResults']:
for artifact in result['artifacts'].itervalues():
self.assertEqual(artifact['remoteUrl'], 'gs://url')
def testUploadArtifacts_CheckRemoteUrl(self):
in_results = testing.IntermediateResults(
test_results=[
testing.TestResult(
'benchmark/story',
artifacts={'trace.html': testing.Artifact('/trace.html')},
),
],
start_time='2019-10-01T12:00:00.123456Z',
)
with mock.patch('py_utils.cloud_storage.Insert') as cloud_patch:
with mock.patch('random.randint') as randint_patch:
randint_patch.return_value = 54321
processor.UploadArtifacts(in_results, 'bucket', 'src@abc + 123')
cloud_patch.assert_called_once_with(
'bucket',
'src_abc_123_20191001T120000_54321/benchmark/story/trace.html',
'/trace.html'
)
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment