Commit bb5fcecd authored by John Chen's avatar John Chen Committed by Commit Bot

Revert "[tools/perf] Add stubs for aggregation and uploading methods"

This reverts commit 654f9cbd.

Reason for revert: Large number of failures in Android perf tests

Original change's description:
> [tools/perf] Add stubs for aggregation and uploading methods
> 
> This CL adds two methods to ResultsProcessor: for aggregating traces and
> uploading artifacts. They do (almost) nothing so far, because the
> corresponding work is done by Telemetry. Implementing full versions of the
> methods will allow us to gradually transfer the responsibility for these
> tasks from Telemetry to ResultsProcessor without major refactoring.
> 
> Bug: 981349
> Change-Id: I5b2cb3f55dffe5f12e0e8ec0f3aaa1641e009f09
> Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1781152
> Commit-Queue: Mikhail Khokhlov <khokhlov@google.com>
> Reviewed-by: Juan Antonio Navarro Pérez <perezju@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#694725}

TBR=perezju@chromium.org,khokhlov@google.com

Change-Id: Ia4e87e5e23460d0e0b9c02c8b85a09af1e0c15ca
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Bug: 981349, 1002272
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1793927Reviewed-by: default avatarJohn Chen <johnchen@chromium.org>
Commit-Queue: John Chen <johnchen@chromium.org>
Cr-Commit-Position: refs/heads/master@{#694997}
parent 45b950f1
...@@ -19,7 +19,6 @@ from py_utils import cloud_storage ...@@ -19,7 +19,6 @@ from py_utils import cloud_storage
from core.results_processor import json3_output from core.results_processor import json3_output
HTML_TRACE_NAME = 'trace.html'
TELEMETRY_RESULTS = '_telemetry_results.jsonl' TELEMETRY_RESULTS = '_telemetry_results.jsonl'
SUPPORTED_FORMATS = { SUPPORTED_FORMATS = {
'none': NotImplemented, 'none': NotImplemented,
...@@ -147,10 +146,6 @@ def ProcessResults(options): ...@@ -147,10 +146,6 @@ def ProcessResults(options):
intermediate_results = _LoadIntermediateResults( intermediate_results = _LoadIntermediateResults(
os.path.join(options.intermediate_dir, TELEMETRY_RESULTS)) os.path.join(options.intermediate_dir, TELEMETRY_RESULTS))
_AggregateTraces(intermediate_results)
_UploadArtifacts(intermediate_results, options.upload_bucket)
for output_format in options.output_formats: for output_format in options.output_formats:
if output_format not in SUPPORTED_FORMATS: if output_format not in SUPPORTED_FORMATS:
raise NotImplementedError(output_format) raise NotImplementedError(output_format)
...@@ -185,40 +180,6 @@ def _LoadIntermediateResults(intermediate_file): ...@@ -185,40 +180,6 @@ def _LoadIntermediateResults(intermediate_file):
return results return results
def _AggregateTraces(intermediate_results):
"""Replace individual traces with an aggregate one for each test result.
For each test run with traces, generates an aggregate HTML trace. Removes
all entries for individual traces and adds one entry for aggregate one.
"""
for result in intermediate_results['testResults']:
artifacts = result.get('artifacts', {})
traces = [name for name in artifacts if name.startswith('trace/')]
if len(traces) > 0:
# For now, the html trace is generated by Telemetry, so it should be there
# already. All we need to do is remove individual traces from the dict.
# TODO(crbug.com/981349): replace this with actual aggregation code.
assert HTML_TRACE_NAME in artifacts
for trace in traces:
del artifacts[trace]
def _UploadArtifacts(intermediate_results, upload_bucket):
"""Upload all artifacts to cloud.
For each test run, uploads all its artifacts to cloud and sets remoteUrl
fields in intermediate_results.
"""
if upload_bucket is not None:
for result in intermediate_results['testResults']:
artifacts = result.get('artifacts', {})
for artifact in artifacts.values():
# For now, the uploading is done by Telemetry, so we just check that
# remoteUrls are set.
# TODO(crbug.com/981349): replace this with actual uploading code
assert 'remoteUrl' in artifact
def _DefaultOutputDir(): def _DefaultOutputDir():
"""Default output directory. """Default output directory.
......
...@@ -20,7 +20,7 @@ from core.results_processor import processor ...@@ -20,7 +20,7 @@ from core.results_processor import processor
from core.results_processor import testing from core.results_processor import testing
class ResultsProcessorIntegrationTests(unittest.TestCase): class ResultProcessorIntegrationTests(unittest.TestCase):
def setUp(self): def setUp(self):
self.output_dir = tempfile.mkdtemp() self.output_dir = tempfile.mkdtemp()
self.intermediate_dir = os.path.join( self.intermediate_dir = os.path.join(
...@@ -66,33 +66,3 @@ class ResultsProcessorIntegrationTests(unittest.TestCase): ...@@ -66,33 +66,3 @@ class ResultsProcessorIntegrationTests(unittest.TestCase):
self.assertEqual(test_result['times'], [1.1, 1.2]) self.assertEqual(test_result['times'], [1.1, 1.2])
self.assertEqual(test_result['time'], 1.1) self.assertEqual(test_result['time'], 1.1)
self.assertEqual(test_result['shard'], 7) self.assertEqual(test_result['shard'], 7)
def testJson3OutputWithArtifacts(self):
self.SerializeIntermediateResults([
testing.TestResult(
'benchmark/story',
artifacts={
'logs': testing.Artifact('/logs.txt', 'gs://logs.txt'),
'trace/telemetry': testing.Artifact('/telemetry.json'),
'trace.html':
testing.Artifact('/trace.html', 'gs://trace.html'),
},
)])
processor.main([
'--output-format', 'json-test-results',
'--output-dir', self.output_dir,
'--intermediate-dir', self.intermediate_dir])
with open(os.path.join(
self.output_dir, json3_output.OUTPUT_FILENAME)) as f:
results = json.load(f)
self.assertIn('benchmark', results['tests'])
self.assertIn('story', results['tests']['benchmark'])
self.assertIn('artifacts', results['tests']['benchmark']['story'])
artifacts = results['tests']['benchmark']['story']['artifacts']
self.assertEqual(len(artifacts), 2)
self.assertEqual(artifacts['logs'], ['gs://logs.txt'])
self.assertEqual(artifacts['trace.html'], ['gs://trace.html'])
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment