Commit 9310bd64 authored by Ned Nguyen's avatar Ned Nguyen Committed by Commit Bot

Refactor token generation logic into tools/perf/core/results_dashboard.py

This has several benefits:
1) No longer generating the authentication token for each benchmark results
too early. Before this CL, we generate authentication for every benchmark
results first, then process each perf results & upload them. With such logic,
the last benchmark to be uploaded may have token generated for a long period of
time.
2) Enables generating a new token for every retry attempt in
core/results_dashboard.py.

Bug:864565
Cq-Include-Trybots: master.tryserver.chromium.perf:obbs_fyi
Change-Id: I8b2d681efdaa241a8af19a7ffbf126523caa12a9

NOTRY=true # CQ flakes

Change-Id: I8b2d681efdaa241a8af19a7ffbf126523caa12a9
Reviewed-on: https://chromium-review.googlesource.com/1150080
Commit-Queue: Ned Nguyen <nednguyen@google.com>
Reviewed-by: default avatarSimon Hatch <simonhatch@chromium.org>
Reviewed-by: default avatarJuan Antonio Navarro Pérez <perezju@chromium.org>
Cr-Commit-Position: refs/heads/master@{#577916}
parent e975a3e7
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""API for generating OAuth2 access tokens from service account
keys predeployed to Chrome Ops bots via Puppet.
"""
import contextlib
import os
import subprocess
import tempfile
@contextlib.contextmanager
def with_access_token(
service_account_json, prefix, token_expiration_in_minutes):
"""Yields an access token for the service account.
Args:
service_account_json: The path to the service account JSON file.
prefix: the prefix of the token file
token_expiration_in_minutes: the integer expiration time of the token
"""
fd, path = tempfile.mkstemp(suffix='.json', prefix=prefix)
try:
args = ['luci-auth', 'token']
if service_account_json:
args += ['-service-account-json', service_account_json]
args += ['-lifetime', '%im' % token_expiration_in_minutes]
subprocess.check_call(args, stdout=fd)
os.close(fd)
fd = None
yield path
finally:
if fd is not None:
os.close(fd)
os.remove(path)
...@@ -34,10 +34,7 @@ psutil = external_modules.ImportOptionalModule('psutil') ...@@ -34,10 +34,7 @@ psutil = external_modules.ImportOptionalModule('psutil')
# The paths in the results dashboard URLs for sending results. # The paths in the results dashboard URLs for sending results.
SEND_RESULTS_PATH = '/add_point' SEND_RESULTS_PATH = '/add_point'
SEND_HISTOGRAMS_PATH = '/add_histograms' SEND_HISTOGRAMS_PATH = '/add_histograms'
DEFAULT_TOKEN_TIMEOUT_IN_MINUTES = 30
ERROR_NO_OAUTH_TOKEN = (
'No oauth token provided, cannot upload HistogramSet. Discarding.')
class SendResultException(Exception): class SendResultException(Exception):
...@@ -52,7 +49,22 @@ class SendResultsFatalException(SendResultException): ...@@ -52,7 +49,22 @@ class SendResultsFatalException(SendResultException):
pass pass
def SendResults(data, url, send_as_histograms=False, oauth_token=None, def LuciAuthTokenGeneratorCallback(
service_account_file, token_expiration_in_minutes):
args = ['luci-auth', 'token',
'-service-account-json', service_account_file,
'-lifetime', '%im' % token_expiration_in_minutes]
p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if p.wait() == 0:
return p.stdout.read()
else:
raise RuntimeError(
'Error generating authentication token.\nStdout: %s\nStder:%s' %
(p.stdout.read(), p.stderr.read()))
def SendResults(data, url, send_as_histograms=False, service_account_file=None,
token_generator_callback=LuciAuthTokenGeneratorCallback,
num_retries=3): num_retries=3):
"""Sends results to the Chrome Performance Dashboard. """Sends results to the Chrome Performance Dashboard.
...@@ -62,18 +74,31 @@ def SendResults(data, url, send_as_histograms=False, oauth_token=None, ...@@ -62,18 +74,31 @@ def SendResults(data, url, send_as_histograms=False, oauth_token=None,
data: The data to try to send. Must be JSON-serializable. data: The data to try to send. Must be JSON-serializable.
url: Performance Dashboard URL (including schema). url: Performance Dashboard URL (including schema).
send_as_histograms: True if result is to be sent to /add_histograms. send_as_histograms: True if result is to be sent to /add_histograms.
oauth_token: string; used for flushing oauth uploads from cache. Note that service_account_file: string; path to service account file which is used
client is responsible for making sure that the oauth_token doesn't expire for authenticating when upload data to perf dashboard.
when using this API. token_generator_callback: a callback for generating the authentication token
to upload to perf dashboard.
This callback takes two parameters
(service_account_file, token_expiration_in_minutes) and returns the token
string.
If |token_generator_callback| is not specified, it's default to
LuciAuthTokenGeneratorCallback.
num_retries: Number of times to retry uploading to the perf dashboard upon num_retries: Number of times to retry uploading to the perf dashboard upon
recoverable error. recoverable error.
""" """
start = time.time() start = time.time()
if send_as_histograms and not service_account_file:
raise ValueError(
'Must set a valid service_account_file for uploading histogram set '
'data')
# Send all the results from this run and the previous cache to the # Send all the results from this run and the previous cache to the
# dashboard. # dashboard.
errors, all_data_uploaded = _SendResultsToDashboard( errors, all_data_uploaded = _SendResultsToDashboard(
data, url, oauth_token, is_histogramset=send_as_histograms, data, url, is_histogramset=send_as_histograms,
service_account_file=service_account_file,
token_generator_callback=token_generator_callback,
num_retries=num_retries) num_retries=num_retries)
print 'Time spent sending results to %s: %s' % (url, time.time() - start) print 'Time spent sending results to %s: %s' % (url, time.time() - start)
...@@ -85,15 +110,12 @@ def SendResults(data, url, send_as_histograms=False, oauth_token=None, ...@@ -85,15 +110,12 @@ def SendResults(data, url, send_as_histograms=False, oauth_token=None,
def _SendResultsToDashboard( def _SendResultsToDashboard(
dashboard_data, url, oauth_token, is_histogramset, num_retries): dashboard_data, url, is_histogramset, service_account_file,
token_generator_callback, num_retries):
"""Tries to send perf dashboard data to |url|. """Tries to send perf dashboard data to |url|.
Args: Args:
perf_results_file_path: A file name. See arguments in SendResults method above.
url: The instance URL to which to post results.
oauth_token: An oauth token to use for histogram uploads. Might be None.
num_retries: Number of time to retry uploading to the perf dashboard upon
recoverable error.
Returns: Returns:
A tuple (errors, all_data_uploaded), whereas: A tuple (errors, all_data_uploaded), whereas:
...@@ -107,19 +129,19 @@ def _SendResultsToDashboard( ...@@ -107,19 +129,19 @@ def _SendResultsToDashboard(
data_type = ('histogram' if is_histogramset else 'chartjson') data_type = ('histogram' if is_histogramset else 'chartjson')
dashboard_data_str = json.dumps(dashboard_data)
for i in xrange(1, num_retries + 1): for i in xrange(1, num_retries + 1):
try: try:
print 'Sending %s result to dashboard (attempt %i out of %i).' % ( print 'Sending %s result to dashboard (attempt %i out of %i).' % (
data_type, i, num_retries) data_type, i, num_retries)
if is_histogramset: if is_histogramset:
# TODO(eakuefner): Remove this discard logic once all bots use oauth_token = token_generator_callback(
# histograms. service_account_file, DEFAULT_TOKEN_TIMEOUT_IN_MINUTES)
if oauth_token is None: _SendHistogramJson(url, dashboard_data_str, oauth_token)
raise SendResultsFatalException(ERROR_NO_OAUTH_TOKEN)
_SendHistogramJson(url, json.dumps(dashboard_data), oauth_token)
else: else:
_SendResultsJson(url, json.dumps(dashboard_data)) # TODO(eakuefner): Remove this logic once all bots use histograms.
_SendResultsJson(url, dashboard_data_str)
all_data_uploaded = True all_data_uploaded = True
break break
except SendResultsRetryException as e: except SendResultsRetryException as e:
......
...@@ -11,34 +11,38 @@ from core import results_dashboard ...@@ -11,34 +11,38 @@ from core import results_dashboard
class ResultsDashboardTest(unittest.TestCase): class ResultsDashboardTest(unittest.TestCase):
def setUp(self): def setUp(self):
self.fake_oath = 'fake-oath' self.fake_service = '/foo/bar/kingsman-service-account'
self.dummy_token_generator = lambda service_file, timeout: 'Arthur-Merlin'
self.perf_data = {'foo': 1, 'bar': 2} self.perf_data = {'foo': 1, 'bar': 2}
self.dashboard_url = 'https://chromeperf.appspot.com' self.dashboard_url = 'https://chromeperf.appspot.com'
def testRetryForSendResultRetryException(self): def testRetryForSendResultRetryException(self):
def raise_retry_exception(url, histogramset_json, oauth_token): def raise_retry_exception(url, histogramset_json, service_account_file):
del url, histogramset_json, oauth_token # unused del url, histogramset_json, service_account_file # unused
raise results_dashboard.SendResultsRetryException('Should retry') raise results_dashboard.SendResultsRetryException('Should retry')
with mock.patch('core.results_dashboard._SendHistogramJson', with mock.patch('core.results_dashboard._SendHistogramJson',
side_effect=raise_retry_exception) as m: side_effect=raise_retry_exception) as m:
upload_result = results_dashboard.SendResults( upload_result = results_dashboard.SendResults(
self.perf_data, self.dashboard_url, send_as_histograms=True, self.perf_data, self.dashboard_url, send_as_histograms=True,
oauth_token=self.fake_oath, num_retries=5) service_account_file=self.fake_service,
token_generator_callback=self.dummy_token_generator, num_retries=5)
self.assertFalse(upload_result) self.assertFalse(upload_result)
self.assertEqual(m.call_count, 5) self.assertEqual(m.call_count, 5)
def testNoRetryForSendResultFatalException(self): def testNoRetryForSendResultFatalException(self):
def raise_retry_exception(url, histogramset_json, oauth_token): def raise_retry_exception(url, histogramset_json, service_account_file):
del url, histogramset_json, oauth_token # unused del url, histogramset_json, service_account_file # unused
raise results_dashboard.SendResultsFatalException('Do not retry') raise results_dashboard.SendResultsFatalException('Do not retry')
with mock.patch('core.results_dashboard._SendHistogramJson', with mock.patch('core.results_dashboard._SendHistogramJson',
side_effect=raise_retry_exception) as m: side_effect=raise_retry_exception) as m:
upload_result = results_dashboard.SendResults( upload_result = results_dashboard.SendResults(
self.perf_data, self.dashboard_url, send_as_histograms=True, self.perf_data, self.dashboard_url, send_as_histograms=True,
oauth_token=self.fake_oath, num_retries=5) service_account_file=self.fake_service,
token_generator_callback=self.dummy_token_generator,
num_retries=5)
self.assertFalse(upload_result) self.assertFalse(upload_result)
self.assertEqual(m.call_count, 1) self.assertEqual(m.call_count, 1)
...@@ -46,15 +50,17 @@ class ResultsDashboardTest(unittest.TestCase): ...@@ -46,15 +50,17 @@ class ResultsDashboardTest(unittest.TestCase):
with mock.patch('core.results_dashboard._SendHistogramJson') as m: with mock.patch('core.results_dashboard._SendHistogramJson') as m:
upload_result = results_dashboard.SendResults( upload_result = results_dashboard.SendResults(
self.perf_data, self.dashboard_url, send_as_histograms=True, self.perf_data, self.dashboard_url, send_as_histograms=True,
oauth_token=self.fake_oath, num_retries=5) service_account_file=self.fake_service,
token_generator_callback=self.dummy_token_generator,
num_retries=5)
self.assertTrue(upload_result) self.assertTrue(upload_result)
self.assertEqual(m.call_count, 1) self.assertEqual(m.call_count, 1)
def testNoRetryAfterSucessfulSendResult(self): def testNoRetryAfterSucessfulSendResult(self):
counter = [0] counter = [0]
def raise_retry_exception_first_two_times( def raise_retry_exception_first_two_times(
url, histogramset_json, oauth_token): url, histogramset_json, service_account_file):
del url, histogramset_json, oauth_token # unused del url, histogramset_json, service_account_file # unused
counter[0] += 1 counter[0] += 1
if counter[0] <= 2: if counter[0] <= 2:
raise results_dashboard.SendResultsRetryException('Please retry') raise results_dashboard.SendResultsRetryException('Please retry')
...@@ -63,6 +69,8 @@ class ResultsDashboardTest(unittest.TestCase): ...@@ -63,6 +69,8 @@ class ResultsDashboardTest(unittest.TestCase):
side_effect=raise_retry_exception_first_two_times) as m: side_effect=raise_retry_exception_first_two_times) as m:
upload_result = results_dashboard.SendResults( upload_result = results_dashboard.SendResults(
self.perf_data, self.dashboard_url, send_as_histograms=True, self.perf_data, self.dashboard_url, send_as_histograms=True,
oauth_token=self.fake_oath, num_retries=5) service_account_file=self.fake_service,
token_generator_callback=self.dummy_token_generator,
num_retries=5)
self.assertTrue(upload_result) self.assertTrue(upload_result)
self.assertEqual(m.call_count, 3) self.assertEqual(m.call_count, 3)
...@@ -101,7 +101,7 @@ def _CreateParser(): ...@@ -101,7 +101,7 @@ def _CreateParser():
parser.add_option('--git-revision') parser.add_option('--git-revision')
parser.add_option('--output-json-dashboard-url') parser.add_option('--output-json-dashboard-url')
parser.add_option('--send-as-histograms', action='store_true') parser.add_option('--send-as-histograms', action='store_true')
parser.add_option('--oauth-token-file') parser.add_option('--service-account-file')
return parser return parser
...@@ -115,11 +115,7 @@ def main(args): ...@@ -115,11 +115,7 @@ def main(args):
if not options.configuration_name or not options.results_url: if not options.configuration_name or not options.results_url:
parser.error('configuration_name and results_url are required.') parser.error('configuration_name and results_url are required.')
if options.oauth_token_file: service_account_file = options.service_account_file or None
with open(options.oauth_token_file) as f:
oauth_token = f.readline()
else:
oauth_token = None
if not options.perf_dashboard_machine_group: if not options.perf_dashboard_machine_group:
print 'Error: Invalid perf dashboard machine group' print 'Error: Invalid perf dashboard machine group'
...@@ -148,7 +144,7 @@ def main(args): ...@@ -148,7 +144,7 @@ def main(args):
dashboard_json, dashboard_json,
options.results_url, options.results_url,
send_as_histograms=options.send_as_histograms, send_as_histograms=options.send_as_histograms,
oauth_token=oauth_token): service_account_file=service_account_file):
return 1 return 1
else: else:
# The upload didn't fail since there was no data to upload. # The upload didn't fail since there was no data to upload.
......
...@@ -16,7 +16,6 @@ import tempfile ...@@ -16,7 +16,6 @@ import tempfile
import time import time
import uuid import uuid
from core import oauth_api
from core import path_util from core import path_util
from core import upload_results_to_perf_dashboard from core import upload_results_to_perf_dashboard
from core import results_merger from core import results_merger
...@@ -64,7 +63,7 @@ def _GetMachineGroup(build_properties): ...@@ -64,7 +63,7 @@ def _GetMachineGroup(build_properties):
def _upload_perf_results(json_to_upload, name, configuration_name, def _upload_perf_results(json_to_upload, name, configuration_name,
build_properties, oauth_file, output_json_file): build_properties, service_account_file, output_json_file):
"""Upload the contents of result JSON(s) to the perf dashboard.""" """Upload the contents of result JSON(s) to the perf dashboard."""
args= [ args= [
'--buildername', build_properties['buildername'], '--buildername', build_properties['buildername'],
...@@ -76,7 +75,7 @@ def _upload_perf_results(json_to_upload, name, configuration_name, ...@@ -76,7 +75,7 @@ def _upload_perf_results(json_to_upload, name, configuration_name,
'--got-revision-cp', build_properties['got_revision_cp'], '--got-revision-cp', build_properties['got_revision_cp'],
'--got-v8-revision', build_properties['got_v8_revision'], '--got-v8-revision', build_properties['got_v8_revision'],
'--got-webrtc-revision', build_properties['got_webrtc_revision'], '--got-webrtc-revision', build_properties['got_webrtc_revision'],
'--oauth-token-file', oauth_file, '--service-account-file', service_account_file,
'--output-json-file', output_json_file, '--output-json-file', output_json_file,
'--perf-dashboard-machine-group', _GetMachineGroup(build_properties) '--perf-dashboard-machine-group', _GetMachineGroup(build_properties)
] ]
...@@ -350,20 +349,15 @@ def _upload_individual( ...@@ -350,20 +349,15 @@ def _upload_individual(
results_filename = join(directories[0], 'perf_results.json') results_filename = join(directories[0], 'perf_results.json')
print 'Uploading perf results from %s benchmark' % benchmark_name print 'Uploading perf results from %s benchmark' % benchmark_name
# We generate an oauth token for every benchmark upload in the event with open(output_json_file, 'w') as oj:
# the token could time out, see crbug.com/854162 upload_fail = _upload_perf_results(
with oauth_api.with_access_token( results_filename,
service_account_file, ('%s_tok' % benchmark_name), benchmark_name, configuration_name, build_properties,
token_expiration_in_minutes=30) as oauth_file: service_account_file, oj)
with open(output_json_file, 'w') as oj: upload_end_time = time.time()
upload_fail = _upload_perf_results( print_duration(('%s upload time' % (benchmark_name)),
results_filename, upload_begin_time, upload_end_time)
benchmark_name, configuration_name, build_properties, return (benchmark_name, upload_fail)
oauth_file, oj)
upload_end_time = time.time()
print_duration(('%s upload time' % (benchmark_name)),
upload_begin_time, upload_end_time)
return (benchmark_name, upload_fail)
finally: finally:
shutil.rmtree(tmpfile_dir) shutil.rmtree(tmpfile_dir)
......
...@@ -8,7 +8,6 @@ import shutil ...@@ -8,7 +8,6 @@ import shutil
import sys import sys
import tempfile import tempfile
import json import json
import contextlib
import unittest import unittest
from core import path_util from core import path_util
...@@ -44,35 +43,22 @@ class ProcessPerfResultsIntegrationTest(unittest.TestCase): ...@@ -44,35 +43,22 @@ class ProcessPerfResultsIntegrationTest(unittest.TestCase):
self.task_output_dir = os.path.join( self.task_output_dir = os.path.join(
os.path.dirname(__file__), 'testdata', 'task_output_dir') os.path.dirname(__file__), 'testdata', 'task_output_dir')
@contextlib.contextmanager m1 = mock.patch(
def mocked_with_access_token(service_account_json, append): 'process_perf_results.logdog_helper.text',
del service_account_json # unused return_value = 'http://foo.link')
test_token_file = os.path.join(self.test_dir, 'token-%s' % append)
with open(test_token_file, 'w') as f:
f.write('1234')
yield test_token_file
m1 = mock.patch('process_perf_results.oauth_api.with_access_token',
side_effect=mocked_with_access_token)
m1.start() m1.start()
self.addCleanup(m1.stop) self.addCleanup(m1.stop)
m2 = mock.patch( m2 = mock.patch(
'process_perf_results.logdog_helper.text', 'process_perf_results.logdog_helper.open_text',
return_value = 'http://foo.link') return_value=_FakeLogdogStream())
m2.start() m2.start()
self.addCleanup(m2.stop) self.addCleanup(m2.stop)
m3 = mock.patch( m3 = mock.patch('core.results_dashboard.SendResults')
'process_perf_results.logdog_helper.open_text',
return_value=_FakeLogdogStream())
m3.start() m3.start()
self.addCleanup(m3.stop) self.addCleanup(m3.stop)
m4 = mock.patch('core.results_dashboard.SendResults')
m4.start()
self.addCleanup(m4.stop)
def tearDown(self): def tearDown(self):
shutil.rmtree(self.test_dir) shutil.rmtree(self.test_dir)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment