Commit 9310bd64 authored by Ned Nguyen's avatar Ned Nguyen Committed by Commit Bot

Refactor token generation logic into tools/perf/core/results_dashboard.py

This has several benefits:
1) No longer generating the authentication token for each benchmark results
too early. Before this CL, we generate authentication for every benchmark
results first, then process each perf results & upload them. With such logic,
the last benchmark to be uploaded may have token generated for a long period of
time.
2) Enables generating a new token for every retry attempt in
core/results_dashboard.py.

Bug:864565
Cq-Include-Trybots: master.tryserver.chromium.perf:obbs_fyi
Change-Id: I8b2d681efdaa241a8af19a7ffbf126523caa12a9

NOTRY=true # CQ flakes

Change-Id: I8b2d681efdaa241a8af19a7ffbf126523caa12a9
Reviewed-on: https://chromium-review.googlesource.com/1150080
Commit-Queue: Ned Nguyen <nednguyen@google.com>
Reviewed-by: default avatarSimon Hatch <simonhatch@chromium.org>
Reviewed-by: default avatarJuan Antonio Navarro Pérez <perezju@chromium.org>
Cr-Commit-Position: refs/heads/master@{#577916}
parent e975a3e7
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""API for generating OAuth2 access tokens from service account
keys predeployed to Chrome Ops bots via Puppet.
"""
import contextlib
import os
import subprocess
import tempfile
@contextlib.contextmanager
def with_access_token(
service_account_json, prefix, token_expiration_in_minutes):
"""Yields an access token for the service account.
Args:
service_account_json: The path to the service account JSON file.
prefix: the prefix of the token file
token_expiration_in_minutes: the integer expiration time of the token
"""
fd, path = tempfile.mkstemp(suffix='.json', prefix=prefix)
try:
args = ['luci-auth', 'token']
if service_account_json:
args += ['-service-account-json', service_account_json]
args += ['-lifetime', '%im' % token_expiration_in_minutes]
subprocess.check_call(args, stdout=fd)
os.close(fd)
fd = None
yield path
finally:
if fd is not None:
os.close(fd)
os.remove(path)
......@@ -34,10 +34,7 @@ psutil = external_modules.ImportOptionalModule('psutil')
# The paths in the results dashboard URLs for sending results.
SEND_RESULTS_PATH = '/add_point'
SEND_HISTOGRAMS_PATH = '/add_histograms'
ERROR_NO_OAUTH_TOKEN = (
'No oauth token provided, cannot upload HistogramSet. Discarding.')
DEFAULT_TOKEN_TIMEOUT_IN_MINUTES = 30
class SendResultException(Exception):
......@@ -52,7 +49,22 @@ class SendResultsFatalException(SendResultException):
pass
def SendResults(data, url, send_as_histograms=False, oauth_token=None,
def LuciAuthTokenGeneratorCallback(
service_account_file, token_expiration_in_minutes):
args = ['luci-auth', 'token',
'-service-account-json', service_account_file,
'-lifetime', '%im' % token_expiration_in_minutes]
p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if p.wait() == 0:
return p.stdout.read()
else:
raise RuntimeError(
'Error generating authentication token.\nStdout: %s\nStder:%s' %
(p.stdout.read(), p.stderr.read()))
def SendResults(data, url, send_as_histograms=False, service_account_file=None,
token_generator_callback=LuciAuthTokenGeneratorCallback,
num_retries=3):
"""Sends results to the Chrome Performance Dashboard.
......@@ -62,18 +74,31 @@ def SendResults(data, url, send_as_histograms=False, oauth_token=None,
data: The data to try to send. Must be JSON-serializable.
url: Performance Dashboard URL (including schema).
send_as_histograms: True if result is to be sent to /add_histograms.
oauth_token: string; used for flushing oauth uploads from cache. Note that
client is responsible for making sure that the oauth_token doesn't expire
when using this API.
service_account_file: string; path to service account file which is used
for authenticating when upload data to perf dashboard.
token_generator_callback: a callback for generating the authentication token
to upload to perf dashboard.
This callback takes two parameters
(service_account_file, token_expiration_in_minutes) and returns the token
string.
If |token_generator_callback| is not specified, it's default to
LuciAuthTokenGeneratorCallback.
num_retries: Number of times to retry uploading to the perf dashboard upon
recoverable error.
"""
start = time.time()
if send_as_histograms and not service_account_file:
raise ValueError(
'Must set a valid service_account_file for uploading histogram set '
'data')
# Send all the results from this run and the previous cache to the
# dashboard.
errors, all_data_uploaded = _SendResultsToDashboard(
data, url, oauth_token, is_histogramset=send_as_histograms,
data, url, is_histogramset=send_as_histograms,
service_account_file=service_account_file,
token_generator_callback=token_generator_callback,
num_retries=num_retries)
print 'Time spent sending results to %s: %s' % (url, time.time() - start)
......@@ -85,15 +110,12 @@ def SendResults(data, url, send_as_histograms=False, oauth_token=None,
def _SendResultsToDashboard(
dashboard_data, url, oauth_token, is_histogramset, num_retries):
dashboard_data, url, is_histogramset, service_account_file,
token_generator_callback, num_retries):
"""Tries to send perf dashboard data to |url|.
Args:
perf_results_file_path: A file name.
url: The instance URL to which to post results.
oauth_token: An oauth token to use for histogram uploads. Might be None.
num_retries: Number of time to retry uploading to the perf dashboard upon
recoverable error.
See arguments in SendResults method above.
Returns:
A tuple (errors, all_data_uploaded), whereas:
......@@ -107,19 +129,19 @@ def _SendResultsToDashboard(
data_type = ('histogram' if is_histogramset else 'chartjson')
dashboard_data_str = json.dumps(dashboard_data)
for i in xrange(1, num_retries + 1):
try:
print 'Sending %s result to dashboard (attempt %i out of %i).' % (
data_type, i, num_retries)
if is_histogramset:
# TODO(eakuefner): Remove this discard logic once all bots use
# histograms.
if oauth_token is None:
raise SendResultsFatalException(ERROR_NO_OAUTH_TOKEN)
_SendHistogramJson(url, json.dumps(dashboard_data), oauth_token)
oauth_token = token_generator_callback(
service_account_file, DEFAULT_TOKEN_TIMEOUT_IN_MINUTES)
_SendHistogramJson(url, dashboard_data_str, oauth_token)
else:
_SendResultsJson(url, json.dumps(dashboard_data))
# TODO(eakuefner): Remove this logic once all bots use histograms.
_SendResultsJson(url, dashboard_data_str)
all_data_uploaded = True
break
except SendResultsRetryException as e:
......
......@@ -11,34 +11,38 @@ from core import results_dashboard
class ResultsDashboardTest(unittest.TestCase):
def setUp(self):
self.fake_oath = 'fake-oath'
self.fake_service = '/foo/bar/kingsman-service-account'
self.dummy_token_generator = lambda service_file, timeout: 'Arthur-Merlin'
self.perf_data = {'foo': 1, 'bar': 2}
self.dashboard_url = 'https://chromeperf.appspot.com'
def testRetryForSendResultRetryException(self):
def raise_retry_exception(url, histogramset_json, oauth_token):
del url, histogramset_json, oauth_token # unused
def raise_retry_exception(url, histogramset_json, service_account_file):
del url, histogramset_json, service_account_file # unused
raise results_dashboard.SendResultsRetryException('Should retry')
with mock.patch('core.results_dashboard._SendHistogramJson',
side_effect=raise_retry_exception) as m:
upload_result = results_dashboard.SendResults(
self.perf_data, self.dashboard_url, send_as_histograms=True,
oauth_token=self.fake_oath, num_retries=5)
service_account_file=self.fake_service,
token_generator_callback=self.dummy_token_generator, num_retries=5)
self.assertFalse(upload_result)
self.assertEqual(m.call_count, 5)
def testNoRetryForSendResultFatalException(self):
def raise_retry_exception(url, histogramset_json, oauth_token):
del url, histogramset_json, oauth_token # unused
def raise_retry_exception(url, histogramset_json, service_account_file):
del url, histogramset_json, service_account_file # unused
raise results_dashboard.SendResultsFatalException('Do not retry')
with mock.patch('core.results_dashboard._SendHistogramJson',
side_effect=raise_retry_exception) as m:
upload_result = results_dashboard.SendResults(
self.perf_data, self.dashboard_url, send_as_histograms=True,
oauth_token=self.fake_oath, num_retries=5)
service_account_file=self.fake_service,
token_generator_callback=self.dummy_token_generator,
num_retries=5)
self.assertFalse(upload_result)
self.assertEqual(m.call_count, 1)
......@@ -46,15 +50,17 @@ class ResultsDashboardTest(unittest.TestCase):
with mock.patch('core.results_dashboard._SendHistogramJson') as m:
upload_result = results_dashboard.SendResults(
self.perf_data, self.dashboard_url, send_as_histograms=True,
oauth_token=self.fake_oath, num_retries=5)
service_account_file=self.fake_service,
token_generator_callback=self.dummy_token_generator,
num_retries=5)
self.assertTrue(upload_result)
self.assertEqual(m.call_count, 1)
def testNoRetryAfterSucessfulSendResult(self):
counter = [0]
def raise_retry_exception_first_two_times(
url, histogramset_json, oauth_token):
del url, histogramset_json, oauth_token # unused
url, histogramset_json, service_account_file):
del url, histogramset_json, service_account_file # unused
counter[0] += 1
if counter[0] <= 2:
raise results_dashboard.SendResultsRetryException('Please retry')
......@@ -63,6 +69,8 @@ class ResultsDashboardTest(unittest.TestCase):
side_effect=raise_retry_exception_first_two_times) as m:
upload_result = results_dashboard.SendResults(
self.perf_data, self.dashboard_url, send_as_histograms=True,
oauth_token=self.fake_oath, num_retries=5)
service_account_file=self.fake_service,
token_generator_callback=self.dummy_token_generator,
num_retries=5)
self.assertTrue(upload_result)
self.assertEqual(m.call_count, 3)
......@@ -101,7 +101,7 @@ def _CreateParser():
parser.add_option('--git-revision')
parser.add_option('--output-json-dashboard-url')
parser.add_option('--send-as-histograms', action='store_true')
parser.add_option('--oauth-token-file')
parser.add_option('--service-account-file')
return parser
......@@ -115,11 +115,7 @@ def main(args):
if not options.configuration_name or not options.results_url:
parser.error('configuration_name and results_url are required.')
if options.oauth_token_file:
with open(options.oauth_token_file) as f:
oauth_token = f.readline()
else:
oauth_token = None
service_account_file = options.service_account_file or None
if not options.perf_dashboard_machine_group:
print 'Error: Invalid perf dashboard machine group'
......@@ -148,7 +144,7 @@ def main(args):
dashboard_json,
options.results_url,
send_as_histograms=options.send_as_histograms,
oauth_token=oauth_token):
service_account_file=service_account_file):
return 1
else:
# The upload didn't fail since there was no data to upload.
......
......@@ -16,7 +16,6 @@ import tempfile
import time
import uuid
from core import oauth_api
from core import path_util
from core import upload_results_to_perf_dashboard
from core import results_merger
......@@ -64,7 +63,7 @@ def _GetMachineGroup(build_properties):
def _upload_perf_results(json_to_upload, name, configuration_name,
build_properties, oauth_file, output_json_file):
build_properties, service_account_file, output_json_file):
"""Upload the contents of result JSON(s) to the perf dashboard."""
args= [
'--buildername', build_properties['buildername'],
......@@ -76,7 +75,7 @@ def _upload_perf_results(json_to_upload, name, configuration_name,
'--got-revision-cp', build_properties['got_revision_cp'],
'--got-v8-revision', build_properties['got_v8_revision'],
'--got-webrtc-revision', build_properties['got_webrtc_revision'],
'--oauth-token-file', oauth_file,
'--service-account-file', service_account_file,
'--output-json-file', output_json_file,
'--perf-dashboard-machine-group', _GetMachineGroup(build_properties)
]
......@@ -350,16 +349,11 @@ def _upload_individual(
results_filename = join(directories[0], 'perf_results.json')
print 'Uploading perf results from %s benchmark' % benchmark_name
# We generate an oauth token for every benchmark upload in the event
# the token could time out, see crbug.com/854162
with oauth_api.with_access_token(
service_account_file, ('%s_tok' % benchmark_name),
token_expiration_in_minutes=30) as oauth_file:
with open(output_json_file, 'w') as oj:
upload_fail = _upload_perf_results(
results_filename,
benchmark_name, configuration_name, build_properties,
oauth_file, oj)
service_account_file, oj)
upload_end_time = time.time()
print_duration(('%s upload time' % (benchmark_name)),
upload_begin_time, upload_end_time)
......
......@@ -8,7 +8,6 @@ import shutil
import sys
import tempfile
import json
import contextlib
import unittest
from core import path_util
......@@ -44,35 +43,22 @@ class ProcessPerfResultsIntegrationTest(unittest.TestCase):
self.task_output_dir = os.path.join(
os.path.dirname(__file__), 'testdata', 'task_output_dir')
@contextlib.contextmanager
def mocked_with_access_token(service_account_json, append):
del service_account_json # unused
test_token_file = os.path.join(self.test_dir, 'token-%s' % append)
with open(test_token_file, 'w') as f:
f.write('1234')
yield test_token_file
m1 = mock.patch('process_perf_results.oauth_api.with_access_token',
side_effect=mocked_with_access_token)
m1 = mock.patch(
'process_perf_results.logdog_helper.text',
return_value = 'http://foo.link')
m1.start()
self.addCleanup(m1.stop)
m2 = mock.patch(
'process_perf_results.logdog_helper.text',
return_value = 'http://foo.link')
'process_perf_results.logdog_helper.open_text',
return_value=_FakeLogdogStream())
m2.start()
self.addCleanup(m2.stop)
m3 = mock.patch(
'process_perf_results.logdog_helper.open_text',
return_value=_FakeLogdogStream())
m3 = mock.patch('core.results_dashboard.SendResults')
m3.start()
self.addCleanup(m3.stop)
m4 = mock.patch('core.results_dashboard.SendResults')
m4.start()
self.addCleanup(m4.stop)
def tearDown(self):
shutil.rmtree(self.test_dir)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment