Commit f95e460c authored by sclittle's avatar sclittle Committed by Commit bot

Added several new bypass telemetry tests for the data reduction proxy

New tests:
- Added new ChromeProxyExplicitBypass integration tests that test "bypass=0", "bypass=<X>", "block=0", and "block=<X>" Chrome-Proxy directives using the test server
- Added new ChromeProxyFallback integration test that gets a "bypass=0" directive from the real data reduction proxy

Cleanup:
- Rewrote the VerifyBadProxies logic, adding support for per-proxy retry times
- Also changed VerifyBadProxies to raise an exception instead of returning false when the actual and expected bad proxy lists were different, which was causing all the existing bypass tests to pass in cases when they should have failed
- Fixed typo in ChromeProxyCorsBypass test, it passes now
- Some general python style cleanup

BUG=374397

Review URL: https://codereview.chromium.org/659333004

Cr-Commit-Position: refs/heads/master@{#300295}
parent 899f630a
...@@ -39,6 +39,7 @@ class ChromeProxyDataSaving(benchmark.Benchmark): ...@@ -39,6 +39,7 @@ class ChromeProxyDataSaving(benchmark.Benchmark):
tag = 'data_saving' tag = 'data_saving'
test = measurements.ChromeProxyDataSaving test = measurements.ChromeProxyDataSaving
page_set = pagesets.Top20PageSet page_set = pagesets.Top20PageSet
def CustomizeBrowserOptions(self, options): def CustomizeBrowserOptions(self, options):
options.AppendExtraBrowserArgs('--enable-spdy-proxy-auth') options.AppendExtraBrowserArgs('--enable-spdy-proxy-auth')
...@@ -66,24 +67,35 @@ class ChromeProxyHeaderValidation(benchmark.Benchmark): ...@@ -66,24 +67,35 @@ class ChromeProxyHeaderValidation(benchmark.Benchmark):
test = measurements.ChromeProxyHeaders test = measurements.ChromeProxyHeaders
page_set = pagesets.Top20PageSet page_set = pagesets.Top20PageSet
@benchmark.Enabled('android') @benchmark.Enabled('android')
class ChromeProxyClientVersion(benchmark.Benchmark): class ChromeProxyClientVersion(benchmark.Benchmark):
tag = 'client_version' tag = 'client_version'
test = measurements.ChromeProxyClientVersion test = measurements.ChromeProxyClientVersion
page_set = pagesets.SyntheticPageSet page_set = pagesets.SyntheticPageSet
@benchmark.Enabled('android') @benchmark.Enabled('android')
class ChromeProxyBypass(benchmark.Benchmark): class ChromeProxyBypass(benchmark.Benchmark):
tag = 'bypass' tag = 'bypass'
test = measurements.ChromeProxyBypass test = measurements.ChromeProxyBypass
page_set = pagesets.BypassPageSet page_set = pagesets.BypassPageSet
@benchmark.Enabled('android')
class ChromeProxyFallback(benchmark.Benchmark):
tag = 'fallback'
test = measurements.ChromeProxyFallback
page_set = pagesets.FallbackPageSet
@benchmark.Enabled('android') @benchmark.Enabled('android')
class ChromeProxyCorsBypass(benchmark.Benchmark): class ChromeProxyCorsBypass(benchmark.Benchmark):
tag = 'bypass' tag = 'bypass'
test = measurements.ChromeProxyCorsBypass test = measurements.ChromeProxyCorsBypass
page_set = pagesets.CorsBypassPageSet page_set = pagesets.CorsBypassPageSet
@benchmark.Enabled('android') @benchmark.Enabled('android')
class ChromeProxyBlockOnce(benchmark.Benchmark): class ChromeProxyBlockOnce(benchmark.Benchmark):
tag = 'block_once' tag = 'block_once'
...@@ -119,6 +131,13 @@ class ChromeProxyHTTPToDirectFallback(benchmark.Benchmark): ...@@ -119,6 +131,13 @@ class ChromeProxyHTTPToDirectFallback(benchmark.Benchmark):
page_set = pagesets.HTTPToDirectFallbackPageSet page_set = pagesets.HTTPToDirectFallbackPageSet
@benchmark.Enabled('android')
class ChromeProxyExplicitBypass(benchmark.Benchmark):
tag = 'explicit-bypass'
test = measurements.ChromeProxyExplicitBypass
page_set = pagesets.ExplicitBypassPageSet
@benchmark.Enabled('android') @benchmark.Enabled('android')
class ChromeProxySmoke(benchmark.Benchmark): class ChromeProxySmoke(benchmark.Benchmark):
tag = 'smoke' tag = 'smoke'
......
...@@ -112,8 +112,18 @@ class ChromeProxyBypass(ChromeProxyValidation): ...@@ -112,8 +112,18 @@ class ChromeProxyBypass(ChromeProxyValidation):
self._metrics.AddResultsForBypass(tab, results) self._metrics.AddResultsForBypass(tab, results)
class ChromeProxyFallback(ChromeProxyValidation):
"""Correctness measurement for proxy fallback responses."""
def __init__(self):
super(ChromeProxyFallback, self).__init__(restart_after_each_page=True)
def AddResults(self, tab, results):
self._metrics.AddResultsForFallback(tab, results)
class ChromeProxyCorsBypass(ChromeProxyValidation): class ChromeProxyCorsBypass(ChromeProxyValidation):
"""Correctness measurement for bypass responses.""" """Correctness measurement for bypass responses for CORS requests."""
def __init__(self): def __init__(self):
super(ChromeProxyCorsBypass, self).__init__(restart_after_each_page=True) super(ChromeProxyCorsBypass, self).__init__(restart_after_each_page=True)
...@@ -123,7 +133,7 @@ class ChromeProxyCorsBypass(ChromeProxyValidation): ...@@ -123,7 +133,7 @@ class ChromeProxyCorsBypass(ChromeProxyValidation):
# finishes. # finishes.
tab.WaitForJavaScriptExpression('window.xhrRequestCompleted', 15000) tab.WaitForJavaScriptExpression('window.xhrRequestCompleted', 15000)
super(ChromeProxyCorsBypass, super(ChromeProxyCorsBypass,
self).ValidateAndMeasurePag1Ge(page, tab, results) self).ValidateAndMeasurePage(page, tab, results)
def AddResults(self, tab, results): def AddResults(self, tab, results):
self._metrics.AddResultsForCorsBypass(tab, results) self._metrics.AddResultsForCorsBypass(tab, results)
...@@ -163,7 +173,8 @@ _TEST_SERVER_DEFAULT_URL = 'http://' + _TEST_SERVER + '/default' ...@@ -163,7 +173,8 @@ _TEST_SERVER_DEFAULT_URL = 'http://' + _TEST_SERVER + '/default'
# #
# The test server allow request to override response status, headers, and # The test server allow request to override response status, headers, and
# body through query parameters. See GetResponseOverrideURL. # body through query parameters. See GetResponseOverrideURL.
def GetResponseOverrideURL(url, respStatus=0, respHeader="", respBody=""): def GetResponseOverrideURL(url=_TEST_SERVER_DEFAULT_URL, respStatus=0,
respHeader="", respBody=""):
""" Compose the request URL with query parameters to override """ Compose the request URL with query parameters to override
the chromeproxy-test server response. the chromeproxy-test server response.
""" """
...@@ -201,7 +212,6 @@ class ChromeProxyHTTPFallbackProbeURL(ChromeProxyValidation): ...@@ -201,7 +212,6 @@ class ChromeProxyHTTPFallbackProbeURL(ChromeProxyValidation):
# Use the test server probe URL which returns the response # Use the test server probe URL which returns the response
# body as specified by respBody. # body as specified by respBody.
probe_url = GetResponseOverrideURL( probe_url = GetResponseOverrideURL(
_TEST_SERVER_DEFAULT_URL,
respBody='not OK') respBody='not OK')
options.AppendExtraBrowserArgs( options.AppendExtraBrowserArgs(
'--data-reduction-proxy-probe-url=%s' % probe_url) '--data-reduction-proxy-probe-url=%s' % probe_url)
...@@ -284,6 +294,49 @@ class ChromeProxyHTTPToDirectFallback(ChromeProxyValidation): ...@@ -284,6 +294,49 @@ class ChromeProxyHTTPToDirectFallback(ChromeProxyValidation):
self._metrics.AddResultsForHTTPToDirectFallback(tab, results) self._metrics.AddResultsForHTTPToDirectFallback(tab, results)
class ChromeProxyExplicitBypass(ChromeProxyValidation):
"""Correctness measurement for explicit proxy bypasses.
In this test, the configured proxy is the chromeproxy-test server which
will send back a response without the expected Via header. Chrome is
expected to use the fallback proxy and add the configured proxy to the
bad proxy list.
"""
def __init__(self):
super(ChromeProxyExplicitBypass, self).__init__(
restart_after_each_page=True)
def CustomizeBrowserOptions(self, options):
super(ChromeProxyExplicitBypass,
self).CustomizeBrowserOptions(options)
options.AppendExtraBrowserArgs('--ignore-certificate-errors')
options.AppendExtraBrowserArgs(
'--spdy-proxy-auth-origin=http://%s' % _TEST_SERVER)
options.AppendExtraBrowserArgs(
'--spdy-proxy-auth-value=%s' % _FAKE_PROXY_AUTH_VALUE)
def AddResults(self, tab, results):
bad_proxies = [{
'proxy': _TEST_SERVER + ':80',
'retry_seconds_low': self._page.bypass_seconds_low,
'retry_seconds_high': self._page.bypass_seconds_high
}]
if self._page.num_bypassed_proxies == 2:
bad_proxies.append({
'proxy': self._metrics.effective_proxies['fallback'],
'retry_seconds_low': self._page.bypass_seconds_low,
'retry_seconds_high': self._page.bypass_seconds_high
})
else:
# Even if the test page only causes the primary proxy to be bypassed,
# Chrome will attempt to fetch the favicon for the test server through
# the data reduction proxy, which will cause a "block=0" bypass.
bad_proxies.append({'proxy': self._metrics.effective_proxies['fallback']})
self._metrics.AddResultsForExplicitBypass(tab, results, bad_proxies)
class ChromeProxySmoke(ChromeProxyValidation): class ChromeProxySmoke(ChromeProxyValidation):
"""Smoke measurement for basic chrome proxy correctness.""" """Smoke measurement for basic chrome proxy correctness."""
......
...@@ -169,6 +169,11 @@ class ChromeProxyMetric(network_metrics.NetworkMetric): ...@@ -169,6 +169,11 @@ class ChromeProxyMetric(network_metrics.NetworkMetric):
results.AddValue(scalar.ScalarValue( results.AddValue(scalar.ScalarValue(
results.current_page, 'version_test', 'count', 1)) results.current_page, 'version_test', 'count', 1))
def ProxyListForDev(self, proxies):
return [self.effective_proxies['proxy-dev']
if proxy == self.effective_proxies['proxy']
else proxy for proxy in proxies]
def IsProxyBypassed(self, tab): def IsProxyBypassed(self, tab):
"""Get whether all configured proxies are bypassed. """Get whether all configured proxies are bypassed.
...@@ -191,8 +196,7 @@ class ChromeProxyMetric(network_metrics.NetworkMetric): ...@@ -191,8 +196,7 @@ class ChromeProxyMetric(network_metrics.NetworkMetric):
proxies = [self.effective_proxies['proxy'], proxies = [self.effective_proxies['proxy'],
self.effective_proxies['fallback']] self.effective_proxies['fallback']]
proxies.sort() proxies.sort()
proxies_dev = [self.effective_proxies['proxy-dev'], proxies_dev = self.ProxyListForDev(proxies)
self.effective_proxies['fallback']]
proxies_dev.sort() proxies_dev.sort()
if bad_proxies == proxies: if bad_proxies == proxies:
return True, proxies return True, proxies
...@@ -200,38 +204,54 @@ class ChromeProxyMetric(network_metrics.NetworkMetric): ...@@ -200,38 +204,54 @@ class ChromeProxyMetric(network_metrics.NetworkMetric):
return True, proxies_dev return True, proxies_dev
return False, [] return False, []
@staticmethod def VerifyBadProxies(self, bad_proxies, expected_bad_proxies):
def VerifyBadProxies( """Verify the bad proxy list and their retry times are expected.
badProxies, expected_proxies,
retry_seconds_low = DEFAULT_BYPASS_MIN_SECONDS,
retry_seconds_high = DEFAULT_BYPASS_MAX_SECONDS):
"""Verify the bad proxy list and their retry times are expected. """
if not badProxies or (len(badProxies) != len(expected_proxies)):
return False
# Check all expected proxies. Args:
proxies = [p['proxy'] for p in badProxies] bad_proxies: the list of actual bad proxies and their retry times.
expected_proxies.sort() expected_bad_proxies: a list of dictionaries in the form:
proxies.sort()
if not expected_proxies == proxies: {'proxy': <proxy origin>,
raise ChromeProxyMetricException, ( 'retry_seconds_low': <minimum bypass duration in seconds>,
'Bad proxies: got %s want %s' % ( 'retry_seconds_high': <maximum bypass duration in seconds>}
str(badProxies), str(expected_proxies)))
If an element in the list is missing either the 'retry_seconds_low'
entry or the 'retry_seconds_high' entry, the default bypass minimum
and maximum durations respectively will be used for that element.
"""
if not bad_proxies:
bad_proxies = []
# Check retry time # Check that each of the proxy origins and retry times match.
for p in badProxies: for bad_proxy, expected_bad_proxy in map(None, bad_proxies,
expected_bad_proxies):
# Check if the proxy origins match, allowing for the proxy-dev origin in
# the place of the HTTPS proxy origin.
if (bad_proxy['proxy'] != expected_bad_proxy['proxy'] and
bad_proxy['proxy'] != expected_bad_proxy['proxy'].replace(
self.effective_proxies['proxy'],
self.effective_proxies['proxy-dev'])):
raise ChromeProxyMetricException, (
'Actual and expected bad proxies should match: %s vs. %s' % (
str(bad_proxy), str(expected_bad_proxy)))
# Check that the retry times match.
retry_seconds_low = expected_bad_proxy.get('retry_seconds_low',
DEFAULT_BYPASS_MIN_SECONDS)
retry_seconds_high = expected_bad_proxy.get('retry_seconds_high',
DEFAULT_BYPASS_MAX_SECONDS)
retry_time_low = (datetime.datetime.now() + retry_time_low = (datetime.datetime.now() +
datetime.timedelta(seconds=retry_seconds_low)) datetime.timedelta(seconds=retry_seconds_low))
retry_time_high = (datetime.datetime.now() + retry_time_high = (datetime.datetime.now() +
datetime.timedelta(seconds=retry_seconds_high)) datetime.timedelta(seconds=retry_seconds_high))
got_retry_time = datetime.datetime.fromtimestamp(int(p['retry'])/1000) got_retry_time = datetime.datetime.fromtimestamp(
int(bad_proxy['retry'])/1000)
if not ProxyRetryTimeInRange( if not ProxyRetryTimeInRange(
got_retry_time, retry_time_low, retry_time_high): got_retry_time, retry_time_low, retry_time_high):
raise ChromeProxyMetricException, ( raise ChromeProxyMetricException, (
'Bad proxy %s retry time (%s) should be within range (%s-%s).' % ( 'Bad proxy %s retry time (%s) should be within range (%s-%s).' % (
p['proxy'], str(got_retry_time), str(retry_time_low), bad_proxy['proxy'], str(got_retry_time), str(retry_time_low),
str(retry_time_high))) str(retry_time_high)))
return True
def VerifyAllProxiesBypassed(self, tab): def VerifyAllProxiesBypassed(self, tab):
if tab: if tab:
...@@ -243,7 +263,8 @@ class ChromeProxyMetric(network_metrics.NetworkMetric): ...@@ -243,7 +263,8 @@ class ChromeProxyMetric(network_metrics.NetworkMetric):
if not is_bypassed: if not is_bypassed:
raise ChromeProxyMetricException, ( raise ChromeProxyMetricException, (
'Chrome proxy should be bypassed. proxy info: %s' % info) 'Chrome proxy should be bypassed. proxy info: %s' % info)
self.VerifyBadProxies(info['badProxies'], expected_bad_proxies) self.VerifyBadProxies(info['badProxies'],
[{'proxy': p} for p in expected_bad_proxies])
def AddResultsForBypass(self, tab, results): def AddResultsForBypass(self, tab, results):
bypass_count = 0 bypass_count = 0
...@@ -259,6 +280,31 @@ class ChromeProxyMetric(network_metrics.NetworkMetric): ...@@ -259,6 +280,31 @@ class ChromeProxyMetric(network_metrics.NetworkMetric):
results.AddValue(scalar.ScalarValue( results.AddValue(scalar.ScalarValue(
results.current_page, 'bypass', 'count', bypass_count)) results.current_page, 'bypass', 'count', bypass_count))
def AddResultsForFallback(self, tab, results):
via_proxy_count = 0
bypass_count = 0
for resp in self.IterResponses(tab):
if resp.HasChromeProxyViaHeader():
via_proxy_count += 1
elif resp.ShouldHaveChromeProxyViaHeader():
bypass_count += 1
if bypass_count != 1:
raise ChromeProxyMetricException, (
'Only the triggering response should have bypassed all proxies.')
info = GetProxyInfoFromNetworkInternals(tab)
if not 'enabled' in info or not info['enabled']:
raise ChromeProxyMetricException, (
'Chrome proxy should be enabled. proxy info: %s' % info)
self.VerifyBadProxies(info['badProxies'],
[{'proxy': self.effective_proxies['proxy']}])
results.AddValue(scalar.ScalarValue(
results.current_page, 'via_proxy', 'count', via_proxy_count))
results.AddValue(scalar.ScalarValue(
results.current_page, 'bypass', 'count', bypass_count))
def AddResultsForCorsBypass(self, tab, results): def AddResultsForCorsBypass(self, tab, results):
eligible_response_count = 0 eligible_response_count = 0
bypass_count = 0 bypass_count = 0
...@@ -352,11 +398,6 @@ class ChromeProxyMetric(network_metrics.NetworkMetric): ...@@ -352,11 +398,6 @@ class ChromeProxyMetric(network_metrics.NetworkMetric):
'Safebrowsing failed (count=%d, safebrowsing_count=%d)\n' % ( 'Safebrowsing failed (count=%d, safebrowsing_count=%d)\n' % (
count, safebrowsing_count)) count, safebrowsing_count))
def ProxyListForDev(self, proxies):
return [self.effective_proxies['proxy-dev']
if proxy == self.effective_proxies['proxy']
else proxy for proxy in proxies]
def VerifyProxyInfo(self, tab, expected_proxies, expected_bad_proxies): def VerifyProxyInfo(self, tab, expected_proxies, expected_bad_proxies):
info = GetProxyInfoFromNetworkInternals(tab) info = GetProxyInfoFromNetworkInternals(tab)
if not 'enabled' in info or not info['enabled']: if not 'enabled' in info or not info['enabled']:
...@@ -395,3 +436,22 @@ class ChromeProxyMetric(network_metrics.NetworkMetric): ...@@ -395,3 +436,22 @@ class ChromeProxyMetric(network_metrics.NetworkMetric):
self.VerifyAllProxiesBypassed(tab) self.VerifyAllProxiesBypassed(tab)
results.AddValue(scalar.ScalarValue( results.AddValue(scalar.ScalarValue(
results.current_page, 'direct_fallback', 'boolean', True)) results.current_page, 'direct_fallback', 'boolean', True))
def AddResultsForExplicitBypass(self, tab, results, expected_bad_proxies):
"""Verify results for an explicit bypass test.
Args:
tab: the tab for the test.
results: the results object to add the results values to.
expected_bad_proxies: A list of dictionary objects representing
expected bad proxies and their expected retry time windows.
See the definition of VerifyBadProxies for details.
"""
info = GetProxyInfoFromNetworkInternals(tab)
if not 'enabled' in info or not info['enabled']:
raise ChromeProxyMetricException, (
'Chrome proxy should be enabled. proxy info: %s' % info)
self.VerifyBadProxies(info['badProxies'],
expected_bad_proxies)
results.AddValue(scalar.ScalarValue(
results.current_page, 'explicit_bypass', 'boolean', True))
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from integration_tests import chrome_proxy_measurements as measurements
from integration_tests import chrome_proxy_metrics as metrics
from telemetry.page import page as page_module
from telemetry.page import page_set as page_set_module
class ExplicitBypassPage(page_module.Page):
"""A test page for the explicit bypass tests.
Attributes:
num_bypassed_proxies: The number of proxies that should be bypassed as a
direct result of loading this test page. 1 indicates that only the
primary data reduction proxy should be bypassed, while 2 indicates
that both the primary and fallback data reduction proxies should be
bypassed.
bypass_seconds_low: The minimum number of seconds that the bypass
triggered by loading this page should last.
bypass_seconds_high: The maximum number of seconds that the bypass
triggered by loading this page should last.
"""
def __init__(self,
url,
page_set,
num_bypassed_proxies,
bypass_seconds_low,
bypass_seconds_high):
super(ExplicitBypassPage, self).__init__(url=url, page_set=page_set)
self.num_bypassed_proxies = num_bypassed_proxies
self.bypass_seconds_low = bypass_seconds_low
self.bypass_seconds_high = bypass_seconds_high
class ExplicitBypassPageSet(page_set_module.PageSet):
""" Chrome proxy test sites """
def __init__(self):
super(ExplicitBypassPageSet, self).__init__()
# Test page for "Chrome-Proxy: bypass=0".
self.AddPage(ExplicitBypassPage(
url=measurements.GetResponseOverrideURL(
respHeader='{"Chrome-Proxy":["bypass=0"],'
'"Via":["1.1 Chrome-Compression-Proxy"]}'),
page_set=self,
num_bypassed_proxies=1,
bypass_seconds_low=metrics.DEFAULT_BYPASS_MIN_SECONDS,
bypass_seconds_high=metrics.DEFAULT_BYPASS_MAX_SECONDS))
# Test page for "Chrome-Proxy: bypass=3600".
self.AddPage(ExplicitBypassPage(
url=measurements.GetResponseOverrideURL(
respHeader='{"Chrome-Proxy":["bypass=3600"],'
'"Via":["1.1 Chrome-Compression-Proxy"]}'),
page_set=self,
num_bypassed_proxies=1,
bypass_seconds_low=3600,
bypass_seconds_high=3600))
# Test page for "Chrome-Proxy: block=0".
self.AddPage(ExplicitBypassPage(
url=measurements.GetResponseOverrideURL(
respHeader='{"Chrome-Proxy":["block=0"],'
'"Via":["1.1 Chrome-Compression-Proxy"]}'),
page_set=self,
num_bypassed_proxies=2,
bypass_seconds_low=metrics.DEFAULT_BYPASS_MIN_SECONDS,
bypass_seconds_high=metrics.DEFAULT_BYPASS_MAX_SECONDS))
# Test page for "Chrome-Proxy: block=3600".
self.AddPage(ExplicitBypassPage(
url=measurements.GetResponseOverrideURL(
respHeader='{"Chrome-Proxy":["block=3600"],'
'"Via":["1.1 Chrome-Compression-Proxy"]}'),
page_set=self,
num_bypassed_proxies=2,
bypass_seconds_low=3600,
bypass_seconds_high=3600))
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import page as page_module
from telemetry.page import page_set as page_set_module
class FallbackPage(page_module.Page):
def __init__(self, url, page_set):
super(FallbackPage, self).__init__(url=url, page_set=page_set)
class FallbackPageSet(page_set_module.PageSet):
""" Chrome proxy test sites """
def __init__(self):
super(FallbackPageSet, self).__init__()
urls_list = [
'http://check.googlezip.net/fallback',
]
for url in urls_list:
self.AddPage(FallbackPage(url, self))
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment