Commit f7ff9cf8 authored by Tarun Bansal's avatar Tarun Bansal Committed by Commit Bot

Remove deprecated data reduction proxy webdriver tests

The tests are being removed completely instead of
just annotating them to enable them for only past milestones.
This is simpler since it removes the dependency
on the server being enabled and working.

Bug: 1048736
Change-Id: Ib20a3c48a2ee2e6a90888517977cc45444ebf7f1
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2109631Reviewed-by: default avatarRobert Ogden <robertogden@chromium.org>
Commit-Queue: Tarun Bansal <tbansal@chromium.org>
Cr-Commit-Position: refs/heads/master@{#751755}
parent 76dfc1d5
This diff is collapsed.
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import common
from common import TestDriver
from common import IntegrationTest
from decorators import ChromeVersionEqualOrAfterM
from decorators import SkipIfForcedBrowserArg
import json
class ClientConfig(IntegrationTest):
# Ensure client config is fetched at the start of the Chrome session, and the
# session ID is correctly set in the chrome-proxy request header.
def testClientConfig(self):
with TestDriver() as t:
t.AddChromeArg('--enable-spdy-proxy-auth')
t.SleepUntilHistogramHasEntry(
'DataReductionProxy.ConfigService.FetchResponseCode')
t.LoadURL('http://check.googlezip.net/test.html')
responses = t.GetHTTPResponses()
self.assertEqual(2, len(responses))
for response in responses:
# Verify that the proxy server honored the session ID.
self.assertHasProxyHeaders(response)
self.assertEqual(200, response.status)
# Ensure Chrome uses a direct connection when no valid client config is given.
@SkipIfForcedBrowserArg('data-reduction-proxy-config-url')
def testNoClientConfigUseDirect(self):
with TestDriver() as t:
t.AddChromeArg('--enable-spdy-proxy-auth')
# The test server won't respond with a valid client config.
t.UseNetLog()
t.AddChromeArg('--data-reduction-proxy-config-url='
'https://chromeproxy-test.appspot.com')
t.SleepUntilHistogramHasEntry(
'DataReductionProxy.ConfigService.FetchResponseCode')
t.LoadURL('http://check.googlezip.net/test.html')
responses = t.GetHTTPResponses()
self.assertEqual(2, len(responses))
for response in responses:
self.assertNotHasChromeProxyViaHeader(response)
# Ensure client config is fetched at the start of the Chrome session, and the
# variations ID is set in the request.
# Disabled on android because the net log is not copied yet. crbug.com/761507
@ChromeVersionEqualOrAfterM(62)
def testClientConfigVariationsHeader(self):
with TestDriver() as t:
t.UseNetLog()
t.AddChromeArg('--enable-spdy-proxy-auth')
# Force set the variations ID, so they are send along with the client
# config fetch request.
t.AddChromeArg('--force-variation-ids=42')
t.LoadURL('http://check.googlezip.net/test.html')
variation_header_count = 0
# Look for the request made to data saver client config server.
data = t.StopAndGetNetLog()
for i in data["events"]:
dumped_event = json.dumps(i)
if dumped_event.find("datasaver.") !=-1 and\
dumped_event.find(".googleapis.com") !=-1 and\
dumped_event.find("clientConfigs") != -1 and\
dumped_event.find("headers") != -1 and\
dumped_event.find("accept-encoding") != -1 and\
dumped_event.find("x-client-data") !=-1:
variation_header_count = variation_header_count + 1
# Variation IDs are set. x-client-data should be present in the request
# headers.
self.assertLessEqual(1, variation_header_count)
# Ensure client config is fetched at the start of the Chrome session, and the
# variations ID is not set in the request.
# Disabled on android because the net log is not copied yet. crbug.com/761507
@ChromeVersionEqualOrAfterM(62)
def testClientConfigNoVariationsHeader(self):
with TestDriver() as t:
t.UseNetLog()
t.AddChromeArg('--enable-spdy-proxy-auth')
t.LoadURL('http://check.googlezip.net/test.html')
variation_header_count = 0
# Look for the request made to data saver client config server.
data = t.StopAndGetNetLog()
for i in data["events"]:
dumped_event = json.dumps(i)
if dumped_event.find("datasaver.") !=-1 and\
dumped_event.find(".googleapis.com") !=-1 and\
dumped_event.find("clientConfigs") != -1 and\
dumped_event.find("headers") != -1 and\
dumped_event.find("accept-encoding") != -1 and\
dumped_event.find("x-client-data") !=-1:
variation_header_count = variation_header_count + 1
# Variation IDs are not set. x-client-data should not be present in the
# request headers.
self.assertEqual(0, variation_header_count)
if __name__ == '__main__':
IntegrationTest.RunAllTests()
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import common
from common import TestDriver
from common import IntegrationTest
class Fallback(IntegrationTest):
# Ensure that when a carrier blocks using the secure proxy, requests fallback
# to the HTTP proxy server.
def testSecureProxyProbeFallback(self):
with TestDriver() as test_driver:
test_driver.AddChromeArg('--enable-spdy-proxy-auth')
# Set the secure proxy check URL to the google.com favicon, which will be
# interpreted as a secure proxy check failure since the response body is
# not "OK". The google.com favicon is used because it will load reliably
# fast, and there have been problems with chromeproxy-test.appspot.com
# being slow and causing tests to flake.
test_driver.AddChromeArg(
'--data-reduction-proxy-secure-proxy-check-url='
'http://www.google.com/favicon.ico')
# Start chrome to begin the secure proxy check
test_driver.LoadURL('about:blank')
self.assertTrue(
test_driver.SleepUntilHistogramHasEntry("DataReductionProxy.ProbeURL"))
test_driver.LoadURL('http://check.googlezip.net/test.html')
responses = test_driver.GetHTTPResponses()
self.assertNotEqual(0, len(responses))
# Verify that DataReductionProxy.ProbeURL histogram has one entry in
# FAILED_PROXY_DISABLED, which is bucket=1.
histogram = test_driver.GetBrowserHistogram('DataReductionProxy.ProbeURL')
self.assertGreaterEqual(histogram['count'], 1)
self.assertGreaterEqual(histogram['buckets'][0]['low'], 1)
for response in responses:
self.assertHasProxyHeaders(response)
# TODO(rajendrant): Fix the correct protocol received.
# self.assertEqual(u'http/2+quic/43', response.protocol)
# DataSaver uses a https proxy by default, if that fails it will fall back to
# a http proxy; and if that fails, it will fall back to a direct connection
def testHTTPToDirectFallback(self):
with TestDriver() as test_driver:
test_driver.AddChromeArg('--enable-spdy-proxy-auth')
test_driver.AddChromeArg('--data-reduction-proxy-http-proxies='
'http://nonexistent.googlezip.net;'
'http://compress.googlezip.net')
test_driver.LoadURL('http://check.googlezip.net/fallback/')
responses = test_driver.GetHTTPResponses()
self.assertNotEqual(0, len(responses))
for response in responses:
self.assertEqual(80, response.port)
test_driver.LoadURL('http://check.googlezip.net/block/')
responses = test_driver.GetHTTPResponses()
self.assertNotEqual(0, len(responses))
for response in responses:
self.assertNotHasChromeProxyViaHeader(response)
if __name__ == '__main__':
IntegrationTest.RunAllTests()
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import common
from common import TestDriver
from common import IntegrationTest
class HTML5(IntegrationTest):
# This test site has a div with id="pointsPanel" that is rendered if the
# browser is capable of using HTML5.
def testHTML5(self):
with TestDriver() as t:
t.AddChromeArg('--enable-spdy-proxy-auth')
t.LoadURL('http://html5test.com/')
t.WaitForJavascriptExpression(
'document.getElementsByClassName("pointsPanel")', 15)
checked_main_page = False
for response in t.GetHTTPResponses():
# Site has a lot on it, just check the main page.
if (response.url == 'http://html5test.com/'
or response.url == 'http://html5test.com/index.html'):
self.assertHasProxyHeaders(response)
checked_main_page = True
if not checked_main_page:
self.fail("Did not check any page!")
if __name__ == '__main__':
IntegrationTest.RunAllTests()
This diff is collapsed.
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import common
from common import TestDriver
from common import IntegrationTest
from decorators import ChromeVersionBetweenInclusiveM
from decorators import ChromeVersionEqualOrAfterM
from emulation_server import BlackHoleHandler
from emulation_server import InvalidTLSHandler
from emulation_server import TCPResetHandler
from emulation_server import TLSResetHandler
class ProxyConnection(IntegrationTest):
def VerifyWarmupHistogram(self, test_driver, is_secure_proxy):
is_histogram_found = False
for histogram_part in ['Core', 'NonCore']:
histogram_name = 'DataReductionProxy.WarmupURLFetcherCallback.' + \
'SuccessfulFetch.%s.%s' % (
'SecureProxy' if is_secure_proxy else 'InsecureProxy',
histogram_part)
histogram = test_driver.GetBrowserHistogram(histogram_name)
if histogram:
self.assertLessEqual(1, histogram['count'])
is_histogram_found = True
self.assertTrue(is_histogram_found)
@ChromeVersionEqualOrAfterM(63)
def testTLSInjectionAfterHandshake(self):
port = common.GetOpenPort()
with TestDriver() as t:
t.AddChromeArg('--enable-spdy-proxy-auth')
# The server should be 127.0.0.1, not localhost because the two are
# treated differently in Chrome internals. Using localhost invalidates the
# test.
t.AddChromeArg(
'--data-reduction-proxy-http-proxies=https://127.0.0.1:%d' % port)
t.AddChromeArg(
'--force-fieldtrials=DataReductionProxyConfigService/Disabled')
t.UseEmulationServer(InvalidTLSHandler, port=port)
t.LoadURL('http://check.googlezip.net/test.html')
responses = t.GetHTTPResponses()
# Expect responses with a bypass on a bad proxy. If the test failed, the
# next assertion will fail because there will be no responses.
self.assertEqual(2, len(responses))
for response in responses:
self.assertNotHasChromeProxyViaHeader(response)
self.assertTrue(
t.SleepUntilHistogramHasEntry('DataReductionProxy.WarmupURL.NetError'))
self.VerifyWarmupHistogram(t, True)
@ChromeVersionEqualOrAfterM(74)
def testTCPReset(self):
port = common.GetOpenPort()
with TestDriver() as t:
t.AddChromeArg('--enable-spdy-proxy-auth')
# The server should be 127.0.0.1, not localhost because the two are
# treated differently in Chrome internals. Using localhost invalidates the
# test.
t.UseNetLog()
t.AddChromeArg(
'--data-reduction-proxy-http-proxies=http://127.0.0.1:%d' % port)
t.AddChromeArg(
'--force-fieldtrials=DataReductionProxyConfigService/Disabled')
t.UseEmulationServer(TCPResetHandler, port=port)
t.LoadURL('http://check.googlezip.net/test.html')
responses = t.GetHTTPResponses()
# Expect responses with a bypass on a bad proxy. If the test failed, the
# next assertion will fail because there will be no responses.
self.assertEqual(2, len(responses))
for response in responses:
self.assertNotHasChromeProxyViaHeader(response)
self.assertTrue(
t.SleepUntilHistogramHasEntry('DataReductionProxy.WarmupURL.NetError',
sleep_intervals=10))
self.VerifyWarmupHistogram(t, False)
@ChromeVersionEqualOrAfterM(63)
def testTLSReset(self):
port = common.GetOpenPort()
with TestDriver() as t:
t.AddChromeArg('--enable-spdy-proxy-auth')
t.AddChromeArg('--allow-insecure-localhost')
# The server should be 127.0.0.1, not localhost because the two are
# treated differently in Chrome internals. Using localhost invalidates the
# test.
t.AddChromeArg(
'--data-reduction-proxy-http-proxies=https://127.0.0.1:%d' % port)
t.AddChromeArg(
'--force-fieldtrials=DataReductionProxyConfigService/Disabled')
t.UseEmulationServer(TLSResetHandler, port=port)
t.LoadURL('http://check.googlezip.net/test.html')
responses = t.GetHTTPResponses()
# Expect responses with a bypass on a bad proxy. If the test failed, the
# next assertion will fail because there will be no responses.
self.assertEqual(2, len(responses))
for response in responses:
self.assertNotHasChromeProxyViaHeader(response)
@ChromeVersionEqualOrAfterM(74)
def testTCPBlackhole(self):
port = common.GetOpenPort()
with TestDriver() as t:
t.UseNetLog()
t.AddChromeArg('--enable-spdy-proxy-auth')
t.EnableChromeFeature(
'DataReductionProxyRobustConnection<DataReductionProxyRobustConnection')
t.AddChromeArg('--force-fieldtrials='
'DataReductionProxyRobustConnection/Enabled')
t.AddChromeArg('--force-fieldtrial-params='
'DataReductionProxyRobustConnection.Enabled:'
'warmup_fetch_callback_enabled/true')
t.AddChromeArg('--force-effective-connection-type=4G')
# The server should be 127.0.0.1, not localhost because the two are
# treated differently in Chrome internals. Using localhost invalidates the
# test.
t.AddChromeArg(
'--data-reduction-proxy-http-proxies=http://127.0.0.1:%d' % port)
t.UseEmulationServer(BlackHoleHandler, port=port)
# Start Chrome and wait for the warmup fetcher timeout (30 seconds).
t.LoadURL('data:,')
self.assertTrue(
t.SleepUntilHistogramHasEntry('DataReductionProxy.WarmupURL.NetError',
sleep_intervals=40))
# Check the WarmupURL Callback was called.
self.VerifyWarmupHistogram(t, False)
# Verify DRP was not used.
t.LoadURL('http://check.googlezip.net/test.html')
responses = t.GetHTTPResponses()
self.assertEqual(2, len(responses))
for response in responses:
self.assertNotHasChromeProxyViaHeader(response)
if __name__ == '__main__':
IntegrationTest.RunAllTests()
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import time
import common
from common import TestDriver
from common import IntegrationTest
from decorators import ChromeVersionEqualOrAfterM
class Quic(IntegrationTest):
# Ensure Chrome uses DataSaver when QUIC is enabled. This test should pass
# even if QUIC is disabled on the server side. In that case, Chrome should
# fallback to using the non-QUIC proxies.
def testCheckPageWithQuicProxy(self):
with TestDriver() as t:
t.AddChromeArg('--enable-spdy-proxy-auth')
# Enable QUIC (including for non-core HTTPS proxies).
t.AddChromeArg('--enable-quic')
t.AddChromeArg('--force-fieldtrials=DataReductionProxyUseQuic/Enabled')
t.AddChromeArg('--force-fieldtrial-params='
'DataReductionProxyUseQuic.Enabled:enable_quic_non_core_proxies/true')
# Enable usage of QUIC for non-core proxies via switch for older versions
# of Chrome (M-59 and prior).
t.AddChromeArg('--data-reduction-proxy-enable-quic-on-non-core-proxies')
t.LoadURL('http://check.googlezip.net/test.html')
responses = t.GetHTTPResponses()
self.assertEqual(2, len(responses))
for response in responses:
self.assertHasProxyHeaders(response)
# Ensure Chrome uses QUIC DataSaver proxy when QUIC is enabled. This test
# may fail if QUIC is disabled on the server side.
@ChromeVersionEqualOrAfterM(76)
def testCheckPageWithQuicProxyTransaction(self):
with TestDriver() as t:
t.AddChromeArg('--enable-spdy-proxy-auth')
# Enable QUIC (including for non-core HTTPS proxies).
t.AddChromeArg('--enable-quic')
t.AddChromeArg('--force-fieldtrials=DataReductionProxyUseQuic/Enabled')
t.AddChromeArg('--force-fieldtrial-params='
'DataReductionProxyUseQuic.Enabled:enable_quic_non_core_proxies/true')
# Enable usage of QUIC for non-core proxies via switch for older versions
# of Chrome (M-59 and prior).
t.AddChromeArg('--data-reduction-proxy-enable-quic-on-non-core-proxies')
t.LoadURL('http://check.googlezip.net/test.html')
responses = t.GetHTTPResponses()
self.assertEqual(2, len(responses))
for response in responses:
self.assertHasProxyHeaders(response)
t.SleepUntilHistogramHasEntry('PageLoad.Clients.DataReductionProxy.'
'ParseTiming.NavigationToParseStart')
# Verify that histogram DataReductionProxy.Quic.ProxyStatus has at least 1
# sample. This sample must be in bucket 0 (QUIC_PROXY_STATUS_AVAILABLE).
proxy_status = t.GetHistogram('DataReductionProxy.Quic.ProxyStatus')
self.assertLessEqual(1, proxy_status['count'])
self.assertEqual(0, proxy_status['sum'])
if __name__ == '__main__':
IntegrationTest.RunAllTests()
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import time
import common
from common import TestDriver
from common import IntegrationTest
from decorators import Slow
class ReenableAfterBypass(IntegrationTest):
"""Tests for ensuring that DRPs are reenabled after bypasses expire.
These tests take a very long time to run since they wait for their respective
bypasses to expire. These tests have been separated out into their own file in
order to make it easier to run these tests separately from the others.
"""
# Verify that longer bypasses triggered by the Data Reduction Proxy only last
# as long as they're supposed to, and that the proxy is used once again after
# the bypass has ended.
@Slow
def testReenableAfterSetBypass(self):
with TestDriver() as test_driver:
test_driver.AddChromeArg('--enable-spdy-proxy-auth')
# Load URL that triggers a 20-second bypass of all proxies.
test_driver.LoadURL('http://check.googlezip.net/block20/')
responses = test_driver.GetHTTPResponses()
self.assertNotEqual(0, len(responses))
for response in responses:
self.assertNotHasChromeProxyViaHeader(response)
# Verify that the Data Reduction Proxy is still bypassed.
test_driver.LoadURL('http://check.googlezip.net/test.html')
responses = test_driver.GetHTTPResponses()
self.assertNotEqual(0, len(responses))
for response in responses:
self.assertNotHasChromeProxyViaHeader(response)
# Verify that the Data Reduction Proxy is no longer bypassed after 20
# seconds.
time.sleep(20)
test_driver.LoadURL('http://check.googlezip.net/test.html')
responses = test_driver.GetHTTPResponses()
self.assertNotEqual(0, len(responses))
for response in responses:
self.assertHasProxyHeaders(response)
# Verify that when the Data Reduction Proxy responds with the "block=0"
# directive, Chrome bypasses all proxies for the next 1-5 minutes.
@Slow
def testReenableAfterBypass(self):
with TestDriver() as test_driver:
test_driver.AddChromeArg('--enable-spdy-proxy-auth')
# Load URL that triggers a bypass of all proxies that lasts between 1 and
# 5 minutes.
test_driver.LoadURL('http://check.googlezip.net/block/')
responses = test_driver.GetHTTPResponses()
self.assertNotEqual(0, len(responses))
for response in responses:
self.assertNotHasChromeProxyViaHeader(response)
# Verify that the Data Reduction Proxy is still bypassed after 30 seconds.
time.sleep(30)
test_driver.LoadURL('http://check.googlezip.net/test.html')
responses = test_driver.GetHTTPResponses()
self.assertNotEqual(0, len(responses))
for response in responses:
self.assertNotHasChromeProxyViaHeader(response)
# Verify that the Data Reduction Proxy is no longer bypassed 5 minutes
# after the original bypass was triggered.
time.sleep(60 * 4 + 30)
test_driver.LoadURL('http://check.googlezip.net/test.html')
responses = test_driver.GetHTTPResponses()
self.assertNotEqual(0, len(responses))
for response in responses:
self.assertHasProxyHeaders(response)
if __name__ == '__main__':
IntegrationTest.RunAllTests()
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import common
from common import TestDriver
from common import IntegrationTest
from decorators import AndroidOnly
from decorators import NotAndroid
from decorators import ChromeVersionBeforeM
from decorators import ChromeVersionEqualOrAfterM
from selenium.common.exceptions import TimeoutException
class SafeBrowsing(IntegrationTest):
@AndroidOnly
@ChromeVersionBeforeM(73)
def testSafeBrowsingOn(self):
with TestDriver() as t:
t.AddChromeArg('--enable-spdy-proxy-auth')
# Starting in M63 LoadURL will timeout when the safebrowsing
# interstitial appears.
try:
t.LoadURL('http://testsafebrowsing.appspot.com/s/malware.html')
responses = t.GetHTTPResponses()
self.assertEqual(0, len(responses))
except TimeoutException:
pass
@AndroidOnly
@ChromeVersionEqualOrAfterM(74)
def testSafeBrowsingMalwareWithOnDeviceChecksOn(self):
with TestDriver() as t:
t.AddChromeArg('--enable-spdy-proxy-auth')
# Starting in M63 LoadURL will timeout when the safebrowsing
# interstitial appears.
try:
t.LoadURL('http://testsafebrowsing.appspot.com/s/malware.html')
responses = t.GetHTTPResponses()
self.assertEqual(0, len(responses))
except TimeoutException:
# Verify that on device safebrowsing records unsafe for mainframe
# request at bucket=0
unsafe_resources = t.GetBrowserHistogram('SB2.ResourceTypes2.Unsafe')
self.assertEqual(1, unsafe_resources['count'])
self.assertEqual(1, unsafe_resources['buckets'][0]['count'])
if __name__ == '__main__':
IntegrationTest.RunAllTests()
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import common
import time
from common import TestDriver
from common import IntegrationTest
from decorators import NotAndroid
from decorators import ChromeVersionBeforeM
from decorators import ChromeVersionEqualOrAfterM
import json
class Smoke(IntegrationTest):
# Ensure Chrome does not use DataSaver in Incognito mode.
# Clank does not honor the --incognito flag.
@NotAndroid
def testCheckPageWithIncognito(self):
with TestDriver() as t:
t.AddChromeArg('--enable-spdy-proxy-auth')
t.AddChromeArg('--incognito')
t.LoadURL('http://check.googlezip.net/test.html')
responses = t.GetHTTPResponses()
self.assertNotEqual(0, len(responses))
for response in responses:
self.assertNotHasChromeProxyViaHeader(response)
# Ensure Chrome does not use DataSaver when holdback is enabled.
@ChromeVersionBeforeM(74)
def testCheckPageWithHoldback(self):
with TestDriver() as t:
t.AddChromeArg('--enable-spdy-proxy-auth')
t.AddChromeArg('--force-fieldtrials=DataCompressionProxyHoldback/'
'Enabled')
t.LoadURL('http://check.googlezip.net/test.html')
responses = t.GetHTTPResponses()
self.assertNotEqual(0, len(responses))
num_chrome_proxy_request_headers = 0
for response in responses:
self.assertNotHasChromeProxyViaHeader(response)
if ('chrome-proxy' in response.request_headers):
num_chrome_proxy_request_headers += 1
# DataSaver histograms must still be logged.
t.SleepUntilHistogramHasEntry('PageLoad.Clients.DataReductionProxy.'
'ParseTiming.NavigationToParseStart')
self.assertEqual(num_chrome_proxy_request_headers, 0)
# Ensure that Chrome did not attempt to use DataSaver and got a bypass.
histogram = t.GetHistogram('DataReductionProxy.BypassedBytes.'
'Status502HttpBadGateway', 5)
self.assertEqual(histogram, {})
histogram = t.GetHistogram('DataReductionProxy.BlockTypePrimary', 5)
self.assertEqual(histogram, {})
histogram = t.GetHistogram('DataReductionProxy.BypassTypePrimary', 5)
self.assertEqual(histogram, {})
# Ensure Chrome uses DataSaver in normal mode.
def testCheckPageWithNormalMode(self):
with TestDriver() as t:
t.AddChromeArg('--enable-spdy-proxy-auth')
t.LoadURL('http://check.googlezip.net/test.html')
responses = t.GetHTTPResponses()
self.assertNotEqual(0, len(responses))
num_chrome_proxy_request_headers = 0
for response in responses:
self.assertHasProxyHeaders(response)
if ('chrome-proxy' in response.request_headers):
num_chrome_proxy_request_headers += 1
t.SleepUntilHistogramHasEntry('PageLoad.Clients.DataReductionProxy.'
'ParseTiming.NavigationToParseStart')
self.assertGreater(num_chrome_proxy_request_headers, 0)
# Ensure pageload metric pingback with DataSaver.
@ChromeVersionBeforeM(79)
def testPingback(self):
with TestDriver() as t:
t.AddChromeArg('--enable-spdy-proxy-auth')
t.AddChromeArg('--enable-data-reduction-proxy-force-pingback')
t.LoadURL('http://check.googlezip.net/test.html')
t.LoadURL('http://check.googlezip.net/test.html')
t.SleepUntilHistogramHasEntry("DataReductionProxy.Pingback.Succeeded")
t.SleepUntilHistogramHasEntry("DataReductionProxy.Pingback.Attempted")
# Verify one pingback attempt that was successful.
attempted = t.GetBrowserHistogram('DataReductionProxy.Pingback.Attempted')
self.assertEqual(1, attempted['count'])
succeeded = t.GetBrowserHistogram('DataReductionProxy.Pingback.Succeeded')
self.assertEqual(1, succeeded['count'])
# Ensure pageload metric pingback with DataSaver has the variations header.
@ChromeVersionEqualOrAfterM(62)
@ChromeVersionBeforeM(79)
def testPingbackHasVariations(self):
with TestDriver() as t:
t.AddChromeArg('--enable-spdy-proxy-auth')
t.AddChromeArg('--enable-data-reduction-proxy-force-pingback')
t.UseNetLog()
# Force set the variations ID, so they are send along with the pingback
# request.
t.AddChromeArg('--force-variation-ids=42')
t.LoadURL('http://check.googlezip.net/test.html')
t.LoadURL('http://check.googlezip.net/test.html')
t.SleepUntilHistogramHasEntry("DataReductionProxy.Pingback.Succeeded")
# Look for the request made to data saver pingback server.
data = t.StopAndGetNetLog()
variation_header_count = 0
for i in data["events"]:
dumped_event = json.dumps(i)
if dumped_event.find("datasaver.googleapis.com") !=-1 and\
dumped_event.find("recordPageloadMetrics") != -1 and\
dumped_event.find("headers") != -1 and\
dumped_event.find("accept-encoding") != -1 and\
dumped_event.find("x-client-data") !=-1:
variation_header_count = variation_header_count + 1
# Variation IDs are set. x-client-data should be present in the request
# headers.
self.assertLessEqual(1, variation_header_count)
# Verify unique page IDs are sent in the Chrome-Proxy header.
@ChromeVersionEqualOrAfterM(59)
def testPageID(self):
with TestDriver() as t:
t.AddChromeArg('--enable-spdy-proxy-auth')
page_identifiers = []
page_loads = 5
for i in range (0, page_loads):
t.LoadURL('http://check.googlezip.net/test.html')
responses = t.GetHTTPResponses()
self.assertEqual(2, len(responses))
pid_in_page_count = 0
page_id = ''
for response in responses:
if not response.request_headers:
continue
self.assertHasProxyHeaders(response)
self.assertEqual(200, response.status)
chrome_proxy_header = response.request_headers['chrome-proxy']
chrome_proxy_directives = chrome_proxy_header.split(',')
for directive in chrome_proxy_directives:
if 'pid=' in directive:
pid_in_page_count = pid_in_page_count+1
page_id = directive.split('=')[1]
self.assertNotEqual('', page_id)
self.assertNotIn(page_id, page_identifiers)
page_identifiers.append(page_id)
self.assertEqual(1, pid_in_page_count)
# Ensure that block causes resources to load from the origin directly.
def testCheckBlockIsWorking(self):
with TestDriver() as t:
t.AddChromeArg('--enable-spdy-proxy-auth')
t.LoadURL('http://check.googlezip.net/block')
responses = t.GetHTTPResponses()
self.assertNotEqual(0, len(responses))
for response in responses:
self.assertNotHasChromeProxyViaHeader(response)
# Ensure image resources are compressed.
def testCheckImageIsCompressed(self):
with TestDriver() as t:
t.AddChromeArg('--enable-spdy-proxy-auth')
t.LoadURL('http://check.googlezip.net/static')
# http://check.googlezip.net/static is a test page that has
# image resources.
responses = t.GetHTTPResponses()
self.assertNotEqual(0, len(responses))
for response in responses:
self.assertHasProxyHeaders(response)
if __name__ == '__main__':
IntegrationTest.RunAllTests()
This diff is collapsed.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment