Commit 97d8cd80 authored by lizeb's avatar lizeb Committed by Commit bot

tools: WPR support for testing speculative_prefetch_predictor.

This CL adds support for WPR and network throttling (through WPR) for
testing the speculative_prefetch_predictor. The modifications are:
- Generate a WPR archive in generate_database.py
- Use the archive and set up throttling in prefetch_benchmark.

Also adds the ability to test several configuration of the predictor in
prefetch_benchmark, and make it loop.

BUG=655980

Review-Url: https://codereview.chromium.org/2561353002
Cr-Commit-Position: refs/heads/master@{#439131}
parent acce62cb
...@@ -41,7 +41,7 @@ _INVALID_VALUE = -1 ...@@ -41,7 +41,7 @@ _INVALID_VALUE = -1
# Command line arguments for Chrome. # Command line arguments for Chrome.
_CHROME_ARGS = [ CHROME_ARGS = [
# Disable backgound network requests that may pollute WPR archive, pollute # Disable backgound network requests that may pollute WPR archive, pollute
# HTTP cache generation, and introduce noise in loading performance. # HTTP cache generation, and introduce noise in loading performance.
'--disable-background-networking', '--disable-background-networking',
...@@ -133,11 +133,10 @@ def RunOnce(device, url, warmup, speculation_mode, delay_to_may_launch_url, ...@@ -133,11 +133,10 @@ def RunOnce(device, url, warmup, speculation_mode, delay_to_may_launch_url,
return match.group(1) if match is not None else None return match.group(1) if match is not None else None
Result = collections.namedtuple('Result', ['warmup', 'speculation_mode', RESULT_FIELDS = ('warmup', 'speculation_mode', 'delay_to_may_launch_url',
'delay_to_may_launch_url', 'delay_to_launch_url', 'commit', 'plt',
'delay_to_launch_url', 'first_contentful_paint')
'commit', 'plt', Result = collections.namedtuple('Result', RESULT_FIELDS)
'first_contentful_paint'])
def ParseResult(result_line): def ParseResult(result_line):
...@@ -181,7 +180,7 @@ def LoopOnDevice(device, configs, output_filename, wpr_archive_path=None, ...@@ -181,7 +180,7 @@ def LoopOnDevice(device, configs, output_filename, wpr_archive_path=None,
try: try:
while should_stop is None or not should_stop.is_set(): while should_stop is None or not should_stop.is_set():
config = configs[random.randint(0, len(configs) - 1)] config = configs[random.randint(0, len(configs) - 1)]
chrome_args = _CHROME_ARGS + wpr_attributes.chrome_args chrome_args = CHROME_ARGS + wpr_attributes.chrome_args
if config['speculation_mode'] == 'no_state_prefetch': if config['speculation_mode'] == 'no_state_prefetch':
# NoStatePrefetch is enabled through an experiment. # NoStatePrefetch is enabled through an experiment.
chrome_args.extend([ chrome_args.extend([
...@@ -218,7 +217,7 @@ def ProcessOutput(filename): ...@@ -218,7 +217,7 @@ def ProcessOutput(filename):
A numpy structured array. A numpy structured array.
""" """
import numpy as np import numpy as np
data = np.genfromtxt(filename, delimiter=',') data = np.genfromtxt(filename, delimiter=',', skip_header=1)
result = np.array(np.zeros(len(data)), result = np.array(np.zeros(len(data)),
dtype=[('warmup', bool), ('speculation_mode', np.int32), dtype=[('warmup', bool), ('speculation_mode', np.int32),
('delay_to_may_launch_url', np.int32), ('delay_to_may_launch_url', np.int32),
......
...@@ -6,6 +6,7 @@ ...@@ -6,6 +6,7 @@
"""Loads a set of web pages several times on a device, and extracts the """Loads a set of web pages several times on a device, and extracts the
predictor database. predictor database.
Also generates a WPR archive for another page.
""" """
import argparse import argparse
...@@ -24,13 +25,14 @@ sys.path.append(os.path.join(_SRC_PATH, 'build', 'android')) ...@@ -24,13 +25,14 @@ sys.path.append(os.path.join(_SRC_PATH, 'build', 'android'))
import devil_chromium import devil_chromium
sys.path.append(os.path.join(_SRC_PATH, 'tools', 'android', 'loading')) sys.path.append(os.path.join(_SRC_PATH, 'tools', 'android', 'loading'))
import device_setup
from options import OPTIONS from options import OPTIONS
import page_track import page_track
import prefetch_predictor_common import prefetch_predictor_common
_PAGE_LOAD_TIMEOUT = 20 _PAGE_LOAD_TIMEOUT = 40
def _CreateArgumentParser(): def _CreateArgumentParser():
...@@ -43,14 +45,16 @@ def _CreateArgumentParser(): ...@@ -43,14 +45,16 @@ def _CreateArgumentParser():
'(one per line). URLs can be repeated.') '(one per line). URLs can be repeated.')
parser.add_argument('--output_filename', parser.add_argument('--output_filename',
help='File to store the database in.') help='File to store the database in.')
parser.add_argument('--test_url', help='URL to record an archive of.')
parser.add_argument('--wpr_archive', help='WPR archive path.')
parser.add_argument('--url_repeat', parser.add_argument('--url_repeat',
help=('Number of times each URL in the input ' help=('Number of times each URL in the input '
'file is loaded.'), 'file is loaded.'), default=3)
default=3)
return parser return parser
def _Go(chrome_controller, urls_filename, output_filename, repeats): def _GenerateDatabase(chrome_controller, urls_filename, output_filename,
repeats):
urls = [] urls = []
with open(urls_filename) as f: with open(urls_filename) as f:
urls = [line.strip() for line in f.readlines()] urls = [line.strip() for line in f.readlines()]
...@@ -71,6 +75,16 @@ def _Go(chrome_controller, urls_filename, output_filename, repeats): ...@@ -71,6 +75,16 @@ def _Go(chrome_controller, urls_filename, output_filename, repeats):
output_filename) output_filename)
def _GenerateWprArchive(device, url, archive_path):
with device_setup.RemoteWprHost(device, archive_path, record=True) as wpr:
chrome_controller = prefetch_predictor_common.Setup(
device, wpr.chrome_args)
with chrome_controller.Open() as connection:
page_track.PageTrack(connection) # Registers the listeners.
connection.MonitorUrl(url, timeout_seconds=_PAGE_LOAD_TIMEOUT,
stop_delay_multiplier=1.5)
def main(): def main():
devil_chromium.Initialize() devil_chromium.Initialize()
logging.basicConfig(level=logging.INFO) logging.basicConfig(level=logging.INFO)
...@@ -86,8 +100,9 @@ def main(): ...@@ -86,8 +100,9 @@ def main():
chrome_controller = prefetch_predictor_common.Setup( chrome_controller = prefetch_predictor_common.Setup(
device, ['--speculative-resource-prefetching=learning']) device, ['--speculative-resource-prefetching=learning'])
_Go(chrome_controller, args.urls_filename, args.output_filename, _GenerateDatabase(chrome_controller, args.urls_filename,
int(args.url_repeat)) args.output_filename, int(args.url_repeat))
_GenerateWprArchive(device, args.test_url, args.wpr_archive)
if __name__ == '__main__': if __name__ == '__main__':
......
...@@ -9,6 +9,7 @@ ...@@ -9,6 +9,7 @@
import argparse import argparse
import logging import logging
import os import os
import random
import sys import sys
import time import time
...@@ -21,6 +22,7 @@ import customtabs_benchmark ...@@ -21,6 +22,7 @@ import customtabs_benchmark
import device_setup import device_setup
sys.path.append(os.path.join(_SRC_PATH, 'tools', 'android', 'loading')) sys.path.append(os.path.join(_SRC_PATH, 'tools', 'android', 'loading'))
import controller
from options import OPTIONS from options import OPTIONS
sys.path.append(os.path.join(_SRC_PATH, 'build', 'android')) sys.path.append(os.path.join(_SRC_PATH, 'build', 'android'))
...@@ -46,10 +48,16 @@ def _CreateArgumentParser(): ...@@ -46,10 +48,16 @@ def _CreateArgumentParser():
help=('File containing the predictor database, as ' help=('File containing the predictor database, as '
'obtained from generate_database.py.')) 'obtained from generate_database.py.'))
parser.add_argument('--url', help='URL to load.') parser.add_argument('--url', help='URL to load.')
parser.add_argument('--prefetch_delay_ms', parser.add_argument('--prefetch_delays_ms',
help='Prefetch delay in ms. -1 to disable prefetch.') help='List of prefetch delays in ms. -1 to disable '
'prefetch. Runs will randomly select one delay in the '
'list.')
parser.add_argument('--output_filename', parser.add_argument('--output_filename',
help='CSV file to append the result to.') help='CSV file to append the result to.')
parser.add_argument('--network_condition',
help='Network condition for emulation.')
parser.add_argument('--wpr_archive', help='WPR archive path.')
parser.add_argument('--once', help='Only run once.', action='store_true')
return parser return parser
...@@ -88,20 +96,36 @@ def _Setup(device, database_filename): ...@@ -88,20 +96,36 @@ def _Setup(device, database_filename):
device.RunShellCommand(command, as_root=True) device.RunShellCommand(command, as_root=True)
def _Go(device, url, prefetch_delay_ms): def _RunOnce(device, database_filename, url, prefetch_delay_ms,
output_filename, wpr_archive, network_condition):
_Setup(device, database_filename)
disable_prefetch = prefetch_delay_ms == -1 disable_prefetch = prefetch_delay_ms == -1
# Startup tracing to ease debugging. # Startup tracing to ease debugging.
chrome_args = (customtabs_benchmark.CHROME_ARGS chrome_args = (customtabs_benchmark.CHROME_ARGS
+ ['--trace-startup', '--trace-startup-duration=20']) + ['--trace-startup', '--trace-startup-duration=20'])
if not disable_prefetch: if not disable_prefetch:
chrome_args.append(_EXTERNAL_PREFETCH_FLAG) chrome_args.append(_EXTERNAL_PREFETCH_FLAG)
prefetch_mode = 'disabled' if disable_prefetch else 'speculative_prefetch'
result = customtabs_benchmark.RunOnce( chrome_controller = controller.RemoteChromeController(device)
device, url, warmup=True, speculation_mode=prefetch_mode, device.ForceStop(OPTIONS.ChromePackage().package)
delay_to_may_launch_url=2000, chrome_controller.AddChromeArguments(chrome_args)
delay_to_launch_url=prefetch_delay_ms, cold=False,
chrome_args=chrome_args, reset_chrome_state=False) with device_setup.RemoteWprHost(
return customtabs_benchmark.ParseResult(result) device, wpr_archive, record=False,
network_condition_name=network_condition) as wpr:
logging.info('WPR arguments: ' + ' '.join(wpr.chrome_args))
chrome_args += wpr.chrome_args
prefetch_mode = 'disabled' if disable_prefetch else 'speculative_prefetch'
result = customtabs_benchmark.RunOnce(
device, url, warmup=True, speculation_mode=prefetch_mode,
delay_to_may_launch_url=2000,
delay_to_launch_url=prefetch_delay_ms, cold=False,
chrome_args=chrome_args, reset_chrome_state=False)
data_point = customtabs_benchmark.ParseResult(result)
with open(output_filename, 'a') as f:
f.write(','.join(str(x) for x in data_point) + '\n')
def main(): def main():
...@@ -111,16 +135,27 @@ def main(): ...@@ -111,16 +135,27 @@ def main():
parser = _CreateArgumentParser() parser = _CreateArgumentParser()
args = parser.parse_args() args = parser.parse_args()
OPTIONS.SetParsedArgs(args) OPTIONS.SetParsedArgs(args)
if os.path.exists(args.output_filename):
logging.error('Output file %s already exists.' % args.output_filename)
sys.exit(1)
device = prefetch_predictor_common.FindDevice(args.device) device = prefetch_predictor_common.FindDevice(args.device)
if device is None: if device is None:
logging.error('Could not find device: %s.', args.device) logging.error('Could not find device: %s.', args.device)
sys.exit(1) sys.exit(1)
_Setup(device, args.database) delays = [int(x) for x in args.prefetch_delays_ms.split(',')]
result = _Go(device, args.url, int(args.prefetch_delay_ms))
print result with open(args.output_filename, 'w') as f:
with open(args.output_filename, 'a') as f: f.write(','.join(customtabs_benchmark.RESULT_FIELDS) + '\n')
f.write(','.join(str(x) for x in result) + '\n')
while True:
delay = delays[random.randint(0, len(delays) - 1)]
_RunOnce(device, args.database, args.url, delay, args.output_filename,
args.wpr_archive, args.network_condition)
if args.once:
return
if __name__ == '__main__': if __name__ == '__main__':
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment