Commit b2dac9b8 authored by Kenneth Russell's avatar Kenneth Russell Committed by Commit Bot

Support FindIt's command line arguments in Chrome GPU tests.

Add support for the --isolated-script-test-repeat,
--isolated-script-test-launcher-retry-limit and
--isolated-script-test-also-run-disabled-tests flags, and unit
tests for these flags' behavior.

Bug: 894258
Cq-Include-Trybots: luci.chromium.try:linux_optional_gpu_tests_rel;luci.chromium.try:mac_optional_gpu_tests_rel;luci.chromium.try:win_optional_gpu_tests_rel
Change-Id: I82cf1638779719b55b8b016c323100de572aae46
Reviewed-on: https://chromium-review.googlesource.com/c/1306662
Commit-Queue: Kenneth Russell <kbr@chromium.org>
Reviewed-by: default avatarKai Ninomiya <kainino@chromium.org>
Cr-Commit-Position: refs/heads/master@{#604763}
parent fb31703c
...@@ -61,6 +61,7 @@ class CloudStorageIntegrationTestBase(gpu_integration_test.GpuIntegrationTest): ...@@ -61,6 +61,7 @@ class CloudStorageIntegrationTestBase(gpu_integration_test.GpuIntegrationTest):
@classmethod @classmethod
def AddCommandlineArgs(cls, parser): def AddCommandlineArgs(cls, parser):
super(CloudStorageIntegrationTestBase, cls).AddCommandlineArgs(parser)
parser.add_option( parser.add_option(
'--build-revision', '--build-revision',
help='Chrome revision being tested.', help='Chrome revision being tested.',
......
...@@ -60,6 +60,7 @@ class ContextLostIntegrationTest(gpu_integration_test.GpuIntegrationTest): ...@@ -60,6 +60,7 @@ class ContextLostIntegrationTest(gpu_integration_test.GpuIntegrationTest):
@classmethod @classmethod
def AddCommandlineArgs(cls, parser): def AddCommandlineArgs(cls, parser):
super(ContextLostIntegrationTest, cls).AddCommandlineArgs(parser)
parser.add_option('--is-asan', parser.add_option('--is-asan',
help='Indicates whether currently running an ASAN build', help='Indicates whether currently running an ASAN build',
action='store_true') action='store_true')
......
...@@ -15,6 +15,7 @@ class GpuIntegrationTest( ...@@ -15,6 +15,7 @@ class GpuIntegrationTest(
serially_executed_browser_test_case.SeriallyExecutedBrowserTestCase): serially_executed_browser_test_case.SeriallyExecutedBrowserTestCase):
_cached_expectations = None _cached_expectations = None
_also_run_disabled_tests = False
# Several of the tests in this directory need to be able to relaunch # Several of the tests in this directory need to be able to relaunch
# the browser on demand with a new set of command line arguments # the browser on demand with a new set of command line arguments
...@@ -37,6 +38,18 @@ class GpuIntegrationTest( ...@@ -37,6 +38,18 @@ class GpuIntegrationTest(
super(GpuIntegrationTest, cls).SetUpProcess() super(GpuIntegrationTest, cls).SetUpProcess()
cls._original_finder_options = cls._finder_options.Copy() cls._original_finder_options = cls._finder_options.Copy()
@classmethod
def AddCommandlineArgs(cls, parser):
"""Adds command line arguments understood by the test harness.
Subclasses overriding this method must invoke the superclass's
version!"""
parser.add_option(
'--also-run-disabled-tests',
dest='also_run_disabled_tests',
action='store_true', default=False,
help='Run disabled tests, ignoring Skip and Fail expectations')
@classmethod @classmethod
def CustomizeBrowserArgs(cls, browser_args): def CustomizeBrowserArgs(cls, browser_args):
"""Customizes the browser's command line arguments. """Customizes the browser's command line arguments.
...@@ -80,6 +93,7 @@ class GpuIntegrationTest( ...@@ -80,6 +93,7 @@ class GpuIntegrationTest(
@classmethod @classmethod
def GenerateTestCases__RunGpuTest(cls, options): def GenerateTestCases__RunGpuTest(cls, options):
cls._also_run_disabled_tests = options.also_run_disabled_tests
for test_name, url, args in cls.GenerateGpuTests(options): for test_name, url, args in cls.GenerateGpuTests(options):
yield test_name, (url, test_name, args) yield test_name, (url, test_name, args)
...@@ -124,6 +138,9 @@ class GpuIntegrationTest( ...@@ -124,6 +138,9 @@ class GpuIntegrationTest(
expectations = self.__class__.GetExpectations() expectations = self.__class__.GetExpectations()
expectation = expectations.GetExpectationForTest( expectation = expectations.GetExpectationForTest(
self.browser, url, test_name) self.browser, url, test_name)
if self.__class__._also_run_disabled_tests:
# Ignore test expectations if the user has requested it.
expectation = 'pass'
if expectation == 'skip': if expectation == 'skip':
# skipTest in Python's unittest harness raises an exception, so # skipTest in Python's unittest harness raises an exception, so
# aborts the control flow here. # aborts the control flow here.
......
...@@ -27,7 +27,8 @@ class GpuIntegrationTestUnittest(unittest.TestCase): ...@@ -27,7 +27,8 @@ class GpuIntegrationTestUnittest(unittest.TestCase):
'unittest_data.integration_tests.SimpleTest.unexpected_failure'], 'unittest_data.integration_tests.SimpleTest.unexpected_failure'],
['unittest_data.integration_tests.SimpleTest.expected_flaky', ['unittest_data.integration_tests.SimpleTest.expected_flaky',
'unittest_data.integration_tests.SimpleTest.expected_failure'], 'unittest_data.integration_tests.SimpleTest.expected_failure'],
['unittest_data.integration_tests.SimpleTest.expected_skip']) ['unittest_data.integration_tests.SimpleTest.expected_skip'],
[])
# It might be nice to be more precise about the order of operations # It might be nice to be more precise about the order of operations
# with these browser restarts, but this is at least a start. # with these browser restarts, but this is at least a start.
self.assertEquals(self._test_state['num_browser_starts'], 6) self.assertEquals(self._test_state['num_browser_starts'], 6)
...@@ -36,7 +37,7 @@ class GpuIntegrationTestUnittest(unittest.TestCase): ...@@ -36,7 +37,7 @@ class GpuIntegrationTestUnittest(unittest.TestCase):
self._RunIntegrationTest( self._RunIntegrationTest(
'browser_start_failure_integration_unittest', [], 'browser_start_failure_integration_unittest', [],
['unittest_data.integration_tests.BrowserStartFailureTest.restart'], ['unittest_data.integration_tests.BrowserStartFailureTest.restart'],
[]) [], [])
self.assertEquals(self._test_state['num_browser_crashes'], 2) self.assertEquals(self._test_state['num_browser_crashes'], 2)
self.assertEquals(self._test_state['num_browser_starts'], 3) self.assertEquals(self._test_state['num_browser_starts'], 3)
...@@ -44,11 +45,40 @@ class GpuIntegrationTestUnittest(unittest.TestCase): ...@@ -44,11 +45,40 @@ class GpuIntegrationTestUnittest(unittest.TestCase):
self._RunIntegrationTest( self._RunIntegrationTest(
'browser_crash_after_start_integration_unittest', [], 'browser_crash_after_start_integration_unittest', [],
[('unittest_data.integration_tests.BrowserCrashAfterStartTest.restart')], [('unittest_data.integration_tests.BrowserCrashAfterStartTest.restart')],
[]) [], [])
self.assertEquals(self._test_state['num_browser_crashes'], 2) self.assertEquals(self._test_state['num_browser_crashes'], 2)
self.assertEquals(self._test_state['num_browser_starts'], 3) self.assertEquals(self._test_state['num_browser_starts'], 3)
def _RunIntegrationTest(self, test_name, failures, successes, skips): def testRetryLimit(self):
self._RunIntegrationTest(
'test_retry_limit',
['unittest_data.integration_tests.TestRetryLimit.unexpected_failure'],
[],
[],
['--retry-limit=2'])
# The number of attempted runs is 1 + the retry limit.
self.assertEquals(self._test_state['num_test_runs'], 3)
def testRepeat(self):
self._RunIntegrationTest(
'test_repeat',
[],
['unittest_data.integration_tests.TestRepeat.success'],
[],
['--repeat=3'])
self.assertEquals(self._test_state['num_test_runs'], 3)
def testAlsoRunDisabledTests(self):
self._RunIntegrationTest(
'test_also_run_disabled_tests',
[],
['unittest_data.integration_tests.TestAlsoRunDisabledTests.success'],
[],
['--also-run-disabled-tests'])
self.assertEquals(self._test_state['num_test_runs'], 1)
def _RunIntegrationTest(self, test_name, failures, successes, skips,
additional_args):
config = chromium_config.ChromiumConfig( config = chromium_config.ChromiumConfig(
top_level_dir=path_util.GetGpuTestDir(), top_level_dir=path_util.GetGpuTestDir(),
benchmark_dirs=[ benchmark_dirs=[
...@@ -61,7 +91,7 @@ class GpuIntegrationTestUnittest(unittest.TestCase): ...@@ -61,7 +91,7 @@ class GpuIntegrationTestUnittest(unittest.TestCase):
config, config,
[test_name, [test_name,
'--write-full-results-to=%s' % test_results_path, '--write-full-results-to=%s' % test_results_path,
'--test-state-json-path=%s' % test_state_path]) '--test-state-json-path=%s' % test_state_path] + additional_args)
with open(test_results_path) as f: with open(test_results_path) as f:
test_result = json.load(f) test_result = json.load(f)
with open(test_state_path) as f: with open(test_state_path) as f:
...@@ -103,4 +133,3 @@ class GpuIntegrationTestUnittest(unittest.TestCase): ...@@ -103,4 +133,3 @@ class GpuIntegrationTestUnittest(unittest.TestCase):
('%s%s%s' % (full_test_name, delimiter, k), ('%s%s%s' % (full_test_name, delimiter, k),
test_dict[k])) test_dict[k]))
return successes, failures, skips return successes, failures, skips
...@@ -19,6 +19,7 @@ class InfoCollectionTest(gpu_integration_test.GpuIntegrationTest): ...@@ -19,6 +19,7 @@ class InfoCollectionTest(gpu_integration_test.GpuIntegrationTest):
@classmethod @classmethod
def AddCommandlineArgs(cls, parser): def AddCommandlineArgs(cls, parser):
super(InfoCollectionTest, cls).AddCommandlineArgs(parser)
parser.add_option('--expected-device-id', parser.add_option('--expected-device-id',
help='The expected device id') help='The expected device id')
parser.add_option('--expected-vendor-id', parser.add_option('--expected-vendor-id',
......
...@@ -119,6 +119,7 @@ class PowerMeasurementIntegrationTest(gpu_integration_test.GpuIntegrationTest): ...@@ -119,6 +119,7 @@ class PowerMeasurementIntegrationTest(gpu_integration_test.GpuIntegrationTest):
@classmethod @classmethod
def AddCommandlineArgs(cls, parser): def AddCommandlineArgs(cls, parser):
super(PowerMeasurementIntegrationTest, cls).AddCommandlineArgs(parser)
parser.add_option("--duration", default=60, type="int", parser.add_option("--duration", default=60, type="int",
help="specify how many seconds Intel Power Gadget " help="specify how many seconds Intel Power Gadget "
"measures. By default, 60 seconds is selected.") "measures. By default, 60 seconds is selected.")
......
...@@ -44,6 +44,7 @@ class ScreenshotSyncIntegrationTest(gpu_integration_test.GpuIntegrationTest): ...@@ -44,6 +44,7 @@ class ScreenshotSyncIntegrationTest(gpu_integration_test.GpuIntegrationTest):
@classmethod @classmethod
def AddCommandlineArgs(cls, parser): def AddCommandlineArgs(cls, parser):
super(ScreenshotSyncIntegrationTest, cls).AddCommandlineArgs(parser)
parser.add_option( parser.add_option(
'--dont-restore-color-profile-after-test', '--dont-restore-color-profile-after-test',
dest='dont_restore_color_profile_after_test', dest='dont_restore_color_profile_after_test',
......
...@@ -79,6 +79,7 @@ class WebGLConformanceIntegrationTest(gpu_integration_test.GpuIntegrationTest): ...@@ -79,6 +79,7 @@ class WebGLConformanceIntegrationTest(gpu_integration_test.GpuIntegrationTest):
@classmethod @classmethod
def AddCommandlineArgs(cls, parser): def AddCommandlineArgs(cls, parser):
super(WebGLConformanceIntegrationTest, cls).AddCommandlineArgs(parser)
parser.add_option('--webgl-conformance-version', parser.add_option('--webgl-conformance-version',
help='Version of the WebGL conformance tests to run.', help='Version of the WebGL conformance tests to run.',
default='1.0.4') default='1.0.4')
......
...@@ -23,8 +23,25 @@ from gpu_tests import gpu_test_expectations ...@@ -23,8 +23,25 @@ from gpu_tests import gpu_test_expectations
class _BaseSampleIntegrationTest(gpu_integration_test.GpuIntegrationTest): class _BaseSampleIntegrationTest(gpu_integration_test.GpuIntegrationTest):
_test_state = {} _test_state = {}
@classmethod
def SetUpProcess(cls):
finder_options = fakes.CreateBrowserFinderOptions()
finder_options.browser_options.platform = fakes.FakeLinuxPlatform()
finder_options.output_formats = ['none']
finder_options.suppress_gtest_report = True
finder_options.output_dir = None
finder_options.upload_bucket = 'public'
finder_options.upload_results = False
cls._finder_options = finder_options
cls.platform = None
cls.browser = None
cls.SetBrowserOptions(cls._finder_options)
cls.StartBrowser()
@classmethod @classmethod
def AddCommandlineArgs(cls, parser): def AddCommandlineArgs(cls, parser):
super(_BaseSampleIntegrationTest, cls).AddCommandlineArgs(parser)
parser.add_option('--test-state-json-path', parser.add_option('--test-state-json-path',
help=('Where to dump the test state json (this is used by ' help=('Where to dump the test state json (this is used by '
'gpu_integration_test_unittest)')) 'gpu_integration_test_unittest)'))
...@@ -44,29 +61,10 @@ class SimpleTest(_BaseSampleIntegrationTest): ...@@ -44,29 +61,10 @@ class SimpleTest(_BaseSampleIntegrationTest):
'num_browser_starts': 0 'num_browser_starts': 0
} }
@classmethod @classmethod
def Name(cls): def Name(cls):
return 'simple_integration_unittest' return 'simple_integration_unittest'
def setUp(self):
super(SimpleTest, self).setUp()
@classmethod
def SetUpProcess(cls):
finder_options = fakes.CreateBrowserFinderOptions()
finder_options.browser_options.platform = fakes.FakeLinuxPlatform()
finder_options.output_formats = ['none']
finder_options.suppress_gtest_report = True
finder_options.output_dir = None
finder_options.upload_bucket = 'public'
finder_options.upload_results = False
cls._finder_options = finder_options
cls.platform = None
cls.browser = None
cls.SetBrowserOptions(cls._finder_options)
cls.StartBrowser()
@classmethod @classmethod
def GenerateGpuTests(cls, options): def GenerateGpuTests(cls, options):
yield ('expected_failure', 'failure.html', ()) yield ('expected_failure', 'failure.html', ())
...@@ -101,7 +99,6 @@ class SimpleTest(_BaseSampleIntegrationTest): ...@@ -101,7 +99,6 @@ class SimpleTest(_BaseSampleIntegrationTest):
class BrowserStartFailureTest(_BaseSampleIntegrationTest): class BrowserStartFailureTest(_BaseSampleIntegrationTest):
_test_state = { _test_state = {
'num_browser_crashes': 0, 'num_browser_crashes': 0,
'num_browser_starts': 0 'num_browser_starts': 0
...@@ -152,7 +149,6 @@ class BrowserStartFailureTest(_BaseSampleIntegrationTest): ...@@ -152,7 +149,6 @@ class BrowserStartFailureTest(_BaseSampleIntegrationTest):
class BrowserCrashAfterStartTest(_BaseSampleIntegrationTest): class BrowserCrashAfterStartTest(_BaseSampleIntegrationTest):
_test_state = { _test_state = {
'num_browser_crashes': 0, 'num_browser_crashes': 0,
'num_browser_starts': 0, 'num_browser_starts': 0,
...@@ -208,6 +204,81 @@ class BrowserCrashAfterStartTest(_BaseSampleIntegrationTest): ...@@ -208,6 +204,81 @@ class BrowserCrashAfterStartTest(_BaseSampleIntegrationTest):
pass pass
class TestRetryLimit(_BaseSampleIntegrationTest):
_test_state = {
'num_test_runs': 0,
}
@classmethod
def Name(cls):
return 'test_retry_limit'
@classmethod
def GenerateGpuTests(cls, options):
yield ('unexpected_failure', 'failure.html', ())
@classmethod
def _CreateExpectations(cls):
expectations = gpu_test_expectations.GpuTestExpectations()
return expectations
def RunActualGpuTest(self, file_path, *args):
self._test_state['num_test_runs'] += 1
if file_path == 'failure.html':
self.fail('Expected failure')
else:
raise Exception('Unexpected test name ' + file_path)
class TestRepeat(_BaseSampleIntegrationTest):
_test_state = {
'num_test_runs': 0,
}
@classmethod
def Name(cls):
return 'test_repeat'
@classmethod
def GenerateGpuTests(cls, options):
yield ('success', 'success.html', ())
@classmethod
def _CreateExpectations(cls):
expectations = gpu_test_expectations.GpuTestExpectations()
return expectations
def RunActualGpuTest(self, file_path, *args):
self._test_state['num_test_runs'] += 1
if file_path != 'success.html':
raise Exception('Unexpected test name ' + file_path)
class TestAlsoRunDisabledTests(_BaseSampleIntegrationTest):
_test_state = {
'num_test_runs': 0,
}
@classmethod
def Name(cls):
return 'test_also_run_disabled_tests'
@classmethod
def GenerateGpuTests(cls, options):
yield ('success', 'success.html', ())
@classmethod
def _CreateExpectations(cls):
expectations = gpu_test_expectations.GpuTestExpectations()
expectations.Skip('success')
return expectations
def RunActualGpuTest(self, file_path, *args):
self._test_state['num_test_runs'] += 1
if file_path != 'success.html':
raise Exception('Unexpected test name ' + file_path)
def load_tests(loader, tests, pattern): def load_tests(loader, tests, pattern):
del loader, tests, pattern # Unused. del loader, tests, pattern # Unused.
return gpu_integration_test.LoadAllTestsInModule(sys.modules[__name__]) return gpu_integration_test.LoadAllTestsInModule(sys.modules[__name__])
...@@ -41,6 +41,9 @@ class GpuIntegrationTestAdapater(common.BaseIsolatedScriptArgsAdapter): ...@@ -41,6 +41,9 @@ class GpuIntegrationTestAdapater(common.BaseIsolatedScriptArgsAdapter):
def generate_test_output_args(self, output): def generate_test_output_args(self, output):
return ['--write-full-results-to', output] return ['--write-full-results-to', output]
def generate_test_also_run_disabled_tests_args(self):
return ['--also-run-disabled-tests']
def generate_test_filter_args(self, test_filter_str): def generate_test_filter_args(self, test_filter_str):
filter_list = common.extract_filter_list(test_filter_str) filter_list = common.extract_filter_list(test_filter_str)
# isolated_script_test_filter comes in like: # isolated_script_test_filter comes in like:
...@@ -56,6 +59,12 @@ class GpuIntegrationTestAdapater(common.BaseIsolatedScriptArgsAdapter): ...@@ -56,6 +59,12 @@ class GpuIntegrationTestAdapater(common.BaseIsolatedScriptArgsAdapter):
return ['--total-shards=%d' % total_shards, return ['--total-shards=%d' % total_shards,
'--shard-index=%d' % shard_index] '--shard-index=%d' % shard_index]
def generate_test_launcher_retry_limit_args(self, retry_limit):
return ['--retry-limit=%d' % retry_limit]
def generate_test_repeat_args(self, repeat_count):
return ['--repeat=%d' % repeat_count]
def main(): def main():
adapter = GpuIntegrationTestAdapater() adapter = GpuIntegrationTestAdapater()
return adapter.run_test() return adapter.run_test()
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment