Commit 6f4ffc2a authored by kyle Ju's avatar kyle Ju Committed by Commit Bot

Fix the issues with skipped tests and add support for --isolated-script-test-filter

1. Ignore skipped tests that are not in the test_path and not in the same shard;
2. Set the expected test status to SKIP if the actual status is SKIP;
3. Fix the issue with skipped tests - pytest deselect flag only works with relative path and pytest separator :: between a test and its subtest;
4. Add support for isolated-script-test-filter


Bug: 992569
Change-Id: Ifa73e4c6372a12f63e7543c85d006cc32c848c25
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1777070
Commit-Queue: Kyle Ju <kyleju@chromium.org>
Reviewed-by: default avatarRobert Ma <robertma@chromium.org>
Reviewed-by: default avatarJohn Chen <johnchen@chromium.org>
Cr-Commit-Position: refs/heads/master@{#707027}
parent 9a0aa1a7
...@@ -73,15 +73,42 @@ def parse_webdriver_expectations(host, port): ...@@ -73,15 +73,42 @@ def parse_webdriver_expectations(host, port):
port, expectations_dict=expectations_dict) port, expectations_dict=expectations_dict)
return expectations return expectations
def preprocess_skipped_tests(test_results, expectations, path_finder): def prepare_filtered_tests(isolated_script_test_filter, finder, shard, port):
filter_list = isolated_script_test_filter.split('::')
filtered_tests = [get_relative_subtest_path(
test, finder, shard, port) for test in filter_list]
return filter(None, filtered_tests)
def get_relative_subtest_path(external_test_path, finder, shard, port):
test_name, subtest_suffix = port.split_webdriver_test_name(
external_test_path)
abs_skipped_test_path = finder.path_from_web_tests(test_name)
if not shard.is_matched_test(abs_skipped_test_path):
return None
relative_path = os.path.relpath(abs_skipped_test_path)
relative_subtest_path = port.add_webdriver_subtest_pytest_suffix(
relative_path, subtest_suffix)
return relative_subtest_path
def process_skip_list(skipped_tests, results, finder, port, test_path, shard):
skip_list = [] skip_list = []
skipped_tests = expectations.model().get_tests_with_result_type( abs_test_path = os.path.abspath(test_path)
test_expectations.SKIP).copy()
for skipped_test in skipped_tests: for skipped_test in skipped_tests:
test_results.append(WebDriverTestResult( test_name, subtest_suffix = port.split_webdriver_test_name(
skipped_test, 'SKIP')) skipped_test)
skip_list.append(path_finder.strip_webdriver_tests_path(skipped_test)) abs_path = finder.path_from_web_tests(test_name)
if not abs_path.startswith(abs_test_path):
continue
if not shard.is_matched_test(abs_path):
continue
pytest_subtest_path = port.add_webdriver_subtest_pytest_suffix(
test_name, subtest_suffix)
skip_list.append(pytest_subtest_path)
results.append(WebDriverTestResult(
skipped_test, 'SKIP'))
return skip_list return skip_list
...@@ -172,7 +199,8 @@ def run_test(path, path_finder, port, skipped_tests=[]): ...@@ -172,7 +199,8 @@ def run_test(path, path_finder, port, skipped_tests=[]):
skip_test_flag = ['--deselect=' + skip_test_flag = ['--deselect=' +
skipped_test for skipped_test in skipped_tests] skipped_test for skipped_test in skipped_tests]
pytest_args = [path] + skip_test_flag pytest_args = [path] + skip_test_flag + \
['--rootdir=' + path_finder.web_tests_dir()]
pytest.main(pytest_args, plugins=[subtests]) pytest.main(pytest_args, plugins=[subtests])
return subtests.result return subtests.result
...@@ -198,6 +226,9 @@ if __name__ == '__main__': ...@@ -198,6 +226,9 @@ if __name__ == '__main__':
parser.add_argument( parser.add_argument(
'--isolated-script-test-perf-output', '--isolated-script-test-perf-output',
help='JSON perf output file used by swarming, ignored') help='JSON perf output file used by swarming, ignored')
parser.add_argument(
'--isolated-script-test-filter',
help='isolated script filter string with :: separators')
parser.add_argument( parser.add_argument(
'--test-path', '--test-path',
required=True, required=True,
...@@ -218,6 +249,7 @@ if __name__ == '__main__': ...@@ -218,6 +249,7 @@ if __name__ == '__main__':
test_shard = TestShard(total_shards, shard_index) test_shard = TestShard(total_shards, shard_index)
test_results = [] test_results = []
test_path = options.test_path
log_level = logging.DEBUG if options.verbose else logging.INFO log_level = logging.DEBUG if options.verbose else logging.INFO
configure_logging(logging_level=log_level, include_time=True) configure_logging(logging_level=log_level, include_time=True)
...@@ -236,8 +268,10 @@ if __name__ == '__main__': ...@@ -236,8 +268,10 @@ if __name__ == '__main__':
# WebDriverExpectations stores skipped and failed WebDriver tests. # WebDriverExpectations stores skipped and failed WebDriver tests.
expectations = parse_webdriver_expectations(host, port) expectations = parse_webdriver_expectations(host, port)
skipped_tests = preprocess_skipped_tests( skip_list = expectations.model().get_tests_with_result_type(
test_results, expectations, path_finder) test_expectations.SKIP).copy()
skipped_tests = process_skip_list(
skip_list, test_results, path_finder, port, test_path, test_shard)
options.chromedriver = util.GetAbsolutePathOfUserPath(options.chromedriver) options.chromedriver = util.GetAbsolutePathOfUserPath(options.chromedriver)
if (not os.path.exists(options.chromedriver) and if (not os.path.exists(options.chromedriver) and
...@@ -261,14 +295,18 @@ if __name__ == '__main__': ...@@ -261,14 +295,18 @@ if __name__ == '__main__':
sys.exit(1) sys.exit(1)
set_up_config(path_finder, chromedriver_server) set_up_config(path_finder, chromedriver_server)
test_path = options.test_path
start_time = time.time() start_time = time.time()
sys.path.insert(0, WEBDRIVER_CLIENT_ABS_PATH) sys.path.insert(0, WEBDRIVER_CLIENT_ABS_PATH)
try: try:
if os.path.isfile(test_path): if options.isolated_script_test_filter:
test_results = run_test(test_path, path_finder, port, skipped_tests) filtered_tests = prepare_filtered_tests(
options.isolated_script_test_filter, path_finder, test_shard, port)
for filter_test in filtered_tests:
test_results += run_test(filter_test, path_finder, port)
elif os.path.isfile(test_path):
test_results += run_test(test_path, path_finder, port, skipped_tests)
elif os.path.isdir(test_path): elif os.path.isdir(test_path):
for root, dirnames, filenames in os.walk(test_path): for root, dirnames, filenames in os.walk(test_path):
for filename in filenames: for filename in filenames:
...@@ -276,7 +314,8 @@ if __name__ == '__main__': ...@@ -276,7 +314,8 @@ if __name__ == '__main__':
continue continue
test_file = os.path.join(root, filename) test_file = os.path.join(root, filename)
if not test_shard.is_matched_test(test_file):
if not test_shard.is_matched_test(os.path.abspath(test_file)):
continue continue
test_results += run_test(test_file, path_finder, port, skipped_tests) test_results += run_test(test_file, path_finder, port, skipped_tests)
else: else:
......
...@@ -182,6 +182,9 @@ class Port(object): ...@@ -182,6 +182,9 @@ class Port(object):
# The following is used for concetenating WebDriver test names. # The following is used for concetenating WebDriver test names.
WEBDRIVER_SUBTEST_SEPARATOR = '>>' WEBDRIVER_SUBTEST_SEPARATOR = '>>'
# The following is used for concetenating WebDriver test names in pytest format.
WEBDRIVER_SUBTEST_PYTEST_SEPARATOR = '::'
# The following two constants must match. When adding a new WPT root, also # The following two constants must match. When adding a new WPT root, also
# remember to add an alias rule to third_party/wpt/wpt.config.json. # remember to add an alias rule to third_party/wpt/wpt.config.json.
# WPT_DIRS maps WPT roots on the file system to URL prefixes on wptserve. # WPT_DIRS maps WPT roots on the file system to URL prefixes on wptserve.
...@@ -1814,6 +1817,9 @@ class Port(object): ...@@ -1814,6 +1817,9 @@ class Port(object):
return test_name + Port.WEBDRIVER_SUBTEST_SEPARATOR + subtest_name return test_name + Port.WEBDRIVER_SUBTEST_SEPARATOR + subtest_name
return test_name return test_name
def add_webdriver_subtest_pytest_suffix(self, test_name, subtest_name):
return test_name + self.WEBDRIVER_SUBTEST_PYTEST_SEPARATOR + subtest_name
class VirtualTestSuite(object): class VirtualTestSuite(object):
......
...@@ -983,14 +983,15 @@ class PortTest(LoggingTestCase): ...@@ -983,14 +983,15 @@ class PortTest(LoggingTestCase):
self.assertEqual( self.assertEqual(
Port.add_webdriver_subtest_suffix("abd", None), "abd") Port.add_webdriver_subtest_suffix("abd", None), "abd")
def test_add_webdriver_subtest_suffix(self): def test_add_webdriver_subtest_pytest_suffix(self):
port = self.make_port() port = self.make_port()
wb_test_name = "abd" wb_test_name = "abd"
sub_test_name = "bar" sub_test_name = "bar"
full_webdriver_name = port.add_webdriver_subtest_suffix(wb_test_name, sub_test_name) full_webdriver_name = port.add_webdriver_subtest_pytest_suffix(
wb_test_name, sub_test_name)
self.assertEqual(full_webdriver_name, "abd>>bar") self.assertEqual(full_webdriver_name, "abd::bar")
class NaturalCompareTest(unittest.TestCase): class NaturalCompareTest(unittest.TestCase):
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment