Commit a238a6bf authored by Dirk Pranke's avatar Dirk Pranke Committed by Commit Bot

Add a mechanism to skip the built in expectations in run_web_tests.

We are adding a variant of run_web_tests that the GPU team will
use to run a subset of the WPT tests that will only ever work
on bots with actual GPU cards. In order for this to work, we
will always skip (WontFix) the tests in the main run_web_tests runs,
but we will run the tests separately.

In order to do that, we need a way to bypass the default expectations
(i.e., skip the lines that will be in NeverFixTests).

This CL adds a --no-default-expectations arg to run_web_tests to
do so.

R=rmhasan@google.com, kainino@chromium.org
BUG=984815

Change-Id: Ia855f70c0f8cdf3b272f700e7e08349b8c5b1694
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1710888Reviewed-by: default avatarRakib Hasan <rmhasan@google.com>
Reviewed-by: default avatarKai Ninomiya <kainino@chromium.org>
Commit-Queue: Kai Ninomiya <kainino@chromium.org>
Cr-Commit-Position: refs/heads/master@{#681468}
parent 74d2e5bd
......@@ -1059,24 +1059,25 @@ class TestExpectations(object):
if not expectations_dict:
expectations_dict = port.expectations_dict()
# Always parse the generic expectations (the generic file is required
# to be the first one in the expectations_dict, which must be an OrderedDict).
generic_path, generic_exps = expectations_dict.items()[0]
expectations = self._parser.parse(generic_path, generic_exps)
self._add_expectations(expectations, self._model)
self._expectations += expectations
# Now add the overrides if so requested.
if include_overrides:
for path, contents in expectations_dict.items()[1:]:
expectations = self._parser.parse(path, contents)
model = TestExpectationsModel(self._shorten_filename)
self._add_expectations(expectations, model)
self._expectations += expectations
flag_specific_match = re.match('.*' + port.FLAG_EXPECTATIONS_PREFIX + '(.*)', path)
if flag_specific_match is not None:
self._model.append_flag_name(flag_specific_match.group(1))
self._model.merge_model(model, flag_specific_match is not None)
if expectations_dict:
# Always parse the generic expectations (the generic file is required
# to be the first one in the expectations_dict, which must be an OrderedDict).
generic_path, generic_exps = expectations_dict.items()[0]
expectations = self._parser.parse(generic_path, generic_exps)
self._add_expectations(expectations, self._model)
self._expectations += expectations
# Now add the overrides if so requested.
if include_overrides:
for path, contents in expectations_dict.items()[1:]:
expectations = self._parser.parse(path, contents)
model = TestExpectationsModel(self._shorten_filename)
self._add_expectations(expectations, model)
self._expectations += expectations
flag_specific_match = re.match('.*' + port.FLAG_EXPECTATIONS_PREFIX + '(.*)', path)
if flag_specific_match is not None:
self._model.append_flag_name(flag_specific_match.group(1))
self._model.merge_model(model, flag_specific_match is not None)
self.add_extra_skipped_tests(set(port.get_option('ignore_tests', [])))
self.add_expectations_from_bot()
......
......@@ -1318,9 +1318,10 @@ class Port(object):
# updated to know about the ordered dict.
expectations = collections.OrderedDict()
for path in self.expectations_files():
if self._filesystem.exists(path):
expectations[path] = self._filesystem.read_text_file(path)
if not self.get_option('ignore_default_expectations', False):
for path in self.expectations_files():
if self._filesystem.exists(path):
expectations[path] = self._filesystem.read_text_file(path)
for path in self.get_option('additional_expectations', []):
expanded_path = self._filesystem.expanduser(path)
......
......@@ -158,6 +158,10 @@ def parse_args(args):
default=[],
help=('Path to a test_expectations file that will override previous '
'expectations. Specify multiple times for multiple sets of overrides.')),
optparse.make_option(
'--ignore-default-expectations',
action='store_true',
help=('Do not use the default set of TestExpectations files.')),
optparse.make_option(
'--additional-platform-directory',
action='append',
......
......@@ -1218,6 +1218,10 @@ class RunTest(unittest.TestCase, StreamTestingMixin):
json_failing_test_results = host.filesystem.read_text_file('/tmp/json_failing_results.json')
self.assertEqual(json.loads(json_failing_test_results), details.summarized_failing_results)
def test_no_default_expectations(self):
self.assertTrue(passing_run(['failures/expected/text.html']))
self.assertFalse(passing_run(['--ignore-default-expectations', 'failures/expected/text.html']))
class RebaselineTest(unittest.TestCase, StreamTestingMixin):
"""Tests for flags which cause new baselines to be written.
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment