Commit 1a2ee799 authored by kyle Ju's avatar kyle Ju Committed by Commit Bot

Activate the use of is_unexpected field in the output JSON file.

Check in bits of code from the pending two-way sync CL. It allows me to monitor the flakiness of webdriver_test_suit on Linux Tests (dbg)(1) while the other CL is being reviewed. The changes are taken from https://chromium-review.googlesource.com/c/chromium/src/+/1484616/36

Sample JSON output:
https://pastebin.com/CfuVTuP8

The is_unexpected fields for all of the three tests are false because their expectations exist in the WebDriverExpectations.

Bug: 934919
Change-Id: If068f2318f2f2cbe6b866f96f14cb97f941a4df0
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1574360Reviewed-by: default avatarJohn Chen <johnchen@chromium.org>
Reviewed-by: default avatarRobert Ma <robertma@chromium.org>
Commit-Queue: Kyle Ju <kyleju@chromium.org>
Cr-Commit-Position: refs/heads/master@{#652292}
parent 040d96eb
......@@ -30,6 +30,7 @@ BLINK_TOOLS_PATH = 'third_party/blink/tools'
BLINK_TOOLS_ABS_PATH = os.path.join(SRC_DIR, BLINK_TOOLS_PATH)
sys.path.insert(0, BLINK_TOOLS_ABS_PATH)
from blinkpy.common import exit_codes
from blinkpy.common.host import Host
from blinkpy.common.system.log_utils import configure_logging
from blinkpy.web_tests.models import test_expectations
......@@ -66,9 +67,10 @@ def preprocess_skipped_tests(test_results, expectations, path_finder):
return skip_list
class SubtestResultRecorder(object):
def __init__(self, path):
def __init__(self, path, port):
self.result = []
self.test_path = path
self.port = port
def pytest_runtest_logreport(self, report):
if report.passed and report.when == "call":
......@@ -102,7 +104,8 @@ class SubtestResultRecorder(object):
# location is a (filesystempath, lineno, domaininfo) tuple
# https://docs.pytest.org/en/3.6.2/reference.html#_pytest.runner.TestReport.location
test_name = report.location[2]
output_name = self.test_path + '::' + test_name
output_name = self.port.add_webdriver_subtest_suffix(
self.test_path, test_name)
self.result.append(WebDriverTestResult(
output_name, status, message))
......@@ -137,10 +140,10 @@ def set_up_config(chromedriver_server):
"browser_host": "web-platform.test",
"ports": {"ws": [9001], "wss": [9444], "http": [8001], "https": [8444]}})
def run_test(path, path_finder, skipped_tests=[]):
def run_test(path, path_finder, port, skipped_tests=[]):
abs_path = os.path.abspath(path)
external_path = path_finder.strip_web_tests_path(abs_path)
subtests = SubtestResultRecorder(external_path)
subtests = SubtestResultRecorder(external_path, port)
skip_test_flag = ['--deselect=' +
skipped_test for skipped_test in skipped_tests]
......@@ -221,14 +224,14 @@ if __name__ == '__main__':
sys.path.insert(0, WEBDRIVER_CLIENT_ABS_PATH)
try:
if os.path.isfile(test_path):
test_results = run_test(test_path, path_finder, skipped_tests)
test_results = run_test(test_path, path_finder, port, skipped_tests)
elif os.path.isdir(test_path):
for root, dirnames, filenames in os.walk(test_path):
for filename in filenames:
if '__init__' in filename:
continue
test_file = os.path.join(root, filename)
test_results += run_test(test_file, path_finder, skipped_tests)
test_results += run_test(test_file, path_finder, port, skipped_tests)
else:
_log.error('%s is not a file nor directory.' % test_path)
sys.exit(1)
......@@ -238,6 +241,7 @@ if __name__ == '__main__':
chromedriver_server.Kill()
port.stop_wptserve()
exit_code = 0
if options.isolated_script_test_output:
output = {
'interrupted': False,
......@@ -251,16 +255,21 @@ if __name__ == '__main__':
success_count = 0
for test_result in test_results:
expected_result = test_result.test_status
if expectations.model().has_test(test_result.test_name):
expected_result = expectations.get_expectations_string(
test_result.test_name)
status = test_expectations.TestExpectations.expectation_from_string(
test_result.test_status)
is_unexpected = not expectations.matches_an_expected_result(
test_result.test_name, status, False)
else:
expected_result = 'PASS'
is_unexpected = (test_result.test_status != expected_result)
# TODO(crbug.com/934919): is_unexpected needs to be set for
# unexpected failures once expectations have been supported.
output['tests'][test_result.test_name] = {
'expected': expected_result,
'actual': test_result.test_status,
'is_unexpected': is_unexpected,
}
if test_result.message:
......@@ -269,6 +278,9 @@ if __name__ == '__main__':
if test_result.test_status == 'PASS':
success_count += 1
if is_unexpected:
exit_code += 1
output['num_failures_by_type']['PASS'] = success_count
output['num_failures_by_type']['SKIP'] = len(skipped_tests)
output['num_failures_by_type']['FAIL'] = len(
......@@ -277,7 +289,9 @@ if __name__ == '__main__':
with open(options.isolated_script_test_output, 'w') as fp:
json.dump(output, fp)
# TODO(crbug.com/934919): exit code should be non-zero once
# the runner is able to detect unexpected failures. Currently set
# to 0 because all failures are expected.
sys.exit(0)
if exit_code > exit_codes.MAX_FAILURES_EXIT_STATUS:
_log.warning('num regressions (%d) exceeds max exit status (%d)',
exit_code, exit_codes.MAX_FAILURES_EXIT_STATUS)
exit_code = exit_codes.MAX_FAILURES_EXIT_STATUS
sys.exit(exit_code)
\ No newline at end of file
......@@ -174,6 +174,9 @@ class Port(object):
FLAG_EXPECTATIONS_PREFIX = 'FlagExpectations'
# The following is used for concetenating WebDriver test names.
WEBDRIVER_SUBTEST_SEPARATOR = '>>'
# The following two constants must match. When adding a new WPT root, also
# remember to add an alias rule to third_party/wpt/wpt.config.json.
# WPT_DIRS maps WPT roots on the file system to URL prefixes on wptserve.
......@@ -1776,6 +1779,8 @@ class Port(object):
raise TestRunException(exit_codes.SYS_DEPS_EXIT_STATUS, message)
return result
def add_webdriver_subtest_suffix(self, test_name, subtest_name):
return test_name + self.WEBDRIVER_SUBTEST_SEPARATOR + subtest_name
class VirtualTestSuite(object):
......
......@@ -938,6 +938,15 @@ class PortTest(LoggingTestCase):
self.assertTrue(port.skips_test('failures/expected/image.html'))
def test_add_webdriver_subtest_suffix(self):
port = self.make_port()
wb_test_name = "abd"
sub_test_name = "bar"
full_webdriver_name = port.add_webdriver_subtest_suffix(wb_test_name, sub_test_name)
self.assertEqual(full_webdriver_name, "abd>>bar")
class NaturalCompareTest(unittest.TestCase):
def setUp(self):
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment