Commit 47c7aa5e authored by zhaoyangli's avatar zhaoyangli Committed by Commit Bot

[iOS][test runner] Report test results to result sink.

Report testId, status, expected, summaryHtml and artifacts to ResultSink.

Bug: 1104247, 1132476
Change-Id: I425290ab52bcab302de3ae215606b33d0af15692
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2430626
Commit-Queue: Zhaoyang Li <zhaoyangli@chromium.org>
Reviewed-by: default avatarJustin Cohen <justincohen@chromium.org>
Reviewed-by: default avatarChan Li <chanli@chromium.org>
Cr-Commit-Position: refs/heads/master@{#814941}
parent 1ed869ad
......@@ -15,6 +15,7 @@ def _RunTestRunnerUnitTests(input_api, output_api):
files = [
'coverage_util_test.py',
'iossim_util_test.py',
'result_sink_util_test.py',
'run_test.py',
'shard_util_test.py',
'standard_json_util_test.py',
......
# Copyright 2020 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import base64
import cgi
import json
import logging
import os
import requests
LOGGER = logging.getLogger(__name__)
# Max summaryHtml length (4 KiB) from
# https://source.chromium.org/chromium/infra/infra/+/master:go/src/go.chromium.org/luci/resultdb/proto/v1/test_result.proto;drc=ca12b9f52b27f064b0fa47c39baa3b011ffa5790;l=96
MAX_REPORT_LEN = 4 * 1024
# VALID_STATUSES is a list of valid status values for test_result['status'].
# The full list can be obtained at
# https://source.chromium.org/chromium/infra/infra/+/master:go/src/go.chromium.org/luci/resultdb/proto/v1/test_result.proto;drc=ca12b9f52b27f064b0fa47c39baa3b011ffa5790;l=151-174
VALID_STATUSES = {"PASS", "FAIL", "CRASH", "ABORT", "SKIP"}
def compose_test_result(test_id, status, expected, test_log=None, tags=None):
"""Composes the test_result dict item to be posted to result sink.
Args:
test_id: (str) A unique identifier of the test in LUCI context.
status: (str) Status of the test. Must be one in |VALID_STATUSES|.
expected: (bool) Whether the status is expected.
test_log: (str) Log of the test. Optional.
tags: (list) List of tags. Each item in list should be a length 2 tuple of
string as ("key", "value"). Optional.
Returns:
A dict of test results with input information, confirming to
https://source.chromium.org/chromium/infra/infra/+/master:go/src/go.chromium.org/luci/resultdb/sink/proto/v1/test_result.proto
"""
assert status in VALID_STATUSES, (
'%s is not a valid status (one in %s) for ResultSink.' %
(status, VALID_STATUSES))
for tag in tags or []:
assert len(tag) == 2, 'Items in tags should be length 2 tuples of strings'
assert isinstance(tag[0], str) and isinstance(
tag[1], str), ('Items in'
'tags should be length 2 tuples of strings')
test_result = {
'testId': test_id,
'status': status,
'expected': expected,
'tags': [{
'key': key,
'value': value
} for (key, value) in (tags or [])]
}
if test_log:
summary = '<pre>%s</pre>' % cgi.escape(test_log)
summary_trunc = ''
if len(summary) > MAX_REPORT_LEN:
summary_trunc = (
summary[:MAX_REPORT_LEN - 45] +
'...Full output in "Test Log" Artifact.</pre>')
test_result['summaryHtml'] = summary_trunc or summary
if summary_trunc:
test_result['artifacts'] = {
'Test Log': {
'contents': base64.b64encode(test_log)
},
}
return test_result
class ResultSinkClient(object):
"""Stores constants and handles posting to ResultSink."""
def __init__(self):
"""Initiates and stores constants to class."""
self.sink = None
luci_context_file = os.environ.get('LUCI_CONTEXT')
if not luci_context_file:
logging.warning('LUCI_CONTEXT not found in environment. ResultDB'
' integration disabled.')
return
with open(luci_context_file) as f:
self.sink = json.load(f).get('result_sink')
if not self.sink:
logging.warning('ResultSink constants not found in LUCI context.'
' ResultDB integration disabled.')
return
self.url = ('http://%s/prpc/luci.resultsink.v1.Sink/ReportTestResults' %
self.sink['address'])
self.headers = {
'Content-Type': 'application/json',
'Accept': 'application/json',
'Authorization': 'ResultSink %s' % self.sink['auth_token'],
}
def post(self, test_result):
"""Posts single test result to server.
Args:
test_result: (dict) Confirming to protocol defined in
https://source.chromium.org/chromium/infra/infra/+/master:go/src/go.chromium.org/luci/resultdb/sink/proto/v1/test_result.proto
"""
if not self.sink:
return
res = requests.post(
url=self.url,
headers=self.headers,
data=json.dumps({'testResults': [test_result]}),
)
res.raise_for_status()
# Copyright 2020 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import base64
import unittest
import result_sink_util
class UnitTest(unittest.TestCase):
def test_compose_test_result(self):
"""Tests compose_test_result function."""
# Test a test result without log_path.
test_result = result_sink_util.compose_test_result('TestCase/testSomething',
'PASS', True)
expected = {
'testId': 'TestCase/testSomething',
'status': 'PASS',
'expected': True,
'tags': [],
}
self.assertEqual(test_result, expected)
# Tests a test result with log_path.
test_result = result_sink_util.compose_test_result('TestCase/testSomething',
'PASS', True,
'Some logs.')
expected = {
'testId': 'TestCase/testSomething',
'status': 'PASS',
'expected': True,
'summaryHtml': '<pre>Some logs.</pre>',
'tags': [],
}
self.assertEqual(test_result, expected)
def test_long_test_log(self):
"""Tests long test log is reported as expected."""
len_32_str = 'This is a string in length of 32'
self.assertEqual(len(len_32_str), 32)
len_4128_str = (4 * 32 + 1) * len_32_str
self.assertEqual(len(len_4128_str), 4128)
expected_summary_html = ('<pre>' + len_32_str * 126 + 'This is a stri' +
'...Full output in "Test Log" Artifact.</pre>')
expected = {
'testId': 'TestCase/testSomething',
'status': 'PASS',
'expected': True,
'summaryHtml': expected_summary_html,
'artifacts': {
'Test Log': {
'contents': base64.b64encode(len_4128_str)
},
},
'tags': [],
}
test_result = result_sink_util.compose_test_result('TestCase/testSomething',
'PASS', True,
len_4128_str)
self.assertEqual(test_result, expected)
def test_compose_test_result_assertions(self):
"""Tests invalid status is rejected"""
with self.assertRaises(AssertionError):
test_result = result_sink_util.compose_test_result(
'TestCase/testSomething', 'SOME_INVALID_STATUS', True)
with self.assertRaises(AssertionError):
test_result = result_sink_util.compose_test_result(
'TestCase/testSomething', 'PASS', True, tags=('a', 'b'))
with self.assertRaises(AssertionError):
test_result = result_sink_util.compose_test_result(
'TestCase/testSomething',
'PASS',
True,
tags=[('a', 'b', 'c'), ('d', 'e')])
with self.assertRaises(AssertionError):
test_result = result_sink_util.compose_test_result(
'TestCase/testSomething', 'PASS', True, tags=[('a', 'b'), ('c', 3)])
def test_composed_with_tags(self):
"""Tests tags is in correct format."""
expected = {
'testId': 'TestCase/testSomething',
'status': 'SKIP',
'expected': True,
'tags': [{
'key': 'disabled_test',
'value': 'true',
}]
}
test_result = result_sink_util.compose_test_result(
'TestCase/testSomething',
'SKIP',
True,
tags=[('disabled_test', 'true')])
self.assertEqual(test_result, expected)
if __name__ == '__main__':
unittest.main()
......@@ -5,6 +5,8 @@
import logging
from collections import OrderedDict
import result_sink_util
LOGGER = logging.getLogger(__name__)
......@@ -17,6 +19,7 @@ class StdJson():
"""
self.tests = OrderedDict()
self.result_sink = result_sink_util.ResultSinkClient()
if 'passed' in kwargs:
self.mark_all_passed(kwargs['passed'])
......@@ -26,7 +29,7 @@ class StdJson():
self.mark_all_passed(kwargs['flaked'], flaky=True)
def mark_passed(self, test, flaky=False):
"""Set test as passed
"""Sets test as passed
Params:
test (str): a test in format "{TestCase}/{testMethod}"
......@@ -38,6 +41,10 @@ class StdJson():
LOGGER.warn('Empty or None test name passed to standard_json_util')
return
result_sink_test_result = result_sink_util.compose_test_result(
test, 'PASS', True)
self.result_sink.post(result_sink_test_result)
if test in self.tests:
self.tests[test]['actual'] = self.tests[test]['actual'] + " PASS"
else:
......@@ -49,20 +56,25 @@ class StdJson():
self.tests[test].pop('is_unexpected', None)
def mark_all_passed(self, tests, flaky=False):
"""Mark all tests as PASS"""
"""Marks all tests as PASS"""
for test in tests:
self.mark_passed(test, flaky)
def mark_failed(self, test):
"""Set test(s) as failed.
def mark_failed(self, test, test_log=None):
"""Sets test(s) as failed.
Params:
test (str): a test in format "{TestCase}/{testMethod}"
test_log (str): log of the specific test
"""
if not test:
LOGGER.warn('Empty or None test name passed to standard_json_util')
return
result_sink_test_result = result_sink_util.compose_test_result(
test, 'FAIL', False, test_log=test_log)
self.result_sink.post(result_sink_test_result)
if test in self.tests:
self.tests[test]['actual'] = self.tests[test]['actual'] + " FAIL"
self.tests[test]['is_unexpected'] = True
......@@ -74,12 +86,12 @@ class StdJson():
}
def mark_all_failed(self, tests):
"""Mark all tests as FAIL"""
"""Marks all tests as FAIL"""
for test in tests:
self.mark_failed(test)
def mark_skipped(self, test):
"""Set test(s) as SKIP.
"""Sets test(s) as expected SKIP.
Params:
test (str): a test in format "{TestCase}/{testMethod}"
......@@ -88,6 +100,10 @@ class StdJson():
LOGGER.warn('Empty or None test name passed to standard_json_util')
return
result_sink_test_result = result_sink_util.compose_test_result(
test, 'SKIP', True, tags=[('disabled_test', 'true')])
self.result_sink.post(result_sink_test_result)
self.tests[test] = {'expected': 'SKIP', 'actual': 'SKIP'}
def mark_all_skipped(self, tests):
......@@ -95,7 +111,7 @@ class StdJson():
self.mark_skipped(test)
def mark_timeout(self, test):
"""Set test as TIMEOUT, which is used to indicate a test abort/timeout
"""Sets test as TIMEOUT, which is used to indicate a test abort/timeout
Params:
test (str): a test in format "{TestCase}/{testMethod}"
......@@ -104,6 +120,17 @@ class StdJson():
LOGGER.warn('Empty or None test name passed to standard_json_util')
return
# Timeout tests in iOS test runner are tests that's unexpectedly not run.
test_log = ('The test is compiled in test target but was unexpectedly not'
' run or not finished.')
result_sink_test_result = result_sink_util.compose_test_result(
test,
'SKIP',
False,
test_log=test_log,
tags=[('disabled_test', 'false')])
self.result_sink.post(result_sink_test_result)
if test in self.tests:
self.tests[test]['actual'] = self.tests[test]['actual'] + " TIMEOUT"
self.tests[test]['is_unexpected'] = True
......
......@@ -82,6 +82,14 @@ class UnitTest(unittest.TestCase):
self.assertTrue(output.tests['e']['is_flaky'])
self.assertIsNot(output.tests['e'].get('is_unexpected'), True)
def test_skip(self):
"""Test setting expected skip."""
test = 'f'
output = sju.StdJson()
output.mark_skipped(test)
self.assertEqual(output.tests['f']['actual'], 'SKIP')
self.assertFalse(output.tests['f'].get('is_unexpected', False))
def test_timeout(self):
"""Test setting timeout"""
test = 'e'
......
......@@ -618,6 +618,7 @@ class TestRunner(object):
# pass before entering the retry block below.
# For each retry that passes, we want to mark it separately as passed
# (ie/ "FAIL PASS"), with is_flaky=True.
# TODO(crbug.com/1132476): Report failed GTest logs to ResultSink.
output = sju.StdJson(passed=passed, failed=failed, flaked=flaked)
# Retry failed test cases.
......
......@@ -455,7 +455,8 @@ class SimulatorParallelTestRunner(test_runner.SimulatorTestRunner):
for attempt, attempt_results in enumerate(shard_attempts):
for test in attempt_results['failed'].keys():
output.mark_failed(test)
output.mark_failed(
test, test_log='\n'.join(self.logs.get(test, [])).encode('utf8'))
# 'aborted tests' in logs is an array of strings, each string defined
# as "{TestCase}/{testMethod}"
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment