Commit 80816a67 authored by Brian Sheedy's avatar Brian Sheedy Committed by Chromium LUCI CQ

Output removed stale expectation bugs

Updates the GPU unexpected pass finder script to output a list of bugs
associated with removed stale expectations in several formats. This
will make it easier to close out any necessary bugs and include the
affected bugs in the CL description.

Bug: 998329
Change-Id: Ia426974eb63c4123505787cd8bc010d6ccea1b91
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2594368
Auto-Submit: Brian Sheedy <bsheedy@chromium.org>
Reviewed-by: default avatarYuly Novikov <ynovikov@chromium.org>
Commit-Queue: Brian Sheedy <bsheedy@chromium.org>
Cr-Commit-Position: refs/heads/master@{#838214}
parent cf758624
......@@ -195,10 +195,11 @@ def main():
for _, expectation_map in stale.iteritems():
stale_expectations.extend(expectation_map.keys())
stale_expectations.extend(unused_expectations)
expectations.RemoveExpectationsFromFile(stale_expectations,
args.expectation_file)
removed_urls = expectations.RemoveExpectationsFromFile(
stale_expectations, args.expectation_file)
print('Stale expectations removed from %s. Stale comments, etc. may still '
'need to be removed.' % args.expectation_file)
result_output.OutputRemovedUrls(removed_urls)
if __name__ == '__main__':
......
......@@ -201,6 +201,10 @@ def RemoveExpectationsFromFile(expectations, expectation_file):
expectations: A list of data_types.Expectations to remove.
expectation_file: A filepath pointing to an expectation file to remove lines
from.
Returns:
A set of strings containing URLs of bugs associated with the removed
expectations.
"""
header = validate_tag_consistency.TAG_HEADER
......@@ -210,6 +214,7 @@ def RemoveExpectationsFromFile(expectations, expectation_file):
output_contents = ''
in_disable_block = False
disable_block_reason = ''
removed_urls = set()
for line in input_contents.splitlines(True):
# Auto-add any comments or empty lines
stripped_line = line.strip()
......@@ -260,12 +265,18 @@ def RemoveExpectationsFromFile(expectations, expectation_file):
'Would have removed expectation %s, but it has an inline disable '
'comment with reason %s',
stripped_line.split('#')[0], _GetDisableReasonFromComment(line))
else:
reason = list_parser.expectations[0].reason
if reason:
removed_urls.add(reason)
else:
output_contents += line
with open(expectation_file, 'w') as f:
f.write(output_contents)
return removed_urls
def _GetDisableReasonFromComment(line):
return line.split(FINDER_DISABLE_COMMENT, 1)[1].strip()
......@@ -365,7 +365,9 @@ crbug.com/2345 [ win ] foo/test [ RetryOnFailure ]
with open(self.filename, 'w') as f:
f.write(contents)
expectations.RemoveExpectationsFromFile(stale_expectations, self.filename)
removed_urls = expectations.RemoveExpectationsFromFile(
stale_expectations, self.filename)
self.assertEqual(removed_urls, set(['crbug.com/1234']))
with open(self.filename) as f:
self.assertEqual(f.read(), expected_contents)
......@@ -397,23 +399,25 @@ crbug.com/1234 [ win ] foo/test [ Failure ]
contents = validate_tag_consistency.TAG_HEADER + """
crbug.com/1234 [ win ] foo/test [ Failure ]
# finder:disable
crbug.com/1234 [ win ] foo/test [ Failure ]
crbug.com/1234 [ win ] foo/test [ Failure ]
crbug.com/2345 [ win ] foo/test [ Failure ]
crbug.com/3456 [ win ] foo/test [ Failure ]
# finder:enable
crbug.com/1234 [ win ] foo/test [ Failure ]
crbug.com/4567 [ win ] foo/test [ Failure ]
"""
stale_expectations = [
data_types.Expectation('foo/test', ['win'], ['Failure'])
]
expected_contents = validate_tag_consistency.TAG_HEADER + """
# finder:disable
crbug.com/1234 [ win ] foo/test [ Failure ]
crbug.com/1234 [ win ] foo/test [ Failure ]
crbug.com/2345 [ win ] foo/test [ Failure ]
crbug.com/3456 [ win ] foo/test [ Failure ]
# finder:enable
"""
with open(self.filename, 'w') as f:
f.write(contents)
expectations.RemoveExpectationsFromFile(stale_expectations, self.filename)
removed_urls = expectations.RemoveExpectationsFromFile(
stale_expectations, self.filename)
self.assertEqual(removed_urls, set(['crbug.com/1234', 'crbug.com/4567']))
with open(self.filename) as f:
self.assertEqual(f.read(), expected_contents)
......@@ -421,18 +425,20 @@ crbug.com/1234 [ win ] foo/test [ Failure ]
"""Tests that expectations with inline disable comments are not removed."""
contents = validate_tag_consistency.TAG_HEADER + """
crbug.com/1234 [ win ] foo/test [ Failure ]
crbug.com/1234 [ win ] foo/test [ Failure ] # finder:disable
crbug.com/1234 [ win ] foo/test [ Failure ]
crbug.com/2345 [ win ] foo/test [ Failure ] # finder:disable
crbug.com/3456 [ win ] foo/test [ Failure ]
"""
stale_expectations = [
data_types.Expectation('foo/test', ['win'], ['Failure'])
]
expected_contents = validate_tag_consistency.TAG_HEADER + """
crbug.com/1234 [ win ] foo/test [ Failure ] # finder:disable
crbug.com/2345 [ win ] foo/test [ Failure ] # finder:disable
"""
with open(self.filename, 'w') as f:
f.write(contents)
expectations.RemoveExpectationsFromFile(stale_expectations, self.filename)
removed_urls = expectations.RemoveExpectationsFromFile(
stale_expectations, self.filename)
self.assertEqual(removed_urls, set(['crbug.com/1234', 'crbug.com/3456']))
with open(self.filename) as f:
self.assertEqual(f.read(), expected_contents)
......
......@@ -6,6 +6,7 @@
Also probably a good example of how to *not* write HTML.
"""
import collections
import logging
import sys
import tempfile
......@@ -136,6 +137,9 @@ SECTION_UNMATCHED = ('Unmatched Results (An Expectation Existed When The Test '
SECTION_UNUSED = ('Unused Expectations (Indicative Of The Configuration No '
'Longer Being Tested Or Tags Changing)')
MAX_BUGS_PER_LINE = 5
MAX_CHARACTERS_PER_CL_LINE = 72
def OutputResults(stale_dict,
semi_stale_dict,
......@@ -430,3 +434,81 @@ def _FormatExpectation(expectation):
def _AddStatsToStr(s, stats):
return '%s (%d/%d)' % (s, stats.passed_builds, stats.total_builds)
def OutputRemovedUrls(removed_urls):
"""Outputs URLs of removed expectations for easier consumption by the user.
Outputs both a string suitable for passing to Chrome via the command line to
open all bugs in the browser and a string suitable for copying into the CL
description to associate the CL with all the affected bugs.
Args:
removed_urls: A set or list of strings containing bug URLs.
"""
removed_urls = list(removed_urls)
removed_urls.sort()
_OutputUrlsForCommandLine(removed_urls)
_OutputUrlsForClDescription(removed_urls)
def _OutputUrlsForCommandLine(urls, file_handle=None):
"""Outputs |urls| for opening in a browser.
The output string is meant to be passed to a browser via the command line in
order to open all URLs in that browser, e.g.
`google-chrome https://crbug.com/1234 https://crbug.com/2345`
Args:
urls: A list of strings containing URLs to output.
file_handle: A file handle to write the string to. Defaults to stdout.
"""
file_handle = file_handle or sys.stdout
def _StartsWithHttp(url):
return url.startswith('https://') or url.startswith('http://')
urls = [u if _StartsWithHttp(u) else 'https://%s' % u for u in urls]
file_handle.write('Affected bugs: %s\n' % ' '.join(urls))
def _OutputUrlsForClDescription(urls, file_handle=None):
"""Outputs |urls| for use in a CL description.
Output adheres to the line length recommendation and max number of bugs per
line supported in Gerrit.
Args:
urls: A list of strings containing URLs to output.
file_handle: A file handle to write the string to. Defaults to stdout.
"""
file_handle = file_handle or sys.stdout
urls = collections.deque(urls)
output_str = ''
current_line = ''
bugs_on_line = 0
while len(urls):
current_bug = urls.popleft()
current_bug = current_bug.split('crbug.com/', 1)[1]
# Handles cases like crbug.com/angleproject/1234.
current_bug = current_bug.replace('/', ':')
# First bug on the line.
if not current_line:
current_line = 'Bug: %s' % current_bug
# Bug or length limit hit for line.
elif (len(current_line) + len(current_bug) + 2 > MAX_CHARACTERS_PER_CL_LINE
or bugs_on_line >= MAX_BUGS_PER_LINE):
output_str += current_line + '\n'
bugs_on_line = 0
current_line = 'Bug: %s' % current_bug
# Can add to current line.
else:
current_line += ', %s' % current_bug
bugs_on_line += 1
output_str += current_line + '\n'
file_handle.write('Affected bugs:\n%s' % output_str)
......@@ -435,6 +435,117 @@ class OutputResultsUnittest(fake_filesystem_unittest.TestCase):
self._file_handle)
class OutputUrlsForCommandLineUnittest(fake_filesystem_unittest.TestCase):
def setUp(self):
self.setUpPyfakefs()
self._file_handle = tempfile.NamedTemporaryFile(delete=False)
self._filepath = self._file_handle.name
def testOutput(self):
"""Tests that the output is correct."""
urls = [
'https://crbug.com/1234',
'https://crbug.com/angleproject/1234',
'http://crbug.com/2345',
'crbug.com/3456',
]
result_output._OutputUrlsForCommandLine(urls, self._file_handle)
self._file_handle.close()
with open(self._filepath) as f:
self.assertEqual(f.read(), ('Affected bugs: '
'https://crbug.com/1234 '
'https://crbug.com/angleproject/1234 '
'http://crbug.com/2345 '
'https://crbug.com/3456\n'))
class OutputUrlsForClDescriptionUnittest(fake_filesystem_unittest.TestCase):
def setUp(self):
self.setUpPyfakefs()
self._file_handle = tempfile.NamedTemporaryFile(delete=False)
self._filepath = self._file_handle.name
def testSingleLine(self):
"""Tests when all bugs can fit on a single line."""
urls = [
'crbug.com/1234',
'https://crbug.com/angleproject/2345',
]
result_output._OutputUrlsForClDescription(urls, self._file_handle)
self._file_handle.close()
with open(self._filepath) as f:
self.assertEqual(f.read(), ('Affected bugs:\n'
'Bug: 1234, angleproject:2345\n'))
def testBugLimit(self):
"""Tests that only a certain number of bugs are allowed per line."""
urls = [
'crbug.com/1',
'crbug.com/2',
'crbug.com/3',
'crbug.com/4',
'crbug.com/5',
'crbug.com/6',
]
result_output._OutputUrlsForClDescription(urls, self._file_handle)
self._file_handle.close()
with open(self._filepath) as f:
self.assertEqual(f.read(), ('Affected bugs:\n'
'Bug: 1, 2, 3, 4, 5\n'
'Bug: 6\n'))
def testLengthLimit(self):
"""Tests that only a certain number of characters are allowed per line."""
urls = [
'crbug.com/averylongprojectthatwillgooverthelinelength/1',
'crbug.com/averylongprojectthatwillgooverthelinelength/2',
]
result_output._OutputUrlsForClDescription(urls, self._file_handle)
self._file_handle.close()
with open(self._filepath) as f:
self.assertEqual(f.read(),
('Affected bugs:\n'
'Bug: averylongprojectthatwillgooverthelinelength:1\n'
'Bug: averylongprojectthatwillgooverthelinelength:2\n'))
project_name = (result_output.MAX_CHARACTERS_PER_CL_LINE - len('Bug: ') -
len(':1, 2')) * 'a'
urls = [
'crbug.com/%s/1' % project_name,
'crbug.com/2',
]
with open(self._filepath, 'w') as f:
result_output._OutputUrlsForClDescription(urls, f)
with open(self._filepath) as f:
self.assertEqual(f.read(), ('Affected bugs:\n'
'Bug: %s:1, 2\n' % project_name))
project_name += 'a'
urls = [
'crbug.com/%s/1' % project_name,
'crbug.com/2',
]
with open(self._filepath, 'w') as f:
result_output._OutputUrlsForClDescription(urls, f)
with open(self._filepath) as f:
self.assertEqual(f.read(), ('Affected bugs:\n'
'Bug: %s:1\nBug: 2\n' % project_name))
def testSingleBugOverLineLimit(self):
"""Tests the behavior when a single bug by itself is over the line limit."""
project_name = result_output.MAX_CHARACTERS_PER_CL_LINE * 'a'
urls = [
'crbug.com/%s/1' % project_name,
'crbug.com/2',
]
result_output._OutputUrlsForClDescription(urls, self._file_handle)
self._file_handle.close()
with open(self._filepath) as f:
self.assertEqual(f.read(), ('Affected bugs:\n'
'Bug: %s:1\n'
'Bug: 2\n' % project_name))
def _Dedent(s):
output = ''
for line in s.splitlines(True):
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment