Commit 884e41f0 authored by Roberto Carrillo's avatar Roberto Carrillo Committed by Commit Bot

[code coverage] Remove results from corrupted shards.

This change makes the merge script remove the results file for bad
shards. This should make the rest of the pipeline properly treat those
shards as missing and retry them if appropriate.

R=liaoyuke,martiniss

Bug: 955012
Change-Id: I4bddc82fe1219b68adeb11d76a299962dddf65c3
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1719265
Commit-Queue: Roberto Carrillo <robertocn@chromium.org>
Reviewed-by: default avatarStephen Martinis <martiniss@chromium.org>
Reviewed-by: default avatarYuke Liao <liaoyuke@chromium.org>
Cr-Commit-Position: refs/heads/master@{#682934}
parent 0a13421c
......@@ -85,6 +85,9 @@ def main():
'w') as f:
json.dump(invalid_profiles, f)
mark_invalid_shards(
coverage_merger.get_shards_to_retry(invalid_profiles),
params.jsons_to_merge)
logging.info('Merging %d test results', len(params.jsons_to_merge))
failed = False
......@@ -114,15 +117,11 @@ def main():
failed = True
logging.warning('Additional merge script %s exited with %s' %
(params.additional_merge_script, rc))
mark_invalid_shards(coverage_merger.get_shards_to_retry(invalid_profiles),
params.summary_json, params.output_json)
elif len(params.jsons_to_merge) == 1:
logging.info("Only one output needs to be merged; directly copying it.")
with open(params.jsons_to_merge[0]) as f_read:
with open(params.output_json, 'w') as f_write:
f_write.write(f_read.read())
mark_invalid_shards(coverage_merger.get_shards_to_retry(invalid_profiles),
params.summary_json, params.output_json)
else:
logging.warning(
"This script was told to merge %d test results, but no additional "
......@@ -130,35 +129,27 @@ def main():
return 1 if (failed or bool(invalid_profiles)) else 0
def mark_invalid_shards(bad_shards, summary_file, output_file):
def mark_invalid_shards(bad_shards, jsons_to_merge):
"""Removes results json files from bad shards.
This is needed so that the caller (e.g. recipe) knows to retry, or otherwise
treat the tests in that shard as not having valid results. Note that this only
removes the results from the local machine, as these are expected to remain in
the shard's isolated output.
Args:
bad_shards: list of task_ids of the shards that are bad or corrupted.
jsons_to_merge: The path to the jsons with the results of the tests.
"""
if not bad_shards:
return
shard_indices = []
try:
with open(summary_file) as f:
summary = json.load(f)
except (OSError, ValueError):
logging.warning('Could not read summary.json, not marking invalid shards')
return
for i in range(len(summary['shards'])):
shard_info = summary['shards'][i]
shard_id = (shard_info['task_id']
if shard_info and 'task_id' in shard_info
else 'unknown')
if shard_id in bad_shards or shard_id == 'unknown':
shard_indices.append(i)
try:
with open(output_file) as f:
output = json.load(f)
except (OSError, ValueError):
logging.warning('Invalid/missing output.json, overwriting')
output = {}
output.setdefault('missing_shards', [])
output['missing_shards'].extend(shard_indices)
with open(output_file, 'w') as f:
json.dump(output, f)
for f in jsons_to_merge:
for task_id in bad_shards:
if task_id in f:
# Remove results json if it corresponds to a bad shard.
os.remove(f)
break
if __name__ == '__main__':
......
......@@ -206,57 +206,16 @@ class MergeProfilesTest(unittest.TestCase):
stderr=-2,
), mock_exec_cmd.call_args)
@mock.patch('__builtin__.open',
new_callable=mock.mock_open,
read_data=json.dumps(
{'shards': [{'task_id': '1234567890abcdeff'}]}))
def test_mark_invalid_shards(self, mo):
mock_result = mock.mock_open()
mock_write = mock.MagicMock()
mock_result.return_value.write = mock_write
mo.side_effect = (
mo.return_value,
mock.mock_open(read_data='{}').return_value,
mock_result.return_value,
)
merge_results.mark_invalid_shards(['1234567890abcdeff'], 'dummy.json',
'o.json')
written = json.loads(''.join(c[0][0] for c in mock_write.call_args_list))
self.assertIn('missing_shards', written)
self.assertEqual(written['missing_shards'], [0])
@mock.patch('__builtin__.open',
new_callable=mock.mock_open,
read_data='invalid data')
def test_mark_invalid_shards_bad_summary(self, _mo):
# Should not raise an exception.
raised_exception = False
try:
merge_results.mark_invalid_shards(['1234567890abcdeff'], 'dummy.json',
'o.json')
except: # pylint: disable=bare-except
raised_exception = True
self.assertFalse(raised_exception, 'unexpected exception')
@mock.patch('__builtin__.open',
new_callable=mock.mock_open,
read_data=json.dumps(
{'shards': [{'task_id': '1234567890abcdeff'}]}))
def test_mark_invalid_shards_bad_output(self, mo):
mock_result = mock.mock_open()
mock_write = mock.MagicMock()
mock_result.return_value.write = mock_write
mo.side_effect = (
mo.return_value,
mock.mock_open(read_data='invalid_data').return_value,
mock_result.return_value,
)
# Should succeed anyway.
merge_results.mark_invalid_shards(['1234567890abcdeff'], 'dummy.json',
'o.json')
written = json.loads(''.join(c[0][0] for c in mock_write.call_args_list))
self.assertIn('missing_shards', written)
self.assertEqual(written['missing_shards'], [0])
@mock.patch('os.remove')
def test_mark_invalid_shards(self, mock_rm):
merge_results.mark_invalid_shards(['123abc'], [
'/tmp/123abc/dummy.json', '/tmp/123abc/dummy2.json',
'/tmp/1234abc/dummy.json'
])
self.assertEqual([
mock.call('/tmp/123abc/dummy.json'),
mock.call('/tmp/123abc/dummy2.json')
], mock_rm.call_args_list)
def test_get_shards_to_retry(self):
bad_profiles = [
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment