Commit 3c3b0e53 authored by Stephen Martinis's avatar Stephen Martinis Committed by Commit Bot

Add merge script directory

Most of these merge scripts lived in tools/build. These are moving
source side so that:
* Devs can find it easily
* Tryjobs can test changes to it
* Different trigger scripts can share and re-use code

Bug: 928577
Change-Id: I6a78164a3b5dfe142983ec111756bd60f838b2a6
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1479171Reviewed-by: default avatarDirk Pranke <dpranke@chromium.org>
Reviewed-by: default avatarJohn Budorick <jbudorick@chromium.org>
Reviewed-by: default avatarMarc-Antoine Ruel <maruel@chromium.org>
Reviewed-by: default avatarKenneth Russell <kbr@chromium.org>
Commit-Queue: Stephen Martinis <martiniss@chromium.org>
Cr-Commit-Position: refs/heads/master@{#638207}
parent dc31b374
jbudorick@chromium.org
kbr@chromium.org
martiniss@chromium.org
maruel@chromium.org
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Top-level presubmit script for testing/merge_scripts.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into depot_tools.
"""
def CommonChecks(input_api, output_api):
return input_api.canned_checks.RunUnitTestsInDirectory(
input_api, output_api, '.', [ r'^.+_unittest\.py$'])
def CheckChangeOnUpload(input_api, output_api):
return CommonChecks(input_api, output_api)
def CheckChangeOnCommit(input_api, output_api):
return CommonChecks(input_api, output_api)
# Swarming merge scripts
This directory contains Swarming merge scripts. Merge scripts run to collect the
results of a swarming run of a test suite. Their basic operation is to merge
together test results from the shard runs. They can also perform other post
processing steps, such as uploading some test results to another test result
server.
There are a few merge scripts here which are simple wrappers around other
scripts. These exist just so that every merge script we know about lives in a
centralized location.
Merge scripts are documented here:
https://cs.chromium.org/search/?q=file:swarming/api.py+%22*+merge:%22&type=cs
# Copyright 2018 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import os
import shutil
import tempfile
import unittest
class CommandLineTest(unittest.TestCase):
def __init__(self, methodName, module):
super(CommandLineTest, self).__init__(methodName)
self._module = module
def setUp(self):
self.temp_dir = tempfile.mkdtemp(prefix='common_merge_script_tests')
def tearDown(self):
shutil.rmtree(self.temp_dir)
def test_accepts_task_output_dir(self):
task_output_dir = os.path.join(self.temp_dir, 'task_output_dir')
shard0_dir = os.path.join(task_output_dir, '0')
os.makedirs(shard0_dir)
summary_json = os.path.join(task_output_dir, 'summary.json')
with open(summary_json, 'w') as summary_file:
summary_contents = {
u'shards': [
{
u'state': u'COMPLETED',
},
],
}
json.dump(summary_contents, summary_file)
shard0_json = os.path.join(shard0_dir, 'output.json')
with open(shard0_json, 'w') as shard0_file:
json.dump({}, shard0_file)
output_json = os.path.join(self.temp_dir, 'merged.json')
raw_args = [
'--task-output-dir', task_output_dir,
'--summary-json', summary_json,
'--output-json', output_json,
shard0_json,
]
self.assertEquals(0, self._module.main(raw_args))
# Copyright 2018 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
def ArgumentParser(*args, **kwargs):
"""Creates an argument parser and adds the merge API arguments to it.
See collect_task.collect_task for more on the merge script API.
"""
parser = argparse.ArgumentParser(*args, **kwargs)
parser.add_argument('--build-properties', help=argparse.SUPPRESS)
parser.add_argument('--summary-json', help=argparse.SUPPRESS)
parser.add_argument('--task-output-dir', help=argparse.SUPPRESS)
parser.add_argument('-o', '--output-json', required=True,
help=argparse.SUPPRESS)
parser.add_argument('jsons_to_merge', nargs='*', help=argparse.SUPPRESS)
return parser
#!/usr/bin/env python
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import json
import shutil
import sys
import merge_api
def noop_merge(output_json, jsons_to_merge):
"""Use the first supplied JSON as the output JSON.
Primarily intended for unsharded tasks.
Args:
output_json: A path to a JSON file to which the results should be written.
jsons_to_merge: A list of paths to JSON files.
"""
if len(jsons_to_merge) > 1:
print >> sys.stderr, (
'Multiple JSONs provided: %s' % ','.join(jsons_to_merge))
return 1
if jsons_to_merge:
shutil.copyfile(jsons_to_merge[0], output_json)
else:
with open(output_json, 'w') as f:
json.dump({}, f)
return 0
def main(raw_args):
parser = merge_api.ArgumentParser()
args = parser.parse_args(raw_args)
return noop_merge(args.output_json, args.jsons_to_merge)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
#!/usr/bin/env python
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import os
import shutil
import sys
import tempfile
import unittest
import common_merge_script_tests
THIS_DIR = os.path.dirname(__file__)
sys.path.insert(
0, os.path.abspath(os.path.join(THIS_DIR, '..', 'resources')))
import noop_merge
class NoopMergeTest(unittest.TestCase):
def setUp(self):
super(NoopMergeTest, self).setUp()
self.temp_dir = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.temp_dir)
super(NoopMergeTest, self).tearDown()
def test_copies_first_json(self):
input_json = os.path.join(self.temp_dir, 'input.json')
input_json_contents = {'foo': ['bar', 'baz']}
with open(input_json, 'w') as f:
json.dump(input_json_contents, f)
output_json = os.path.join(self.temp_dir, 'output.json')
self.assertEqual(0, noop_merge.noop_merge(output_json, [input_json]))
with open(output_json) as f:
self.assertEqual(input_json_contents, json.load(f))
def test_no_jsons(self):
output_json = os.path.join(self.temp_dir, 'output.json')
self.assertEqual(0, noop_merge.noop_merge(output_json, []))
with open(output_json) as f:
self.assertEqual({}, json.load(f))
def test_multiple_jsons(self):
input_json1 = os.path.join(self.temp_dir, 'input1.json')
input_json1_contents = {'test1': ['foo1', 'bar1']}
with open(input_json1, 'w') as f:
json.dump(input_json1_contents, f)
input_json2 = os.path.join(self.temp_dir, 'input2.json')
input_json2_contents = {'test2': ['foo2', 'bar2']}
with open(input_json2, 'w') as f:
json.dump(input_json2_contents, f)
output_json = os.path.join(self.temp_dir, 'output.json')
self.assertNotEqual(
0, noop_merge.noop_merge(output_json, [input_json1, input_json2]))
class CommandLineTest(common_merge_script_tests.CommandLineTest):
def __init__(self, methodName='runTest'):
super(CommandLineTest, self).__init__(methodName, noop_merge)
if __name__ == '__main__':
unittest.main()
#!/usr/bin/env python
# Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import sys
SRC_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(
__file__))))
sys.path.append(os.path.join(SRC_DIR, 'tools', 'perf'))
import process_perf_results
if __name__ == '__main__':
sys.exit(process_perf_results.main())
#!/usr/bin/env python
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import copy
import json
import sys
# These fields must appear in the test result output
REQUIRED = {
'interrupted',
'num_failures_by_type',
'seconds_since_epoch',
'tests',
}
# These fields are optional, but must have the same value on all shards
OPTIONAL_MATCHING = (
'builder_name',
'build_number',
'chromium_revision',
'has_pretty_patch',
'has_wdiff',
'path_delimiter',
'pixel_tests_enabled',
'random_order_seed',
)
OPTIONAL_IGNORED = (
'layout_tests_dir',
)
# These fields are optional and will be summed together
OPTIONAL_COUNTS = (
'fixable',
'num_flaky',
'num_passes',
'num_regressions',
'skipped',
'skips',
)
class MergeException(Exception):
pass
def merge_test_results(shard_results_list):
""" Merge list of results.
Args:
shard_results_list: list of results to merge. All the results must have the
same format. Supported format are simplified JSON format & Chromium JSON
test results format version 3 (see
https://www.chromium.org/developers/the-json-test-results-format)
Returns:
a dictionary that represent the merged results. Its format follow the same
format of all results in |shard_results_list|.
"""
shard_results_list = [x for x in shard_results_list if x]
if not shard_results_list:
return {}
if 'seconds_since_epoch' in shard_results_list[0]:
return _merge_json_test_result_format(shard_results_list)
else:
return _merge_simplified_json_format(shard_results_list)
def _merge_simplified_json_format(shard_results_list):
# This code is specialized to the "simplified" JSON format that used to be
# the standard for recipes.
# These are the only keys we pay attention to in the output JSON.
merged_results = {
'successes': [],
'failures': [],
'valid': True,
}
for result_json in shard_results_list:
successes = result_json.get('successes', [])
failures = result_json.get('failures', [])
valid = result_json.get('valid', True)
if (not isinstance(successes, list) or not isinstance(failures, list) or
not isinstance(valid, bool)):
raise MergeException(
'Unexpected value type in %s' % result_json) # pragma: no cover
merged_results['successes'].extend(successes)
merged_results['failures'].extend(failures)
merged_results['valid'] = merged_results['valid'] and valid
return merged_results
def _merge_json_test_result_format(shard_results_list):
# This code is specialized to the Chromium JSON test results format version 3:
# https://www.chromium.org/developers/the-json-test-results-format
# These are required fields for the JSON test result format version 3.
merged_results = {
'tests': {},
'interrupted': False,
'version': 3,
'seconds_since_epoch': float('inf'),
'num_failures_by_type': {
}
}
# To make sure that we don't mutate existing shard_results_list.
shard_results_list = copy.deepcopy(shard_results_list)
for result_json in shard_results_list:
# TODO(tansell): check whether this deepcopy is actually neccessary.
result_json = copy.deepcopy(result_json)
# Check the version first
version = result_json.pop('version', -1)
if version != 3:
raise MergeException( # pragma: no cover (covered by
# results_merger_unittest).
'Unsupported version %s. Only version 3 is supported' % version)
# Check the results for each shard have the required keys
missing = REQUIRED - set(result_json)
if missing:
raise MergeException( # pragma: no cover (covered by
# results_merger_unittest).
'Invalid json test results (missing %s)' % missing)
# Curry merge_values for this result_json.
merge = lambda key, merge_func: merge_value(
result_json, merged_results, key, merge_func)
# Traverse the result_json's test trie & merged_results's test tries in
# DFS order & add the n to merged['tests'].
merge('tests', merge_tries)
# If any were interrupted, we are interrupted.
merge('interrupted', lambda x,y: x|y)
# Use the earliest seconds_since_epoch value
merge('seconds_since_epoch', min)
# Sum the number of failure types
merge('num_failures_by_type', sum_dicts)
# Optional values must match
for optional_key in OPTIONAL_MATCHING:
if optional_key not in result_json:
continue
if optional_key not in merged_results:
# Set this value to None, then blindly copy over it.
merged_results[optional_key] = None
merge(optional_key, lambda src, dst: src)
else:
merge(optional_key, ensure_match)
# Optional values ignored
for optional_key in OPTIONAL_IGNORED:
if optional_key in result_json:
merged_results[optional_key] = result_json.pop(
# pragma: no cover (covered by
# results_merger_unittest).
optional_key)
# Sum optional value counts
for count_key in OPTIONAL_COUNTS:
if count_key in result_json: # pragma: no cover
# TODO(mcgreevy): add coverage.
merged_results.setdefault(count_key, 0)
merge(count_key, lambda a, b: a+b)
if result_json:
raise MergeException( # pragma: no cover (covered by
# results_merger_unittest).
'Unmergable values %s' % result_json.keys())
return merged_results
def merge_tries(source, dest):
""" Merges test tries.
This is intended for use as a merge_func parameter to merge_value.
Args:
source: A result json test trie.
dest: A json test trie merge destination.
"""
# merge_tries merges source into dest by performing a lock-step depth-first
# traversal of dest and source.
# pending_nodes contains a list of all sub-tries which have been reached but
# need further merging.
# Each element consists of a trie prefix, and a sub-trie from each of dest
# and source which is reached via that prefix.
pending_nodes = [('', dest, source)]
while pending_nodes:
prefix, dest_node, curr_node = pending_nodes.pop()
for k, v in curr_node.iteritems():
if k in dest_node:
if not isinstance(v, dict):
raise MergeException(
"%s:%s: %r not mergable, curr_node: %r\ndest_node: %r" % (
prefix, k, v, curr_node, dest_node))
pending_nodes.append(("%s:%s" % (prefix, k), dest_node[k], v))
else:
dest_node[k] = v
return dest
def ensure_match(source, dest):
""" Returns source if it matches dest.
This is intended for use as a merge_func parameter to merge_value.
Raises:
MergeException if source != dest
"""
if source != dest:
raise MergeException( # pragma: no cover (covered by
# results_merger_unittest).
"Values don't match: %s, %s" % (source, dest))
return source
def sum_dicts(source, dest):
""" Adds values from source to corresponding values in dest.
This is intended for use as a merge_func parameter to merge_value.
"""
for k, v in source.iteritems():
dest.setdefault(k, 0)
dest[k] += v
return dest
def merge_value(source, dest, key, merge_func):
""" Merges a value from source to dest.
The value is deleted from source.
Args:
source: A dictionary from which to pull a value, identified by key.
dest: The dictionary into to which the value is to be merged.
key: The key which identifies the value to be merged.
merge_func(src, dst): A function which merges its src into dst,
and returns the result. May modify dst. May raise a MergeException.
Raises:
MergeException if the values can not be merged.
"""
try:
dest[key] = merge_func(source[key], dest[key])
except MergeException as e:
e.message = "MergeFailure for %s\n%s" % (key, e.message)
e.args = tuple([e.message] + list(e.args[1:]))
raise
del source[key]
def main(files):
if len(files) < 2:
sys.stderr.write("Not enough JSON files to merge.\n")
return 1
sys.stderr.write('Starting with %s\n' % files[0])
result = json.load(open(files[0]))
for f in files[1:]:
sys.stderr.write('Merging %s\n' % f)
result = merge_test_results([result, json.load(open(f))])
print json.dumps(result)
return 0
if __name__ == "__main__":
sys.exit(main(sys.argv[1:]))
#!/usr/bin/env vpython
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import copy
import os
import sys
import unittest
THIS_DIR = os.path.dirname(os.path.abspath(__file__))
# For results_merger.
sys.path.insert(0, os.path.join(THIS_DIR, '..', 'resources'))
import results_merger
GOOD_JSON_TEST_RESULT_0 = {
'tests': {
'car': {
'honda': {
'expected': 'PASS',
'actual': 'PASS'
},
'toyota': {
'expected': 'FAIL',
'actual': 'FAIL'
}
},
'computer': {
'dell': {
'expected': 'PASS',
'actual': 'PASS'
}
},
},
'interrupted': False,
'version': 3,
'seconds_since_epoch': 1406662289.76,
'num_failures_by_type': {
'FAIL': 0,
'PASS': 2
},
'layout_tests_dir': 'abc'
}
GOOD_JSON_TEST_RESULT_1 = {
'tests': {
'car': {
'tesla': {
'expected': 'PASS',
'actual': 'PASS'
},
},
'burger': {
'mcdonald': {
'expected': 'PASS',
'actual': 'PASS'
}
},
},
'interrupted': False,
'version': 3,
'seconds_since_epoch': 1406662283.11,
'num_failures_by_type': {
'FAIL': 0,
'PASS': 2
},
'layout_tests_dir': '123'
}
GOOD_JSON_TEST_RESULT_2 = {
'tests': {
'car': {
'mercedes': {
'expected': 'PASS',
'actual': 'FAIL'
},
},
'burger': {
'in n out': {
'expected': 'PASS',
'actual': 'PASS'
}
},
},
'interrupted': True,
'version': 3,
'seconds_since_epoch': 1406662200.01,
'num_failures_by_type': {
'FAIL': 1,
'PASS': 1
}
}
GOOD_JSON_TEST_RESULT_MERGED = {
'tests': {
'car': {
'tesla': {
'expected': 'PASS',
'actual': 'PASS'
},
'mercedes': {
'expected': 'PASS',
'actual': 'FAIL'
},
'honda': {
'expected': 'PASS',
'actual': 'PASS'
},
'toyota': {
'expected': 'FAIL',
'actual': 'FAIL'
}
},
'computer': {
'dell': {
'expected': 'PASS',
'actual': 'PASS'
}
},
'burger': {
'mcdonald': {
'expected': 'PASS',
'actual': 'PASS'
},
'in n out': {
'expected': 'PASS',
'actual': 'PASS'
}
}
},
'interrupted': True,
'version': 3,
'seconds_since_epoch': 1406662200.01,
'num_failures_by_type': {
'FAIL': 1,
'PASS': 5
},
'layout_tests_dir': '123'
}
def extend(initial, add):
out = copy.deepcopy(initial)
out.update(add)
return out
def remove(initial, keys):
out = copy.deepcopy(initial)
for k in keys:
del out[k]
return out
# These unittests are run in PRESUBMIT, but not by recipe_simulation_test, hence
# to avoid false alert on missing coverage by recipe_simulation_test, we mark
# these code as no cover.
class MergingTest(unittest.TestCase): # pragma: no cover
maxDiff = None # Show full diff if assertion fail
def test_merge_tries(self):
self.assertEquals(
{'a': 'A', 'b': {'c': 'C'}},
results_merger.merge_tries(
{'a': 'A', 'b': {}}, {'b': {'c': 'C'}}))
def test_merge_tries_unmergable(self):
with self.assertRaisesRegexp(results_merger.MergeException, "a:b"):
results_merger.merge_tries(
{'a': {'b': 'A'}}, {'a': {'b': 'C'}})
def test_merge_json_test_results_nop(self):
good_json_results = (
GOOD_JSON_TEST_RESULT_0,
GOOD_JSON_TEST_RESULT_1,
GOOD_JSON_TEST_RESULT_2,
GOOD_JSON_TEST_RESULT_MERGED)
for j in good_json_results:
# Clone so we can check the input dictionaries are not modified
a = copy.deepcopy(j)
self.assertEquals(results_merger.merge_test_results([a]), j)
self.assertEquals(a, j)
def test_merge_json_test_results_invalid_version(self):
with self.assertRaises(results_merger.MergeException):
results_merger.merge_test_results([
extend(GOOD_JSON_TEST_RESULT_0, {'version': 5}),
])
with self.assertRaises(results_merger.MergeException):
results_merger.merge_test_results([
GOOD_JSON_TEST_RESULT_0,
extend(GOOD_JSON_TEST_RESULT_1, {'version': 5}),
])
def test_merge_json_test_results_missing_version(self):
with self.assertRaises(results_merger.MergeException):
results_merger.merge_test_results([
remove(GOOD_JSON_TEST_RESULT_0, ['version']),
])
with self.assertRaises(results_merger.MergeException):
results_merger.merge_test_results([
GOOD_JSON_TEST_RESULT_0,
remove(GOOD_JSON_TEST_RESULT_1, ['version']),
])
def test_merge_json_test_results_invalid_extra(self):
with self.assertRaises(results_merger.MergeException):
results_merger.merge_test_results([
extend(GOOD_JSON_TEST_RESULT_0, {'extra': True}),
])
with self.assertRaises(results_merger.MergeException):
results_merger.merge_test_results([
GOOD_JSON_TEST_RESULT_0,
extend(GOOD_JSON_TEST_RESULT_1, {'extra': True}),
])
def test_merge_json_test_results_missing_required(self):
with self.assertRaises(results_merger.MergeException):
results_merger.merge_test_results([
remove(GOOD_JSON_TEST_RESULT_0, ['interrupted']),
])
with self.assertRaises(results_merger.MergeException):
results_merger.merge_test_results([
GOOD_JSON_TEST_RESULT_0,
remove(GOOD_JSON_TEST_RESULT_1, ['interrupted']),
])
def test_merge_json_test_results_multiple(self):
self.assertEquals(
results_merger.merge_test_results([
GOOD_JSON_TEST_RESULT_0,
GOOD_JSON_TEST_RESULT_1,
GOOD_JSON_TEST_RESULT_2,
]),
GOOD_JSON_TEST_RESULT_MERGED)
def test_merge_json_test_results_optional_matches(self):
self.assertEquals(
results_merger.merge_test_results([
extend(GOOD_JSON_TEST_RESULT_0, {'path_delimiter': '.'}),
extend(GOOD_JSON_TEST_RESULT_1, {'path_delimiter': '.'}),
extend(GOOD_JSON_TEST_RESULT_2, {'path_delimiter': '.'}),
]),
extend(GOOD_JSON_TEST_RESULT_MERGED, {'path_delimiter': '.'}))
def test_merge_json_test_results_optional_differs(self):
with self.assertRaises(results_merger.MergeException):
results_merger.merge_test_results([
extend(GOOD_JSON_TEST_RESULT_0, {'path_delimiter': '.'}),
extend(GOOD_JSON_TEST_RESULT_1, {'path_delimiter': '.'}),
extend(GOOD_JSON_TEST_RESULT_2, {'path_delimiter': '/'}),
])
def test_merge_json_test_results_optional_count(self):
self.assertEquals(
results_merger.merge_test_results([
extend(GOOD_JSON_TEST_RESULT_0, {'fixable': 1}),
extend(GOOD_JSON_TEST_RESULT_1, {'fixable': 2}),
extend(GOOD_JSON_TEST_RESULT_2, {'fixable': 3}),
]),
extend(GOOD_JSON_TEST_RESULT_MERGED, {'fixable': 6}))
def test_merge_nothing(self):
self.assertEquals(
results_merger.merge_test_results([]),
{})
# TODO(tansell): Make this test fail properly, currently fails with an
# AttributeError.
# def test_merge_test_name_conflict(self):
# self.maxDiff = None # Show full diff if assertion fail
# with self.assertRaises(results_merger.MergeException):
# results_merger.merge_test_results(
# [GOOD_JSON_TEST_RESULT_0, GOOD_JSON_TEST_RESULT_0])
if __name__ == '__main__':
unittest.main() # pragma: no cover
#!/usr/bin/env python
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import json
import os
import shutil
import sys
import tempfile
import traceback
import merge_api
MISSING_SHARDS_MSG = r"""Missing results from the following shard(s): %s
This can happen in following cases:
* Test failed to start (missing *.dll/*.so dependency for example)
* Test crashed or hung
* Task expired because there are not enough bots available and are all used
* Swarming service experienced problems
Please examine logs to figure out what happened.
"""
def emit_warning(title, log=None):
print '@@@STEP_WARNINGS@@@'
print title
if log:
title = title.rstrip()
for line in log.splitlines():
print '@@@STEP_LOG_LINE@%s@%s@@@' % (title, line.rstrip())
print '@@@STEP_LOG_END@%s@@@' % title
def merge_shard_results(summary_json, jsons_to_merge):
"""Reads JSON test output from all shards and combines them into one.
Returns dict with merged test output on success or None on failure. Emits
annotations.
"""
# summary.json is produced by swarming.py itself. We are mostly interested
# in the number of shards.
try:
with open(summary_json) as f:
summary = json.load(f)
except (IOError, ValueError):
emit_warning(
'summary.json is missing or can not be read',
'Something is seriously wrong with swarming_client/ or the bot.')
return None
# Merge all JSON files together. Keep track of missing shards.
merged = {
'all_tests': set(),
'disabled_tests': set(),
'global_tags': set(),
'missing_shards': [],
'per_iteration_data': [],
'swarming_summary': summary,
'test_locations': {},
}
for index, result in enumerate(summary['shards']):
if result is None:
merged['missing_shards'].append(index)
continue
# Author note: this code path doesn't trigger convert_to_old_format() in
# client/swarming.py, which means the state enum is saved in its string
# name form, not in the number form.
state = result.get('state')
if state == u'BOT_DIED':
emit_warning('Shard #%d had a Swarming internal failure' % index)
elif state == u'EXPIRED':
emit_warning('There wasn\'t enough capacity to run your test')
elif state == u'TIMED_OUT':
emit_warning(
'Test runtime exceeded allocated time',
'Either it ran for too long (hard timeout) or it didn\'t produce '
'I/O for an extended period of time (I/O timeout)')
elif state != u'COMPLETED':
emit_warning('Invalid Swarming task state: %s' % state)
json_data, err_msg = load_shard_json(index, result.get('task_id'),
jsons_to_merge)
if json_data:
# Set-like fields.
for key in ('all_tests', 'disabled_tests', 'global_tags'):
merged[key].update(json_data.get(key), [])
# Dict-like fields.
for key in ('test_locations',):
merged[key].update(json_data.get(key, {}))
# 'per_iteration_data' is a list of dicts. Dicts should be merged
# together, not the 'per_iteration_data' list itself.
merged['per_iteration_data'] = merge_list_of_dicts(
merged['per_iteration_data'],
json_data.get('per_iteration_data', []))
else:
merged['missing_shards'].append(index)
emit_warning('No result was found: %s' % err_msg)
# If some shards are missing, make it known. Continue parsing anyway. Step
# should be red anyway, since swarming.py return non-zero exit code in that
# case.
if merged['missing_shards']:
as_str = ', '.join(map(str, merged['missing_shards']))
emit_warning(
'some shards did not complete: %s' % as_str,
MISSING_SHARDS_MSG % as_str)
# Not all tests run, combined JSON summary can not be trusted.
merged['global_tags'].add('UNRELIABLE_RESULTS')
# Convert to jsonish dict.
for key in ('all_tests', 'disabled_tests', 'global_tags'):
merged[key] = sorted(merged[key])
return merged
OUTPUT_JSON_SIZE_LIMIT = 100 * 1024 * 1024 # 100 MB
def load_shard_json(index, task_id, jsons_to_merge):
"""Reads JSON output of the specified shard.
Args:
output_dir: The directory in which to look for the JSON output to load.
index: The index of the shard to load data for, this is for old api.
task_id: The directory of the shard to load data for, this is for new api.
Returns: A tuple containing:
* The contents of path, deserialized into a python object.
* An error string.
(exactly one of the tuple elements will be non-None).
"""
# 'output.json' is set in swarming/api.py, gtest_task method.
matching_json_files = [
j for j in jsons_to_merge
if (os.path.basename(j) == 'output.json' and
(os.path.basename(os.path.dirname(j)) == str(index) or
os.path.basename(os.path.dirname(j)) == task_id))]
if not matching_json_files:
print >> sys.stderr, 'shard %s test output missing' % index
return (None, 'shard %s test output was missing' % index)
elif len(matching_json_files) > 1:
print >> sys.stderr, 'duplicate test output for shard %s' % index
return (None, 'shard %s test output was duplicated' % index)
path = matching_json_files[0]
try:
filesize = os.stat(path).st_size
if filesize > OUTPUT_JSON_SIZE_LIMIT:
print >> sys.stderr, 'output.json is %d bytes. Max size is %d' % (
filesize, OUTPUT_JSON_SIZE_LIMIT)
return (None, 'shard %s test output exceeded the size limit' % index)
with open(path) as f:
return (json.load(f), None)
except (IOError, ValueError, OSError) as e:
print >> sys.stderr, 'Missing or invalid gtest JSON file: %s' % path
print >> sys.stderr, '%s: %s' % (type(e).__name__, e)
return (None, 'shard %s test output was missing or invalid' % index)
def merge_list_of_dicts(left, right):
"""Merges dicts left[0] with right[0], left[1] with right[1], etc."""
output = []
for i in xrange(max(len(left), len(right))):
left_dict = left[i] if i < len(left) else {}
right_dict = right[i] if i < len(right) else {}
merged_dict = left_dict.copy()
merged_dict.update(right_dict)
output.append(merged_dict)
return output
def standard_gtest_merge(
output_json, summary_json, jsons_to_merge):
output = merge_shard_results(summary_json, jsons_to_merge)
with open(output_json, 'wb') as f:
json.dump(output, f)
return 0
def main(raw_args):
parser = merge_api.ArgumentParser()
args = parser.parse_args(raw_args)
return standard_gtest_merge(
args.output_json, args.summary_json, args.jsons_to_merge)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
#!/usr/bin/env vpython
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import cStringIO
import json
import logging
import os
import shutil
import sys
import tempfile
import unittest
import common_merge_script_tests
THIS_DIR = os.path.dirname(os.path.abspath(__file__))
# For 'standard_gtest_merge.py'.
sys.path.insert(
0, os.path.abspath(os.path.join(THIS_DIR, '..', 'resources')))
import mock
import standard_gtest_merge
# gtest json output for successfully finished shard #0.
GOOD_GTEST_JSON_0 = {
'all_tests': [
'AlignedMemoryTest.DynamicAllocation',
'AlignedMemoryTest.ScopedDynamicAllocation',
'AlignedMemoryTest.StackAlignment',
'AlignedMemoryTest.StaticAlignment',
],
'disabled_tests': [
'ConditionVariableTest.TimeoutAcrossSetTimeOfDay',
'FileTest.TouchGetInfo',
'MessageLoopTestTypeDefault.EnsureDeletion',
],
'global_tags': ['CPU_64_BITS', 'MODE_DEBUG', 'OS_LINUX', 'OS_POSIX'],
'per_iteration_data': [{
'AlignedMemoryTest.DynamicAllocation': [{
'elapsed_time_ms': 0,
'losless_snippet': True,
'output_snippet': 'blah\\n',
'output_snippet_base64': 'YmxhaAo=',
'status': 'SUCCESS',
}],
'AlignedMemoryTest.ScopedDynamicAllocation': [{
'elapsed_time_ms': 0,
'losless_snippet': True,
'output_snippet': 'blah\\n',
'output_snippet_base64': 'YmxhaAo=',
'status': 'SUCCESS',
}],
}],
'test_locations': {
'AlignedMemoryTest.DynamicAllocation': {
'file': 'foo/bar/allocation_test.cc',
'line': 123,
},
'AlignedMemoryTest.ScopedDynamicAllocation': {
'file': 'foo/bar/allocation_test.cc',
'line': 456,
},
# This is a test from a different shard, but this happens in practice and we
# should not fail if information is repeated.
'AlignedMemoryTest.StaticAlignment': {
'file': 'foo/bar/allocation_test.cc',
'line': 12,
},
},
}
# gtest json output for successfully finished shard #1.
GOOD_GTEST_JSON_1 = {
'all_tests': [
'AlignedMemoryTest.DynamicAllocation',
'AlignedMemoryTest.ScopedDynamicAllocation',
'AlignedMemoryTest.StackAlignment',
'AlignedMemoryTest.StaticAlignment',
],
'disabled_tests': [
'ConditionVariableTest.TimeoutAcrossSetTimeOfDay',
'FileTest.TouchGetInfo',
'MessageLoopTestTypeDefault.EnsureDeletion',
],
'global_tags': ['CPU_64_BITS', 'MODE_DEBUG', 'OS_LINUX', 'OS_POSIX'],
'per_iteration_data': [{
'AlignedMemoryTest.StackAlignment': [{
'elapsed_time_ms': 0,
'losless_snippet': True,
'output_snippet': 'blah\\n',
'output_snippet_base64': 'YmxhaAo=',
'status': 'SUCCESS',
}],
'AlignedMemoryTest.StaticAlignment': [{
'elapsed_time_ms': 0,
'losless_snippet': True,
'output_snippet': 'blah\\n',
'output_snippet_base64': 'YmxhaAo=',
'status': 'SUCCESS',
}],
}],
'test_locations': {
'AlignedMemoryTest.StackAlignment': {
'file': 'foo/bar/allocation_test.cc',
'line': 789,
},
'AlignedMemoryTest.StaticAlignment': {
'file': 'foo/bar/allocation_test.cc',
'line': 12,
},
},
}
TIMED_OUT_GTEST_JSON_1 = {
'disabled_tests': [],
'global_tags': [],
'all_tests': [
'AlignedMemoryTest.DynamicAllocation',
'AlignedMemoryTest.ScopedDynamicAllocation',
'AlignedMemoryTest.StackAlignment',
'AlignedMemoryTest.StaticAlignment',
],
'per_iteration_data': [{
'AlignedMemoryTest.StackAlignment': [{
'elapsed_time_ms': 54000,
'losless_snippet': True,
'output_snippet': 'timed out',
'output_snippet_base64': '',
'status': 'FAILURE',
}],
'AlignedMemoryTest.StaticAlignment': [{
'elapsed_time_ms': 0,
'losless_snippet': True,
'output_snippet': '',
'output_snippet_base64': '',
'status': 'NOTRUN',
}],
}],
'test_locations': {
'AlignedMemoryTest.StackAlignment': {
'file': 'foo/bar/allocation_test.cc',
'line': 789,
},
'AlignedMemoryTest.StaticAlignment': {
'file': 'foo/bar/allocation_test.cc',
'line': 12,
},
},
}
# GOOD_GTEST_JSON_0 and GOOD_GTEST_JSON_1 merged.
GOOD_GTEST_JSON_MERGED = {
'all_tests': [
'AlignedMemoryTest.DynamicAllocation',
'AlignedMemoryTest.ScopedDynamicAllocation',
'AlignedMemoryTest.StackAlignment',
'AlignedMemoryTest.StaticAlignment',
],
'disabled_tests': [
'ConditionVariableTest.TimeoutAcrossSetTimeOfDay',
'FileTest.TouchGetInfo',
'MessageLoopTestTypeDefault.EnsureDeletion',
],
'global_tags': ['CPU_64_BITS', 'MODE_DEBUG', 'OS_LINUX', 'OS_POSIX'],
'missing_shards': [],
'per_iteration_data': [{
'AlignedMemoryTest.DynamicAllocation': [{
'elapsed_time_ms': 0,
'losless_snippet': True,
'output_snippet': 'blah\\n',
'output_snippet_base64': 'YmxhaAo=',
'status': 'SUCCESS',
}],
'AlignedMemoryTest.ScopedDynamicAllocation': [{
'elapsed_time_ms': 0,
'losless_snippet': True,
'output_snippet': 'blah\\n',
'output_snippet_base64': 'YmxhaAo=',
'status': 'SUCCESS',
}],
'AlignedMemoryTest.StackAlignment': [{
'elapsed_time_ms': 0,
'losless_snippet': True,
'output_snippet': 'blah\\n',
'output_snippet_base64': 'YmxhaAo=',
'status': 'SUCCESS',
}],
'AlignedMemoryTest.StaticAlignment': [{
'elapsed_time_ms': 0,
'losless_snippet': True,
'output_snippet': 'blah\\n',
'output_snippet_base64': 'YmxhaAo=',
'status': 'SUCCESS',
}],
}],
'swarming_summary': {
u'shards': [
{
u'state': u'COMPLETED',
u'outputs_ref': {
u'view_url': u'blah',
},
}
],
},
'test_locations': {
'AlignedMemoryTest.StackAlignment': {
'file': 'foo/bar/allocation_test.cc',
'line': 789,
},
'AlignedMemoryTest.StaticAlignment': {
'file': 'foo/bar/allocation_test.cc',
'line': 12,
},
'AlignedMemoryTest.DynamicAllocation': {
'file': 'foo/bar/allocation_test.cc',
'line': 123,
},
'AlignedMemoryTest.ScopedDynamicAllocation': {
'file': 'foo/bar/allocation_test.cc',
'line': 456,
},
},
}
# Only shard #1 finished. UNRELIABLE_RESULTS is set.
BAD_GTEST_JSON_ONLY_1_SHARD = {
'all_tests': [
'AlignedMemoryTest.DynamicAllocation',
'AlignedMemoryTest.ScopedDynamicAllocation',
'AlignedMemoryTest.StackAlignment',
'AlignedMemoryTest.StaticAlignment',
],
'disabled_tests': [
'ConditionVariableTest.TimeoutAcrossSetTimeOfDay',
'FileTest.TouchGetInfo',
'MessageLoopTestTypeDefault.EnsureDeletion',
],
'global_tags': [
'CPU_64_BITS',
'MODE_DEBUG',
'OS_LINUX',
'OS_POSIX',
'UNRELIABLE_RESULTS',
],
'missing_shards': [0],
'per_iteration_data': [{
'AlignedMemoryTest.StackAlignment': [{
'elapsed_time_ms': 0,
'losless_snippet': True,
'output_snippet': 'blah\\n',
'output_snippet_base64': 'YmxhaAo=',
'status': 'SUCCESS',
}],
'AlignedMemoryTest.StaticAlignment': [{
'elapsed_time_ms': 0,
'losless_snippet': True,
'output_snippet': 'blah\\n',
'output_snippet_base64': 'YmxhaAo=',
'status': 'SUCCESS',
}],
}],
'test_locations': {
'AlignedMemoryTest.StackAlignment': {
'file': 'foo/bar/allocation_test.cc',
'line': 789,
},
'AlignedMemoryTest.StaticAlignment': {
'file': 'foo/bar/allocation_test.cc',
'line': 12,
},
},
}
# GOOD_GTEST_JSON_0 and TIMED_OUT_GTEST_JSON_1 merged.
TIMED_OUT_GTEST_JSON_MERGED = {
'all_tests': [
'AlignedMemoryTest.DynamicAllocation',
'AlignedMemoryTest.ScopedDynamicAllocation',
'AlignedMemoryTest.StackAlignment',
'AlignedMemoryTest.StaticAlignment',
],
'disabled_tests': [
'ConditionVariableTest.TimeoutAcrossSetTimeOfDay',
'FileTest.TouchGetInfo',
'MessageLoopTestTypeDefault.EnsureDeletion',
],
'global_tags': ['CPU_64_BITS', 'MODE_DEBUG', 'OS_LINUX', 'OS_POSIX'],
'missing_shards': [],
'per_iteration_data': [{
'AlignedMemoryTest.DynamicAllocation': [{
'elapsed_time_ms': 0,
'losless_snippet': True,
'output_snippet': 'blah\\n',
'output_snippet_base64': 'YmxhaAo=',
'status': 'SUCCESS',
}],
'AlignedMemoryTest.ScopedDynamicAllocation': [{
'elapsed_time_ms': 0,
'losless_snippet': True,
'output_snippet': 'blah\\n',
'output_snippet_base64': 'YmxhaAo=',
'status': 'SUCCESS',
}],
'AlignedMemoryTest.StackAlignment': [{
'elapsed_time_ms': 54000,
'losless_snippet': True,
'output_snippet': 'timed out',
'output_snippet_base64': '',
'status': 'FAILURE',
}],
'AlignedMemoryTest.StaticAlignment': [{
'elapsed_time_ms': 0,
'losless_snippet': True,
'output_snippet': '',
'output_snippet_base64': '',
'status': 'NOTRUN',
}],
}],
'swarming_summary': {
u'shards': [
{
u'state': u'COMPLETED',
},
{
u'state': u'TIMED_OUT',
},
],
},
'test_locations': {
'AlignedMemoryTest.StackAlignment': {
'file': 'foo/bar/allocation_test.cc',
'line': 789,
},
'AlignedMemoryTest.StaticAlignment': {
'file': 'foo/bar/allocation_test.cc',
'line': 12,
},
'AlignedMemoryTest.DynamicAllocation': {
'file': 'foo/bar/allocation_test.cc',
'line': 123,
},
'AlignedMemoryTest.ScopedDynamicAllocation': {
'file': 'foo/bar/allocation_test.cc',
'line': 456,
},
},
}
class _StandardGtestMergeTest(unittest.TestCase):
def setUp(self):
self.temp_dir = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.temp_dir)
def _write_temp_file(self, path, content):
abs_path = os.path.join(self.temp_dir, path.replace('/', os.sep))
if not os.path.exists(os.path.dirname(abs_path)):
os.makedirs(os.path.dirname(abs_path))
with open(abs_path, 'w') as f:
if isinstance(content, dict):
json.dump(content, f)
else:
assert isinstance(content, str)
f.write(content)
return abs_path
class LoadShardJsonTest(_StandardGtestMergeTest):
def test_double_digit_jsons(self):
jsons_to_merge = []
for i in xrange(15):
json_dir = os.path.join(self.temp_dir, str(i))
json_path = os.path.join(json_dir, 'output.json')
if not os.path.exists(json_dir):
os.makedirs(json_dir)
with open(json_path, 'w') as f:
json.dump({'all_tests': ['LoadShardJsonTest.test%d' % i]}, f)
jsons_to_merge.append(json_path)
content, err = standard_gtest_merge.load_shard_json(
0, None, jsons_to_merge)
self.assertEqual({'all_tests': ['LoadShardJsonTest.test0']}, content)
self.assertIsNone(err)
content, err = standard_gtest_merge.load_shard_json(
12, None, jsons_to_merge)
self.assertEqual({'all_tests': ['LoadShardJsonTest.test12']}, content)
self.assertIsNone(err)
def test_double_task_id_jsons(self):
jsons_to_merge = []
for i in xrange(15):
json_dir = os.path.join(self.temp_dir, 'deadbeef%d' % i)
json_path = os.path.join(json_dir, 'output.json')
if not os.path.exists(json_dir):
os.makedirs(json_dir)
with open(json_path, 'w') as f:
json.dump({'all_tests': ['LoadShardJsonTest.test%d' % i]}, f)
jsons_to_merge.append(json_path)
content, err = standard_gtest_merge.load_shard_json(
0, 'deadbeef0', jsons_to_merge)
self.assertEqual({'all_tests': ['LoadShardJsonTest.test0']},
content)
self.assertIsNone(err)
content, err = standard_gtest_merge.load_shard_json(
12, 'deadbeef12', jsons_to_merge)
self.assertEqual({'all_tests': ['LoadShardJsonTest.test12']},
content)
self.assertIsNone(err)
class MergeShardResultsTest(_StandardGtestMergeTest):
"""Tests for merge_shard_results function."""
def setUp(self):
super(MergeShardResultsTest, self).setUp()
self.summary = None
self.test_files = []
def stage(self, summary, files):
self.summary = self._write_temp_file('summary.json', summary)
for path, content in files.iteritems():
abs_path = self._write_temp_file(path, content)
self.test_files.append(abs_path)
def call(self):
stdout = cStringIO.StringIO()
with mock.patch('sys.stdout', stdout):
merged = standard_gtest_merge.merge_shard_results(
self.summary, self.test_files)
return merged, stdout.getvalue().strip()
def assertUnicodeEquals(self, expectation, result):
def convert_to_unicode(key_or_value):
if isinstance(key_or_value, str):
return unicode(key_or_value)
if isinstance(key_or_value, dict):
return {convert_to_unicode(k): convert_to_unicode(v)
for k, v in key_or_value.items()}
if isinstance(key_or_value, list):
return [convert_to_unicode(x) for x in key_or_value]
return key_or_value
unicode_expectations = convert_to_unicode(expectation)
unicode_result = convert_to_unicode(result)
self.assertEquals(unicode_expectations, unicode_result)
def test_ok(self):
# Two shards, both successfully finished.
self.stage({
u'shards': [
{
u'state': u'COMPLETED',
},
{
u'state': u'COMPLETED',
},
],
},
{
'0/output.json': GOOD_GTEST_JSON_0,
'1/output.json': GOOD_GTEST_JSON_1,
})
merged, stdout = self.call()
merged['swarming_summary'] = {
'shards': [
{
u'state': u'COMPLETED',
u'outputs_ref': {
u'view_url': u'blah',
},
}
],
}
self.assertUnicodeEquals(GOOD_GTEST_JSON_MERGED, merged)
self.assertEqual('', stdout)
def test_timed_out(self):
# Two shards, both successfully finished.
self.stage({
'shards': [
{
'state': 'COMPLETED',
},
{
'state': 'TIMED_OUT',
},
],
},
{
'0/output.json': GOOD_GTEST_JSON_0,
'1/output.json': TIMED_OUT_GTEST_JSON_1,
})
merged, stdout = self.call()
self.assertUnicodeEquals(TIMED_OUT_GTEST_JSON_MERGED, merged)
self.assertIn(
'Test runtime exceeded allocated time\n', stdout)
def test_missing_summary_json(self):
# summary.json is missing, should return None and emit warning.
self.summary = os.path.join(self.temp_dir, 'summary.json')
merged, output = self.call()
self.assertEqual(None, merged)
self.assertIn('@@@STEP_WARNINGS@@@', output)
self.assertIn('summary.json is missing or can not be read', output)
def test_unfinished_shards(self):
# Only one shard (#1) finished. Shard #0 did not.
self.stage({
u'shards': [
None,
{
u'state': u'COMPLETED',
},
],
},
{
u'1/output.json': GOOD_GTEST_JSON_1,
})
merged, stdout = self.call()
merged.pop('swarming_summary')
self.assertUnicodeEquals(BAD_GTEST_JSON_ONLY_1_SHARD, merged)
self.assertIn(
'@@@STEP_WARNINGS@@@\nsome shards did not complete: 0\n', stdout)
self.assertIn(
'@@@STEP_LOG_LINE@some shards did not complete: 0@'
'Missing results from the following shard(s): 0@@@\n', stdout)
def test_missing_output_json(self):
# Shard #0 output json is missing.
self.stage({
u'shards': [
{
u'state': u'COMPLETED',
},
{
u'state': u'COMPLETED',
},
],
},
{
u'1/output.json': GOOD_GTEST_JSON_1,
})
merged, stdout = self.call()
merged.pop('swarming_summary')
self.assertUnicodeEquals(BAD_GTEST_JSON_ONLY_1_SHARD, merged)
self.assertIn(
'No result was found: '
'shard 0 test output was missing', stdout)
def test_large_output_json(self):
# a shard is too large.
self.stage({
u'shards': [
{
u'state': u'COMPLETED',
},
{
u'state': u'COMPLETED',
},
],
},
{
'0/output.json': GOOD_GTEST_JSON_0,
'1/output.json': GOOD_GTEST_JSON_1,
})
old_json_limit = standard_gtest_merge.OUTPUT_JSON_SIZE_LIMIT
len0 = len(json.dumps(GOOD_GTEST_JSON_0))
len1 = len(json.dumps(GOOD_GTEST_JSON_1))
large_shard = "0" if len0 > len1 else "1"
try:
# Override max output.json size just for this test.
standard_gtest_merge.OUTPUT_JSON_SIZE_LIMIT = min(len0,len1)
merged, stdout = self.call()
merged.pop('swarming_summary')
self.assertUnicodeEquals(BAD_GTEST_JSON_ONLY_1_SHARD, merged)
self.assertIn(
'No result was found: '
'shard %s test output exceeded the size limit' % large_shard, stdout)
finally:
standard_gtest_merge.OUTPUT_JSON_SIZE_LIMIT = old_json_limit
class CommandLineTest(common_merge_script_tests.CommandLineTest):
def __init__(self, methodName='runTest'):
super(CommandLineTest, self).__init__(methodName, standard_gtest_merge)
if __name__ == '__main__':
logging.basicConfig(
level=logging.DEBUG if '-v' in sys.argv else logging.ERROR)
if '-v' in sys.argv:
unittest.TestCase.maxDiff = None
unittest.main()
#!/usr/bin/env python
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import json
import os
import sys
import merge_api
import results_merger
def StandardIsolatedScriptMerge(output_json, summary_json, jsons_to_merge):
"""Merge the contents of one or more results JSONs into a single JSON.
Args:
output_json: A path to a JSON file to which the merged results should be
written.
jsons_to_merge: A list of paths to JSON files that should be merged.
"""
# summary.json is produced by swarming.py itself. We are mostly interested
# in the number of shards.
try:
with open(summary_json) as f:
summary = json.load(f)
except (IOError, ValueError):
print >> sys.stderr, (
'summary.json is missing or can not be read',
'Something is seriously wrong with swarming_client/ or the bot.')
return 1
missing_shards = []
shard_results_list = []
for index, result in enumerate(summary['shards']):
output_path = None
if result:
output_path = find_shard_output_path(index, result.get('task_id'),
jsons_to_merge)
if not output_path:
missing_shards.append(index)
continue
with open(output_path) as f:
try:
json_contents = json.load(f)
except ValueError:
raise ValueError('Failed to parse JSON from %s' % j)
shard_results_list.append(json_contents)
merged_results = results_merger.merge_test_results(shard_results_list)
if missing_shards:
merged_results['missing_shards'] = missing_shards
if 'global_tags' not in merged_results:
merged_results['global_tags'] = []
merged_results['global_tags'].append('UNRELIABLE_RESULTS')
with open(output_json, 'w') as f:
json.dump(merged_results, f)
return 0
def find_shard_output_path(index, task_id, jsons_to_merge):
"""Finds the shard matching the index/task-id.
Args:
index: The index of the shard to load data for, this is for old api.
task_id: The directory of the shard to load data for, this is for new api.
jsons_to_merge: A container of file paths for shards that emitted output.
Returns:
* The matching path, or None
"""
# 'output.json' is set in swarming/api.py, gtest_task method.
matching_json_files = [
j for j in jsons_to_merge
if (os.path.basename(j) == 'output.json' and
(os.path.basename(os.path.dirname(j)) == str(index) or
os.path.basename(os.path.dirname(j)) == task_id))]
if not matching_json_files:
print >> sys.stderr, 'shard %s test output missing' % index
return None
elif len(matching_json_files) > 1:
print >> sys.stderr, 'duplicate test output for shard %s' % index
return None
return matching_json_files[0]
def main(raw_args):
parser = merge_api.ArgumentParser()
args = parser.parse_args(raw_args)
return StandardIsolatedScriptMerge(
args.output_json, args.summary_json, args.jsons_to_merge)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
#!/usr/bin/env vpython
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import itertools
import json
import os
import shutil
import sys
import tempfile
import unittest
import mock
import common_merge_script_tests
THIS_DIR = os.path.dirname(__file__)
sys.path.insert(
0, os.path.abspath(os.path.join(THIS_DIR, '..', '..', '..', 'unittests')))
import test_env
sys.path.insert(
0, os.path.abspath(os.path.join(THIS_DIR, '..', 'resources')))
import standard_isolated_script_merge
TWO_COMPLETED_SHARDS = {
u'shards': [
{
u'state': u'COMPLETED',
},
{
u'state': u'COMPLETED',
},
],
}
class StandardIsolatedScriptMergeTest(unittest.TestCase):
def setUp(self):
self.temp_dir = tempfile.mkdtemp()
self.test_files = []
self.summary = None
def tearDown(self):
shutil.rmtree(self.temp_dir)
super(StandardIsolatedScriptMergeTest, self).tearDown()
def _write_temp_file(self, path, content):
abs_path = os.path.join(self.temp_dir, path.replace('/', os.sep))
if not os.path.exists(os.path.dirname(abs_path)):
os.makedirs(os.path.dirname(abs_path))
with open(abs_path, 'w') as f:
if isinstance(content, dict):
json.dump(content, f)
else:
assert isinstance(content, str)
f.write(content)
return abs_path
def _stage(self, summary, files):
self.summary = self._write_temp_file('summary.json', summary)
for path, content in files.iteritems():
abs_path = self._write_temp_file(path, content)
self.test_files.append(abs_path)
class OutputTest(StandardIsolatedScriptMergeTest):
def test_success_and_failure(self):
self._stage(TWO_COMPLETED_SHARDS,
{
'0/output.json':
{
'successes': ['fizz', 'baz'],
},
'1/output.json':
{
'successes': ['buzz', 'bar'],
'failures': ['failing_test_one']
}
})
output_json_file = os.path.join(self.temp_dir, 'output.json')
standard_isolated_script_merge.StandardIsolatedScriptMerge(
output_json_file, self.summary, self.test_files)
with open(output_json_file, 'r') as f:
results = json.load(f)
self.assertEquals(results['successes'], ['fizz', 'baz', 'buzz', 'bar'])
self.assertEquals(results['failures'], ['failing_test_one'])
self.assertTrue(results['valid'])
def test_missing_shard(self):
self._stage(TWO_COMPLETED_SHARDS,
{
'0/output.json':
{
'successes': ['fizz', 'baz'],
},
})
output_json_file = os.path.join(self.temp_dir, 'output.json')
standard_isolated_script_merge.StandardIsolatedScriptMerge(
output_json_file, self.summary, self.test_files)
with open(output_json_file, 'r') as f:
results = json.load(f)
self.assertEquals(results['successes'], ['fizz', 'baz'])
self.assertEquals(results['failures'], [])
self.assertTrue(results['valid'])
self.assertEquals(results['global_tags'], ['UNRELIABLE_RESULTS'])
self.assertEquals(results['missing_shards'], [1])
class InputParsingTest(StandardIsolatedScriptMergeTest):
def setUp(self):
super(InputParsingTest, self).setUp()
self.merge_test_results_args = []
def mock_merge_test_results(results_list):
self.merge_test_results_args.append(results_list)
return {
'foo': [
'bar',
'baz',
],
}
m = mock.patch(
'standard_isolated_script_merge.results_merger.merge_test_results',
side_effect=mock_merge_test_results)
m.start()
self.addCleanup(m.stop)
def test_simple(self):
self._stage(TWO_COMPLETED_SHARDS,
{
'0/output.json':
{
'result0': ['bar', 'baz'],
},
'1/output.json':
{
'result1': {'foo': 'bar'}
}
})
output_json_file = os.path.join(self.temp_dir, 'output.json')
exit_code = standard_isolated_script_merge.StandardIsolatedScriptMerge(
output_json_file, self.summary, self.test_files)
self.assertEquals(0, exit_code)
self.assertEquals(
[
[
{
'result0': [
'bar', 'baz',
],
},
{
'result1': {
'foo': 'bar',
},
}
],
],
self.merge_test_results_args)
def test_no_jsons(self):
self._stage({
u'shards': [],
}, {})
json_files = []
output_json_file = os.path.join(self.temp_dir, 'output.json')
exit_code = standard_isolated_script_merge.StandardIsolatedScriptMerge(
output_json_file, self.summary, json_files)
self.assertEquals(0, exit_code)
self.assertEquals([[]], self.merge_test_results_args)
class CommandLineTest(common_merge_script_tests.CommandLineTest):
def __init__(self, methodName='runTest'):
super(CommandLineTest, self).__init__(
methodName, standard_isolated_script_merge)
if __name__ == '__main__':
unittest.main()
......@@ -9,13 +9,8 @@ for more details about the presubmit API built into depot_tools.
"""
def CommonChecks(input_api, output_api):
commands = [
input_api.Command(
name='trigger_multiple_dimensions_unittest', cmd=[
input_api.python_executable, 'trigger_multiple_dimensions_unittest.py'],
kwargs={}, message=output_api.PresubmitError),
]
return input_api.RunTests(commands)
return input_api.canned_checks.RunUnitTestsInDirectory(
input_api, output_api, '.', whitelist=['.*test.py'])
def CheckChangeOnUpload(input_api, output_api):
return CommonChecks(input_api, output_api)
......
......@@ -6,4 +6,4 @@ and distributed on the Swarming fleet.
Trigger scripts are documented here:
https://cs.chromium.org/chromium/build/scripts/slave/recipe_modules/swarming/api.py?l=1292
https://cs.chromium.org/search/?q=file:swarming/api.py+%22*+trigger_script:%22&type=cs
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment