Commit 6820d712 authored by Igor Makarov's avatar Igor Makarov Committed by Commit Bot

Reformat python code in third_party/blink/tools/blinkpy/bindings for PEP8

- added .style.yapf file
- reformat *.py files

Bug: 1051750
Change-Id: Id52422ad244199d802c3c66afe480646e1672d37
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2127112Reviewed-by: default avatarKent Tamura <tkent@chromium.org>
Commit-Queue: Kent Tamura <tkent@chromium.org>
Cr-Commit-Position: refs/heads/master@{#758450}
parent ee6ab987
......@@ -44,8 +44,7 @@ from compute_interfaces_info_individual import InterfaceInfoCollector
from compute_interfaces_info_overall import (compute_interfaces_info_overall,
interfaces_info)
from generate_origin_trial_features import generate_origin_trial_features
from idl_compiler import (generate_bindings,
generate_union_type_containers,
from idl_compiler import (generate_bindings, generate_union_type_containers,
generate_dictionary_impl,
generate_callback_function_impl)
from json5_generator import Json5File
......@@ -55,7 +54,6 @@ from utilities import get_file_contents
from utilities import get_first_interface_name_from_idl
from utilities import to_snake_case
PASS_MESSAGE = 'All tests PASS!'
FAIL_MESSAGE = """Some tests FAIL!
To update the reference files, execute:
......@@ -114,8 +112,9 @@ def generate_interface_dependencies(runtime_enabled_features):
# http://bugs.python.org/issue11406
idl_paths = []
for dirpath, _, files in os.walk(directory):
idl_paths.extend(os.path.join(dirpath, filename)
for filename in fnmatch.filter(files, '*.idl'))
idl_paths.extend(
os.path.join(dirpath, filename)
for filename in fnmatch.filter(files, '*.idl'))
return idl_paths
def collect_blink_idl_paths():
......@@ -139,7 +138,8 @@ def generate_interface_dependencies(runtime_enabled_features):
# To avoid this issue, we need to clear relative_dir here.
for value in info['interfaces_info'].itervalues():
value['relative_dir'] = ''
component_info = info_collector.get_component_info_as_dict(runtime_enabled_features)
component_info = info_collector.get_component_info_as_dict(
runtime_enabled_features)
return info, component_info
# We compute interfaces info for *all* IDL files, not just test IDL
......@@ -165,15 +165,18 @@ def generate_interface_dependencies(runtime_enabled_features):
# includes are invalid), but that's brittle (would need to update this file
# for each new component) and doesn't test the code generator any better
# than using a single component.
non_test_interfaces_info, non_test_component_info = collect_interfaces_info(non_test_idl_paths)
non_test_interfaces_info, non_test_component_info = collect_interfaces_info(
non_test_idl_paths)
test_interfaces_info = {}
test_component_info = {}
for component, paths in test_idl_paths.iteritems():
test_interfaces_info[component], test_component_info[component] = collect_interfaces_info(paths)
test_interfaces_info[component], test_component_info[component] = \
collect_interfaces_info(paths)
# In order to allow test IDL files to override the production IDL files if
# they have the same interface name, process the test IDL files after the
# non-test IDL files.
info_individuals = [non_test_interfaces_info] + test_interfaces_info.values()
info_individuals = [non_test_interfaces_info] + \
test_interfaces_info.values()
compute_interfaces_info_overall(info_individuals)
# Add typedefs which are specified in the actual IDL files to the testing
# component info.
......@@ -187,8 +190,8 @@ def generate_interface_dependencies(runtime_enabled_features):
class IdlCompilerOptions(object):
def __init__(self, output_directory, cache_directory, impl_output_directory,
target_component):
def __init__(self, output_directory, cache_directory,
impl_output_directory, target_component):
self.output_directory = output_directory
self.cache_directory = cache_directory
self.impl_output_directory = impl_output_directory
......@@ -225,8 +228,10 @@ def bindings_tests(output_directory, verbose, suppress_diff):
def delete_cache_files():
# FIXME: Instead of deleting cache files, don't generate them.
cache_files = [path for path in list_files(output_directory)
if is_cache_file(os.path.basename(path))]
cache_files = [
path for path in list_files(output_directory)
if is_cache_file(os.path.basename(path))
]
for cache_file in cache_files:
os.remove(cache_file)
......@@ -253,15 +258,20 @@ def bindings_tests(output_directory, verbose, suppress_diff):
return True
def identical_output_files(output_files):
reference_files = [os.path.join(REFERENCE_DIRECTORY,
os.path.relpath(path, output_directory))
for path in output_files]
return all([identical_file(reference_filename, output_filename)
for (reference_filename, output_filename) in zip(reference_files, output_files)])
reference_files = [
os.path.join(REFERENCE_DIRECTORY,
os.path.relpath(path, output_directory))
for path in output_files
]
return all([
identical_file(reference_filename, output_filename)
for (reference_filename,
output_filename) in zip(reference_files, output_files)
])
def no_excess_files(output_files):
generated_files = set([os.path.relpath(path, output_directory)
for path in output_files])
generated_files = set(
[os.path.relpath(path, output_directory) for path in output_files])
excess_files = []
for path in list_files(REFERENCE_DIRECTORY):
relpath = os.path.relpath(path, REFERENCE_DIRECTORY)
......@@ -271,14 +281,15 @@ def bindings_tests(output_directory, verbose, suppress_diff):
if relpath not in generated_files:
excess_files.append(relpath)
if excess_files:
print ('Excess reference files! '
'(probably cruft from renaming or deleting):\n' +
'\n'.join(excess_files))
print('Excess reference files! '
'(probably cruft from renaming or deleting):\n' +
'\n'.join(excess_files))
return False
return True
def make_runtime_features_dict():
input_filename = os.path.join(TEST_INPUT_DIRECTORY, 'runtime_enabled_features.json5')
input_filename = os.path.join(TEST_INPUT_DIRECTORY,
'runtime_enabled_features.json5')
json5_file = Json5File.load_from_files([input_filename])
features_map = {}
for feature in json5_file.name_dictionaries:
......@@ -301,8 +312,8 @@ def bindings_tests(output_directory, verbose, suppress_diff):
target_component=component)
if component == 'core':
partial_interface_output_dir = os.path.join(output_directory,
'modules')
partial_interface_output_dir = \
os.path.join(output_directory, 'modules')
if not os.path.exists(partial_interface_output_dir):
os.makedirs(partial_interface_output_dir)
partial_interface_options = IdlCompilerOptions(
......@@ -325,8 +336,10 @@ def bindings_tests(output_directory, verbose, suppress_diff):
idl_filenames.append(idl_path)
idl_basename = os.path.basename(idl_path)
name_from_basename, _ = os.path.splitext(idl_basename)
definition_name = get_first_interface_name_from_idl(get_file_contents(idl_path))
is_partial_interface_idl = to_snake_case(definition_name) != name_from_basename
definition_name = get_first_interface_name_from_idl(
get_file_contents(idl_path))
is_partial_interface_idl = to_snake_case(
definition_name) != name_from_basename
if not is_partial_interface_idl:
interface_info = interfaces_info[definition_name]
if interface_info['is_dictionary']:
......@@ -336,32 +349,25 @@ def bindings_tests(output_directory, verbose, suppress_diff):
partial_interface_filenames.append(idl_path)
info_provider = component_info_providers[component]
partial_interface_info_provider = component_info_providers['modules']
partial_interface_info_provider = \
component_info_providers['modules']
generate_union_type_containers(CodeGeneratorUnionType,
info_provider, options)
generate_callback_function_impl(CodeGeneratorCallbackFunction,
info_provider, options)
generate_bindings(
CodeGeneratorV8,
info_provider,
options,
idl_filenames)
generate_bindings(
CodeGeneratorV8,
partial_interface_info_provider,
partial_interface_options,
partial_interface_filenames)
generate_dictionary_impl(
CodeGeneratorDictionaryImpl,
info_provider,
options,
dictionary_impl_filenames)
generate_origin_trial_features(
info_provider,
options,
[filename for filename in idl_filenames
if filename not in dictionary_impl_filenames])
generate_bindings(CodeGeneratorV8, info_provider, options,
idl_filenames)
generate_bindings(CodeGeneratorV8, partial_interface_info_provider,
partial_interface_options,
partial_interface_filenames)
generate_dictionary_impl(CodeGeneratorDictionaryImpl,
info_provider, options,
dictionary_impl_filenames)
generate_origin_trial_features(info_provider, options, [
filename for filename in idl_filenames
if filename not in dictionary_impl_filenames
])
finally:
delete_cache_files()
......@@ -390,6 +396,8 @@ def run_bindings_tests(reset_results, verbose, suppress_diff):
with TemporaryDirectory() as temp_dir:
# TODO(peria): Remove this hack.
# Some internal algorithms depend on the path of output directory.
temp_source_path = os.path.join(temp_dir, 'third_party', 'blink', 'renderer')
temp_output_path = os.path.join(temp_source_path, 'bindings', 'tests', 'results')
temp_source_path = os.path.join(temp_dir, 'third_party', 'blink',
'renderer')
temp_output_path = os.path.join(temp_source_path, 'bindings', 'tests',
'results')
return bindings_tests(temp_output_path, verbose, suppress_diff)
......@@ -2,7 +2,6 @@
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Usage: collect_idls_into_json.py path_file.txt json_file.json
This script collects and organizes interface information and that information dumps into json file.
"""
......@@ -17,7 +16,6 @@ path_finder.add_bindings_scripts_dir_to_sys_path()
import utilities
from blink_idl_parser import parse_file, BlinkIDLParser
_INTERFACE = 'Interface'
_IMPLEMENT = 'Implements'
_PARTIAL = 'Partial'
......@@ -89,7 +87,8 @@ def is_partial(definition):
Return:
True if |definition| is 'partial interface' class, otherwise False.
"""
return definition.GetClass() == _INTERFACE and definition.GetProperty(_PARTIAL)
return definition.GetClass() == _INTERFACE and definition.GetProperty(
_PARTIAL)
def get_filepath(interface_node):
......@@ -149,10 +148,16 @@ def const_node_to_dict(const_node):
dictionary of const's information
"""
return {
_NAME: const_node.GetName(),
_TYPE: get_const_type(const_node),
_VALUE: get_const_value(const_node),
_EXTATTRIBUTES: [extattr_node_to_dict(extattr) for extattr in get_extattribute_node_list(const_node)],
_NAME:
const_node.GetName(),
_TYPE:
get_const_type(const_node),
_VALUE:
get_const_value(const_node),
_EXTATTRIBUTES: [
extattr_node_to_dict(extattr)
for extattr in get_extattribute_node_list(const_node)
],
}
......@@ -182,7 +187,8 @@ def get_attribute_type(attribute_node):
if type_component.GetClass() == _ARRAY:
type_list[-1] += '[]'
elif type_component.GetClass() == _SEQUENCE:
for seq_type in type_component.GetOneOf(_TYPE).GetChildren():
for seq_type in type_component.GetOneOf(
_TYPE).GetChildren():
type_list.append('<' + seq_type.GetName() + '>')
else:
type_list.append(type_component.GetName())
......@@ -190,16 +196,20 @@ def get_attribute_type(attribute_node):
elif attr_type.GetClass() == _SEQUENCE:
union_member_types = []
if attr_type.GetOneOf(_TYPE).GetChildren()[0].GetClass() == _UNIONTYPE:
for union_member in attr_type.GetOneOf(_TYPE).GetOneOf(_UNIONTYPE).GetListOf(_TYPE):
for union_member in attr_type.GetOneOf(_TYPE).GetOneOf(
_UNIONTYPE).GetListOf(_TYPE):
if len(union_member.GetChildren()) != 1:
raise Exception('Complex type in a union in a sequence is not yet supported')
raise Exception(
'Complex type in a union in a sequence is not yet supported'
)
type_component = union_member.GetChildren()[0]
union_member_types.append(type_component.GetName())
return '<' + str(union_member_types) + '>'
else:
for type_component in attr_type.GetOneOf(_TYPE).GetChildren():
if type_component.GetClass() == _SEQUENCE:
raise Exception('Sequence in another sequence is not yet supported')
raise Exception(
'Sequence in another sequence is not yet supported')
else:
if type_component.GetClass() == _ARRAY:
type_list[-1] += []
......@@ -229,11 +239,18 @@ def attribute_node_to_dict(attribute_node):
dictionary of attribute's information
"""
return {
_NAME: attribute_node.GetName(),
_TYPE: get_attribute_type(attribute_node),
_EXTATTRIBUTES: [extattr_node_to_dict(extattr) for extattr in get_extattribute_node_list(attribute_node)],
_READONLY: attribute_node.GetProperty(_PROP_READONLY, default=False),
_STATIC: attribute_node.GetProperty(_PROP_STATIC, default=False),
_NAME:
attribute_node.GetName(),
_TYPE:
get_attribute_type(attribute_node),
_EXTATTRIBUTES: [
extattr_node_to_dict(extattr)
for extattr in get_extattribute_node_list(attribute_node)
],
_READONLY:
attribute_node.GetProperty(_PROP_READONLY, default=False),
_STATIC:
attribute_node.GetProperty(_PROP_STATIC, default=False),
}
......@@ -295,12 +312,21 @@ def operation_node_to_dict(operation_node):
dictionary of operation's informantion
"""
return {
_NAME: get_operation_name(operation_node),
_ARGUMENTS: [argument_node_to_dict(argument) for argument in get_argument_node_list(operation_node)
if argument_node_to_dict(argument)],
_TYPE: get_operation_type(operation_node),
_EXTATTRIBUTES: [extattr_node_to_dict(extattr) for extattr in get_extattribute_node_list(operation_node)],
_STATIC: operation_node.GetProperty(_PROP_STATIC, default=False),
_NAME:
get_operation_name(operation_node),
_ARGUMENTS: [
argument_node_to_dict(argument)
for argument in get_argument_node_list(operation_node)
if argument_node_to_dict(argument)
],
_TYPE:
get_operation_type(operation_node),
_EXTATTRIBUTES: [
extattr_node_to_dict(extattr)
for extattr in get_extattribute_node_list(operation_node)
],
_STATIC:
operation_node.GetProperty(_PROP_STATIC, default=False),
}
......@@ -351,13 +377,29 @@ def interface_node_to_dict(interface_node):
A dictionary of the interface information.
"""
return {
_NAME: interface_node.GetName(),
_FILEPATH: get_filepath(interface_node),
_CONSTS: [const_node_to_dict(const) for const in get_const_node_list(interface_node)],
_ATTRIBUTES: [attribute_node_to_dict(attr) for attr in get_attribute_node_list(interface_node) if attr],
_OPERATIONS: [operation_node_to_dict(operation) for operation in get_operation_node_list(interface_node) if operation],
_EXTATTRIBUTES: [extattr_node_to_dict(extattr) for extattr in get_extattribute_node_list(interface_node)],
_INHERIT: inherit_node_to_dict(interface_node)
_NAME:
interface_node.GetName(),
_FILEPATH:
get_filepath(interface_node),
_CONSTS: [
const_node_to_dict(const)
for const in get_const_node_list(interface_node)
],
_ATTRIBUTES: [
attribute_node_to_dict(attr)
for attr in get_attribute_node_list(interface_node) if attr
],
_OPERATIONS: [
operation_node_to_dict(operation)
for operation in get_operation_node_list(interface_node)
if operation
],
_EXTATTRIBUTES: [
extattr_node_to_dict(extattr)
for extattr in get_extattribute_node_list(interface_node)
],
_INHERIT:
inherit_node_to_dict(interface_node)
}
......@@ -372,10 +414,13 @@ def merge_partial_dicts(interfaces_dict, partials_dict):
for interface_name, partial in partials_dict.iteritems():
interface = interfaces_dict.get(interface_name)
if not interface:
raise Exception('There is a partial interface, but the corresponding non-partial interface was not found.')
raise Exception(
'There is a partial interface, but the corresponding non-partial interface was not found.'
)
for member in _MEMBERS:
interface[member].extend(partial.get(member))
interface.setdefault(_PARTIAL_FILEPATH, []).append(partial[_FILEPATH])
interface.setdefault(_PARTIAL_FILEPATH,
[]).append(partial[_FILEPATH])
return interfaces_dict
......@@ -390,10 +435,13 @@ def merge_implement_nodes(interfaces_dict, implement_node_list):
for implement in implement_node_list:
reference = implement.GetProperty(_PROP_REFERENCE)
implement = implement.GetName()
if reference not in interfaces_dict.keys() or implement not in interfaces_dict.keys():
raise Exception('There is not corresponding implement or reference interface.')
if (reference not in interfaces_dict.keys()
or implement not in interfaces_dict.keys()):
raise Exception(
'There is not corresponding implement or reference interface.')
for member in _MEMBERS:
interfaces_dict[implement][member].extend(interfaces_dict[reference].get(member))
interfaces_dict[implement][member].extend(
interfaces_dict[reference].get(member))
return interfaces_dict
......@@ -408,7 +456,9 @@ def export_to_jsonfile(dictionary, json_file):
def usage():
sys.stdout.write('Usage: collect_idls_into_json.py <path_file.txt> <output_file.json>\n')
sys.stdout.write(
'Usage: collect_idls_into_json.py <path_file.txt> <output_file.json>\n'
)
def main(args):
......@@ -418,17 +468,22 @@ def main(args):
path_file = args[0]
json_file = args[1]
path_list = utilities.read_file_to_list(path_file)
implement_node_list = [definition
for definition in get_definitions(path_list)
if is_implements(definition)]
interfaces_dict = {definition.GetName(): interface_node_to_dict(definition)
for definition in get_definitions(path_list)
if not is_partial(definition)}
partials_dict = {definition.GetName(): interface_node_to_dict(definition)
for definition in get_definitions(path_list)
if is_partial(definition)}
implement_node_list = [
definition for definition in get_definitions(path_list)
if is_implements(definition)
]
interfaces_dict = {
definition.GetName(): interface_node_to_dict(definition)
for definition in get_definitions(path_list)
if not is_partial(definition)
}
partials_dict = {
definition.GetName(): interface_node_to_dict(definition)
for definition in get_definitions(path_list) if is_partial(definition)
}
dictionary = merge_partial_dicts(interfaces_dict, partials_dict)
interfaces_dict = merge_implement_nodes(interfaces_dict, implement_node_list)
interfaces_dict = merge_implement_nodes(interfaces_dict,
implement_node_list)
export_to_jsonfile(dictionary, json_file)
......
......@@ -11,24 +11,27 @@ from blink_idl_parser import parse_file, BlinkIDLParser
testdata_path = os.path.join(
os.path.dirname(os.path.realpath(__file__)), 'testdata')
_FILE = os.path.join(testdata_path, 'test_filepath.txt')
_KEY_SET = set(['Operations', 'Name', 'FilePath', 'Inherit', 'Consts', 'ExtAttributes', 'Attributes'])
_KEY_SET = set([
'Operations', 'Name', 'FilePath', 'Inherit', 'Consts', 'ExtAttributes',
'Attributes'
])
_PARTIAL = {
'Node': {
'Operations': [],
'Name': 'Node',
'FilePath': 'Source/core/timing/WorkerGlobalScopePerformance.idl',
'Name':
'Node',
'FilePath':
'Source/core/timing/WorkerGlobalScopePerformance.idl',
'Inherit': [],
'Consts': [],
'ExtAttributes': [],
'Attributes': [
{
'Static': False,
'Readonly': True,
'Type': 'WorkerPerformance',
'Name': 'performance',
'ExtAttributes': []
}
]
'Attributes': [{
'Static': False,
'Readonly': True,
'Type': 'WorkerPerformance',
'Name': 'performance',
'ExtAttributes': []
}]
}
}
......@@ -36,23 +39,26 @@ _PARTIAL = {
class TestFunctions(unittest.TestCase):
def setUp(self):
parser = BlinkIDLParser()
path = os.path.join(
testdata_path, utilities.read_file_to_list(_FILE)[0])
path = os.path.join(testdata_path,
utilities.read_file_to_list(_FILE)[0])
definitions = parse_file(parser, path)
self.definition = definitions.GetChildren()[0]
def test_get_definitions(self):
pathfile = utilities.read_file_to_list(_FILE)
pathfile = [os.path.join(testdata_path, filename)
for filename in pathfile]
pathfile = [
os.path.join(testdata_path, filename) for filename in pathfile
]
for actual in collect_idls_into_json.get_definitions(pathfile):
self.assertEqual(actual.GetName(), self.definition.GetName())
def test_is_partial(self):
if self.definition.GetClass() == 'Interface' and self.definition.GetProperty('Partial'):
if (self.definition.GetClass() == 'Interface'
and self.definition.GetProperty('Partial')):
self.assertTrue(collect_idls_into_json.is_partial(self.definition))
else:
self.assertFalse(collect_idls_into_json.is_partial(self.definition))
self.assertFalse(
collect_idls_into_json.is_partial(self.definition))
def test_get_filepaths(self):
filepath = collect_idls_into_json.get_filepath(self.definition)
......@@ -60,53 +66,85 @@ class TestFunctions(unittest.TestCase):
def test_const_node_to_dict(self):
const_member = set(['Name', 'Type', 'Value', 'ExtAttributes'])
for const in collect_idls_into_json.get_const_node_list(self.definition):
for const in collect_idls_into_json.get_const_node_list(
self.definition):
if const:
self.assertEqual(const.GetClass(), 'Const')
self.assertEqual(collect_idls_into_json.get_const_type(const), 'unsigned short')
self.assertEqual(collect_idls_into_json.get_const_value(const), '1')
self.assertTrue(const_member.issuperset(collect_idls_into_json.const_node_to_dict(const).keys()))
self.assertEqual(
collect_idls_into_json.get_const_type(const),
'unsigned short')
self.assertEqual(
collect_idls_into_json.get_const_value(const), '1')
self.assertTrue(
const_member.issuperset(
collect_idls_into_json.const_node_to_dict(const).
keys()))
else:
self.assertEqual(const, None)
def test_attribute_node_to_dict(self):
attribute_member = set(['Name', 'Type', 'ExtAttributes', 'Readonly', 'Static'])
for attribute in collect_idls_into_json.get_attribute_node_list(self.definition):
attribute_member = set(
['Name', 'Type', 'ExtAttributes', 'Readonly', 'Static'])
for attribute in collect_idls_into_json.get_attribute_node_list(
self.definition):
if attribute:
self.assertEqual(attribute.GetClass(), 'Attribute')
self.assertEqual(attribute.GetName(), 'parentNode')
self.assertEqual(collect_idls_into_json.get_attribute_type(attribute), 'Node')
self.assertTrue(attribute_member.issuperset(collect_idls_into_json.attribute_node_to_dict(attribute).keys()))
self.assertEqual(
collect_idls_into_json.get_attribute_type(attribute),
'Node')
self.assertTrue(
attribute_member.issuperset(
collect_idls_into_json.attribute_node_to_dict(
attribute).keys()))
else:
self.assertEqual(attribute, None)
def test_operation_node_to_dict(self):
operate_member = set(['Static', 'ExtAttributes', 'Type', 'Name', 'Arguments'])
operate_member = set(
['Static', 'ExtAttributes', 'Type', 'Name', 'Arguments'])
argument_member = set(['Name', 'Type'])
for operation in collect_idls_into_json.get_operation_node_list(self.definition):
for operation in collect_idls_into_json.get_operation_node_list(
self.definition):
if operation:
self.assertEqual(operation.GetClass(), 'Operation')
self.assertEqual(operation.GetName(), 'appendChild')
self.assertEqual(collect_idls_into_json.get_operation_type(operation), 'Node')
self.assertTrue(operate_member.issuperset(collect_idls_into_json.operation_node_to_dict(operation).keys()))
for argument in collect_idls_into_json.get_argument_node_list(operation):
self.assertEqual(
collect_idls_into_json.get_operation_type(operation),
'Node')
self.assertTrue(
operate_member.issuperset(
collect_idls_into_json.operation_node_to_dict(
operation).keys()))
for argument in collect_idls_into_json.get_argument_node_list(
operation):
if argument:
self.assertEqual(argument.GetClass(), 'Argument')
self.assertEqual(argument.GetName(), 'newChild')
self.assertEqual(collect_idls_into_json.get_argument_type(argument), 'Node')
self.assertTrue(argument_member.issuperset(collect_idls_into_json.argument_node_to_dict(argument).keys()))
self.assertEqual(
collect_idls_into_json.get_argument_type(argument),
'Node')
self.assertTrue(
argument_member.issuperset(
collect_idls_into_json.argument_node_to_dict(
argument).keys()))
else:
self.assertEqual(argument, None)
else:
self.assertEqual(operation, None)
def test_extattribute_node_to_dict(self):
for extattr in collect_idls_into_json.get_extattribute_node_list(self.definition):
for extattr in collect_idls_into_json.get_extattribute_node_list(
self.definition):
if extattr:
self.assertEqual(extattr.GetClass(), 'ExtAttribute')
self.assertEqual(extattr.GetName(), 'CustomToV8')
self.assertEqual(collect_idls_into_json.extattr_node_to_dict(extattr).keys(), ['Name'])
self.assertEqual(collect_idls_into_json.extattr_node_to_dict(extattr).values(), ['CustomToV8'])
self.assertEqual(
collect_idls_into_json.extattr_node_to_dict(extattr).
keys(), ['Name'])
self.assertEqual(
collect_idls_into_json.extattr_node_to_dict(extattr).
values(), ['CustomToV8'])
else:
self.assertEqual(extattr, None)
......@@ -119,12 +157,17 @@ class TestFunctions(unittest.TestCase):
self.assertEqual(inherit, [])
def test_interface_node_to_dict(self):
self.assertTrue(_KEY_SET.issuperset(collect_idls_into_json.interface_node_to_dict(self.definition)))
self.assertTrue(
_KEY_SET.issuperset(
collect_idls_into_json.interface_node_to_dict(
self.definition)))
def test_merge_partial_dicts(self):
key_name = self.definition.GetName()
merged = collect_idls_into_json.merge_partial_dicts(
{key_name: collect_idls_into_json.interface_node_to_dict(self.definition)}, _PARTIAL)[key_name]['Partial_FilePaths']
merged = collect_idls_into_json.merge_partial_dicts({
key_name:
collect_idls_into_json.interface_node_to_dict(self.definition)
}, _PARTIAL)[key_name]['Partial_FilePaths']
expected = [
'Source/core/timing/WorkerGlobalScopePerformance.idl',
'Source/core/timing/WorkerGlobalScopePerformance.idl',
......
......@@ -2,7 +2,6 @@
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""generate_idl_diff.py is a script that generates a diff of two given IDL files.
Usage: generate_idl_diff.py old_file.json new_file.json diff_file.json
old_file.json: An input json file including idl data of old Chrome version
......@@ -55,8 +54,9 @@ and 'Operations'. Each item in them are called a "member".
}
"""
EXTATTRIBUTES_AND_MEMBER_TYPES = ['ExtAttributes', 'Consts', 'Attributes', 'Operations']
EXTATTRIBUTES_AND_MEMBER_TYPES = [
'ExtAttributes', 'Consts', 'Attributes', 'Operations'
]
DIFF_INSENSITIVE_FIELDS = ['Name']
DIFF_TAG = 'diff_tag'
DIFF_TAG_ADDED = 'added'
......@@ -139,7 +139,8 @@ def interfaces_diff(old_interfaces, new_interfaces):
annotated = {}
for interface_name, interface in new_interfaces.items():
if interface_name in old_interfaces:
annotated_interface, is_changed = members_diff(old_interfaces[interface_name], interface)
annotated_interface, is_changed = members_diff(
old_interfaces[interface_name], interface)
if is_changed:
annotated[interface_name] = annotated_interface
del old_interfaces[interface_name]
......@@ -166,9 +167,8 @@ def write_diff(diff, filepath):
def main(argv):
if len(argv) != 3:
sys.stdout.write(
'Usage: make_diff.py <old_file.json> <new_file.json> '
'<diff_file.json>\n')
sys.stdout.write('Usage: make_diff.py <old_file.json> <new_file.json> '
'<diff_file.json>\n')
exit(1)
old_json_file = argv[0]
new_json_file = argv[1]
......
......@@ -11,7 +11,6 @@ from blinkpy.bindings.generate_idl_diff import DIFF_TAG
from blinkpy.bindings.generate_idl_diff import DIFF_TAG_DELETED
from blinkpy.bindings.generate_idl_diff import DIFF_TAG_ADDED
testdata_path = os.path.join(
os.path.dirname(os.path.realpath(__file__)), 'testdata')
old_data_path = os.path.join(testdata_path, 'old_blink_idls.json')
......@@ -19,7 +18,6 @@ new_data_path = os.path.join(testdata_path, 'new_blink_idls.json')
class TestGenerateIDLDiff(unittest.TestCase):
def setUp(self):
old = generate_idl_diff.load_json_file(old_data_path)
new = generate_idl_diff.load_json_file(new_data_path)
......
......@@ -2,7 +2,6 @@
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Print a diff generated by generate_idl_diff.py.
Before printing, sort the diff in the alphabetical order or the order of
diffing tags.
......@@ -24,7 +23,6 @@ from blinkpy.bindings.generate_idl_diff import DIFF_TAG
from blinkpy.bindings.generate_idl_diff import DIFF_TAG_ADDED
from blinkpy.bindings.generate_idl_diff import DIFF_TAG_DELETED
# pylint: disable=W0105
"""Refer to the explanation of generate_idl_diff.py's input files.
The deffference between the input structure of generate_idl_diff.py and
......@@ -238,8 +236,8 @@ def sort_members_in_alphabetical_order(interface):
"""
sorted_interface = OrderedDict()
for member_type in EXTATTRIBUTES_AND_MEMBER_TYPES:
sorted_members = sorted(interface[member_type],
key=lambda member: member['Name'])
sorted_members = sorted(
interface[member_type], key=lambda member: member['Name'])
sorted_interface[member_type] = sorted_members
return sorted_interface
......@@ -323,6 +321,7 @@ def print_extattributes_in_member(extattributes, out):
Args:
A list of "ExtAttributes" in the "member" object
"""
def callback(extattribute):
out.write(extattribute['Name'])
......@@ -350,6 +349,7 @@ def print_arguments(arguments, out):
"""Print arguments in a "members" object named "Operations".
Args: A list of "Arguments"
"""
def callback(argument):
out.write(argument['Name'])
......@@ -407,7 +407,8 @@ def print_diff(diff, out):
def print_usage():
"""Show usage."""
sys.stdout.write('Usage: print_diff.py <diff_file.json> <"TAG"|"ALPHABET">\n')
sys.stdout.write(
'Usage: print_diff.py <diff_file.json> <"TAG"|"ALPHABET">\n')
def main(argv):
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment