Commit 3b8f5a3b authored by Yuki Shiino's avatar Yuki Shiino Committed by Commit Bot

bind-gen: Implement bind_gen.TaskQueue

Bug: 839389
Change-Id: I4921bd4b970fdd65fd2561f22bc2832768c28063
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2228742Reviewed-by: default avatarHitoshi Yoshida <peria@chromium.org>
Commit-Queue: Yuki Shiino <yukishiino@chromium.org>
Cr-Commit-Position: refs/heads/master@{#774974}
parent 54c74ea7
...@@ -36,7 +36,7 @@ _setup_sys_path() ...@@ -36,7 +36,7 @@ _setup_sys_path()
from .dictionary import generate_dictionaries from .dictionary import generate_dictionaries
from .enumeration import generate_enumerations from .enumeration import generate_enumerations
from .interface import generate_interfaces from .interface import generate_interfaces
from .union import generate_unions from .task_queue import TaskQueue
def init(root_src_dir, root_gen_dir, component_reldirs): def init(root_src_dir, root_gen_dir, component_reldirs):
......
...@@ -2,10 +2,6 @@ ...@@ -2,10 +2,6 @@
# Use of this source code is governed by a BSD-style license that can be # Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file. # found in the LICENSE file.
import itertools
import multiprocessing
import os.path
import web_idl import web_idl
from . import name_style from . import name_style
...@@ -13,7 +9,6 @@ from .blink_v8_bridge import blink_class_name ...@@ -13,7 +9,6 @@ from .blink_v8_bridge import blink_class_name
from .blink_v8_bridge import blink_type_info from .blink_v8_bridge import blink_type_info
from .blink_v8_bridge import make_default_value_expr from .blink_v8_bridge import make_default_value_expr
from .blink_v8_bridge import make_v8_to_blink_value from .blink_v8_bridge import make_v8_to_blink_value
from .code_node import CodeNode
from .code_node import Likeliness from .code_node import Likeliness
from .code_node import ListNode from .code_node import ListNode
from .code_node import SequenceNode from .code_node import SequenceNode
...@@ -39,8 +34,8 @@ from .codegen_utils import make_forward_declarations ...@@ -39,8 +34,8 @@ from .codegen_utils import make_forward_declarations
from .codegen_utils import make_header_include_directives from .codegen_utils import make_header_include_directives
from .codegen_utils import write_code_node_to_file from .codegen_utils import write_code_node_to_file
from .mako_renderer import MakoRenderer from .mako_renderer import MakoRenderer
from .package_initializer import package_initializer
from .path_manager import PathManager from .path_manager import PathManager
from .task_queue import TaskQueue
_DICT_MEMBER_PRESENCE_PREDICATES = { _DICT_MEMBER_PRESENCE_PREDICATES = {
...@@ -1029,31 +1024,9 @@ def generate_dictionary(dictionary): ...@@ -1029,31 +1024,9 @@ def generate_dictionary(dictionary):
write_code_node_to_file(source_node, path_manager.gen_path_to(source_path)) write_code_node_to_file(source_node, path_manager.gen_path_to(source_path))
def run_multiprocessing_task(args): def generate_dictionaries(task_queue, web_idl_database):
dictionary, package_initializer = args assert isinstance(task_queue, TaskQueue)
package_initializer.init() assert isinstance(web_idl_database, web_idl.Database)
generate_dictionary(dictionary)
def generate_dictionaries(web_idl_database):
# More processes do not mean better performance. The default size was
# chosen heuristically.
process_pool_size = 8
cpu_count = multiprocessing.cpu_count()
process_pool_size = max(1, min(cpu_count / 2, process_pool_size))
pool = multiprocessing.Pool(process_pool_size)
# Prior to Python3, Pool.map doesn't support user interrupts (e.g. Ctrl-C),
# although Pool.map_async(...).get(...) does.
timeout_in_sec = 3600 # Just enough long time
pool.map_async(
run_multiprocessing_task,
map(lambda dictionary: (dictionary, package_initializer()),
web_idl_database.dictionaries)).get(timeout_in_sec)
return
# When it is difficult to see errors in generator, use following loop
# instead of parallel runs above.
for dictionary in web_idl_database.dictionaries: for dictionary in web_idl_database.dictionaries:
generate_dictionary(dictionary) task_queue.post_task(generate_dictionary, dictionary)
...@@ -2,6 +2,8 @@ ...@@ -2,6 +2,8 @@
# Use of this source code is governed by a BSD-style license that can be # Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file. # found in the LICENSE file.
import web_idl
from . import name_style from . import name_style
from .blink_v8_bridge import blink_class_name from .blink_v8_bridge import blink_class_name
from .code_node import EmptyNode from .code_node import EmptyNode
...@@ -23,6 +25,7 @@ from .codegen_utils import make_header_include_directives ...@@ -23,6 +25,7 @@ from .codegen_utils import make_header_include_directives
from .codegen_utils import write_code_node_to_file from .codegen_utils import write_code_node_to_file
from .mako_renderer import MakoRenderer from .mako_renderer import MakoRenderer
from .path_manager import PathManager from .path_manager import PathManager
from .task_queue import TaskQueue
def make_factory_methods(cg_context): def make_factory_methods(cg_context):
...@@ -355,6 +358,9 @@ def generate_enumeration(enumeration): ...@@ -355,6 +358,9 @@ def generate_enumeration(enumeration):
write_code_node_to_file(source_node, path_manager.gen_path_to(source_path)) write_code_node_to_file(source_node, path_manager.gen_path_to(source_path))
def generate_enumerations(web_idl_database): def generate_enumerations(task_queue, web_idl_database):
assert isinstance(task_queue, TaskQueue)
assert isinstance(web_idl_database, web_idl.Database)
for enumeration in web_idl_database.enumerations: for enumeration in web_idl_database.enumerations:
generate_enumeration(enumeration) task_queue.post_task(generate_enumeration, enumeration)
...@@ -3,8 +3,6 @@ ...@@ -3,8 +3,6 @@
# found in the LICENSE file. # found in the LICENSE file.
import itertools import itertools
import multiprocessing
import os.path
import web_idl import web_idl
...@@ -47,8 +45,8 @@ from .codegen_utils import make_forward_declarations ...@@ -47,8 +45,8 @@ from .codegen_utils import make_forward_declarations
from .codegen_utils import make_header_include_directives from .codegen_utils import make_header_include_directives
from .codegen_utils import write_code_node_to_file from .codegen_utils import write_code_node_to_file
from .mako_renderer import MakoRenderer from .mako_renderer import MakoRenderer
from .package_initializer import package_initializer
from .path_manager import PathManager from .path_manager import PathManager
from .task_queue import TaskQueue
def _is_none_or_str(arg): def _is_none_or_str(arg):
...@@ -6839,42 +6837,25 @@ def generate_init_idl_interfaces(web_idl_database, ...@@ -6839,42 +6837,25 @@ def generate_init_idl_interfaces(web_idl_database,
write_code_node_to_file(source_node, path_manager.gen_path_to(source_path)) write_code_node_to_file(source_node, path_manager.gen_path_to(source_path))
def run_multiprocessing_task(args): def generate_interfaces(task_queue, web_idl_database):
interface, package_initializer = args assert isinstance(task_queue, TaskQueue)
package_initializer.init()
generate_interface(interface)
def generate_interfaces(web_idl_database):
assert isinstance(web_idl_database, web_idl.Database) assert isinstance(web_idl_database, web_idl.Database)
generate_install_properties_per_feature( for interface in web_idl_database.interfaces:
web_idl_database, "InstallPropertiesPerFeature", task_queue.post_task(generate_interface, interface)
"properties_per_feature_installer")
generate_install_properties_per_feature( task_queue.post_task(generate_install_properties_per_feature,
web_idl_database, web_idl_database, "InstallPropertiesPerFeature",
"InstallPropertiesPerFeatureForTesting", "properties_per_feature_installer")
"properties_per_feature_installer_for_testing", task_queue.post_task(generate_install_properties_per_feature,
for_testing=True) web_idl_database,
generate_init_idl_interfaces(web_idl_database, "InitIDLInterfaces", "InstallPropertiesPerFeatureForTesting",
"init_idl_interfaces") "properties_per_feature_installer_for_testing",
generate_init_idl_interfaces( for_testing=True)
web_idl_database, task_queue.post_task(generate_init_idl_interfaces, web_idl_database,
"InitIDLInterfacesForTesting", "InitIDLInterfaces", "init_idl_interfaces")
"init_idl_interfaces_for_testing", task_queue.post_task(generate_init_idl_interfaces,
for_testing=True) web_idl_database,
"InitIDLInterfacesForTesting",
# More processes do not mean better performance. The default size was "init_idl_interfaces_for_testing",
# chosen heuristically. for_testing=True)
process_pool_size = 8
cpu_count = multiprocessing.cpu_count()
process_pool_size = max(1, min(cpu_count / 2, process_pool_size))
pool = multiprocessing.Pool(process_pool_size)
# Prior to Python3, Pool.map doesn't support user interrupts (e.g. Ctrl-C),
# although Pool.map_async(...).get(...) does.
timeout_in_sec = 3600 # Just enough long time
pool.map_async(
run_multiprocessing_task,
map(lambda interface: (interface, package_initializer()),
web_idl_database.interfaces)).get(timeout_in_sec)
# Copyright 2020 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import multiprocessing
from .package_initializer import package_initializer
class TaskQueue(object):
"""
Represents a task queue to run tasks with using a worker pool. Scheduled
tasks will be executed in parallel.
"""
def __init__(self):
# More processes do not mean better performance. The pool size was
# chosen heuristically.
cpu_count = multiprocessing.cpu_count()
self._pool_size = max(2, cpu_count / 4)
self._pool = multiprocessing.Pool(self._pool_size,
package_initializer().init)
self._requested_tasks = [] # List of (func, args, kwargs)
self._worker_tasks = [] # List of multiprocessing.pool.AsyncResult
self._did_run = False
def post_task(self, func, *args, **kwargs):
"""
Schedules a new task to be executed when |run| method is invoked. This
method does not kick any execution, only puts a new task in the queue.
"""
assert not self._did_run
self._requested_tasks.append((func, args, kwargs))
def run(self, report_progress=None):
"""
Executes all scheduled tasks.
Args:
report_progress: A callable that takes two arguments, total number
of worker tasks and number of completed worker tasks.
Scheduled tasks are reorganized into worker tasks, so the
number of worker tasks may be different from the number of
scheduled tasks.
"""
assert report_progress is None or callable(report_progress)
assert not self._did_run
assert not self._worker_tasks
self._did_run = True
num_of_requested_tasks = len(self._requested_tasks)
chunk_size = min(20, num_of_requested_tasks / (4 * self._pool_size))
i = 0
while i < num_of_requested_tasks:
tasks = self._requested_tasks[i:i + chunk_size]
i += chunk_size
self._worker_tasks.append(
self._pool.apply_async(_task_queue_run_tasks, [tasks]))
self._pool.close()
timeout_in_sec = 2
while True:
self._report_worker_task_progress(report_progress)
for worker_task in self._worker_tasks:
if not worker_task.ready():
worker_task.wait(timeout_in_sec)
break
if not worker_task.successful():
worker_task.get() # Let |get()| raise an exception.
assert False
else:
break
self._pool.join()
def _report_worker_task_progress(self, report_progress):
assert report_progress is None or callable(report_progress)
if not report_progress:
return
done_count = reduce(
lambda count, worker_task: count + bool(worker_task.ready()),
self._worker_tasks, 0)
report_progress(len(self._worker_tasks), done_count)
def _task_queue_run_tasks(tasks):
for task in tasks:
func, args, kwargs = task
apply(func, args, kwargs)
...@@ -59,7 +59,6 @@ def main(): ...@@ -59,7 +59,6 @@ def main():
'dictionary': bind_gen.generate_dictionaries, 'dictionary': bind_gen.generate_dictionaries,
'enumeration': bind_gen.generate_enumerations, 'enumeration': bind_gen.generate_enumerations,
'interface': bind_gen.generate_interfaces, 'interface': bind_gen.generate_interfaces,
'union': bind_gen.generate_unions,
} }
for task in tasks: for task in tasks:
...@@ -72,14 +71,27 @@ def main(): ...@@ -72,14 +71,27 @@ def main():
web_idl.Component('core'): options.output_core_reldir, web_idl.Component('core'): options.output_core_reldir,
web_idl.Component('modules'): options.output_modules_reldir, web_idl.Component('modules'): options.output_modules_reldir,
} }
bind_gen.init( bind_gen.init(
root_src_dir=options.root_src_dir, root_src_dir=options.root_src_dir,
root_gen_dir=options.root_gen_dir, root_gen_dir=options.root_gen_dir,
component_reldirs=component_reldirs) component_reldirs=component_reldirs)
task_queue = bind_gen.TaskQueue()
for task in tasks: for task in tasks:
dispatch_table[task](web_idl_database=web_idl_database) dispatch_table[task](task_queue=task_queue,
web_idl_database=web_idl_database)
def report_progress(total, done):
out = sys.stdout
if not out.isatty():
return
percentage = int(float(done) / float(total) * 100)
message = "Blink-V8 bindings generation: {}% done\r".format(percentage)
out.write(message)
out.flush()
task_queue.run(report_progress)
if __name__ == '__main__': if __name__ == '__main__':
......
...@@ -45,7 +45,7 @@ bind_gen/name_style.py ...@@ -45,7 +45,7 @@ bind_gen/name_style.py
bind_gen/package_initializer.py bind_gen/package_initializer.py
bind_gen/path_manager.py bind_gen/path_manager.py
bind_gen/style_format.py bind_gen/style_format.py
bind_gen/union.py bind_gen/task_queue.py
generate_bindings.py generate_bindings.py
web_idl/__init__.py web_idl/__init__.py
web_idl/argument.py web_idl/argument.py
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment