Commit b47f3d35 authored by Omer Katz's avatar Omer Katz Committed by Commit Bot

heap: Replacing ephemeron_callbacks_ with a worklist.

I added a worklist for registering ephemeron callbacks.
ThreadHeap previously used a HashMap for that. The HashMap remains since it
is also used to verify callbacks are consistent.
MarkingVisitor adds entries to the worklist instead of the HashMap. ThreadHeap
then reads from the worklist to the HashMap, invoking callbacks as it goes.

Bug: 986235
Change-Id: Ie716281ba216a083146bff7a78abe06621befcba
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1718371Reviewed-by: default avatarMichael Lippautz <mlippautz@chromium.org>
Reviewed-by: default avatarKentaro Hara <haraken@chromium.org>
Commit-Queue: Omer Katz <omerkatz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#681733}
parent 20a56ff0
...@@ -99,6 +99,7 @@ ThreadHeap::ThreadHeap(ThreadState* thread_state) ...@@ -99,6 +99,7 @@ ThreadHeap::ThreadHeap(ThreadState* thread_state)
not_fully_constructed_worklist_(nullptr), not_fully_constructed_worklist_(nullptr),
weak_callback_worklist_(nullptr), weak_callback_worklist_(nullptr),
movable_reference_worklist_(nullptr), movable_reference_worklist_(nullptr),
weak_table_worklist_(nullptr),
vector_backing_arena_index_(BlinkGC::kVector1ArenaIndex), vector_backing_arena_index_(BlinkGC::kVector1ArenaIndex),
current_arena_ages_(0) { current_arena_ages_(0) {
if (ThreadState::Current()->IsMainThread()) if (ThreadState::Current()->IsMainThread())
...@@ -150,18 +151,6 @@ Address ThreadHeap::CheckAndMarkPointer(MarkingVisitor* visitor, ...@@ -150,18 +151,6 @@ Address ThreadHeap::CheckAndMarkPointer(MarkingVisitor* visitor,
return nullptr; return nullptr;
} }
void ThreadHeap::RegisterWeakTable(void* table,
EphemeronCallback iteration_callback) {
DCHECK(thread_state_->InAtomicMarkingPause());
#if DCHECK_IS_ON()
auto result = ephemeron_callbacks_.insert(table, iteration_callback);
DCHECK(result.is_new_entry ||
result.stored_value->value == iteration_callback);
#else
ephemeron_callbacks_.insert(table, iteration_callback);
#endif // DCHECK_IS_ON()
}
void ThreadHeap::SetupWorklists() { void ThreadHeap::SetupWorklists() {
marking_worklist_.reset(new MarkingWorklist()); marking_worklist_.reset(new MarkingWorklist());
not_fully_constructed_worklist_.reset(new NotFullyConstructedWorklist()); not_fully_constructed_worklist_.reset(new NotFullyConstructedWorklist());
...@@ -169,6 +158,7 @@ void ThreadHeap::SetupWorklists() { ...@@ -169,6 +158,7 @@ void ThreadHeap::SetupWorklists() {
new NotFullyConstructedWorklist()); new NotFullyConstructedWorklist());
weak_callback_worklist_.reset(new WeakCallbackWorklist()); weak_callback_worklist_.reset(new WeakCallbackWorklist());
movable_reference_worklist_.reset(new MovableReferenceWorklist()); movable_reference_worklist_.reset(new MovableReferenceWorklist());
weak_table_worklist_.reset(new WeakTableWorklist);
DCHECK(ephemeron_callbacks_.IsEmpty()); DCHECK(ephemeron_callbacks_.IsEmpty());
} }
...@@ -176,6 +166,7 @@ void ThreadHeap::DestroyMarkingWorklists(BlinkGC::StackState stack_state) { ...@@ -176,6 +166,7 @@ void ThreadHeap::DestroyMarkingWorklists(BlinkGC::StackState stack_state) {
marking_worklist_.reset(nullptr); marking_worklist_.reset(nullptr);
previously_not_fully_constructed_worklist_.reset(nullptr); previously_not_fully_constructed_worklist_.reset(nullptr);
weak_callback_worklist_.reset(nullptr); weak_callback_worklist_.reset(nullptr);
weak_table_worklist_.reset();
ephemeron_callbacks_.clear(); ephemeron_callbacks_.clear();
// The fixed point iteration may have found not-fully-constructed objects. // The fixed point iteration may have found not-fully-constructed objects.
...@@ -250,28 +241,39 @@ void ThreadHeap::MarkNotFullyConstructedObjects(MarkingVisitor* visitor) { ...@@ -250,28 +241,39 @@ void ThreadHeap::MarkNotFullyConstructedObjects(MarkingVisitor* visitor) {
} }
} }
void ThreadHeap::InvokeEphemeronCallbacks(Visitor* visitor) { void ThreadHeap::InvokeEphemeronCallbacks(MarkingVisitor* visitor) {
// Mark any strong pointers that have now become reachable in ephemeron maps. // Mark any strong pointers that have now become reachable in ephemeron maps.
ThreadHeapStatsCollector::Scope stats_scope( ThreadHeapStatsCollector::Scope stats_scope(
stats_collector(), stats_collector(),
ThreadHeapStatsCollector::kMarkInvokeEphemeronCallbacks); ThreadHeapStatsCollector::kMarkInvokeEphemeronCallbacks);
// Avoid supporting a subtle scheme that allows insertion while iterating // MarkingVisitor records callbacks for weak tables in a worklist.
// by just creating temporary lists for iteration and sinking. // ThreadHeap reads from the worklist before invoking ephemeron callbacks.
WTF::HashMap<void*, EphemeronCallback> iteration_set; // We need to flush the callback worklist so ThreadHeap can access it.
WTF::HashMap<void*, EphemeronCallback> final_set; //
// TODO(omerkatz): In concurrent marking, this should flush all visitors.
bool found_new = false; if (thread_state_->in_atomic_pause())
do { visitor->FlushWeakTableCallbacks();
iteration_set = std::move(ephemeron_callbacks_);
ephemeron_callbacks_.clear(); // We first reiterate over known callbacks from previous iterations.
for (auto& tuple : iteration_set) { for (auto& tuple : ephemeron_callbacks_)
final_set.insert(tuple.key, tuple.value); tuple.value(visitor, tuple.key);
tuple.value(visitor, tuple.key);
// Then we iterate over the new callbacks found by the marking visitor.
while (!weak_table_worklist_->IsGlobalEmpty()) {
// Read ephemeron callbacks from worklist to ephemeron_callbacks_ hashmap.
WeakTableWorklist::View ephemerons_worklist(weak_table_worklist_.get(),
WorklistTaskId::MainThread);
WeakTableItem item;
while (ephemerons_worklist.Pop(&item)) {
auto result = ephemeron_callbacks_.insert(item.object, item.callback);
DCHECK(result.is_new_entry ||
result.stored_value->value == item.callback);
if (result.is_new_entry) {
item.callback(visitor, item.object);
}
} }
found_new = !ephemeron_callbacks_.IsEmpty(); }
} while (found_new);
ephemeron_callbacks_ = std::move(final_set);
} }
namespace { namespace {
......
...@@ -67,6 +67,7 @@ struct MarkingItem { ...@@ -67,6 +67,7 @@ struct MarkingItem {
using CustomCallbackItem = MarkingItem; using CustomCallbackItem = MarkingItem;
using NotFullyConstructedItem = void*; using NotFullyConstructedItem = void*;
using WeakTableItem = MarkingItem;
// Segment size of 512 entries necessary to avoid throughput regressions. Since // Segment size of 512 entries necessary to avoid throughput regressions. Since
// the work list is currently a temporary object this is not a problem. // the work list is currently a temporary object this is not a problem.
...@@ -79,6 +80,7 @@ using WeakCallbackWorklist = ...@@ -79,6 +80,7 @@ using WeakCallbackWorklist =
// regressions. // regressions.
using MovableReferenceWorklist = using MovableReferenceWorklist =
Worklist<MovableReference*, 512 /* local entries */>; Worklist<MovableReference*, 512 /* local entries */>;
using WeakTableWorklist = Worklist<WeakTableItem, 16 /* local entries */>;
class PLATFORM_EXPORT HeapAllocHooks { class PLATFORM_EXPORT HeapAllocHooks {
STATIC_ONLY(HeapAllocHooks); STATIC_ONLY(HeapAllocHooks);
...@@ -221,6 +223,10 @@ class PLATFORM_EXPORT ThreadHeap { ...@@ -221,6 +223,10 @@ class PLATFORM_EXPORT ThreadHeap {
return movable_reference_worklist_.get(); return movable_reference_worklist_.get();
} }
WeakTableWorklist* GetWeakTableWorklist() const {
return weak_table_worklist_.get();
}
// Register an ephemeron table for fixed-point iteration. // Register an ephemeron table for fixed-point iteration.
void RegisterWeakTable(void* container_object, void RegisterWeakTable(void* container_object,
EphemeronCallback); EphemeronCallback);
...@@ -407,7 +413,7 @@ class PLATFORM_EXPORT ThreadHeap { ...@@ -407,7 +413,7 @@ class PLATFORM_EXPORT ThreadHeap {
void DestroyMarkingWorklists(BlinkGC::StackState); void DestroyMarkingWorklists(BlinkGC::StackState);
void DestroyCompactionWorklists(); void DestroyCompactionWorklists();
void InvokeEphemeronCallbacks(Visitor*); void InvokeEphemeronCallbacks(MarkingVisitor*);
ThreadState* thread_state_; ThreadState* thread_state_;
std::unique_ptr<ThreadHeapStatsCollector> heap_stats_collector_; std::unique_ptr<ThreadHeapStatsCollector> heap_stats_collector_;
...@@ -443,6 +449,10 @@ class PLATFORM_EXPORT ThreadHeap { ...@@ -443,6 +449,10 @@ class PLATFORM_EXPORT ThreadHeap {
// created at the atomic pause phase. // created at the atomic pause phase.
std::unique_ptr<MovableReferenceWorklist> movable_reference_worklist_; std::unique_ptr<MovableReferenceWorklist> movable_reference_worklist_;
// Worklist of ephemeron callbacks. Used to pass new callbacks from
// MarkingVisitor to ThreadHeap.
std::unique_ptr<WeakTableWorklist> weak_table_worklist_;
// No duplicates allowed for ephemeron callbacks. Hence, we use a hashmap // No duplicates allowed for ephemeron callbacks. Hence, we use a hashmap
// with the key being the HashTable. // with the key being the HashTable.
WTF::HashMap<void*, EphemeronCallback> ephemeron_callbacks_; WTF::HashMap<void*, EphemeronCallback> ephemeron_callbacks_;
......
...@@ -28,6 +28,8 @@ MarkingVisitorBase::MarkingVisitorBase(ThreadState* state, ...@@ -28,6 +28,8 @@ MarkingVisitorBase::MarkingVisitorBase(ThreadState* state,
WorklistTaskId::MainThread), WorklistTaskId::MainThread),
movable_reference_worklist_(Heap().GetMovableReferenceWorklist(), movable_reference_worklist_(Heap().GetMovableReferenceWorklist(),
WorklistTaskId::MainThread), WorklistTaskId::MainThread),
weak_table_worklist_(Heap().GetWeakTableWorklist(),
WorklistTaskId::MainThread),
marking_mode_(marking_mode) { marking_mode_(marking_mode) {
DCHECK(state->InAtomicMarkingPause()); DCHECK(state->InAtomicMarkingPause());
#if DCHECK_IS_ON() #if DCHECK_IS_ON()
...@@ -72,12 +74,14 @@ void MarkingVisitorBase::RegisterBackingStoreCallback( ...@@ -72,12 +74,14 @@ void MarkingVisitorBase::RegisterBackingStoreCallback(
bool MarkingVisitorBase::RegisterWeakTable( bool MarkingVisitorBase::RegisterWeakTable(
const void* closure, const void* closure,
EphemeronCallback iteration_callback) { EphemeronCallback iteration_callback) {
// TODO(mlippautz): Do not call into heap directly but rather use a Worklist weak_table_worklist_.Push({const_cast<void*>(closure), iteration_callback});
// as temporary storage.
Heap().RegisterWeakTable(const_cast<void*>(closure), iteration_callback);
return true; return true;
} }
void MarkingVisitorBase::FlushWeakTableCallbacks() {
weak_table_worklist_.FlushToGlobal();
}
void MarkingVisitor::WriteBarrierSlow(void* value) { void MarkingVisitor::WriteBarrierSlow(void* value) {
if (!value || IsHashTableDeleteValue(value)) if (!value || IsHashTableDeleteValue(value))
return; return;
......
...@@ -106,6 +106,8 @@ class PLATFORM_EXPORT MarkingVisitorBase : public Visitor { ...@@ -106,6 +106,8 @@ class PLATFORM_EXPORT MarkingVisitorBase : public Visitor {
// Flush private segments remaining in visitor's worklists to global pools. // Flush private segments remaining in visitor's worklists to global pools.
void FlushCompactionWorklists(); void FlushCompactionWorklists();
void FlushWeakTableCallbacks();
size_t marked_bytes() const { return marked_bytes_; } size_t marked_bytes() const { return marked_bytes_; }
protected: protected:
...@@ -129,6 +131,7 @@ class PLATFORM_EXPORT MarkingVisitorBase : public Visitor { ...@@ -129,6 +131,7 @@ class PLATFORM_EXPORT MarkingVisitorBase : public Visitor {
NotFullyConstructedWorklist::View not_fully_constructed_worklist_; NotFullyConstructedWorklist::View not_fully_constructed_worklist_;
WeakCallbackWorklist::View weak_callback_worklist_; WeakCallbackWorklist::View weak_callback_worklist_;
MovableReferenceWorklist::View movable_reference_worklist_; MovableReferenceWorklist::View movable_reference_worklist_;
WeakTableWorklist::View weak_table_worklist_;
size_t marked_bytes_ = 0; size_t marked_bytes_ = 0;
const MarkingMode marking_mode_; const MarkingMode marking_mode_;
}; };
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment