Commit b822df94 authored by Michael Lippautz's avatar Michael Lippautz Committed by Commit Bot

[oilpan] Port CallbackStack uses to Worklist

Bug: chromium:757440
Change-Id: Ie8a5d0ee4f311514e18603ae0ce643f0623a253f
Reviewed-on: https://chromium-review.googlesource.com/980212
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Reviewed-by: default avatarKentaro Hara <haraken@chromium.org>
Cr-Commit-Position: refs/heads/master@{#545829}
parent dd7b5634
...@@ -136,9 +136,9 @@ ThreadHeap::ThreadHeap(ThreadState* thread_state) ...@@ -136,9 +136,9 @@ ThreadHeap::ThreadHeap(ThreadState* thread_state)
heap_does_not_contain_cache_(std::make_unique<HeapDoesNotContainCache>()), heap_does_not_contain_cache_(std::make_unique<HeapDoesNotContainCache>()),
free_page_pool_(std::make_unique<PagePool>()), free_page_pool_(std::make_unique<PagePool>()),
marking_worklist_(nullptr), marking_worklist_(nullptr),
not_fully_constructed_marking_stack_(CallbackStack::Create()), not_fully_constructed_worklist_(nullptr),
post_marking_callback_stack_(CallbackStack::Create()), post_marking_worklist_(nullptr),
weak_callback_stack_(CallbackStack::Create()), weak_callback_worklist_(nullptr),
vector_backing_arena_index_(BlinkGC::kVector1ArenaIndex), vector_backing_arena_index_(BlinkGC::kVector1ArenaIndex),
current_arena_ages_(0), current_arena_ages_(0),
should_flush_heap_does_not_contain_cache_(false) { should_flush_heap_does_not_contain_cache_(false) {
...@@ -212,62 +212,6 @@ Address ThreadHeap::CheckAndMarkPointer( ...@@ -212,62 +212,6 @@ Address ThreadHeap::CheckAndMarkPointer(
} }
#endif // DCHECK_IS_ON() #endif // DCHECK_IS_ON()
void ThreadHeap::PushNotFullyConstructedTraceCallback(void* object) {
DCHECK(thread_state_->IsInGC() || thread_state_->IsIncrementalMarking());
CallbackStack::Item* slot =
not_fully_constructed_marking_stack_->AllocateEntry();
*slot = CallbackStack::Item(object, nullptr);
}
bool ThreadHeap::PopAndInvokeNotFullyConstructedTraceCallback(Visitor* v) {
DCHECK(!thread_state_->IsIncrementalMarking());
MarkingVisitor* visitor = reinterpret_cast<MarkingVisitor*>(v);
CallbackStack::Item* item = not_fully_constructed_marking_stack_->Pop();
if (!item)
return false;
// This is called in the atomic marking phase. We mark all
// not-fully-constructed objects that have found before this point
// conservatively. See comments on GarbageCollectedMixin for more details.
// - Objects that are fully constructed are safe to process.
// - Objects may still have uninitialized parts. This will not crash as fields
// are zero initialized and it is still correct as values are held alive
// from other references as it would otherwise be impossible to use them for
// assignment.
BasePage* const page = PageFromObject(item->Object());
visitor->ConservativelyMarkAddress(page,
reinterpret_cast<Address>(item->Object()));
return true;
}
void ThreadHeap::PushPostMarkingCallback(void* object, TraceCallback callback) {
DCHECK(thread_state_->IsInGC());
CallbackStack::Item* slot = post_marking_callback_stack_->AllocateEntry();
*slot = CallbackStack::Item(object, callback);
}
bool ThreadHeap::PopAndInvokePostMarkingCallback(Visitor* visitor) {
if (CallbackStack::Item* item = post_marking_callback_stack_->Pop()) {
item->Call(visitor);
return true;
}
return false;
}
void ThreadHeap::PushWeakCallback(void* closure, WeakCallback callback) {
CallbackStack::Item* slot = weak_callback_stack_->AllocateEntry();
*slot = CallbackStack::Item(closure, callback);
}
bool ThreadHeap::PopAndInvokeWeakCallback(Visitor* visitor) {
if (CallbackStack::Item* item = weak_callback_stack_->Pop()) {
item->Call(visitor);
return true;
}
return false;
}
void ThreadHeap::RegisterWeakTable(void* table, void ThreadHeap::RegisterWeakTable(void* table,
EphemeronCallback iteration_callback) { EphemeronCallback iteration_callback) {
DCHECK(thread_state_->IsInGC()); DCHECK(thread_state_->IsInGC());
...@@ -282,17 +226,17 @@ void ThreadHeap::RegisterWeakTable(void* table, ...@@ -282,17 +226,17 @@ void ThreadHeap::RegisterWeakTable(void* table,
void ThreadHeap::CommitCallbackStacks() { void ThreadHeap::CommitCallbackStacks() {
marking_worklist_.reset(new MarkingWorklist()); marking_worklist_.reset(new MarkingWorklist());
not_fully_constructed_marking_stack_->Commit(); not_fully_constructed_worklist_.reset(new NotFullyConstructedWorklist());
post_marking_callback_stack_->Commit(); post_marking_worklist_.reset(new PostMarkingWorklist());
weak_callback_stack_->Commit(); weak_callback_worklist_.reset(new WeakCallbackWorklist());
DCHECK(ephemeron_callbacks_.IsEmpty()); DCHECK(ephemeron_callbacks_.IsEmpty());
} }
void ThreadHeap::DecommitCallbackStacks() { void ThreadHeap::DecommitCallbackStacks() {
marking_worklist_.reset(nullptr); marking_worklist_.reset(nullptr);
not_fully_constructed_marking_stack_->Decommit(); not_fully_constructed_worklist_.reset(nullptr);
post_marking_callback_stack_->Decommit(); post_marking_worklist_.reset(nullptr);
weak_callback_stack_->Decommit(); weak_callback_worklist_.reset(nullptr);
ephemeron_callbacks_.clear(); ephemeron_callbacks_.clear();
} }
...@@ -325,7 +269,13 @@ void ThreadHeap::ProcessMarkingStack(Visitor* visitor) { ...@@ -325,7 +269,13 @@ void ThreadHeap::ProcessMarkingStack(Visitor* visitor) {
void ThreadHeap::MarkNotFullyConstructedObjects(Visitor* visitor) { void ThreadHeap::MarkNotFullyConstructedObjects(Visitor* visitor) {
TRACE_EVENT0("blink_gc", "ThreadHeap::MarkNotFullyConstructedObjects"); TRACE_EVENT0("blink_gc", "ThreadHeap::MarkNotFullyConstructedObjects");
DCHECK(!thread_state_->IsIncrementalMarking()); DCHECK(!thread_state_->IsIncrementalMarking());
while (PopAndInvokeNotFullyConstructedTraceCallback(visitor)) {
NotFullyConstructedItem item;
while (
not_fully_constructed_worklist_->Pop(WorklistTaskId::MainThread, &item)) {
BasePage* const page = PageFromObject(item);
reinterpret_cast<MarkingVisitor*>(visitor)->ConservativelyMarkAddress(
page, reinterpret_cast<Address>(item));
} }
} }
...@@ -381,23 +331,19 @@ bool ThreadHeap::AdvanceMarkingStackProcessing(Visitor* visitor, ...@@ -381,23 +331,19 @@ bool ThreadHeap::AdvanceMarkingStackProcessing(Visitor* visitor,
} }
void ThreadHeap::PostMarkingProcessing(Visitor* visitor) { void ThreadHeap::PostMarkingProcessing(Visitor* visitor) {
TRACE_EVENT0("blink_gc", "ThreadHeap::postMarkingProcessing"); TRACE_EVENT0("blink_gc", "ThreadHeap::PostMarkingProcessing");
// Call post-marking callbacks including: // Call post marking callbacks on collection backings to mark them if they are
// 1. the ephemeronIterationDone callbacks on weak tables to do cleanup // only reachable from their front objects.
// (specifically to clear the queued bits for weak hash tables), and CustomCallbackItem item;
// 2. the markNoTracing callbacks on collection backings to mark them while (post_marking_worklist_->Pop(WorklistTaskId::MainThread, &item)) {
// if they are only reachable from their front objects. item.callback(visitor, item.object);
while (PopAndInvokePostMarkingCallback(visitor)) {
} }
// Post marking callbacks should not add any new objects for marking.
// Post-marking callbacks should not trace any objects and
// therefore the marking stack should be empty after the
// post-marking callbacks.
DCHECK(marking_worklist_->IsGlobalEmpty()); DCHECK(marking_worklist_->IsGlobalEmpty());
} }
void ThreadHeap::WeakProcessing(Visitor* visitor) { void ThreadHeap::WeakProcessing(Visitor* visitor) {
TRACE_EVENT0("blink_gc", "ThreadHeap::weakProcessing"); TRACE_EVENT0("blink_gc", "ThreadHeap::WeakProcessing");
double start_time = WTF::CurrentTimeTicksInMilliseconds(); double start_time = WTF::CurrentTimeTicksInMilliseconds();
// Weak processing may access unmarked objects but are forbidden from // Weak processing may access unmarked objects but are forbidden from
...@@ -406,11 +352,11 @@ void ThreadHeap::WeakProcessing(Visitor* visitor) { ...@@ -406,11 +352,11 @@ void ThreadHeap::WeakProcessing(Visitor* visitor) {
ThreadState::Current()); ThreadState::Current());
// Call weak callbacks on objects that may now be pointing to dead objects. // Call weak callbacks on objects that may now be pointing to dead objects.
while (PopAndInvokeWeakCallback(visitor)) { CustomCallbackItem item;
while (weak_callback_worklist_->Pop(WorklistTaskId::MainThread, &item)) {
item.callback(visitor, item.object);
} }
// Weak callbacks should not add any new objects for marking.
// It is not permitted to trace pointers of live objects in the weak
// callback phase, so the marking stack should still be empty here.
DCHECK(marking_worklist_->IsGlobalEmpty()); DCHECK(marking_worklist_->IsGlobalEmpty());
double time_for_weak_processing = double time_for_weak_processing =
......
...@@ -55,23 +55,26 @@ namespace incremental_marking_test { ...@@ -55,23 +55,26 @@ namespace incremental_marking_test {
class IncrementalMarkingScopeBase; class IncrementalMarkingScopeBase;
} // namespace incremental_marking_test } // namespace incremental_marking_test
class CallbackStack;
class PagePool; class PagePool;
class RegionTree; class RegionTree;
struct MarkingItem { struct MarkingItem {
void* object; void* object;
TraceCallback callback; TraceCallback callback;
// Only used for DCHECKs for Worklist::Contains.
bool operator==(const MarkingItem& other) const {
return object == other.object;
}
}; };
using CustomCallbackItem = MarkingItem;
using NotFullyConstructedItem = void*;
// Segment size of 512 entries necessary to avoid throughput regressions. Since // Segment size of 512 entries necessary to avoid throughput regressions. Since
// the work list is currently a temporary object this is not a problem. // the work list is currently a temporary object this is not a problem.
using MarkingWorklist = Worklist<MarkingItem, 512 /* local entries */>; using MarkingWorklist = Worklist<MarkingItem, 512 /* local entries */>;
using NotFullyConstructedWorklist =
Worklist<NotFullyConstructedItem, 16 /* local entries */>;
using PostMarkingWorklist =
Worklist<CustomCallbackItem, 128 /* local entries */>;
using WeakCallbackWorklist =
Worklist<CustomCallbackItem, 256 /* local entries */>;
class PLATFORM_EXPORT HeapAllocHooks { class PLATFORM_EXPORT HeapAllocHooks {
public: public:
...@@ -248,14 +251,16 @@ class PLATFORM_EXPORT ThreadHeap { ...@@ -248,14 +251,16 @@ class PLATFORM_EXPORT ThreadHeap {
return marking_worklist_.get(); return marking_worklist_.get();
} }
CallbackStack* NotFullyConstructedMarkingStack() const { NotFullyConstructedWorklist* GetNotFullyConstructedWorklist() const {
return not_fully_constructed_marking_stack_.get(); return not_fully_constructed_worklist_.get();
} }
CallbackStack* PostMarkingCallbackStack() const {
return post_marking_callback_stack_.get(); PostMarkingWorklist* GetPostMarkingWorklist() const {
return post_marking_worklist_.get();
} }
CallbackStack* WeakCallbackStack() const {
return weak_callback_stack_.get(); WeakCallbackWorklist* GetWeakCallbackWorklist() const {
return weak_callback_worklist_.get();
} }
void VisitPersistentRoots(Visitor*); void VisitPersistentRoots(Visitor*);
...@@ -296,33 +301,6 @@ class PLATFORM_EXPORT ThreadHeap { ...@@ -296,33 +301,6 @@ class PLATFORM_EXPORT ThreadHeap {
page, const_cast<T*>(object_pointer)); page, const_cast<T*>(object_pointer));
} }
// Push a trace callback for not-fully-constructed objects.
void PushNotFullyConstructedTraceCallback(void* container_object);
// Push a trace callback on the post-marking callback stack. These
// callbacks are called after normal marking (including ephemeron
// iteration).
void PushPostMarkingCallback(void*, TraceCallback);
// Push a weak callback. The weak callback is called when the object
// doesn't get marked in the current GC.
void PushWeakCallback(void*, WeakCallback);
// Pop the top of the not-yet-fully-constructed objects marking stack and call
// the callback with the visitor and the object. Returns false when there is
// nothing more to do. Can only be called during the atomic pause.
bool PopAndInvokeNotFullyConstructedTraceCallback(Visitor*);
// Remove an item from the post-marking callback stack and call
// the callback with the visitor and the object pointer. Returns
// false when there is nothing more to do.
bool PopAndInvokePostMarkingCallback(Visitor*);
// Remove an item from the weak callback work list and call the callback
// with the visitor and the closure pointer. Returns false when there is
// nothing more to do.
bool PopAndInvokeWeakCallback(Visitor*);
// Register an ephemeron table for fixed-point iteration. // Register an ephemeron table for fixed-point iteration.
void RegisterWeakTable(void* container_object, void RegisterWeakTable(void* container_object,
EphemeronCallback); EphemeronCallback);
...@@ -539,9 +517,9 @@ class PLATFORM_EXPORT ThreadHeap { ...@@ -539,9 +517,9 @@ class PLATFORM_EXPORT ThreadHeap {
std::unique_ptr<HeapDoesNotContainCache> heap_does_not_contain_cache_; std::unique_ptr<HeapDoesNotContainCache> heap_does_not_contain_cache_;
std::unique_ptr<PagePool> free_page_pool_; std::unique_ptr<PagePool> free_page_pool_;
std::unique_ptr<MarkingWorklist> marking_worklist_; std::unique_ptr<MarkingWorklist> marking_worklist_;
std::unique_ptr<CallbackStack> not_fully_constructed_marking_stack_; std::unique_ptr<NotFullyConstructedWorklist> not_fully_constructed_worklist_;
std::unique_ptr<CallbackStack> post_marking_callback_stack_; std::unique_ptr<PostMarkingWorklist> post_marking_worklist_;
std::unique_ptr<CallbackStack> weak_callback_stack_; std::unique_ptr<WeakCallbackWorklist> weak_callback_worklist_;
// No duplicates allowed for ephemeron callbacks. Hence, we use a hashmap // No duplicates allowed for ephemeron callbacks. Hence, we use a hashmap
// with the key being the HashTable. // with the key being the HashTable.
WTF::HashMap<void*, EphemeronCallback> ephemeron_callbacks_; WTF::HashMap<void*, EphemeronCallback> ephemeron_callbacks_;
......
...@@ -46,10 +46,10 @@ class IncrementalMarkingScope : public IncrementalMarkingScopeBase { ...@@ -46,10 +46,10 @@ class IncrementalMarkingScope : public IncrementalMarkingScopeBase {
: IncrementalMarkingScopeBase(thread_state), : IncrementalMarkingScopeBase(thread_state),
gc_forbidden_scope_(thread_state), gc_forbidden_scope_(thread_state),
marking_worklist_(heap_.GetMarkingWorklist()), marking_worklist_(heap_.GetMarkingWorklist()),
not_fully_constructed_marking_stack_( not_fully_constructed_worklist_(
heap_.NotFullyConstructedMarkingStack()) { heap_.GetNotFullyConstructedWorklist()) {
EXPECT_TRUE(marking_worklist_->IsGlobalEmpty()); EXPECT_TRUE(marking_worklist_->IsGlobalEmpty());
EXPECT_TRUE(not_fully_constructed_marking_stack_->IsEmpty()); EXPECT_TRUE(not_fully_constructed_worklist_->IsGlobalEmpty());
heap_.EnableIncrementalMarkingBarrier(); heap_.EnableIncrementalMarkingBarrier();
thread_state->current_gc_data_.visitor = thread_state->current_gc_data_.visitor =
MarkingVisitor::Create(thread_state, MarkingVisitor::kGlobalMarking); MarkingVisitor::Create(thread_state, MarkingVisitor::kGlobalMarking);
...@@ -57,19 +57,23 @@ class IncrementalMarkingScope : public IncrementalMarkingScopeBase { ...@@ -57,19 +57,23 @@ class IncrementalMarkingScope : public IncrementalMarkingScopeBase {
~IncrementalMarkingScope() { ~IncrementalMarkingScope() {
EXPECT_TRUE(marking_worklist_->IsGlobalEmpty()); EXPECT_TRUE(marking_worklist_->IsGlobalEmpty());
EXPECT_TRUE(not_fully_constructed_marking_stack_->IsEmpty()); EXPECT_TRUE(not_fully_constructed_worklist_->IsGlobalEmpty());
heap_.DisableIncrementalMarkingBarrier(); heap_.DisableIncrementalMarkingBarrier();
// Need to clear out unused worklists that might have been polluted during
// test.
heap_.GetPostMarkingWorklist()->Clear();
heap_.GetWeakCallbackWorklist()->Clear();
} }
MarkingWorklist* marking_worklist() const { return marking_worklist_; } MarkingWorklist* marking_worklist() const { return marking_worklist_; }
CallbackStack* not_fully_constructed_marking_stack() const { NotFullyConstructedWorklist* not_fully_constructed_worklist() const {
return not_fully_constructed_marking_stack_; return not_fully_constructed_worklist_;
} }
protected: protected:
ThreadState::GCForbiddenScope gc_forbidden_scope_; ThreadState::GCForbiddenScope gc_forbidden_scope_;
MarkingWorklist* const marking_worklist_; MarkingWorklist* const marking_worklist_;
CallbackStack* const not_fully_constructed_marking_stack_; NotFullyConstructedWorklist* const not_fully_constructed_worklist_;
}; };
// Expects that the write barrier fires for the objects passed to the // Expects that the write barrier fires for the objects passed to the
...@@ -1514,11 +1518,12 @@ TEST(IncrementalMarkingTest, WriteBarrierDuringMixinConstruction) { ...@@ -1514,11 +1518,12 @@ TEST(IncrementalMarkingTest, WriteBarrierDuringMixinConstruction) {
scope.marking_worklist()->Pop(WorklistTaskId::MainThread, &item); scope.marking_worklist()->Pop(WorklistTaskId::MainThread, &item);
// The mixin object should be on the not-fully-constructed object marking // The mixin object should be on the not-fully-constructed object marking
// stack. // stack.
EXPECT_FALSE(scope.not_fully_constructed_marking_stack()->IsEmpty()); EXPECT_FALSE(scope.not_fully_constructed_worklist()->IsGlobalEmpty());
CallbackStack::Item* item2 = NotFullyConstructedItem item2;
scope.not_fully_constructed_marking_stack()->Pop(); EXPECT_TRUE(scope.not_fully_constructed_worklist()->Pop(
WorklistTaskId::MainThread, &item2));
RegisteringObject* recorded_object2 = RegisteringObject* recorded_object2 =
reinterpret_cast<RegisteringObject*>(item2->Object()); reinterpret_cast<RegisteringObject*>(item2);
EXPECT_EQ(object, recorded_object2); EXPECT_EQ(object, recorded_object2);
} }
......
...@@ -18,6 +18,12 @@ MarkingVisitor::MarkingVisitor(ThreadState* state, MarkingMode marking_mode) ...@@ -18,6 +18,12 @@ MarkingVisitor::MarkingVisitor(ThreadState* state, MarkingMode marking_mode)
: Visitor(state), : Visitor(state),
marking_worklist_(Heap().GetMarkingWorklist(), marking_worklist_(Heap().GetMarkingWorklist(),
WorklistTaskId::MainThread), WorklistTaskId::MainThread),
not_fully_constructed_worklist_(Heap().GetNotFullyConstructedWorklist(),
WorklistTaskId::MainThread),
post_marking_worklist_(Heap().GetPostMarkingWorklist(),
WorklistTaskId::MainThread),
weak_callback_worklist_(Heap().GetWeakCallbackWorklist(),
WorklistTaskId::MainThread),
marking_mode_(marking_mode) { marking_mode_(marking_mode) {
// See ThreadState::runScheduledGC() why we need to already be in a // See ThreadState::runScheduledGC() why we need to already be in a
// GCForbiddenScope before any safe point is entered. // GCForbiddenScope before any safe point is entered.
...@@ -104,12 +110,11 @@ void MarkingVisitor::MarkNoTracingCallback(Visitor* visitor, void* object) { ...@@ -104,12 +110,11 @@ void MarkingVisitor::MarkNoTracingCallback(Visitor* visitor, void* object) {
HeapObjectHeader::FromPayload(object)); HeapObjectHeader::FromPayload(object));
} }
void MarkingVisitor::RegisterWeakCallback(void* closure, void MarkingVisitor::RegisterWeakCallback(void* object, WeakCallback callback) {
WeakCallback callback) {
// We don't want to run weak processings when taking a snapshot. // We don't want to run weak processings when taking a snapshot.
if (marking_mode_ == kSnapshotMarking) if (marking_mode_ == kSnapshotMarking)
return; return;
Heap().PushWeakCallback(closure, callback); weak_callback_worklist_.Push({object, callback});
} }
void MarkingVisitor::RegisterBackingStoreReference(void* slot) { void MarkingVisitor::RegisterBackingStoreReference(void* slot) {
......
...@@ -61,7 +61,7 @@ class PLATFORM_EXPORT MarkingVisitor final : public Visitor { ...@@ -61,7 +61,7 @@ class PLATFORM_EXPORT MarkingVisitor final : public Visitor {
if (desc.base_object_payload == BlinkGC::kNotFullyConstructedObject) { if (desc.base_object_payload == BlinkGC::kNotFullyConstructedObject) {
// This means that the objects are not-yet-fully-constructed. See comments // This means that the objects are not-yet-fully-constructed. See comments
// on GarbageCollectedMixin for how those objects are handled. // on GarbageCollectedMixin for how those objects are handled.
Heap().PushNotFullyConstructedTraceCallback(object); not_fully_constructed_worklist_.Push(object);
return; return;
} }
// Default mark method of the trait just calls the two-argument mark // Default mark method of the trait just calls the two-argument mark
...@@ -121,7 +121,7 @@ class PLATFORM_EXPORT MarkingVisitor final : public Visitor { ...@@ -121,7 +121,7 @@ class PLATFORM_EXPORT MarkingVisitor final : public Visitor {
void** object_slot, void** object_slot,
TraceDescriptor desc) final { TraceDescriptor desc) final {
RegisterBackingStoreReference(object_slot); RegisterBackingStoreReference(object_slot);
Heap().PushPostMarkingCallback(object, &MarkNoTracingCallback); post_marking_worklist_.Push({object, &MarkNoTracingCallback});
} }
void RegisterBackingStoreCallback(void* backing_store, void RegisterBackingStoreCallback(void* backing_store,
...@@ -139,6 +139,9 @@ class PLATFORM_EXPORT MarkingVisitor final : public Visitor { ...@@ -139,6 +139,9 @@ class PLATFORM_EXPORT MarkingVisitor final : public Visitor {
void ConservativelyMarkHeader(HeapObjectHeader*); void ConservativelyMarkHeader(HeapObjectHeader*);
MarkingWorklist::View marking_worklist_; MarkingWorklist::View marking_worklist_;
NotFullyConstructedWorklist::View not_fully_constructed_worklist_;
PostMarkingWorklist::View post_marking_worklist_;
WeakCallbackWorklist::View weak_callback_worklist_;
const MarkingMode marking_mode_; const MarkingMode marking_mode_;
}; };
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment