Commit 9655730f authored by Haruka Matsumura's avatar Haruka Matsumura Committed by Commit Bot

Oilpan: Change callback registration in HeapCompaction

This CL changes the system of callback registration to slot-based as with references.
Anytime HeapLinkedHashSet rehashes and impl_.table_ changes while incremental marking is in progress, is when the callback won't be called.
So as preparing for HeapCompaction with IncrementalMarking, we change the key of the registration on HashMap from references(buffer address) to slots.

Bug: 864425
Change-Id: I5af93f1c56d0a76952e3347c4f3f1ba4f271047b
Reviewed-on: https://chromium-review.googlesource.com/1164881
Commit-Queue: Haruka Matsumura <harukamt@google.com>
Reviewed-by: default avatarKeishi Hattori <keishi@chromium.org>
Reviewed-by: default avatarKentaro Hara <haraken@chromium.org>
Cr-Commit-Position: refs/heads/master@{#582104}
parent a6a2898f
...@@ -30,7 +30,8 @@ class PLATFORM_EXPORT ScriptWrappableVisitor : public Visitor { ...@@ -30,7 +30,8 @@ class PLATFORM_EXPORT ScriptWrappableVisitor : public Visitor {
WeakCallback, WeakCallback,
void*) final {} void*) final {}
void VisitBackingStoreOnly(void*, void**) final {} void VisitBackingStoreOnly(void*, void**) final {}
void RegisterBackingStoreCallback(void*, MovingObjectCallback, void*) final {} void RegisterBackingStoreCallback(void**, MovingObjectCallback, void*) final {
}
void RegisterWeakCallback(void*, WeakCallback) final {} void RegisterWeakCallback(void*, WeakCallback) final {}
protected: protected:
......
...@@ -199,16 +199,13 @@ HeapCompact* ThreadHeap::Compaction() { ...@@ -199,16 +199,13 @@ HeapCompact* ThreadHeap::Compaction() {
} }
void ThreadHeap::RegisterMovingObjectReference(MovableReference* slot) { void ThreadHeap::RegisterMovingObjectReference(MovableReference* slot) {
DCHECK(slot);
Compaction()->RegisterMovingObjectReference(slot); Compaction()->RegisterMovingObjectReference(slot);
} }
void ThreadHeap::RegisterMovingObjectCallback(MovableReference reference, void ThreadHeap::RegisterMovingObjectCallback(MovableReference* slot,
MovingObjectCallback callback, MovingObjectCallback callback,
void* callback_data) { void* callback_data) {
DCHECK(reference); Compaction()->RegisterMovingObjectCallback(slot, callback, callback_data);
Compaction()->RegisterMovingObjectCallback(reference, callback,
callback_data);
} }
void ThreadHeap::MarkNotFullyConstructedObjects(MarkingVisitor* visitor) { void ThreadHeap::MarkNotFullyConstructedObjects(MarkingVisitor* visitor) {
......
...@@ -262,7 +262,7 @@ class PLATFORM_EXPORT ThreadHeap { ...@@ -262,7 +262,7 @@ class PLATFORM_EXPORT ThreadHeap {
// //
// For Blink, |HeapLinkedHashSet<>| is currently the only abstraction which // For Blink, |HeapLinkedHashSet<>| is currently the only abstraction which
// relies on this feature. // relies on this feature.
void RegisterMovingObjectCallback(MovableReference, void RegisterMovingObjectCallback(MovableReference*,
MovingObjectCallback, MovingObjectCallback,
void* callback_data); void* callback_data);
......
...@@ -192,11 +192,11 @@ class PLATFORM_EXPORT HeapAllocator { ...@@ -192,11 +192,11 @@ class PLATFORM_EXPORT HeapAllocator {
template <typename T, typename VisitorDispatcher> template <typename T, typename VisitorDispatcher>
static void RegisterBackingStoreCallback(VisitorDispatcher visitor, static void RegisterBackingStoreCallback(VisitorDispatcher visitor,
T* backing_store, T** backing_store_slot,
MovingObjectCallback callback, MovingObjectCallback callback,
void* callback_data) { void* callback_data) {
visitor->RegisterBackingStoreCallback(backing_store, callback, visitor->RegisterBackingStoreCallback(
callback_data); reinterpret_cast<void**>(backing_store_slot), callback, callback_data);
} }
static void EnterGCForbiddenScope() { static void EnterGCForbiddenScope() {
......
...@@ -115,12 +115,12 @@ class HeapCompact::MovableObjectFixups final { ...@@ -115,12 +115,12 @@ class HeapCompact::MovableObjectFixups final {
AddInteriorFixup(slot); AddInteriorFixup(slot);
} }
void AddFixupCallback(MovableReference reference, void AddFixupCallback(MovableReference* slot,
MovingObjectCallback callback, MovingObjectCallback callback,
void* callback_data) { void* callback_data) {
DCHECK(!fixup_callbacks_.Contains(reference)); DCHECK(!fixup_callbacks_.Contains(slot));
fixup_callbacks_.insert(reference, std::pair<void*, MovingObjectCallback>( fixup_callbacks_.insert(
callback_data, callback)); slot, std::pair<void*, MovingObjectCallback>(callback_data, callback));
} }
void RelocateInteriorFixups(Address from, Address to, size_t size) { void RelocateInteriorFixups(Address from, Address to, size_t size) {
...@@ -226,7 +226,9 @@ class HeapCompact::MovableObjectFixups final { ...@@ -226,7 +226,9 @@ class HeapCompact::MovableObjectFixups final {
*slot = to; *slot = to;
size_t size = 0; size_t size = 0;
auto callback = fixup_callbacks_.find(from); MovableReference* callback_slot =
reinterpret_cast<MovableReference*>(it->value);
auto callback = fixup_callbacks_.find(callback_slot);
if (UNLIKELY(callback != fixup_callbacks_.end())) { if (UNLIKELY(callback != fixup_callbacks_.end())) {
size = HeapObjectHeader::FromPayload(to)->PayloadSize(); size = HeapObjectHeader::FromPayload(to)->PayloadSize();
callback->value.second(callback->value.first, from, to, size); callback->value.second(callback->value.first, from, to, size);
...@@ -265,7 +267,7 @@ class HeapCompact::MovableObjectFixups final { ...@@ -265,7 +267,7 @@ class HeapCompact::MovableObjectFixups final {
// Map from movable reference to callbacks that need to be invoked // Map from movable reference to callbacks that need to be invoked
// when the object moves. // when the object moves.
HashMap<MovableReference, std::pair<void*, MovingObjectCallback>> HashMap<MovableReference*, std::pair<void*, MovingObjectCallback>>
fixup_callbacks_; fixup_callbacks_;
// Slot => relocated slot/final location. // Slot => relocated slot/final location.
...@@ -382,13 +384,13 @@ void HeapCompact::RegisterMovingObjectReference(MovableReference* slot) { ...@@ -382,13 +384,13 @@ void HeapCompact::RegisterMovingObjectReference(MovableReference* slot) {
traced_slots_.insert(slot); traced_slots_.insert(slot);
} }
void HeapCompact::RegisterMovingObjectCallback(MovableReference reference, void HeapCompact::RegisterMovingObjectCallback(MovableReference* slot,
MovingObjectCallback callback, MovingObjectCallback callback,
void* callback_data) { void* callback_data) {
if (!do_compact_) if (!do_compact_)
return; return;
Fixups().AddFixupCallback(reference, callback, callback_data); Fixups().AddFixupCallback(slot, callback, callback_data);
} }
void HeapCompact::UpdateHeapResidency() { void HeapCompact::UpdateHeapResidency() {
......
...@@ -81,7 +81,7 @@ class PLATFORM_EXPORT HeapCompact final { ...@@ -81,7 +81,7 @@ class PLATFORM_EXPORT HeapCompact final {
void RegisterMovingObjectReference(MovableReference* slot); void RegisterMovingObjectReference(MovableReference* slot);
// See |Heap::registerMovingObjectCallback()| documentation. // See |Heap::registerMovingObjectCallback()| documentation.
void RegisterMovingObjectCallback(MovableReference, void RegisterMovingObjectCallback(MovableReference*,
MovingObjectCallback, MovingObjectCallback,
void* callback_data); void* callback_data);
...@@ -165,7 +165,7 @@ class PLATFORM_EXPORT HeapCompact final { ...@@ -165,7 +165,7 @@ class PLATFORM_EXPORT HeapCompact final {
// the range of BlinkGC::ArenaIndices. // the range of BlinkGC::ArenaIndices.
unsigned compactable_arenas_; unsigned compactable_arenas_;
// The set is to remember slots traced during the incremental and atomic // The set is to remember slots that traced during
// marking phases. The mapping between the slots and the backing stores are // marking phases. The mapping between the slots and the backing stores are
// created at the atomic pause phase. // created at the atomic pause phase.
HashSet<MovableReference*> traced_slots_; HashSet<MovableReference*> traced_slots_;
......
...@@ -65,7 +65,7 @@ class BackingVisitor : public Visitor { ...@@ -65,7 +65,7 @@ class BackingVisitor : public Visitor {
WeakCallback, WeakCallback,
void*) final {} void*) final {}
void VisitBackingStoreOnly(void*, void**) final {} void VisitBackingStoreOnly(void*, void**) final {}
void RegisterBackingStoreCallback(void* backing_store, void RegisterBackingStoreCallback(void** slot,
MovingObjectCallback, MovingObjectCallback,
void* callback_data) final {} void* callback_data) final {}
void RegisterWeakCallback(void* closure, WeakCallback) final {} void RegisterWeakCallback(void* closure, WeakCallback) final {}
......
...@@ -53,7 +53,8 @@ class MarkingVerifier final : public Visitor { ...@@ -53,7 +53,8 @@ class MarkingVerifier final : public Visitor {
WeakCallback, WeakCallback,
void*) final {} void*) final {}
void VisitBackingStoreOnly(void*, void**) final {} void VisitBackingStoreOnly(void*, void**) final {}
void RegisterBackingStoreCallback(void*, MovingObjectCallback, void*) final {} void RegisterBackingStoreCallback(void**, MovingObjectCallback, void*) final {
}
void RegisterWeakCallback(void*, WeakCallback) final {} void RegisterWeakCallback(void*, WeakCallback) final {}
void Visit(const TraceWrapperV8Reference<v8::Value>&) final {} void Visit(const TraceWrapperV8Reference<v8::Value>&) final {}
void Visit(DOMWrapperMap<ScriptWrappable>*, void Visit(DOMWrapperMap<ScriptWrappable>*,
......
...@@ -117,21 +117,20 @@ void MarkingVisitor::RegisterWeakCallback(void* object, WeakCallback callback) { ...@@ -117,21 +117,20 @@ void MarkingVisitor::RegisterWeakCallback(void* object, WeakCallback callback) {
weak_callback_worklist_.Push({object, callback}); weak_callback_worklist_.Push({object, callback});
} }
void MarkingVisitor::RegisterBackingStoreReference(void* slot) { void MarkingVisitor::RegisterBackingStoreReference(void** slot) {
if (marking_mode_ != kGlobalMarkingWithCompaction) if (marking_mode_ != kGlobalMarkingWithCompaction)
return; return;
Heap().RegisterMovingObjectReference( Heap().RegisterMovingObjectReference(
reinterpret_cast<MovableReference*>(slot)); reinterpret_cast<MovableReference*>(slot));
} }
void MarkingVisitor::RegisterBackingStoreCallback(void* backing_store, void MarkingVisitor::RegisterBackingStoreCallback(void** slot,
MovingObjectCallback callback, MovingObjectCallback callback,
void* callback_data) { void* callback_data) {
if (marking_mode_ != kGlobalMarkingWithCompaction) if (marking_mode_ != kGlobalMarkingWithCompaction)
return; return;
Heap().RegisterMovingObjectCallback( Heap().RegisterMovingObjectCallback(reinterpret_cast<MovableReference*>(slot),
reinterpret_cast<MovableReference>(backing_store), callback, callback, callback_data);
callback_data);
} }
bool MarkingVisitor::RegisterWeakTable(const void* closure, bool MarkingVisitor::RegisterWeakTable(const void* closure,
......
...@@ -148,7 +148,7 @@ class PLATFORM_EXPORT MarkingVisitor final : public Visitor { ...@@ -148,7 +148,7 @@ class PLATFORM_EXPORT MarkingVisitor final : public Visitor {
MarkHeaderNoTracing(HeapObjectHeader::FromPayload(object)); MarkHeaderNoTracing(HeapObjectHeader::FromPayload(object));
} }
void RegisterBackingStoreCallback(void* backing_store, void RegisterBackingStoreCallback(void** slot,
MovingObjectCallback, MovingObjectCallback,
void* callback_data) final; void* callback_data) final;
bool RegisterWeakTable(const void* closure, bool RegisterWeakTable(const void* closure,
...@@ -165,7 +165,7 @@ class PLATFORM_EXPORT MarkingVisitor final : public Visitor { ...@@ -165,7 +165,7 @@ class PLATFORM_EXPORT MarkingVisitor final : public Visitor {
static void WriteBarrierSlow(void*); static void WriteBarrierSlow(void*);
static void TraceMarkedBackingStoreSlow(void*); static void TraceMarkedBackingStoreSlow(void*);
void RegisterBackingStoreReference(void* slot); void RegisterBackingStoreReference(void** slot);
void ConservativelyMarkHeader(HeapObjectHeader*); void ConservativelyMarkHeader(HeapObjectHeader*);
......
...@@ -266,7 +266,7 @@ class PLATFORM_EXPORT Visitor { ...@@ -266,7 +266,7 @@ class PLATFORM_EXPORT Visitor {
// Registers backing store pointers so that they can be moved and properly // Registers backing store pointers so that they can be moved and properly
// updated. // updated.
virtual void RegisterBackingStoreCallback(void* backing_store, virtual void RegisterBackingStoreCallback(void** slot,
MovingObjectCallback, MovingObjectCallback,
void* callback_data) = 0; void* callback_data) = 0;
......
...@@ -307,11 +307,9 @@ class LinkedHashSet { ...@@ -307,11 +307,9 @@ class LinkedHashSet {
impl_.Trace(visitor); impl_.Trace(visitor);
// Should the underlying table be moved by GC, register a callback // Should the underlying table be moved by GC, register a callback
// that fixes up the interior pointers that the (Heap)LinkedHashSet keeps. // that fixes up the interior pointers that the (Heap)LinkedHashSet keeps.
if (impl_.table_) { Allocator::RegisterBackingStoreCallback(visitor, &impl_.table_,
Allocator::RegisterBackingStoreCallback( MoveBackingCallback,
visitor, impl_.table_, MoveBackingCallback, reinterpret_cast<void*>(&anchor_));
reinterpret_cast<void*>(&anchor_));
}
} }
int64_t Modifications() const { return impl_.Modifications(); } int64_t Modifications() const { return impl_.Modifications(); }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment