Commit 9655730f authored by Haruka Matsumura's avatar Haruka Matsumura Committed by Commit Bot

Oilpan: Change callback registration in HeapCompaction

This CL changes the system of callback registration to slot-based as with references.
Anytime HeapLinkedHashSet rehashes and impl_.table_ changes while incremental marking is in progress, is when the callback won't be called.
So as preparing for HeapCompaction with IncrementalMarking, we change the key of the registration on HashMap from references(buffer address) to slots.

Bug: 864425
Change-Id: I5af93f1c56d0a76952e3347c4f3f1ba4f271047b
Reviewed-on: https://chromium-review.googlesource.com/1164881
Commit-Queue: Haruka Matsumura <harukamt@google.com>
Reviewed-by: default avatarKeishi Hattori <keishi@chromium.org>
Reviewed-by: default avatarKentaro Hara <haraken@chromium.org>
Cr-Commit-Position: refs/heads/master@{#582104}
parent a6a2898f
......@@ -30,7 +30,8 @@ class PLATFORM_EXPORT ScriptWrappableVisitor : public Visitor {
WeakCallback,
void*) final {}
void VisitBackingStoreOnly(void*, void**) final {}
void RegisterBackingStoreCallback(void*, MovingObjectCallback, void*) final {}
void RegisterBackingStoreCallback(void**, MovingObjectCallback, void*) final {
}
void RegisterWeakCallback(void*, WeakCallback) final {}
protected:
......
......@@ -199,16 +199,13 @@ HeapCompact* ThreadHeap::Compaction() {
}
void ThreadHeap::RegisterMovingObjectReference(MovableReference* slot) {
DCHECK(slot);
Compaction()->RegisterMovingObjectReference(slot);
}
void ThreadHeap::RegisterMovingObjectCallback(MovableReference reference,
void ThreadHeap::RegisterMovingObjectCallback(MovableReference* slot,
MovingObjectCallback callback,
void* callback_data) {
DCHECK(reference);
Compaction()->RegisterMovingObjectCallback(reference, callback,
callback_data);
Compaction()->RegisterMovingObjectCallback(slot, callback, callback_data);
}
void ThreadHeap::MarkNotFullyConstructedObjects(MarkingVisitor* visitor) {
......
......@@ -262,7 +262,7 @@ class PLATFORM_EXPORT ThreadHeap {
//
// For Blink, |HeapLinkedHashSet<>| is currently the only abstraction which
// relies on this feature.
void RegisterMovingObjectCallback(MovableReference,
void RegisterMovingObjectCallback(MovableReference*,
MovingObjectCallback,
void* callback_data);
......
......@@ -192,11 +192,11 @@ class PLATFORM_EXPORT HeapAllocator {
template <typename T, typename VisitorDispatcher>
static void RegisterBackingStoreCallback(VisitorDispatcher visitor,
T* backing_store,
T** backing_store_slot,
MovingObjectCallback callback,
void* callback_data) {
visitor->RegisterBackingStoreCallback(backing_store, callback,
callback_data);
visitor->RegisterBackingStoreCallback(
reinterpret_cast<void**>(backing_store_slot), callback, callback_data);
}
static void EnterGCForbiddenScope() {
......
......@@ -115,12 +115,12 @@ class HeapCompact::MovableObjectFixups final {
AddInteriorFixup(slot);
}
void AddFixupCallback(MovableReference reference,
void AddFixupCallback(MovableReference* slot,
MovingObjectCallback callback,
void* callback_data) {
DCHECK(!fixup_callbacks_.Contains(reference));
fixup_callbacks_.insert(reference, std::pair<void*, MovingObjectCallback>(
callback_data, callback));
DCHECK(!fixup_callbacks_.Contains(slot));
fixup_callbacks_.insert(
slot, std::pair<void*, MovingObjectCallback>(callback_data, callback));
}
void RelocateInteriorFixups(Address from, Address to, size_t size) {
......@@ -226,7 +226,9 @@ class HeapCompact::MovableObjectFixups final {
*slot = to;
size_t size = 0;
auto callback = fixup_callbacks_.find(from);
MovableReference* callback_slot =
reinterpret_cast<MovableReference*>(it->value);
auto callback = fixup_callbacks_.find(callback_slot);
if (UNLIKELY(callback != fixup_callbacks_.end())) {
size = HeapObjectHeader::FromPayload(to)->PayloadSize();
callback->value.second(callback->value.first, from, to, size);
......@@ -265,7 +267,7 @@ class HeapCompact::MovableObjectFixups final {
// Map from movable reference to callbacks that need to be invoked
// when the object moves.
HashMap<MovableReference, std::pair<void*, MovingObjectCallback>>
HashMap<MovableReference*, std::pair<void*, MovingObjectCallback>>
fixup_callbacks_;
// Slot => relocated slot/final location.
......@@ -382,13 +384,13 @@ void HeapCompact::RegisterMovingObjectReference(MovableReference* slot) {
traced_slots_.insert(slot);
}
void HeapCompact::RegisterMovingObjectCallback(MovableReference reference,
void HeapCompact::RegisterMovingObjectCallback(MovableReference* slot,
MovingObjectCallback callback,
void* callback_data) {
if (!do_compact_)
return;
Fixups().AddFixupCallback(reference, callback, callback_data);
Fixups().AddFixupCallback(slot, callback, callback_data);
}
void HeapCompact::UpdateHeapResidency() {
......
......@@ -81,7 +81,7 @@ class PLATFORM_EXPORT HeapCompact final {
void RegisterMovingObjectReference(MovableReference* slot);
// See |Heap::registerMovingObjectCallback()| documentation.
void RegisterMovingObjectCallback(MovableReference,
void RegisterMovingObjectCallback(MovableReference*,
MovingObjectCallback,
void* callback_data);
......@@ -165,7 +165,7 @@ class PLATFORM_EXPORT HeapCompact final {
// the range of BlinkGC::ArenaIndices.
unsigned compactable_arenas_;
// The set is to remember slots traced during the incremental and atomic
// The set is to remember slots that traced during
// marking phases. The mapping between the slots and the backing stores are
// created at the atomic pause phase.
HashSet<MovableReference*> traced_slots_;
......
......@@ -65,7 +65,7 @@ class BackingVisitor : public Visitor {
WeakCallback,
void*) final {}
void VisitBackingStoreOnly(void*, void**) final {}
void RegisterBackingStoreCallback(void* backing_store,
void RegisterBackingStoreCallback(void** slot,
MovingObjectCallback,
void* callback_data) final {}
void RegisterWeakCallback(void* closure, WeakCallback) final {}
......
......@@ -53,7 +53,8 @@ class MarkingVerifier final : public Visitor {
WeakCallback,
void*) final {}
void VisitBackingStoreOnly(void*, void**) final {}
void RegisterBackingStoreCallback(void*, MovingObjectCallback, void*) final {}
void RegisterBackingStoreCallback(void**, MovingObjectCallback, void*) final {
}
void RegisterWeakCallback(void*, WeakCallback) final {}
void Visit(const TraceWrapperV8Reference<v8::Value>&) final {}
void Visit(DOMWrapperMap<ScriptWrappable>*,
......
......@@ -117,21 +117,20 @@ void MarkingVisitor::RegisterWeakCallback(void* object, WeakCallback callback) {
weak_callback_worklist_.Push({object, callback});
}
void MarkingVisitor::RegisterBackingStoreReference(void* slot) {
void MarkingVisitor::RegisterBackingStoreReference(void** slot) {
if (marking_mode_ != kGlobalMarkingWithCompaction)
return;
Heap().RegisterMovingObjectReference(
reinterpret_cast<MovableReference*>(slot));
}
void MarkingVisitor::RegisterBackingStoreCallback(void* backing_store,
void MarkingVisitor::RegisterBackingStoreCallback(void** slot,
MovingObjectCallback callback,
void* callback_data) {
if (marking_mode_ != kGlobalMarkingWithCompaction)
return;
Heap().RegisterMovingObjectCallback(
reinterpret_cast<MovableReference>(backing_store), callback,
callback_data);
Heap().RegisterMovingObjectCallback(reinterpret_cast<MovableReference*>(slot),
callback, callback_data);
}
bool MarkingVisitor::RegisterWeakTable(const void* closure,
......
......@@ -148,7 +148,7 @@ class PLATFORM_EXPORT MarkingVisitor final : public Visitor {
MarkHeaderNoTracing(HeapObjectHeader::FromPayload(object));
}
void RegisterBackingStoreCallback(void* backing_store,
void RegisterBackingStoreCallback(void** slot,
MovingObjectCallback,
void* callback_data) final;
bool RegisterWeakTable(const void* closure,
......@@ -165,7 +165,7 @@ class PLATFORM_EXPORT MarkingVisitor final : public Visitor {
static void WriteBarrierSlow(void*);
static void TraceMarkedBackingStoreSlow(void*);
void RegisterBackingStoreReference(void* slot);
void RegisterBackingStoreReference(void** slot);
void ConservativelyMarkHeader(HeapObjectHeader*);
......
......@@ -266,7 +266,7 @@ class PLATFORM_EXPORT Visitor {
// Registers backing store pointers so that they can be moved and properly
// updated.
virtual void RegisterBackingStoreCallback(void* backing_store,
virtual void RegisterBackingStoreCallback(void** slot,
MovingObjectCallback,
void* callback_data) = 0;
......
......@@ -307,11 +307,9 @@ class LinkedHashSet {
impl_.Trace(visitor);
// Should the underlying table be moved by GC, register a callback
// that fixes up the interior pointers that the (Heap)LinkedHashSet keeps.
if (impl_.table_) {
Allocator::RegisterBackingStoreCallback(
visitor, impl_.table_, MoveBackingCallback,
reinterpret_cast<void*>(&anchor_));
}
Allocator::RegisterBackingStoreCallback(visitor, &impl_.table_,
MoveBackingCallback,
reinterpret_cast<void*>(&anchor_));
}
int64_t Modifications() const { return impl_.Modifications(); }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment