Commit 0e8266ea authored by Michael Lippautz's avatar Michael Lippautz Committed by Commit Bot

heap: Move virtual methods to cc file

- Virtual Visit methods are always called through base class Visitor
  using Trace methods. There's no need to keep them in the header file.
- Drive-by: Move inlined definitions out-of-line to allow skimming the
  class header more easily.
- Only mark definition as inline as this is an implementation detail.

Change-Id: I44993d515eb18498105b0e592e282fcd536c82ff
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1884731Reviewed-by: default avatarAnton Bikineev <bikineev@chromium.org>
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#710688}
parent 3f22e50b
...@@ -65,6 +65,57 @@ void MarkingVisitorCommon::RegisterBackingStoreCallback( ...@@ -65,6 +65,57 @@ void MarkingVisitorCommon::RegisterBackingStoreCallback(
} }
} }
void MarkingVisitorCommon::VisitWeak(void* object,
void* object_weak_ref,
TraceDescriptor desc,
WeakCallback callback) {
// Filter out already marked values. The write barrier for WeakMember
// ensures that any newly set value after this point is kept alive and does
// not require the callback.
if (desc.base_object_payload != BlinkGC::kNotFullyConstructedObject &&
HeapObjectHeader::FromPayload(desc.base_object_payload)
->IsMarked<HeapObjectHeader::AccessMode::kAtomic>())
return;
RegisterWeakCallback(object_weak_ref, callback);
}
void MarkingVisitorCommon::VisitBackingStoreStrongly(void* object,
void** object_slot,
TraceDescriptor desc) {
RegisterBackingStoreReference(object_slot);
if (!object)
return;
Visit(object, desc);
}
// All work is registered through RegisterWeakCallback.
void MarkingVisitorCommon::VisitBackingStoreWeakly(void* object,
void** object_slot,
TraceDescriptor strong_desc,
TraceDescriptor weak_desc,
WeakCallback callback,
void* parameter) {
RegisterBackingStoreReference(object_slot);
if (!object)
return;
RegisterWeakCallback(parameter, callback);
if (weak_desc.callback) {
weak_table_worklist_.Push(
{weak_desc.base_object_payload, weak_desc.callback});
}
}
void MarkingVisitorCommon::VisitBackingStoreOnly(void* object,
void** object_slot) {
RegisterBackingStoreReference(object_slot);
if (!object)
return;
HeapObjectHeader* header = HeapObjectHeader::FromPayload(object);
MarkHeaderNoTracing(header);
AccountMarkedBytes(header);
}
// static // static
bool MarkingVisitor::WriteBarrierSlow(void* value) { bool MarkingVisitor::WriteBarrierSlow(void* value) {
if (!value || IsHashTableDeleteValue(value)) if (!value || IsHashTableDeleteValue(value))
......
...@@ -18,74 +18,25 @@ class BasePage; ...@@ -18,74 +18,25 @@ class BasePage;
class PLATFORM_EXPORT MarkingVisitorCommon : public Visitor { class PLATFORM_EXPORT MarkingVisitorCommon : public Visitor {
public: public:
enum MarkingMode { enum MarkingMode {
// This is a default visitor. This is used for MarkingType=kAtomicMarking // Default visitor mode used for regular marking.
// and MarkingType=kIncrementalMarking.
kGlobalMarking, kGlobalMarking,
// Perform global marking along with preparing for additional sweep // Visitor mode recording slots for compaction during marking.
// compaction of heap arenas afterwards. Compared to the GlobalMarking
// visitor, this visitor will also register references to objects
// that might be moved during arena compaction -- the compaction
// pass will then fix up those references when the object move goes
// ahead.
kGlobalMarkingWithCompaction, kGlobalMarkingWithCompaction,
}; };
// void VisitWeak(void*, void*, TraceDescriptor, WeakCallback) final;
// Implementation of the visitor interface. void VisitBackingStoreStrongly(void*, void**, TraceDescriptor) final;
// void VisitBackingStoreWeakly(void*,
void**,
void VisitWeak(void* object, TraceDescriptor,
void* object_weak_ref, TraceDescriptor,
TraceDescriptor desc, WeakCallback,
WeakCallback callback) final { void*) final;
// Filter out already marked values. The write barrier for WeakMember
// ensures that any newly set value after this point is kept alive and does
// not require the callback.
if (desc.base_object_payload != BlinkGC::kNotFullyConstructedObject &&
HeapObjectHeader::FromPayload(desc.base_object_payload)
->IsMarked<HeapObjectHeader::AccessMode::kAtomic>())
return;
RegisterWeakCallback(object_weak_ref, callback);
}
void VisitBackingStoreStrongly(void* object,
void** object_slot,
TraceDescriptor desc) final {
RegisterBackingStoreReference(object_slot);
if (!object)
return;
Visit(object, desc);
}
// All work is registered through RegisterWeakCallback.
void VisitBackingStoreWeakly(void* object,
void** object_slot,
TraceDescriptor strong_desc,
TraceDescriptor weak_desc,
WeakCallback callback,
void* parameter) final {
RegisterBackingStoreReference(object_slot);
if (!object)
return;
RegisterWeakCallback(parameter, callback);
if (weak_desc.callback) {
weak_table_worklist_.Push(
{weak_desc.base_object_payload, weak_desc.callback});
}
}
// Used to only mark the backing store when it has been registered for weak // Used to only mark the backing store when it has been registered for weak
// processing. In this case, the contents are processed separately using // processing. In this case, the contents are processed separately using
// the corresponding traits but the backing store requires marking. // the corresponding traits but the backing store requires marking.
void VisitBackingStoreOnly(void* object, void** object_slot) final { void VisitBackingStoreOnly(void*, void**) final;
RegisterBackingStoreReference(object_slot);
if (!object)
return;
HeapObjectHeader* header = HeapObjectHeader::FromPayload(object);
MarkHeaderNoTracing(header);
AccountMarkedBytes(header);
}
// This callback mechanism is needed to account for backing store objects // This callback mechanism is needed to account for backing store objects
// containing intra-object pointers, all of which must be relocated/rebased // containing intra-object pointers, all of which must be relocated/rebased
...@@ -93,8 +44,8 @@ class PLATFORM_EXPORT MarkingVisitorCommon : public Visitor { ...@@ -93,8 +44,8 @@ class PLATFORM_EXPORT MarkingVisitorCommon : public Visitor {
// //
// For Blink, |HeapLinkedHashSet<>| is currently the only abstraction which // For Blink, |HeapLinkedHashSet<>| is currently the only abstraction which
// relies on this feature. // relies on this feature.
void RegisterBackingStoreCallback(void* backing, MovingObjectCallback) final; void RegisterBackingStoreCallback(void*, MovingObjectCallback) final;
void RegisterWeakCallback(void* closure, WeakCallback) final; void RegisterWeakCallback(void*, WeakCallback) final;
// Flush private segments remaining in visitor's worklists to global pools. // Flush private segments remaining in visitor's worklists to global pools.
void FlushCompactionWorklists(); void FlushCompactionWorklists();
...@@ -114,7 +65,7 @@ class PLATFORM_EXPORT MarkingVisitorCommon : public Visitor { ...@@ -114,7 +65,7 @@ class PLATFORM_EXPORT MarkingVisitorCommon : public Visitor {
// Try to mark an object without tracing. Returns true when the object was not // Try to mark an object without tracing. Returns true when the object was not
// marked upon calling. // marked upon calling.
inline bool MarkHeaderNoTracing(HeapObjectHeader*); bool MarkHeaderNoTracing(HeapObjectHeader*);
void RegisterBackingStoreReference(void** slot); void RegisterBackingStoreReference(void** slot);
...@@ -137,7 +88,7 @@ ALWAYS_INLINE void MarkingVisitorCommon::AccountMarkedBytes( ...@@ -137,7 +88,7 @@ ALWAYS_INLINE void MarkingVisitorCommon::AccountMarkedBytes(
: header->size(); : header->size();
} }
inline bool MarkingVisitorCommon::MarkHeaderNoTracing( ALWAYS_INLINE bool MarkingVisitorCommon::MarkHeaderNoTracing(
HeapObjectHeader* header) { HeapObjectHeader* header) {
DCHECK(header); DCHECK(header);
DCHECK(State()->InAtomicMarkingPause() || State()->IsIncrementalMarking()); DCHECK(State()->InAtomicMarkingPause() || State()->IsIncrementalMarking());
...@@ -155,17 +106,7 @@ inline bool MarkingVisitorCommon::MarkHeaderNoTracing( ...@@ -155,17 +106,7 @@ inline bool MarkingVisitorCommon::MarkHeaderNoTracing(
template <class Specialized> template <class Specialized>
class PLATFORM_EXPORT MarkingVisitorBase : public MarkingVisitorCommon { class PLATFORM_EXPORT MarkingVisitorBase : public MarkingVisitorCommon {
public: public:
void Visit(void* object, TraceDescriptor desc) final { void Visit(void* object, TraceDescriptor desc) final;
DCHECK(object);
if (desc.base_object_payload == BlinkGC::kNotFullyConstructedObject) {
// This means that the objects are not-yet-fully-constructed. See comments
// on GarbageCollectedMixin for how those objects are handled.
not_fully_constructed_worklist_.Push(object);
return;
}
MarkHeader(HeapObjectHeader::FromPayload(desc.base_object_payload),
desc.callback);
}
// Unused cross-component visit methods. // Unused cross-component visit methods.
void Visit(const TraceWrapperV8Reference<v8::Value>&) override {} void Visit(const TraceWrapperV8Reference<v8::Value>&) override {}
...@@ -176,36 +117,59 @@ class PLATFORM_EXPORT MarkingVisitorBase : public MarkingVisitorCommon { ...@@ -176,36 +117,59 @@ class PLATFORM_EXPORT MarkingVisitorBase : public MarkingVisitorCommon {
~MarkingVisitorBase() override = default; ~MarkingVisitorBase() override = default;
// Marks an object and adds a tracing callback for processing of the object. // Marks an object and adds a tracing callback for processing of the object.
inline void MarkHeader(HeapObjectHeader* header, TraceCallback callback) { void MarkHeader(HeapObjectHeader*, TraceCallback);
DCHECK(header);
DCHECK(callback);
if (Specialized::IsInConstruction(header)) {
not_fully_constructed_worklist_.Push(header->Payload());
} else if (MarkHeaderNoTracing(header)) {
marking_worklist_.Push(
{reinterpret_cast<void*>(header->Payload()), callback});
}
}
}; };
template <class Specialized>
inline void MarkingVisitorBase<Specialized>::Visit(void* object,
TraceDescriptor desc) {
DCHECK(object);
if (desc.base_object_payload == BlinkGC::kNotFullyConstructedObject) {
// This means that the objects are not-yet-fully-constructed. See comments
// on GarbageCollectedMixin for how those objects are handled.
not_fully_constructed_worklist_.Push(object);
return;
}
MarkHeader(HeapObjectHeader::FromPayload(desc.base_object_payload),
desc.callback);
}
// Marks an object and adds a tracing callback for processing of the object.
template <class Specialized>
ALWAYS_INLINE void MarkingVisitorBase<Specialized>::MarkHeader(
HeapObjectHeader* header,
TraceCallback callback) {
DCHECK(header);
DCHECK(callback);
if (Specialized::IsInConstruction(header)) {
not_fully_constructed_worklist_.Push(header->Payload());
} else if (MarkHeaderNoTracing(header)) {
marking_worklist_.Push(
{reinterpret_cast<void*>(header->Payload()), callback});
}
}
// Visitor used to mark Oilpan objects on the main thread. Also implements // Visitor used to mark Oilpan objects on the main thread. Also implements
// various sorts of write barriers that should only be called from the main // various sorts of write barriers that should only be called from the main
// thread. // thread.
class PLATFORM_EXPORT MarkingVisitor class PLATFORM_EXPORT MarkingVisitor
: public MarkingVisitorBase<MarkingVisitor> { : public MarkingVisitorBase<MarkingVisitor> {
public: public:
// Returns whether an object is in construction.
static bool IsInConstruction(HeapObjectHeader* header);
// Write barrier that adds |value| to the set of marked objects. The barrier // Write barrier that adds |value| to the set of marked objects. The barrier
// bails out if marking is off or the object is not yet marked. Returns true // bails out if marking is off or the object is not yet marked. Returns true
// if the object was marked on this call. // if the object was marked on this call.
ALWAYS_INLINE static bool WriteBarrier(void* value); static bool WriteBarrier(void* value);
// Eagerly traces an already marked backing store ensuring that all its // Eagerly traces an already marked backing store ensuring that all its
// children are discovered by the marker. The barrier bails out if marking // children are discovered by the marker. The barrier bails out if marking
// is off and on individual objects reachable if they are already marked. The // is off and on individual objects reachable if they are already marked. The
// barrier uses the callback function through GcInfo, so it will not inline // barrier uses the callback function through GcInfo, so it will not inline
// any templated type-specific code. // any templated type-specific code.
ALWAYS_INLINE static void TraceMarkedBackingStore(void* value); static void TraceMarkedBackingStore(void* value);
MarkingVisitor(ThreadState*, MarkingMode); MarkingVisitor(ThreadState*, MarkingMode);
~MarkingVisitor() override = default; ~MarkingVisitor() override = default;
...@@ -222,12 +186,6 @@ class PLATFORM_EXPORT MarkingVisitor ...@@ -222,12 +186,6 @@ class PLATFORM_EXPORT MarkingVisitor
void FlushMarkingWorklist(); void FlushMarkingWorklist();
static bool IsInConstruction(HeapObjectHeader* header) {
// No need for atomics when operating on the mutator thread where
// construction happens.
return header->IsInConstruction<HeapObjectHeader::AccessMode::kNonAtomic>();
}
private: private:
// Exact version of the marking write barriers. // Exact version of the marking write barriers.
static bool WriteBarrierSlow(void*); static bool WriteBarrierSlow(void*);
...@@ -236,6 +194,14 @@ class PLATFORM_EXPORT MarkingVisitor ...@@ -236,6 +194,14 @@ class PLATFORM_EXPORT MarkingVisitor
WriteBarrierWorklist::View write_barrier_worklist_; WriteBarrierWorklist::View write_barrier_worklist_;
}; };
// static
ALWAYS_INLINE bool MarkingVisitor::IsInConstruction(HeapObjectHeader* header) {
// No need for atomics when operating on the mutator thread where
// construction happens.
return header->IsInConstruction<HeapObjectHeader::AccessMode::kNonAtomic>();
}
// static
ALWAYS_INLINE bool MarkingVisitor::WriteBarrier(void* value) { ALWAYS_INLINE bool MarkingVisitor::WriteBarrier(void* value) {
if (!ThreadState::IsAnyIncrementalMarking()) if (!ThreadState::IsAnyIncrementalMarking())
return false; return false;
...@@ -245,6 +211,7 @@ ALWAYS_INLINE bool MarkingVisitor::WriteBarrier(void* value) { ...@@ -245,6 +211,7 @@ ALWAYS_INLINE bool MarkingVisitor::WriteBarrier(void* value) {
return WriteBarrierSlow(value); return WriteBarrierSlow(value);
} }
// static
ALWAYS_INLINE void MarkingVisitor::TraceMarkedBackingStore(void* value) { ALWAYS_INLINE void MarkingVisitor::TraceMarkedBackingStore(void* value) {
if (!ThreadState::IsAnyIncrementalMarking()) if (!ThreadState::IsAnyIncrementalMarking())
return; return;
...@@ -258,16 +225,21 @@ ALWAYS_INLINE void MarkingVisitor::TraceMarkedBackingStore(void* value) { ...@@ -258,16 +225,21 @@ ALWAYS_INLINE void MarkingVisitor::TraceMarkedBackingStore(void* value) {
class PLATFORM_EXPORT ConcurrentMarkingVisitor class PLATFORM_EXPORT ConcurrentMarkingVisitor
: public MarkingVisitorBase<ConcurrentMarkingVisitor> { : public MarkingVisitorBase<ConcurrentMarkingVisitor> {
public: public:
// Returns whether an object is in construction.
static bool IsInConstruction(HeapObjectHeader* header);
ConcurrentMarkingVisitor(ThreadState*, MarkingMode, int); ConcurrentMarkingVisitor(ThreadState*, MarkingMode, int);
~ConcurrentMarkingVisitor() override = default; ~ConcurrentMarkingVisitor() override = default;
virtual void FlushWorklists(); virtual void FlushWorklists();
static bool IsInConstruction(HeapObjectHeader* header) {
return header->IsInConstruction<HeapObjectHeader::AccessMode::kAtomic>();
}
}; };
// static
ALWAYS_INLINE bool ConcurrentMarkingVisitor::IsInConstruction(
HeapObjectHeader* header) {
return header->IsInConstruction<HeapObjectHeader::AccessMode::kAtomic>();
}
} // namespace blink } // namespace blink
#endif // THIRD_PARTY_BLINK_RENDERER_PLATFORM_HEAP_MARKING_VISITOR_H_ #endif // THIRD_PARTY_BLINK_RENDERER_PLATFORM_HEAP_MARKING_VISITOR_H_
...@@ -52,6 +52,7 @@ UnifiedHeapMarkingVisitor::UnifiedHeapMarkingVisitor(ThreadState* thread_state, ...@@ -52,6 +52,7 @@ UnifiedHeapMarkingVisitor::UnifiedHeapMarkingVisitor(ThreadState* thread_state,
isolate, isolate,
WorklistTaskId::MutatorThread) {} WorklistTaskId::MutatorThread) {}
// static
void UnifiedHeapMarkingVisitor::WriteBarrier( void UnifiedHeapMarkingVisitor::WriteBarrier(
const TraceWrapperV8Reference<v8::Value>& object) { const TraceWrapperV8Reference<v8::Value>& object) {
if (object.IsEmpty() || !ThreadState::IsAnyIncrementalMarking()) if (object.IsEmpty() || !ThreadState::IsAnyIncrementalMarking())
...@@ -64,6 +65,7 @@ void UnifiedHeapMarkingVisitor::WriteBarrier( ...@@ -64,6 +65,7 @@ void UnifiedHeapMarkingVisitor::WriteBarrier(
thread_state->CurrentVisitor()->Trace(object); thread_state->CurrentVisitor()->Trace(object);
} }
// static
void UnifiedHeapMarkingVisitor::WriteBarrier( void UnifiedHeapMarkingVisitor::WriteBarrier(
v8::Isolate* isolate, v8::Isolate* isolate,
const WrapperTypeInfo* wrapper_type_info, const WrapperTypeInfo* wrapper_type_info,
...@@ -80,6 +82,11 @@ void UnifiedHeapMarkingVisitor::WriteBarrier( ...@@ -80,6 +82,11 @@ void UnifiedHeapMarkingVisitor::WriteBarrier(
wrapper_type_info->Trace(thread_state->CurrentVisitor(), object); wrapper_type_info->Trace(thread_state->CurrentVisitor(), object);
} }
void UnifiedHeapMarkingVisitor::Visit(
const TraceWrapperV8Reference<v8::Value>& v) {
VisitImpl(v);
}
ConcurrentUnifiedHeapMarkingVisitor::ConcurrentUnifiedHeapMarkingVisitor( ConcurrentUnifiedHeapMarkingVisitor::ConcurrentUnifiedHeapMarkingVisitor(
ThreadState* thread_state, ThreadState* thread_state,
MarkingMode mode, MarkingMode mode,
...@@ -93,4 +100,9 @@ void ConcurrentUnifiedHeapMarkingVisitor::FlushWorklists() { ...@@ -93,4 +100,9 @@ void ConcurrentUnifiedHeapMarkingVisitor::FlushWorklists() {
v8_references_worklist_.FlushToGlobal(); v8_references_worklist_.FlushToGlobal();
} }
void ConcurrentUnifiedHeapMarkingVisitor::Visit(
const TraceWrapperV8Reference<v8::Value>& v) {
VisitImpl(v);
}
} // namespace blink } // namespace blink
...@@ -47,16 +47,14 @@ class PLATFORM_EXPORT UnifiedHeapMarkingVisitor ...@@ -47,16 +47,14 @@ class PLATFORM_EXPORT UnifiedHeapMarkingVisitor
: public MarkingVisitor, : public MarkingVisitor,
public UnifiedHeapMarkingVisitorBase { public UnifiedHeapMarkingVisitorBase {
public: public:
UnifiedHeapMarkingVisitor(ThreadState*, MarkingMode, v8::Isolate*);
~UnifiedHeapMarkingVisitor() override = default;
// Write barriers for annotating a write during incremental marking. // Write barriers for annotating a write during incremental marking.
static void WriteBarrier(const TraceWrapperV8Reference<v8::Value>&); static void WriteBarrier(const TraceWrapperV8Reference<v8::Value>&);
static void WriteBarrier(v8::Isolate*, const WrapperTypeInfo*, void*); static void WriteBarrier(v8::Isolate*, const WrapperTypeInfo*, void*);
void Visit(const TraceWrapperV8Reference<v8::Value>& v) final { UnifiedHeapMarkingVisitor(ThreadState*, MarkingMode, v8::Isolate*);
VisitImpl(v); ~UnifiedHeapMarkingVisitor() override = default;
}
void Visit(const TraceWrapperV8Reference<v8::Value>&) final;
private: private:
DISALLOW_COPY_AND_ASSIGN(UnifiedHeapMarkingVisitor); DISALLOW_COPY_AND_ASSIGN(UnifiedHeapMarkingVisitor);
...@@ -74,9 +72,7 @@ class PLATFORM_EXPORT ConcurrentUnifiedHeapMarkingVisitor ...@@ -74,9 +72,7 @@ class PLATFORM_EXPORT ConcurrentUnifiedHeapMarkingVisitor
int task_id); int task_id);
~ConcurrentUnifiedHeapMarkingVisitor() override = default; ~ConcurrentUnifiedHeapMarkingVisitor() override = default;
void Visit(const TraceWrapperV8Reference<v8::Value>& v) final { void Visit(const TraceWrapperV8Reference<v8::Value>&) final;
VisitImpl(v);
}
void FlushWorklists() override; void FlushWorklists() override;
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment