Commit 0e8266ea authored by Michael Lippautz's avatar Michael Lippautz Committed by Commit Bot

heap: Move virtual methods to cc file

- Virtual Visit methods are always called through base class Visitor
  using Trace methods. There's no need to keep them in the header file.
- Drive-by: Move inlined definitions out-of-line to allow skimming the
  class header more easily.
- Only mark definition as inline as this is an implementation detail.

Change-Id: I44993d515eb18498105b0e592e282fcd536c82ff
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1884731Reviewed-by: default avatarAnton Bikineev <bikineev@chromium.org>
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#710688}
parent 3f22e50b
......@@ -65,6 +65,57 @@ void MarkingVisitorCommon::RegisterBackingStoreCallback(
}
}
void MarkingVisitorCommon::VisitWeak(void* object,
void* object_weak_ref,
TraceDescriptor desc,
WeakCallback callback) {
// Filter out already marked values. The write barrier for WeakMember
// ensures that any newly set value after this point is kept alive and does
// not require the callback.
if (desc.base_object_payload != BlinkGC::kNotFullyConstructedObject &&
HeapObjectHeader::FromPayload(desc.base_object_payload)
->IsMarked<HeapObjectHeader::AccessMode::kAtomic>())
return;
RegisterWeakCallback(object_weak_ref, callback);
}
void MarkingVisitorCommon::VisitBackingStoreStrongly(void* object,
void** object_slot,
TraceDescriptor desc) {
RegisterBackingStoreReference(object_slot);
if (!object)
return;
Visit(object, desc);
}
// All work is registered through RegisterWeakCallback.
void MarkingVisitorCommon::VisitBackingStoreWeakly(void* object,
void** object_slot,
TraceDescriptor strong_desc,
TraceDescriptor weak_desc,
WeakCallback callback,
void* parameter) {
RegisterBackingStoreReference(object_slot);
if (!object)
return;
RegisterWeakCallback(parameter, callback);
if (weak_desc.callback) {
weak_table_worklist_.Push(
{weak_desc.base_object_payload, weak_desc.callback});
}
}
void MarkingVisitorCommon::VisitBackingStoreOnly(void* object,
void** object_slot) {
RegisterBackingStoreReference(object_slot);
if (!object)
return;
HeapObjectHeader* header = HeapObjectHeader::FromPayload(object);
MarkHeaderNoTracing(header);
AccountMarkedBytes(header);
}
// static
bool MarkingVisitor::WriteBarrierSlow(void* value) {
if (!value || IsHashTableDeleteValue(value))
......
......@@ -18,74 +18,25 @@ class BasePage;
class PLATFORM_EXPORT MarkingVisitorCommon : public Visitor {
public:
enum MarkingMode {
// This is a default visitor. This is used for MarkingType=kAtomicMarking
// and MarkingType=kIncrementalMarking.
// Default visitor mode used for regular marking.
kGlobalMarking,
// Perform global marking along with preparing for additional sweep
// compaction of heap arenas afterwards. Compared to the GlobalMarking
// visitor, this visitor will also register references to objects
// that might be moved during arena compaction -- the compaction
// pass will then fix up those references when the object move goes
// ahead.
// Visitor mode recording slots for compaction during marking.
kGlobalMarkingWithCompaction,
};
//
// Implementation of the visitor interface.
//
void VisitWeak(void* object,
void* object_weak_ref,
TraceDescriptor desc,
WeakCallback callback) final {
// Filter out already marked values. The write barrier for WeakMember
// ensures that any newly set value after this point is kept alive and does
// not require the callback.
if (desc.base_object_payload != BlinkGC::kNotFullyConstructedObject &&
HeapObjectHeader::FromPayload(desc.base_object_payload)
->IsMarked<HeapObjectHeader::AccessMode::kAtomic>())
return;
RegisterWeakCallback(object_weak_ref, callback);
}
void VisitBackingStoreStrongly(void* object,
void** object_slot,
TraceDescriptor desc) final {
RegisterBackingStoreReference(object_slot);
if (!object)
return;
Visit(object, desc);
}
// All work is registered through RegisterWeakCallback.
void VisitBackingStoreWeakly(void* object,
void** object_slot,
TraceDescriptor strong_desc,
TraceDescriptor weak_desc,
WeakCallback callback,
void* parameter) final {
RegisterBackingStoreReference(object_slot);
if (!object)
return;
RegisterWeakCallback(parameter, callback);
if (weak_desc.callback) {
weak_table_worklist_.Push(
{weak_desc.base_object_payload, weak_desc.callback});
}
}
void VisitWeak(void*, void*, TraceDescriptor, WeakCallback) final;
void VisitBackingStoreStrongly(void*, void**, TraceDescriptor) final;
void VisitBackingStoreWeakly(void*,
void**,
TraceDescriptor,
TraceDescriptor,
WeakCallback,
void*) final;
// Used to only mark the backing store when it has been registered for weak
// processing. In this case, the contents are processed separately using
// the corresponding traits but the backing store requires marking.
void VisitBackingStoreOnly(void* object, void** object_slot) final {
RegisterBackingStoreReference(object_slot);
if (!object)
return;
HeapObjectHeader* header = HeapObjectHeader::FromPayload(object);
MarkHeaderNoTracing(header);
AccountMarkedBytes(header);
}
void VisitBackingStoreOnly(void*, void**) final;
// This callback mechanism is needed to account for backing store objects
// containing intra-object pointers, all of which must be relocated/rebased
......@@ -93,8 +44,8 @@ class PLATFORM_EXPORT MarkingVisitorCommon : public Visitor {
//
// For Blink, |HeapLinkedHashSet<>| is currently the only abstraction which
// relies on this feature.
void RegisterBackingStoreCallback(void* backing, MovingObjectCallback) final;
void RegisterWeakCallback(void* closure, WeakCallback) final;
void RegisterBackingStoreCallback(void*, MovingObjectCallback) final;
void RegisterWeakCallback(void*, WeakCallback) final;
// Flush private segments remaining in visitor's worklists to global pools.
void FlushCompactionWorklists();
......@@ -114,7 +65,7 @@ class PLATFORM_EXPORT MarkingVisitorCommon : public Visitor {
// Try to mark an object without tracing. Returns true when the object was not
// marked upon calling.
inline bool MarkHeaderNoTracing(HeapObjectHeader*);
bool MarkHeaderNoTracing(HeapObjectHeader*);
void RegisterBackingStoreReference(void** slot);
......@@ -137,7 +88,7 @@ ALWAYS_INLINE void MarkingVisitorCommon::AccountMarkedBytes(
: header->size();
}
inline bool MarkingVisitorCommon::MarkHeaderNoTracing(
ALWAYS_INLINE bool MarkingVisitorCommon::MarkHeaderNoTracing(
HeapObjectHeader* header) {
DCHECK(header);
DCHECK(State()->InAtomicMarkingPause() || State()->IsIncrementalMarking());
......@@ -155,17 +106,7 @@ inline bool MarkingVisitorCommon::MarkHeaderNoTracing(
template <class Specialized>
class PLATFORM_EXPORT MarkingVisitorBase : public MarkingVisitorCommon {
public:
void Visit(void* object, TraceDescriptor desc) final {
DCHECK(object);
if (desc.base_object_payload == BlinkGC::kNotFullyConstructedObject) {
// This means that the objects are not-yet-fully-constructed. See comments
// on GarbageCollectedMixin for how those objects are handled.
not_fully_constructed_worklist_.Push(object);
return;
}
MarkHeader(HeapObjectHeader::FromPayload(desc.base_object_payload),
desc.callback);
}
void Visit(void* object, TraceDescriptor desc) final;
// Unused cross-component visit methods.
void Visit(const TraceWrapperV8Reference<v8::Value>&) override {}
......@@ -176,36 +117,59 @@ class PLATFORM_EXPORT MarkingVisitorBase : public MarkingVisitorCommon {
~MarkingVisitorBase() override = default;
// Marks an object and adds a tracing callback for processing of the object.
inline void MarkHeader(HeapObjectHeader* header, TraceCallback callback) {
DCHECK(header);
DCHECK(callback);
if (Specialized::IsInConstruction(header)) {
not_fully_constructed_worklist_.Push(header->Payload());
} else if (MarkHeaderNoTracing(header)) {
marking_worklist_.Push(
{reinterpret_cast<void*>(header->Payload()), callback});
}
}
void MarkHeader(HeapObjectHeader*, TraceCallback);
};
template <class Specialized>
inline void MarkingVisitorBase<Specialized>::Visit(void* object,
TraceDescriptor desc) {
DCHECK(object);
if (desc.base_object_payload == BlinkGC::kNotFullyConstructedObject) {
// This means that the objects are not-yet-fully-constructed. See comments
// on GarbageCollectedMixin for how those objects are handled.
not_fully_constructed_worklist_.Push(object);
return;
}
MarkHeader(HeapObjectHeader::FromPayload(desc.base_object_payload),
desc.callback);
}
// Marks an object and adds a tracing callback for processing of the object.
template <class Specialized>
ALWAYS_INLINE void MarkingVisitorBase<Specialized>::MarkHeader(
HeapObjectHeader* header,
TraceCallback callback) {
DCHECK(header);
DCHECK(callback);
if (Specialized::IsInConstruction(header)) {
not_fully_constructed_worklist_.Push(header->Payload());
} else if (MarkHeaderNoTracing(header)) {
marking_worklist_.Push(
{reinterpret_cast<void*>(header->Payload()), callback});
}
}
// Visitor used to mark Oilpan objects on the main thread. Also implements
// various sorts of write barriers that should only be called from the main
// thread.
class PLATFORM_EXPORT MarkingVisitor
: public MarkingVisitorBase<MarkingVisitor> {
public:
// Returns whether an object is in construction.
static bool IsInConstruction(HeapObjectHeader* header);
// Write barrier that adds |value| to the set of marked objects. The barrier
// bails out if marking is off or the object is not yet marked. Returns true
// if the object was marked on this call.
ALWAYS_INLINE static bool WriteBarrier(void* value);
static bool WriteBarrier(void* value);
// Eagerly traces an already marked backing store ensuring that all its
// children are discovered by the marker. The barrier bails out if marking
// is off and on individual objects reachable if they are already marked. The
// barrier uses the callback function through GcInfo, so it will not inline
// any templated type-specific code.
ALWAYS_INLINE static void TraceMarkedBackingStore(void* value);
static void TraceMarkedBackingStore(void* value);
MarkingVisitor(ThreadState*, MarkingMode);
~MarkingVisitor() override = default;
......@@ -222,12 +186,6 @@ class PLATFORM_EXPORT MarkingVisitor
void FlushMarkingWorklist();
static bool IsInConstruction(HeapObjectHeader* header) {
// No need for atomics when operating on the mutator thread where
// construction happens.
return header->IsInConstruction<HeapObjectHeader::AccessMode::kNonAtomic>();
}
private:
// Exact version of the marking write barriers.
static bool WriteBarrierSlow(void*);
......@@ -236,6 +194,14 @@ class PLATFORM_EXPORT MarkingVisitor
WriteBarrierWorklist::View write_barrier_worklist_;
};
// static
ALWAYS_INLINE bool MarkingVisitor::IsInConstruction(HeapObjectHeader* header) {
// No need for atomics when operating on the mutator thread where
// construction happens.
return header->IsInConstruction<HeapObjectHeader::AccessMode::kNonAtomic>();
}
// static
ALWAYS_INLINE bool MarkingVisitor::WriteBarrier(void* value) {
if (!ThreadState::IsAnyIncrementalMarking())
return false;
......@@ -245,6 +211,7 @@ ALWAYS_INLINE bool MarkingVisitor::WriteBarrier(void* value) {
return WriteBarrierSlow(value);
}
// static
ALWAYS_INLINE void MarkingVisitor::TraceMarkedBackingStore(void* value) {
if (!ThreadState::IsAnyIncrementalMarking())
return;
......@@ -258,16 +225,21 @@ ALWAYS_INLINE void MarkingVisitor::TraceMarkedBackingStore(void* value) {
class PLATFORM_EXPORT ConcurrentMarkingVisitor
: public MarkingVisitorBase<ConcurrentMarkingVisitor> {
public:
// Returns whether an object is in construction.
static bool IsInConstruction(HeapObjectHeader* header);
ConcurrentMarkingVisitor(ThreadState*, MarkingMode, int);
~ConcurrentMarkingVisitor() override = default;
virtual void FlushWorklists();
static bool IsInConstruction(HeapObjectHeader* header) {
return header->IsInConstruction<HeapObjectHeader::AccessMode::kAtomic>();
}
};
// static
ALWAYS_INLINE bool ConcurrentMarkingVisitor::IsInConstruction(
HeapObjectHeader* header) {
return header->IsInConstruction<HeapObjectHeader::AccessMode::kAtomic>();
}
} // namespace blink
#endif // THIRD_PARTY_BLINK_RENDERER_PLATFORM_HEAP_MARKING_VISITOR_H_
......@@ -52,6 +52,7 @@ UnifiedHeapMarkingVisitor::UnifiedHeapMarkingVisitor(ThreadState* thread_state,
isolate,
WorklistTaskId::MutatorThread) {}
// static
void UnifiedHeapMarkingVisitor::WriteBarrier(
const TraceWrapperV8Reference<v8::Value>& object) {
if (object.IsEmpty() || !ThreadState::IsAnyIncrementalMarking())
......@@ -64,6 +65,7 @@ void UnifiedHeapMarkingVisitor::WriteBarrier(
thread_state->CurrentVisitor()->Trace(object);
}
// static
void UnifiedHeapMarkingVisitor::WriteBarrier(
v8::Isolate* isolate,
const WrapperTypeInfo* wrapper_type_info,
......@@ -80,6 +82,11 @@ void UnifiedHeapMarkingVisitor::WriteBarrier(
wrapper_type_info->Trace(thread_state->CurrentVisitor(), object);
}
void UnifiedHeapMarkingVisitor::Visit(
const TraceWrapperV8Reference<v8::Value>& v) {
VisitImpl(v);
}
ConcurrentUnifiedHeapMarkingVisitor::ConcurrentUnifiedHeapMarkingVisitor(
ThreadState* thread_state,
MarkingMode mode,
......@@ -93,4 +100,9 @@ void ConcurrentUnifiedHeapMarkingVisitor::FlushWorklists() {
v8_references_worklist_.FlushToGlobal();
}
void ConcurrentUnifiedHeapMarkingVisitor::Visit(
const TraceWrapperV8Reference<v8::Value>& v) {
VisitImpl(v);
}
} // namespace blink
......@@ -47,16 +47,14 @@ class PLATFORM_EXPORT UnifiedHeapMarkingVisitor
: public MarkingVisitor,
public UnifiedHeapMarkingVisitorBase {
public:
UnifiedHeapMarkingVisitor(ThreadState*, MarkingMode, v8::Isolate*);
~UnifiedHeapMarkingVisitor() override = default;
// Write barriers for annotating a write during incremental marking.
static void WriteBarrier(const TraceWrapperV8Reference<v8::Value>&);
static void WriteBarrier(v8::Isolate*, const WrapperTypeInfo*, void*);
void Visit(const TraceWrapperV8Reference<v8::Value>& v) final {
VisitImpl(v);
}
UnifiedHeapMarkingVisitor(ThreadState*, MarkingMode, v8::Isolate*);
~UnifiedHeapMarkingVisitor() override = default;
void Visit(const TraceWrapperV8Reference<v8::Value>&) final;
private:
DISALLOW_COPY_AND_ASSIGN(UnifiedHeapMarkingVisitor);
......@@ -74,9 +72,7 @@ class PLATFORM_EXPORT ConcurrentUnifiedHeapMarkingVisitor
int task_id);
~ConcurrentUnifiedHeapMarkingVisitor() override = default;
void Visit(const TraceWrapperV8Reference<v8::Value>& v) final {
VisitImpl(v);
}
void Visit(const TraceWrapperV8Reference<v8::Value>&) final;
void FlushWorklists() override;
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment