Commit 174cc9f5 authored by Michael Lippautz's avatar Michael Lippautz Committed by Commit Bot

[oilpan] Explicitly trace out-of-line vector backings

- Avoid tracing elements directly from WTF::Vector and WTF::Deque
  when the backing store is allocated out-of-line.
- Avoid MarkNoTrace optimization breaking the tracing/marking
  abstraction.

Bug: chromium:802273
Change-Id: Icfb5deed3479d4a7dec85c82930b464bdfe16b0e
Reviewed-on: https://chromium-review.googlesource.com/886484
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Reviewed-by: default avatarKentaro Hara <haraken@chromium.org>
Cr-Commit-Position: refs/heads/master@{#532806}
parent 3ff965d2
...@@ -89,13 +89,21 @@ void swap(HeapVector<TraceWrapperMember<T>>& a, ...@@ -89,13 +89,21 @@ void swap(HeapVector<TraceWrapperMember<T>>& a,
} }
} }
// HeapVectorBacking<TraceWrapperMember<T>> need to map to
// HeapVectorBacking<Member<T>> for performing the swap method below.
template <typename T, typename Traits>
struct GCInfoTrait<HeapVectorBacking<TraceWrapperMember<T>, Traits>>
: public GCInfoTrait<
HeapVectorBacking<Member<T>, WTF::VectorTraits<Member<T>>>> {};
// Swaps two HeapVectors, one containing TraceWrapperMember and one with // Swaps two HeapVectors, one containing TraceWrapperMember and one with
// regular Members. The custom swap function is required as TraceWrapperMember // regular Members. The custom swap function is required as TraceWrapperMember
// potentially requires emitting a write barrier. // potentially requires emitting a write barrier.
template <typename T> template <typename T>
void swap(HeapVector<TraceWrapperMember<T>>& a, HeapVector<Member<T>>& b) { void swap(HeapVector<TraceWrapperMember<T>>& a, HeapVector<Member<T>>& b) {
// HeapVector<Member<T>> and HeapVector<TraceWrapperMember<T>> have the // HeapVector<Member<T>> and HeapVector<TraceWrapperMember<T>> have the
// same size and semantics. // same size and semantics. This cast and swap assumes that GCInfo for both
// TraceWrapperMember and Member match in vector backings.
HeapVector<Member<T>>& a_ = reinterpret_cast<HeapVector<Member<T>>&>(a); HeapVector<Member<T>>& a_ = reinterpret_cast<HeapVector<Member<T>>&>(a);
a_.swap(b); a_.swap(b);
if (ThreadState::Current()->WrapperTracingInProgress()) { if (ThreadState::Current()->WrapperTracingInProgress()) {
......
...@@ -279,6 +279,16 @@ class PLATFORM_EXPORT HeapAllocator { ...@@ -279,6 +279,16 @@ class PLATFORM_EXPORT HeapAllocator {
#endif // BUILDFLAG(BLINK_HEAP_INCREMENTAL_MARKING) #endif // BUILDFLAG(BLINK_HEAP_INCREMENTAL_MARKING)
} }
template <typename T, typename VisitorDispatcher>
static void TraceVectorBacking(VisitorDispatcher visitor,
T* backing,
T** backing_slot) {
HeapVectorBacking<T>* vector_backing =
reinterpret_cast<HeapVectorBacking<T>*>(backing);
visitor->RegisterBackingStoreReference(backing_slot);
visitor->Trace(vector_backing);
}
private: private:
static void BackingFree(void*); static void BackingFree(void*);
static bool BackingExpand(void*, size_t); static bool BackingExpand(void*, size_t);
......
...@@ -676,29 +676,33 @@ std::enable_if_t<A::kIsGarbageCollected> ...@@ -676,29 +676,33 @@ std::enable_if_t<A::kIsGarbageCollected>
Deque<T, inlineCapacity, Allocator>::Trace(VisitorDispatcher visitor) { Deque<T, inlineCapacity, Allocator>::Trace(VisitorDispatcher visitor) {
static_assert(Allocator::kIsGarbageCollected, static_assert(Allocator::kIsGarbageCollected,
"Garbage collector must be enabled."); "Garbage collector must be enabled.");
const T* buffer_begin = buffer_.Buffer();
const T* end = buffer_begin + end_;
if (IsTraceableInCollectionTrait<VectorTraits<T>>::value) {
if (start_ <= end_) {
for (const T* buffer_entry = buffer_begin + start_; buffer_entry != end;
buffer_entry++)
Allocator::template Trace<VisitorDispatcher, T, VectorTraits<T>>(
visitor, *const_cast<T*>(buffer_entry));
} else {
for (const T* buffer_entry = buffer_begin; buffer_entry != end;
buffer_entry++)
Allocator::template Trace<VisitorDispatcher, T, VectorTraits<T>>(
visitor, *const_cast<T*>(buffer_entry));
const T* buffer_end = buffer_.Buffer() + buffer_.capacity();
for (const T* buffer_entry = buffer_begin + start_;
buffer_entry != buffer_end; buffer_entry++)
Allocator::template Trace<VisitorDispatcher, T, VectorTraits<T>>(
visitor, *const_cast<T*>(buffer_entry));
}
}
if (buffer_.HasOutOfLineBuffer()) { if (buffer_.HasOutOfLineBuffer()) {
Allocator::MarkNoTracing(visitor, buffer_.Buffer()); Allocator::TraceVectorBacking(visitor, buffer_.Buffer(),
Allocator::RegisterBackingStoreReference(visitor, buffer_.BufferSlot()); buffer_.BufferSlot());
} else {
const T* buffer_begin = buffer_.Buffer();
const T* end = buffer_begin + end_;
if (IsTraceableInCollectionTrait<VectorTraits<T>>::value) {
if (start_ <= end_) {
for (const T* buffer_entry = buffer_begin + start_; buffer_entry != end;
buffer_entry++) {
Allocator::template Trace<VisitorDispatcher, T, VectorTraits<T>>(
visitor, *const_cast<T*>(buffer_entry));
}
} else {
for (const T* buffer_entry = buffer_begin; buffer_entry != end;
buffer_entry++) {
Allocator::template Trace<VisitorDispatcher, T, VectorTraits<T>>(
visitor, *const_cast<T*>(buffer_entry));
}
const T* buffer_end = buffer_.Buffer() + buffer_.capacity();
for (const T* buffer_entry = buffer_begin + start_;
buffer_entry != buffer_end; buffer_entry++) {
Allocator::template Trace<VisitorDispatcher, T, VectorTraits<T>>(
visitor, *const_cast<T*>(buffer_entry));
}
}
}
} }
} }
......
...@@ -1959,8 +1959,7 @@ inline bool operator!=(const Vector<T, inlineCapacityA, Allocator>& a, ...@@ -1959,8 +1959,7 @@ inline bool operator!=(const Vector<T, inlineCapacityA, Allocator>& a,
return !(a == b); return !(a == b);
} }
// This is only defined if the allocator is a HeapAllocator. It is used when // Only defined for HeapAllocator. Used when visiting vector object.
// visiting during a tracing GC.
template <typename T, size_t inlineCapacity, typename Allocator> template <typename T, size_t inlineCapacity, typename Allocator>
template <typename VisitorDispatcher, typename A> template <typename VisitorDispatcher, typename A>
std::enable_if_t<A::kIsGarbageCollected> std::enable_if_t<A::kIsGarbageCollected>
...@@ -1970,23 +1969,19 @@ Vector<T, inlineCapacity, Allocator>::Trace(VisitorDispatcher visitor) { ...@@ -1970,23 +1969,19 @@ Vector<T, inlineCapacity, Allocator>::Trace(VisitorDispatcher visitor) {
if (!Buffer()) if (!Buffer())
return; return;
if (this->HasOutOfLineBuffer()) { if (this->HasOutOfLineBuffer()) {
// This is a performance optimization for a case where the buffer has Allocator::TraceVectorBacking(visitor, Buffer(), Base::BufferSlot());
// been already traced by somewhere. This can happen if the conservative } else {
// scanning traced an on-stack (false-positive or real) pointer to the // Inline buffer requires tracing immediately.
// HeapVector, and then visitor->trace() traces the HeapVector. const T* buffer_begin = Buffer();
if (Allocator::IsHeapObjectAlive(Buffer())) const T* buffer_end = Buffer() + size();
return; if (IsTraceableInCollectionTrait<VectorTraits<T>>::value) {
Allocator::MarkNoTracing(visitor, Buffer()); for (const T* buffer_entry = buffer_begin; buffer_entry != buffer_end;
Allocator::RegisterBackingStoreReference(visitor, Base::BufferSlot()); buffer_entry++) {
} Allocator::template Trace<VisitorDispatcher, T, VectorTraits<T>>(
const T* buffer_begin = Buffer(); visitor, *const_cast<T*>(buffer_entry));
const T* buffer_end = Buffer() + size(); }
if (IsTraceableInCollectionTrait<VectorTraits<T>>::value) { CheckUnusedSlots(Buffer() + size(), Buffer() + capacity());
for (const T* buffer_entry = buffer_begin; buffer_entry != buffer_end; }
buffer_entry++)
Allocator::template Trace<VisitorDispatcher, T, VectorTraits<T>>(
visitor, *const_cast<T*>(buffer_entry));
CheckUnusedSlots(Buffer() + size(), Buffer() + capacity());
} }
} }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment