Commit 226485b0 authored by Michael Lippautz's avatar Michael Lippautz Committed by Commit Bot

heap: Re-structure write barrier

Re-structure write barrier to allow delegating to the Oilpan library.

Bug: 1056170
Change-Id: I872fdb902a51d16d985f4e88ffd7e9368bf46623
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2551115
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Reviewed-by: default avatarUlan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#829646}
parent 261a750d
...@@ -103,22 +103,6 @@ class PLATFORM_EXPORT HeapAllocator { ...@@ -103,22 +103,6 @@ class PLATFORM_EXPORT HeapAllocator {
static void FreeHashTableBacking(void* address); static void FreeHashTableBacking(void* address);
static bool ExpandHashTableBacking(void*, size_t); static bool ExpandHashTableBacking(void*, size_t);
static void TraceBackingStoreIfMarked(const void* address) {
// Trace backing store elements only if backing store was marked. The
// sweeper may be active on the backing store which requires atomic mark bit
// access. A precise filter is performed in
// MarkingVisitor::TraceMarkedBackingStore.
if (HeapObjectHeader::FromPayload(address)
->IsMarked<HeapObjectHeader::AccessMode::kAtomic>()) {
MarkingVisitor::TraceMarkedBackingStore(address);
}
}
template <typename T>
static void BackingWriteBarrier(T** slot) {
MarkingVisitor::WriteBarrier(slot);
}
static bool IsAllocationAllowed() { static bool IsAllocationAllowed() {
return ThreadState::Current()->IsAllocationAllowed(); return ThreadState::Current()->IsAllocationAllowed();
} }
...@@ -128,12 +112,6 @@ class PLATFORM_EXPORT HeapAllocator { ...@@ -128,12 +112,6 @@ class PLATFORM_EXPORT HeapAllocator {
ThreadState::Current()->IsIncrementalMarking(); ThreadState::Current()->IsIncrementalMarking();
} }
template <typename T, typename Traits>
static void Trace(Visitor* visitor, const T& t) {
TraceCollectionIfEnabled<WTF::WeakHandlingTrait<T>::value, T,
Traits>::Trace(visitor, &t);
}
static void EnterGCForbiddenScope() { static void EnterGCForbiddenScope() {
ThreadState::Current()->EnterGCForbiddenScope(); ThreadState::Current()->EnterGCForbiddenScope();
} }
...@@ -143,65 +121,9 @@ class PLATFORM_EXPORT HeapAllocator { ...@@ -143,65 +121,9 @@ class PLATFORM_EXPORT HeapAllocator {
} }
template <typename T, typename Traits> template <typename T, typename Traits>
static void NotifyNewObject(T* object) { static void Trace(Visitor* visitor, const T& t) {
#if BUILDFLAG(BLINK_HEAP_YOUNG_GENERATION) TraceCollectionIfEnabled<WTF::WeakHandlingTrait<T>::value, T,
ThreadState* const thread_state = ThreadState::Current(); Traits>::Trace(visitor, &t);
if (!thread_state->IsIncrementalMarking()) {
MarkingVisitor::GenerationalBarrier(reinterpret_cast<Address>(object),
thread_state);
return;
}
#else
if (!ThreadState::IsAnyIncrementalMarking())
return;
// The object may have been in-place constructed as part of a large object.
// It is not safe to retrieve the page from the object here.
ThreadState* const thread_state = ThreadState::Current();
if (!thread_state->IsIncrementalMarking()) {
return;
}
#endif // BLINK_HEAP_YOUNG_GENERATION
// Eagerly trace the object ensuring that the object and all its children
// are discovered by the marker.
ThreadState::NoAllocationScope no_allocation_scope(thread_state);
DCHECK(thread_state->CurrentVisitor());
// No weak handling for write barriers. Modifying weakly reachable objects
// strongifies them for the current cycle.
DCHECK(!Traits::kCanHaveDeletedValue || !Traits::IsDeletedValue(*object));
TraceCollectionIfEnabled<WTF::kNoWeakHandling, T, Traits>::Trace(
thread_state->CurrentVisitor(), object);
}
template <typename T, typename Traits>
static void NotifyNewObjects(T* array, size_t len) {
#if BUILDFLAG(BLINK_HEAP_YOUNG_GENERATION)
ThreadState* const thread_state = ThreadState::Current();
if (!thread_state->IsIncrementalMarking()) {
MarkingVisitor::GenerationalBarrier(reinterpret_cast<Address>(array),
thread_state);
return;
}
#else
if (!ThreadState::IsAnyIncrementalMarking())
return;
// The object may have been in-place constructed as part of a large object.
// It is not safe to retrieve the page from the object here.
ThreadState* const thread_state = ThreadState::Current();
if (!thread_state->IsIncrementalMarking()) {
return;
}
#endif // BLINK_HEAP_YOUNG_GENERATION
// See |NotifyNewObject| for details.
ThreadState::NoAllocationScope no_allocation_scope(thread_state);
DCHECK(thread_state->CurrentVisitor());
// No weak handling for write barriers. Modifying weakly reachable objects
// strongifies them for the current cycle.
while (len-- > 0) {
DCHECK(!Traits::kCanHaveDeletedValue || !Traits::IsDeletedValue(*array));
TraceCollectionIfEnabled<WTF::kNoWeakHandling, T, Traits>::Trace(
thread_state->CurrentVisitor(), array);
array++;
}
} }
template <typename T> template <typename T>
...@@ -239,6 +161,29 @@ class PLATFORM_EXPORT HeapAllocator { ...@@ -239,6 +161,29 @@ class PLATFORM_EXPORT HeapAllocator {
callback, parameter); callback, parameter);
} }
template <typename T>
static void BackingWriteBarrier(T** slot) {
MarkingVisitor::WriteBarrier(reinterpret_cast<void**>(slot));
}
static void TraceBackingStoreIfMarked(const void* object) {
MarkingVisitor::RetraceObject(object);
}
template <typename T, typename Traits>
static void NotifyNewObject(T* object) {
MarkingVisitor::WriteBarrier(
[]() { return ThreadState::Current(); }, object, sizeof(T), 1,
TraceCollectionIfEnabled<WTF::kNoWeakHandling, T, Traits>::Trace);
}
template <typename T, typename Traits>
static void NotifyNewObjects(T* array, size_t len) {
MarkingVisitor::WriteBarrier(
[]() { return ThreadState::Current(); }, array, sizeof(T), len,
TraceCollectionIfEnabled<WTF::kNoWeakHandling, T, Traits>::Trace);
}
private: private:
static void BackingFree(void*); static void BackingFree(void*);
static bool BackingExpand(void*, size_t); static bool BackingExpand(void*, size_t);
......
...@@ -171,7 +171,7 @@ bool MarkingVisitor::MarkValue(void* value, ...@@ -171,7 +171,7 @@ bool MarkingVisitor::MarkValue(void* value,
// static // static
bool MarkingVisitor::WriteBarrierSlow(void* value) { bool MarkingVisitor::WriteBarrierSlow(void* value) {
if (!value || IsHashTableDeleteValue(value)) if (!value || internal::IsHashTableDeleteValue(value))
return false; return false;
// It is guaranteed that managed references point to either GarbageCollected // It is guaranteed that managed references point to either GarbageCollected
...@@ -209,23 +209,31 @@ void MarkingVisitor::GenerationalBarrierSlow(Address slot, ...@@ -209,23 +209,31 @@ void MarkingVisitor::GenerationalBarrierSlow(Address slot,
} }
} }
void MarkingVisitor::TraceMarkedBackingStoreSlow(const void* value) { void MarkingVisitor::RetraceObjectSlow(const void* object) {
if (!value) if (!object)
return; return;
// Trace object only if it is marked and thus has been traced before. The
// marker may be active on the backing store which requires atomic mark bit
// access.
if (!HeapObjectHeader::FromPayload(object)
->IsMarked<HeapObjectHeader::AccessMode::kAtomic>()) {
return;
}
ThreadState* const thread_state = ThreadState::Current(); ThreadState* const thread_state = ThreadState::Current();
if (!thread_state->IsIncrementalMarking()) if (!thread_state->IsIncrementalMarking())
return; return;
// |value| is pointing to the start of a backing store. // |value| is pointing to the start of a backing store.
HeapObjectHeader* header = HeapObjectHeader::FromPayload(value); HeapObjectHeader* header = HeapObjectHeader::FromPayload(object);
CHECK(header->IsMarked()); CHECK(header->IsMarked());
DCHECK(thread_state->CurrentVisitor()); DCHECK(thread_state->CurrentVisitor());
// No weak handling for write barriers. Modifying weakly reachable objects // No weak handling for write barriers. Modifying weakly reachable objects
// strongifies them for the current cycle. // strongifies them for the current cycle.
GCInfo::From(header->GcInfoIndex()) GCInfo::From(header->GcInfoIndex())
.trace(thread_state->CurrentVisitor(), value); .trace(thread_state->CurrentVisitor(), object);
} }
constexpr size_t MarkingVisitor::RecentlyRetracedWeakContainers::kMaxCacheSize; constexpr size_t MarkingVisitor::RecentlyRetracedWeakContainers::kMaxCacheSize;
......
...@@ -8,20 +8,20 @@ ...@@ -8,20 +8,20 @@
#include "third_party/blink/renderer/platform/heap/heap.h" #include "third_party/blink/renderer/platform/heap/heap.h"
#include "third_party/blink/renderer/platform/heap/heap_buildflags.h" #include "third_party/blink/renderer/platform/heap/heap_buildflags.h"
#include "third_party/blink/renderer/platform/heap/impl/heap_page.h" #include "third_party/blink/renderer/platform/heap/impl/heap_page.h"
#include "third_party/blink/renderer/platform/heap/thread_state_scopes.h"
#include "third_party/blink/renderer/platform/heap/visitor.h" #include "third_party/blink/renderer/platform/heap/visitor.h"
namespace blink { namespace blink {
namespace { namespace internal {
ALWAYS_INLINE bool IsHashTableDeleteValue(const void* value) { ALWAYS_INLINE bool IsHashTableDeleteValue(const void* value) {
return value == reinterpret_cast<void*>(-1); return value == reinterpret_cast<void*>(-1);
} }
} // namespace } // namespace internal
class BasePage; class BasePage;
class HeapAllocator;
enum class TracenessMemberConfiguration; enum class TracenessMemberConfiguration;
template <typename T, TracenessMemberConfiguration tracenessConfiguration> template <typename T, TracenessMemberConfiguration tracenessConfiguration>
class MemberBase; class MemberBase;
...@@ -142,14 +142,29 @@ ALWAYS_INLINE void MarkingVisitorBase::MarkHeader(HeapObjectHeader* header, ...@@ -142,14 +142,29 @@ ALWAYS_INLINE void MarkingVisitorBase::MarkHeader(HeapObjectHeader* header,
// thread. // thread.
class PLATFORM_EXPORT MarkingVisitor : public MarkingVisitorBase { class PLATFORM_EXPORT MarkingVisitor : public MarkingVisitorBase {
public: public:
static void GenerationalBarrier(Address slot, ThreadState* state); // Write barrier that adds a value the |slot| refers to to the set of marked
// objects. The barrier bails out if marking is off or the object is not yet
// Eagerly traces an already marked backing store ensuring that all its // marked. Returns true if the value has been marked on this call.
// children are discovered by the marker. The barrier bails out if marking ALWAYS_INLINE static bool WriteBarrier(void** slot);
// is off and on individual objects reachable if they are already marked. The
// barrier uses the callback function through GcInfo, so it will not inline using ThreadStateCallback = ThreadState*();
// any templated type-specific code. // Write barrier where for a range of |number_of_elements| elements of size
static void TraceMarkedBackingStore(const void* value); // |element_size| starting at |first_element|. The |callback| will be invoked
// for each element if necessary.
ALWAYS_INLINE static void WriteBarrier(
ThreadStateCallback thread_state_callback,
void* first_element,
size_t element_size,
size_t number_of_elements,
TraceCallback callback);
// Eagerly traces an already marked |object| ensuring that all its children
// are discovered by the marker. The barrier bails out if marking is off and
// on individual objects reachable if they are already marked. The barrier
// uses the callback function through GcInfo.
//
// Note: |object| must point to the beginning of the heap object.
ALWAYS_INLINE static void RetraceObject(const void* object);
MarkingVisitor(ThreadState*, MarkingMode); MarkingVisitor(ThreadState*, MarkingMode);
~MarkingVisitor() override = default; ~MarkingVisitor() override = default;
...@@ -162,17 +177,14 @@ class PLATFORM_EXPORT MarkingVisitor : public MarkingVisitorBase { ...@@ -162,17 +177,14 @@ class PLATFORM_EXPORT MarkingVisitor : public MarkingVisitorBase {
void FlushMarkingWorklists(); void FlushMarkingWorklists();
private: private:
// Write barrier that adds a value the |slot| refers to to the set of marked ALWAYS_INLINE static void GenerationalBarrier(Address slot,
// objects. The barrier bails out if marking is off or the object is not yet ThreadState* state);
// marked. Returns true if the value has been marked on this call.
template <typename T>
static bool WriteBarrier(T** slot);
// Exact version of the marking and generational write barriers. // Exact version of the marking and generational write barriers.
static bool WriteBarrierSlow(void*); static bool WriteBarrierSlow(void*);
static void GenerationalBarrierSlow(Address, ThreadState*); static void GenerationalBarrierSlow(Address, ThreadState*);
static bool MarkValue(void*, BasePage*, ThreadState*); static bool MarkValue(void*, BasePage*, ThreadState*);
static void TraceMarkedBackingStoreSlow(const void*); static void RetraceObjectSlow(const void*);
// Weak containers are strongly retraced during conservative stack scanning. // Weak containers are strongly retraced during conservative stack scanning.
// Stack scanning happens once per GC at the start of the atomic pause. // Stack scanning happens once per GC at the start of the atomic pause.
...@@ -190,17 +202,15 @@ class PLATFORM_EXPORT MarkingVisitor : public MarkingVisitorBase { ...@@ -190,17 +202,15 @@ class PLATFORM_EXPORT MarkingVisitor : public MarkingVisitorBase {
size_t last_used_index_ = -1; size_t last_used_index_ = -1;
} recently_retraced_weak_containers_; } recently_retraced_weak_containers_;
friend class HeapAllocator;
template <typename T, TracenessMemberConfiguration tracenessConfiguration> template <typename T, TracenessMemberConfiguration tracenessConfiguration>
friend class MemberBase; friend class MemberBase;
}; };
// static // static
template <typename T> bool MarkingVisitor::WriteBarrier(void** slot) {
ALWAYS_INLINE bool MarkingVisitor::WriteBarrier(T** slot) {
#if BUILDFLAG(BLINK_HEAP_YOUNG_GENERATION) #if BUILDFLAG(BLINK_HEAP_YOUNG_GENERATION)
void* value = *slot; void* value = *slot;
if (!value || IsHashTableDeleteValue(value)) if (!value || internal::IsHashTableDeleteValue(value))
return false; return false;
// Dijkstra barrier if concurrent marking is in progress. // Dijkstra barrier if concurrent marking is in progress.
...@@ -223,8 +233,41 @@ ALWAYS_INLINE bool MarkingVisitor::WriteBarrier(T** slot) { ...@@ -223,8 +233,41 @@ ALWAYS_INLINE bool MarkingVisitor::WriteBarrier(T** slot) {
} }
// static // static
ALWAYS_INLINE void MarkingVisitor::GenerationalBarrier(Address slot, void MarkingVisitor::WriteBarrier(ThreadStateCallback thread_state_callback,
ThreadState* state) { void* first_element,
size_t element_size,
size_t number_of_elements,
TraceCallback callback) {
#if BUILDFLAG(BLINK_HEAP_YOUNG_GENERATION)
ThreadState* const thread_state = thread_state_callback();
if (!thread_state->IsIncrementalMarking()) {
MarkingVisitor::GenerationalBarrier(
reinterpret_cast<Address>(first_element), thread_state);
return;
}
#else // !BLINK_HEAP_YOUNG_GENERATION
if (!ThreadState::IsAnyIncrementalMarking())
return;
// The object may have been in-place constructed as part of a large object.
// It is not safe to retrieve the page from the object here.
ThreadState* const thread_state = thread_state_callback();
if (!thread_state->IsIncrementalMarking()) {
return;
}
#endif // !BLINK_HEAP_YOUNG_GENERATION
ThreadState::NoAllocationScope no_allocation_scope(thread_state);
DCHECK(thread_state->CurrentVisitor());
// No weak handling for write barriers. Modifying weakly reachable objects
// strongifies them for the current cycle.
char* array = static_cast<char*>(first_element);
while (number_of_elements-- > 0) {
callback(thread_state->CurrentVisitor(), array);
array += element_size;
}
}
// static
void MarkingVisitor::GenerationalBarrier(Address slot, ThreadState* state) {
// First, check if the source object is in the last allocated region of heap. // First, check if the source object is in the last allocated region of heap.
if (LIKELY(state->Heap().IsInLastAllocatedRegion(slot))) if (LIKELY(state->Heap().IsInLastAllocatedRegion(slot)))
return; return;
...@@ -234,13 +277,11 @@ ALWAYS_INLINE void MarkingVisitor::GenerationalBarrier(Address slot, ...@@ -234,13 +277,11 @@ ALWAYS_INLINE void MarkingVisitor::GenerationalBarrier(Address slot,
} }
// static // static
ALWAYS_INLINE void MarkingVisitor::TraceMarkedBackingStore(const void* value) { void MarkingVisitor::RetraceObject(const void* object) {
if (!ThreadState::IsAnyIncrementalMarking()) if (!ThreadState::IsAnyIncrementalMarking())
return; return;
// Avoid any further checks and dispatch to a call at this point. Aggressive RetraceObjectSlow(object);
// inlining otherwise pollutes the regular execution paths.
TraceMarkedBackingStoreSlow(value);
} }
// Visitor used to mark Oilpan objects on concurrent threads. // Visitor used to mark Oilpan objects on concurrent threads.
......
...@@ -225,7 +225,8 @@ class MemberBase { ...@@ -225,7 +225,8 @@ class MemberBase {
} }
void WriteBarrier() const { void WriteBarrier() const {
MarkingVisitor::WriteBarrier(const_cast<std::remove_const_t<T>**>(&raw_)); MarkingVisitor::WriteBarrier(
reinterpret_cast<void**>(const_cast<std::remove_const_t<T>**>(&raw_)));
} }
void CheckPointer() { void CheckPointer() {
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment