Commit 50577e88 authored by Michael Lippautz's avatar Michael Lippautz Committed by Commit Bot

heap: Refactor PersistentRegion and CrossThreadPersistentRegion

Refactoring from composition to using CRTP to simple inheritance.

Drive-by: Rework TraceNodes to allow inlining and constant folding
ShouldTracePersistentNode in the case of regular Persistents (in
PersistentRegion).

Change-Id: I1306218f71ea0fea11783b6b8832be84cc3c8480
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1789588Reviewed-by: default avatarKentaro Hara <haraken@chromium.org>
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#695748}
parent 3ec1c93e
......@@ -637,7 +637,7 @@ void V8EmbedderGraphBuilder::VisitBlinkRoots() {
std::unique_ptr<Graph::Node>(new EmbedderRootNode("Blink roots"))));
EnsureRootState(root);
ParentScope parent(this, root);
ThreadState::Current()->GetPersistentRegion()->TracePersistentNodes(this);
ThreadState::Current()->GetPersistentRegion()->TraceNodes(this);
}
{
EmbedderNode* root =
......@@ -646,7 +646,7 @@ void V8EmbedderGraphBuilder::VisitBlinkRoots() {
EnsureRootState(root);
ParentScope parent(this, root);
MutexLocker persistent_lock(ProcessHeap::CrossThreadPersistentMutex());
ProcessHeap::GetCrossThreadPersistentRegion().TracePersistentNodes(this);
ProcessHeap::GetCrossThreadPersistentRegion().TraceNodes(this);
}
}
......
......@@ -64,7 +64,10 @@ class PersistentBase {
return result;
}
void Clear() { AssignSafe(nullptr); }
void Clear() {
// Note that this also frees up related data in the backend.
AssignSafe(nullptr);
}
T* Get() const {
CheckPointer();
......
......@@ -19,59 +19,44 @@ class DummyGCBase final : public GarbageCollected<DummyGCBase> {
};
}
PersistentRegion::~PersistentRegion() {
PersistentRegionBase::~PersistentRegionBase() {
PersistentNodeSlots* slots = slots_;
while (slots) {
PersistentNodeSlots* dead_slots = slots;
slots = slots->next_;
slots = slots->next;
delete dead_slots;
}
}
int PersistentRegion::NumberOfPersistents() {
int PersistentRegionBase::NodesInUse() const {
size_t persistent_count = 0;
for (PersistentNodeSlots* slots = slots_; slots; slots = slots->next_) {
for (PersistentNodeSlots* slots = slots_; slots; slots = slots->next) {
for (int i = 0; i < PersistentNodeSlots::kSlotCount; ++i) {
if (!slots->slot_[i].IsUnused())
if (!slots->slot[i].IsUnused())
++persistent_count;
}
}
#if DCHECK_IS_ON()
DCHECK_EQ(persistent_count, persistent_count_);
DCHECK_EQ(persistent_count, used_node_count_);
#endif
return persistent_count;
}
void PersistentRegion::EnsurePersistentNodeSlots(void* self,
TraceCallback trace) {
void PersistentRegionBase::EnsureNodeSlots() {
DCHECK(!free_list_head_);
PersistentNodeSlots* slots = new PersistentNodeSlots;
for (int i = 0; i < PersistentNodeSlots::kSlotCount; ++i) {
PersistentNode* node = &slots->slot_[i];
PersistentNode* node = &slots->slot[i];
node->SetFreeListNext(free_list_head_);
free_list_head_ = node;
DCHECK(node->IsUnused());
}
slots->next_ = slots_;
slots->next = slots_;
slots_ = slots;
}
void PersistentRegion::ReleasePersistentNode(PersistentNode* persistent_node) {
DCHECK(!persistent_node->IsUnused());
// 'self' is in use, containing the persistent wrapper object.
void* self = persistent_node->Self();
Persistent<DummyGCBase>* persistent =
reinterpret_cast<Persistent<DummyGCBase>*>(self);
persistent->Clear();
DCHECK(persistent_node->IsUnused());
}
// This function traces all PersistentNodes. If we encounter
// a PersistentNodeSlot that contains only freed PersistentNodes,
// we delete the PersistentNodeSlot. This function rebuilds the free
// list of PersistentNodes.
void PersistentRegion::TracePersistentNodes(Visitor* visitor,
ShouldTraceCallback should_trace) {
void PersistentRegionBase::TraceNodesImpl(Visitor* visitor,
ShouldTraceCallback should_trace) {
free_list_head_ = nullptr;
size_t persistent_count = 0;
PersistentNodeSlots** prev_next = &slots_;
......@@ -81,7 +66,7 @@ void PersistentRegion::TracePersistentNodes(Visitor* visitor,
PersistentNode* free_list_last = nullptr;
int free_count = 0;
for (int i = 0; i < PersistentNodeSlots::kSlotCount; ++i) {
PersistentNode* node = &slots->slot_[i];
PersistentNode* node = &slots->slot[i];
if (node->IsUnused()) {
if (!free_list_next)
free_list_last = node;
......@@ -97,8 +82,8 @@ void PersistentRegion::TracePersistentNodes(Visitor* visitor,
}
if (free_count == PersistentNodeSlots::kSlotCount) {
PersistentNodeSlots* dead_slots = slots;
*prev_next = slots->next_;
slots = slots->next_;
*prev_next = slots->next;
slots = slots->next;
delete dead_slots;
} else {
if (free_list_last) {
......@@ -107,21 +92,32 @@ void PersistentRegion::TracePersistentNodes(Visitor* visitor,
free_list_last->SetFreeListNext(free_list_head_);
free_list_head_ = free_list_next;
}
prev_next = &slots->next_;
slots = slots->next_;
prev_next = &slots->next;
slots = slots->next;
}
}
#if DCHECK_IS_ON()
DCHECK_EQ(persistent_count, persistent_count_);
DCHECK_EQ(persistent_count, used_node_count_);
#endif
}
void PersistentRegion::PrepareForThreadStateTermination() {
void PersistentRegion::ReleaseNode(PersistentNode* persistent_node) {
DCHECK(!persistent_node->IsUnused());
// 'self' is in use, containing the persistent wrapper object.
void* self = persistent_node->Self();
Persistent<DummyGCBase>* persistent =
reinterpret_cast<Persistent<DummyGCBase>*>(self);
persistent->Clear();
DCHECK(persistent_node->IsUnused());
}
void PersistentRegion::PrepareForThreadStateTermination(ThreadState* state) {
DCHECK_EQ(state, ThreadState::Current());
DCHECK(!IsMainThread());
PersistentNodeSlots* slots = slots_;
while (slots) {
for (int i = 0; i < PersistentNodeSlots::kSlotCount; ++i) {
PersistentNode* node = &slots->slot_[i];
PersistentNode* node = &slots->slot[i];
if (node->IsUnused())
continue;
// It is safe to cast to Persistent<DummyGCBase> because persistent heap
......@@ -132,10 +128,10 @@ void PersistentRegion::PrepareForThreadStateTermination() {
persistent->Clear();
DCHECK(node->IsUnused());
}
slots = slots->next_;
slots = slots->next;
}
#if DCHECK_IS_ON()
DCHECK_EQ(persistent_count_, 0u);
DCHECK_EQ(used_node_count_, 0u);
#endif
}
......@@ -159,17 +155,17 @@ void CrossThreadPersistentRegion::PrepareForThreadStateTermination(
// out the underlying heap reference.
MutexLocker lock(ProcessHeap::CrossThreadPersistentMutex());
PersistentNodeSlots* slots = persistent_region_.slots_;
PersistentNodeSlots* slots = slots_;
while (slots) {
for (int i = 0; i < PersistentNodeSlots::kSlotCount; ++i) {
if (slots->slot_[i].IsUnused())
if (slots->slot[i].IsUnused())
continue;
// 'self' is in use, containing the cross-thread persistent wrapper
// object.
CrossThreadPersistent<DummyGCBase>* persistent =
reinterpret_cast<CrossThreadPersistent<DummyGCBase>*>(
slots->slot_[i].Self());
slots->slot[i].Self());
DCHECK(persistent);
void* raw_object = persistent->Get();
if (!raw_object)
......@@ -178,10 +174,10 @@ void CrossThreadPersistentRegion::PrepareForThreadStateTermination(
DCHECK(page);
if (page->Arena()->GetThreadState() == thread_state) {
persistent->ClearWithLockHeld();
DCHECK(slots->slot_[i].IsUnused());
DCHECK(slots->slot[i].IsUnused());
}
}
slots = slots->next_;
slots = slots->next;
}
}
......@@ -191,10 +187,9 @@ void CrossThreadPersistentRegion::UnpoisonCrossThreadPersistents() {
ProcessHeap::CrossThreadPersistentMutex().AssertAcquired();
#endif
size_t persistent_count = 0;
for (PersistentNodeSlots* slots = persistent_region_.slots_; slots;
slots = slots->next_) {
for (PersistentNodeSlots* slots = slots_; slots; slots = slots->next) {
for (int i = 0; i < PersistentNodeSlots::kSlotCount; ++i) {
const PersistentNode& node = slots->slot_[i];
const PersistentNode& node = slots->slot[i];
if (!node.IsUnused()) {
ASAN_UNPOISON_MEMORY_REGION(node.Self(),
sizeof(CrossThreadPersistent<void*>));
......@@ -203,7 +198,7 @@ void CrossThreadPersistentRegion::UnpoisonCrossThreadPersistents() {
}
}
#if DCHECK_IS_ON()
DCHECK_EQ(persistent_count, persistent_region_.persistent_count_);
DCHECK_EQ(persistent_count, used_node_count_);
#endif
}
#endif
......
......@@ -123,12 +123,11 @@ class PersistentNode final {
struct PersistentNodeSlots final {
USING_FAST_MALLOC(PersistentNodeSlots);
private:
static const int kSlotCount = 256;
PersistentNodeSlots* next_;
PersistentNode slot_[kSlotCount];
friend class PersistentRegion;
friend class CrossThreadPersistentRegion;
public:
static constexpr int kSlotCount = 256;
PersistentNodeSlots* next;
PersistentNode slot[kSlotCount];
};
// Used by PersistentBase to manage a pointer to a thread heap persistent node.
......@@ -196,118 +195,111 @@ class CrossThreadPersistentNodePtr {
std::atomic<PersistentNode*> ptr_{nullptr};
};
// PersistentRegion provides a region of PersistentNodes. PersistentRegion
// holds a linked list of PersistentNodeSlots, each of which stores
// a predefined number of PersistentNodes. You can call allocatePersistentNode/
// freePersistentNode to allocate/free a PersistentNode on the region.
class PLATFORM_EXPORT PersistentRegion final {
USING_FAST_MALLOC(PersistentRegion);
class PLATFORM_EXPORT PersistentRegionBase {
public:
PersistentRegion() = default;
~PersistentRegion();
~PersistentRegionBase();
inline PersistentNode* AllocateNode(void* self, TraceCallback trace);
inline void FreeNode(PersistentNode* persistent_node);
int NodesInUse() const;
protected:
using ShouldTraceCallback = bool (*)(Visitor*, PersistentNode*);
PersistentNode* AllocatePersistentNode(void* self, TraceCallback trace) {
void TraceNodesImpl(Visitor*, ShouldTraceCallback);
void EnsureNodeSlots();
PersistentNode* free_list_head_ = nullptr;
PersistentNodeSlots* slots_ = nullptr;
#if DCHECK_IS_ON()
++persistent_count_;
size_t used_node_count_ = 0;
#endif
if (UNLIKELY(!free_list_head_))
EnsurePersistentNodeSlots(self, trace);
DCHECK(free_list_head_);
PersistentNode* node = free_list_head_;
free_list_head_ = free_list_head_->FreeListNext();
node->Initialize(self, trace);
DCHECK(!node->IsUnused());
return node;
}
};
void FreePersistentNode(PersistentNode* persistent_node) {
inline PersistentNode* PersistentRegionBase::AllocateNode(void* self,
TraceCallback trace) {
#if DCHECK_IS_ON()
DCHECK_GT(persistent_count_, 0u);
++used_node_count_;
#endif
persistent_node->SetFreeListNext(free_list_head_);
free_list_head_ = persistent_node;
if (UNLIKELY(!free_list_head_))
EnsureNodeSlots();
DCHECK(free_list_head_);
PersistentNode* node = free_list_head_;
free_list_head_ = free_list_head_->FreeListNext();
node->Initialize(self, trace);
DCHECK(!node->IsUnused());
return node;
}
void PersistentRegionBase::FreeNode(PersistentNode* persistent_node) {
#if DCHECK_IS_ON()
--persistent_count_;
DCHECK_GT(used_node_count_, 0u);
#endif
}
persistent_node->SetFreeListNext(free_list_head_);
free_list_head_ = persistent_node;
#if DCHECK_IS_ON()
--used_node_count_;
#endif
}
static bool ShouldTracePersistentNode(Visitor*, PersistentNode*) {
return true;
}
class PLATFORM_EXPORT PersistentRegion final : public PersistentRegionBase {
USING_FAST_MALLOC(PersistentRegion);
void ReleasePersistentNode(PersistentNode*);
using ShouldTraceCallback = bool (*)(Visitor*, PersistentNode*);
void TracePersistentNodes(
Visitor*,
ShouldTraceCallback = PersistentRegion::ShouldTracePersistentNode);
int NumberOfPersistents();
void PrepareForThreadStateTermination();
public:
inline void TraceNodes(Visitor*);
private:
friend CrossThreadPersistentRegion;
// Clears the Persistent and then frees the node.
void ReleaseNode(PersistentNode*);
void EnsurePersistentNodeSlots(void*, TraceCallback);
void PrepareForThreadStateTermination(ThreadState*);
PersistentNode* free_list_head_ = nullptr;
PersistentNodeSlots* slots_ = nullptr;
#if DCHECK_IS_ON()
size_t persistent_count_ = 0;
#endif
private:
static constexpr bool ShouldTracePersistentNode(Visitor*, PersistentNode*) {
return true;
}
};
// Protected by ProcessHeap::CrossThreadPersistentMutex.
class PLATFORM_EXPORT CrossThreadPersistentRegion final {
inline void PersistentRegion::TraceNodes(Visitor* visitor) {
PersistentRegionBase::TraceNodesImpl(visitor, ShouldTracePersistentNode);
}
class PLATFORM_EXPORT CrossThreadPersistentRegion final
: public PersistentRegionBase {
USING_FAST_MALLOC(CrossThreadPersistentRegion);
public:
PersistentNode* AllocatePersistentNode(void* self, TraceCallback trace) {
PersistentMutexTraits<
kCrossThreadPersistentConfiguration>::AssertAcquired();
return persistent_region_.AllocatePersistentNode(self, trace);
}
void FreePersistentNode(PersistentNode* node) {
PersistentMutexTraits<
kCrossThreadPersistentConfiguration>::AssertAcquired();
// When the thread that holds the heap object that the cross-thread
// persistent shuts down, prepareForThreadStateTermination() will clear out
// the associated CrossThreadPersistent<> and PersistentNode so as to avoid
// unsafe access. This can overlap with a holder of the
// CrossThreadPersistent<> also clearing the persistent and freeing the
// PersistentNode.
//
// The lock ensures the updating is ordered, but by the time lock has been
// acquired the PersistentNode reference may have been cleared out already;
// check for this.
if (!node)
return;
persistent_region_.FreePersistentNode(node);
}
void TracePersistentNodes(Visitor* visitor) {
PersistentMutexTraits<
kCrossThreadPersistentConfiguration>::AssertAcquired();
persistent_region_.TracePersistentNodes(
visitor, CrossThreadPersistentRegion::ShouldTracePersistentNode);
}
inline PersistentNode* AllocateNode(void* self, TraceCallback trace);
inline void FreeNode(PersistentNode*);
inline void TraceNodes(Visitor*);
void PrepareForThreadStateTermination(ThreadState*);
NO_SANITIZE_ADDRESS
static bool ShouldTracePersistentNode(Visitor*, PersistentNode*);
#if defined(ADDRESS_SANITIZER)
void UnpoisonCrossThreadPersistents();
#endif
private:
// We don't make CrossThreadPersistentRegion inherit from PersistentRegion
// because we don't want to virtualize performance-sensitive methods
// such as PersistentRegion::allocate/freePersistentNode.
PersistentRegion persistent_region_;
NO_SANITIZE_ADDRESS
static bool ShouldTracePersistentNode(Visitor*, PersistentNode*);
};
inline PersistentNode* CrossThreadPersistentRegion::AllocateNode(
void* self,
TraceCallback trace) {
PersistentMutexTraits<kCrossThreadPersistentConfiguration>::AssertAcquired();
return PersistentRegionBase::AllocateNode(self, trace);
}
inline void CrossThreadPersistentRegion::FreeNode(PersistentNode* node) {
PersistentMutexTraits<kCrossThreadPersistentConfiguration>::AssertAcquired();
PersistentRegionBase::FreeNode(node);
}
inline void CrossThreadPersistentRegion::TraceNodes(Visitor* visitor) {
PersistentRegionBase::TraceNodesImpl(visitor, ShouldTracePersistentNode);
}
template <ThreadAffinity affinity,
WeaknessPersistentConfiguration weakness_configuration>
void PersistentNodePtr<affinity, weakness_configuration>::Initialize(
......@@ -319,7 +311,7 @@ void PersistentNodePtr<affinity, weakness_configuration>::Initialize(
weakness_configuration == kWeakPersistentConfiguration
? state->GetWeakPersistentRegion()
: state->GetPersistentRegion();
ptr_ = region->AllocatePersistentNode(owner, trace_callback);
ptr_ = region->AllocateNode(owner, trace_callback);
#if DCHECK_IS_ON()
state_ = state;
#endif
......@@ -354,7 +346,7 @@ void CrossThreadPersistentNodePtr<weakness_configuration>::Initialize(
weakness_configuration == kWeakPersistentConfiguration
? ProcessHeap::GetCrossThreadWeakPersistentRegion()
: ProcessHeap::GetCrossThreadPersistentRegion();
PersistentNode* node = region.AllocatePersistentNode(owner, trace_callback);
PersistentNode* node = region.AllocateNode(owner, trace_callback);
ptr_.store(node, std::memory_order_release);
}
......@@ -365,7 +357,7 @@ void CrossThreadPersistentNodePtr<weakness_configuration>::Uninitialize() {
weakness_configuration == kWeakPersistentConfiguration
? ProcessHeap::GetCrossThreadWeakPersistentRegion()
: ProcessHeap::GetCrossThreadPersistentRegion();
region.FreePersistentNode(ptr_.load(std::memory_order_relaxed));
region.FreeNode(ptr_.load(std::memory_order_relaxed));
ptr_.store(nullptr, std::memory_order_release);
}
......@@ -376,7 +368,7 @@ void CrossThreadPersistentNodePtr<weakness_configuration>::ClearWithLockHeld() {
weakness_configuration == kWeakPersistentConfiguration
? ProcessHeap::GetCrossThreadWeakPersistentRegion()
: ProcessHeap::GetCrossThreadPersistentRegion();
region.FreePersistentNode(ptr_.load(std::memory_order_relaxed));
region.FreeNode(ptr_.load(std::memory_order_relaxed));
ptr_.store(nullptr, std::memory_order_release);
}
......
......@@ -217,7 +217,7 @@ void ThreadState::RunTerminationGC() {
// Do thread local GC's as long as the count of thread local Persistents
// changes and is above zero.
int old_count = -1;
int current_count = GetPersistentRegion()->NumberOfPersistents();
int current_count = GetPersistentRegion()->NodesInUse();
DCHECK_GE(current_count, 0);
while (current_count != old_count) {
CollectGarbage(BlinkGC::kNoHeapPointersOnStack, BlinkGC::kAtomicMarking,
......@@ -227,7 +227,7 @@ void ThreadState::RunTerminationGC() {
// instantiated while running the termination GC.
ReleaseStaticPersistentNodes();
old_count = current_count;
current_count = GetPersistentRegion()->NumberOfPersistents();
current_count = GetPersistentRegion()->NodesInUse();
}
// We should not have any persistents left when getting to this point,
......@@ -235,17 +235,17 @@ void ThreadState::RunTerminationGC() {
// RegisterAsStaticReference. Clearing out all the Persistents will avoid
// stale pointers and gets them reported as nullptr dereferences.
if (current_count) {
for (size_t i = 0; i < kMaxTerminationGCLoops &&
GetPersistentRegion()->NumberOfPersistents();
for (size_t i = 0;
i < kMaxTerminationGCLoops && GetPersistentRegion()->NodesInUse();
i++) {
GetPersistentRegion()->PrepareForThreadStateTermination();
GetPersistentRegion()->PrepareForThreadStateTermination(this);
CollectGarbage(BlinkGC::kNoHeapPointersOnStack, BlinkGC::kAtomicMarking,
BlinkGC::kEagerSweeping,
BlinkGC::GCReason::kThreadTerminationGC);
}
}
CHECK(!GetPersistentRegion()->NumberOfPersistents());
CHECK(!GetPersistentRegion()->NodesInUse());
// All of pre-finalizers should be consumed.
DCHECK(ordered_pre_finalizers_.empty());
......@@ -326,19 +326,18 @@ void ThreadState::VisitPersistents(Visitor* visitor) {
Heap().stats_collector(),
ThreadHeapStatsCollector::kVisitCrossThreadPersistents);
ProcessHeap::CrossThreadPersistentMutex().AssertAcquired();
ProcessHeap::GetCrossThreadPersistentRegion().TracePersistentNodes(visitor);
ProcessHeap::GetCrossThreadPersistentRegion().TraceNodes(visitor);
}
{
ThreadHeapStatsCollector::Scope inner_stats_scope(
Heap().stats_collector(), ThreadHeapStatsCollector::kVisitPersistents);
persistent_region_->TracePersistentNodes(visitor);
persistent_region_->TraceNodes(visitor);
}
}
void ThreadState::VisitWeakPersistents(Visitor* visitor) {
ProcessHeap::GetCrossThreadWeakPersistentRegion().TracePersistentNodes(
visitor);
weak_persistent_region_->TracePersistentNodes(visitor);
ProcessHeap::GetCrossThreadWeakPersistentRegion().TraceNodes(visitor);
weak_persistent_region_->TraceNodes(visitor);
}
ThreadState::GCSnapshotInfo::GCSnapshotInfo(wtf_size_t num_object_types)
......@@ -981,12 +980,12 @@ void ThreadState::ReleaseStaticPersistentNodes() {
PersistentRegion* persistent_region = GetPersistentRegion();
for (PersistentNode* it : static_persistents)
persistent_region->ReleasePersistentNode(it);
persistent_region->ReleaseNode(it);
}
void ThreadState::FreePersistentNode(PersistentRegion* persistent_region,
PersistentNode* persistent_node) {
persistent_region->FreePersistentNode(persistent_node);
persistent_region->FreeNode(persistent_node);
// Do not allow static persistents to be freed before
// they're all released in releaseStaticPersistentNodes().
//
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment