Commit 73e7ae28 authored by Michael Lippautz's avatar Michael Lippautz Committed by Commit Bot

[oilpan] Merge post marking processing into weak processing

Post marking processing assumed that the backing store for weak
HashTable's remains unchanged during marking which is not correct in the
presence of incremental marking.

Since post marking was just marking the header of the backing store this
can be merged into the weak processing steps.

This CL
- Remove Heap::post_marking_worklist_ completely, avoid any memory
  consumption it might require.
- Merges WeakProcessing and PostMarkingProcessing as the post marking
  steps were only used for weak HashTables.

Bug: chromium:828537, chromium:757440
Change-Id: I0d0b7552786659becf039419bff3701e7bb72aee
Reviewed-on: https://chromium-review.googlesource.com/999485Reviewed-by: default avatarKentaro Hara <haraken@chromium.org>
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#548854}
parent 23f0c082
......@@ -136,7 +136,6 @@ ThreadHeap::ThreadHeap(ThreadState* thread_state)
free_page_pool_(std::make_unique<PagePool>()),
marking_worklist_(nullptr),
not_fully_constructed_worklist_(nullptr),
post_marking_worklist_(nullptr),
weak_callback_worklist_(nullptr),
vector_backing_arena_index_(BlinkGC::kVector1ArenaIndex),
current_arena_ages_(0),
......@@ -226,7 +225,6 @@ void ThreadHeap::RegisterWeakTable(void* table,
void ThreadHeap::CommitCallbackStacks() {
marking_worklist_.reset(new MarkingWorklist());
not_fully_constructed_worklist_.reset(new NotFullyConstructedWorklist());
post_marking_worklist_.reset(new PostMarkingWorklist());
weak_callback_worklist_.reset(new WeakCallbackWorklist());
DCHECK(ephemeron_callbacks_.IsEmpty());
}
......@@ -234,7 +232,6 @@ void ThreadHeap::CommitCallbackStacks() {
void ThreadHeap::DecommitCallbackStacks() {
marking_worklist_.reset(nullptr);
not_fully_constructed_worklist_.reset(nullptr);
post_marking_worklist_.reset(nullptr);
weak_callback_worklist_.reset(nullptr);
ephemeron_callbacks_.clear();
}
......@@ -329,18 +326,6 @@ bool ThreadHeap::AdvanceMarkingStackProcessing(Visitor* visitor,
return true;
}
void ThreadHeap::PostMarkingProcessing(Visitor* visitor) {
TRACE_EVENT0("blink_gc", "ThreadHeap::PostMarkingProcessing");
// Call post marking callbacks on collection backings to mark them if they are
// only reachable from their front objects.
CustomCallbackItem item;
while (post_marking_worklist_->Pop(WorklistTaskId::MainThread, &item)) {
item.callback(visitor, item.object);
}
// Post marking callbacks should not add any new objects for marking.
DCHECK(marking_worklist_->IsGlobalEmpty());
}
void ThreadHeap::WeakProcessing(Visitor* visitor) {
TRACE_EVENT0("blink_gc", "ThreadHeap::WeakProcessing");
double start_time = WTF::CurrentTimeTicksInMilliseconds();
......
......@@ -71,8 +71,6 @@ using NotFullyConstructedItem = void*;
using MarkingWorklist = Worklist<MarkingItem, 512 /* local entries */>;
using NotFullyConstructedWorklist =
Worklist<NotFullyConstructedItem, 16 /* local entries */>;
using PostMarkingWorklist =
Worklist<CustomCallbackItem, 128 /* local entries */>;
using WeakCallbackWorklist =
Worklist<CustomCallbackItem, 256 /* local entries */>;
......@@ -255,10 +253,6 @@ class PLATFORM_EXPORT ThreadHeap {
return not_fully_constructed_worklist_.get();
}
PostMarkingWorklist* GetPostMarkingWorklist() const {
return post_marking_worklist_.get();
}
WeakCallbackWorklist* GetWeakCallbackWorklist() const {
return weak_callback_worklist_.get();
}
......@@ -348,7 +342,6 @@ class PLATFORM_EXPORT ThreadHeap {
static Address Reallocate(void* previous, size_t);
void ProcessMarkingStack(Visitor*);
void PostMarkingProcessing(Visitor*);
void WeakProcessing(Visitor*);
void MarkNotFullyConstructedObjects(Visitor*);
bool AdvanceMarkingStackProcessing(Visitor*, double deadline_seconds);
......@@ -513,7 +506,6 @@ class PLATFORM_EXPORT ThreadHeap {
std::unique_ptr<PagePool> free_page_pool_;
std::unique_ptr<MarkingWorklist> marking_worklist_;
std::unique_ptr<NotFullyConstructedWorklist> not_fully_constructed_worklist_;
std::unique_ptr<PostMarkingWorklist> post_marking_worklist_;
std::unique_ptr<WeakCallbackWorklist> weak_callback_worklist_;
// No duplicates allowed for ephemeron callbacks. Hence, we use a hashmap
// with the key being the HashTable.
......
......@@ -180,13 +180,6 @@ class PLATFORM_EXPORT HeapAllocator {
visitor, t);
}
template <typename VisitorDispatcher>
static void RegisterWeakMembers(VisitorDispatcher visitor,
const void* closure,
WeakCallback callback) {
visitor->RegisterWeakCallback(const_cast<void*>(closure), callback);
}
template <typename VisitorDispatcher>
static bool RegisterWeakTable(VisitorDispatcher visitor,
const void* closure,
......@@ -288,8 +281,20 @@ class PLATFORM_EXPORT HeapAllocator {
template <typename T, typename HashTable>
static void TraceHashTableBackingWeakly(Visitor* visitor,
T* backing,
T** backing_slot) {
T** backing_slot,
WeakCallback callback,
void* parameter) {
visitor->TraceBackingStoreWeakly(
reinterpret_cast<HeapHashTableBacking<HashTable>*>(backing),
reinterpret_cast<HeapHashTableBacking<HashTable>**>(backing_slot),
callback, parameter);
}
template <typename T, typename HashTable>
static void TraceHashTableBackingOnly(Visitor* visitor,
T* backing,
T** backing_slot) {
visitor->TraceBackingStoreOnly(
reinterpret_cast<HeapHashTableBacking<HashTable>*>(backing),
reinterpret_cast<HeapHashTableBacking<HashTable>**>(backing_slot));
}
......
......@@ -58,9 +58,12 @@ class BackingVisitor : public Visitor {
void VisitBackingStoreStrongly(void* object,
void** object_slot,
TraceDescriptor desc) final {}
void VisitBackingStoreWeakly(void* object,
void** object_slot,
TraceDescriptor desc) final {}
void VisitBackingStoreWeakly(void*,
void**,
TraceDescriptor,
WeakCallback,
void*) final {}
void VisitBackingStoreOnly(void*, void**) final {}
void RegisterBackingStoreCallback(void* backing_store,
MovingObjectCallback,
void* callback_data) final {}
......@@ -110,7 +113,6 @@ class IncrementalMarkingScope : public IncrementalMarkingScopeBase {
thread_state_->DisableIncrementalMarkingBarrier();
// Need to clear out unused worklists that might have been polluted during
// test.
heap_.GetPostMarkingWorklist()->Clear();
heap_.GetWeakCallbackWorklist()->Clear();
thread_state_->SetGCPhase(ThreadState::GCPhase::kSweeping);
thread_state_->SetGCPhase(ThreadState::GCPhase::kNone);
......@@ -1581,6 +1583,81 @@ TEST(IncrementalMarkingTest, OverrideAfterMixinConstruction) {
EXPECT_NE(uninitialized_value, header);
}
// =============================================================================
// Tests that execute complete incremental garbage collections. ================
// =============================================================================
// Test driver for incremental marking. Assumes that no stack handling is
// required.
class IncrementalMarkingTestDriver {
public:
explicit IncrementalMarkingTestDriver(ThreadState* thread_state)
: thread_state_(thread_state) {}
~IncrementalMarkingTestDriver() {
if (thread_state_->IsIncrementalMarking())
FinishGC();
}
void Start() {
thread_state_->ScheduleIncrementalMarkingStart();
thread_state_->RunScheduledGC(BlinkGC::kNoHeapPointersOnStack);
}
bool SingleStep() {
CHECK(thread_state_->IsIncrementalMarking());
if (thread_state_->GcState() ==
ThreadState::kIncrementalMarkingStepScheduled) {
thread_state_->RunScheduledGC(BlinkGC::kNoHeapPointersOnStack);
return true;
}
return false;
}
void FinishSteps() {
CHECK(thread_state_->IsIncrementalMarking());
while (SingleStep()) {
}
}
void FinishGC() {
CHECK(thread_state_->IsIncrementalMarking());
FinishSteps();
CHECK_EQ(ThreadState::kIncrementalMarkingFinalizeScheduled,
thread_state_->GcState());
thread_state_->RunScheduledGC(BlinkGC::kNoHeapPointersOnStack);
CHECK(!thread_state_->IsIncrementalMarking());
thread_state_->CompleteSweep();
}
private:
ThreadState* const thread_state_;
};
TEST(IncrementalMarkingTest, TestDriver) {
IncrementalMarkingTestDriver driver(ThreadState::Current());
driver.Start();
EXPECT_TRUE(ThreadState::Current()->IsIncrementalMarking());
driver.SingleStep();
EXPECT_TRUE(ThreadState::Current()->IsIncrementalMarking());
driver.FinishGC();
EXPECT_FALSE(ThreadState::Current()->IsIncrementalMarking());
}
TEST(IncrementalMarkingTest, DropBackingStore) {
// Regression test: crbug.com/828537
using WeakStore = HeapHashCountedSet<WeakMember<Object>>;
Persistent<WeakStore> persistent(new WeakStore);
persistent->insert(Object::Create());
IncrementalMarkingTestDriver driver(ThreadState::Current());
driver.Start();
driver.FinishSteps();
persistent->clear();
// Marking verifier should not crash on a black backing store with all
// black->white edges.
driver.FinishGC();
}
} // namespace incremental_marking_test
} // namespace blink
......
......@@ -45,16 +45,15 @@ class MarkingVerifier final : public Visitor {
}
// Unused overrides.
void VisitBackingStoreStrongly(void* object,
void** object_slot,
TraceDescriptor desc) final {}
void VisitBackingStoreWeakly(void* object,
void** object_slot,
TraceDescriptor desc) final {}
void RegisterBackingStoreCallback(void* backing_store,
MovingObjectCallback,
void* callback_data) final {}
void RegisterWeakCallback(void* closure, WeakCallback) final {}
void VisitBackingStoreStrongly(void*, void**, TraceDescriptor) final {}
void VisitBackingStoreWeakly(void*,
void**,
TraceDescriptor,
WeakCallback,
void*) final {}
void VisitBackingStoreOnly(void*, void**) final {}
void RegisterBackingStoreCallback(void*, MovingObjectCallback, void*) final {}
void RegisterWeakCallback(void*, WeakCallback) final {}
private:
void VerifyChild(void* base_object_payload) {
......
......@@ -20,8 +20,6 @@ MarkingVisitor::MarkingVisitor(ThreadState* state, MarkingMode marking_mode)
WorklistTaskId::MainThread),
not_fully_constructed_worklist_(Heap().GetNotFullyConstructedWorklist(),
WorklistTaskId::MainThread),
post_marking_worklist_(Heap().GetPostMarkingWorklist(),
WorklistTaskId::MainThread),
weak_callback_worklist_(Heap().GetWeakCallbackWorklist(),
WorklistTaskId::MainThread),
marking_mode_(marking_mode) {
......@@ -103,11 +101,6 @@ void MarkingVisitor::ConservativelyMarkHeader(HeapObjectHeader* header) {
}
}
void MarkingVisitor::MarkNoTracingCallback(Visitor* visitor, void* object) {
reinterpret_cast<MarkingVisitor*>(visitor)->MarkHeaderNoTracing(
HeapObjectHeader::FromPayload(object));
}
void MarkingVisitor::RegisterWeakCallback(void* object, WeakCallback callback) {
// We don't want to run weak processings when taking a snapshot.
if (marking_mode_ == kSnapshotMarking)
......
......@@ -111,17 +111,21 @@ class PLATFORM_EXPORT MarkingVisitor final : public Visitor {
Visit(object, desc);
}
// Used to delay the marking of objects until the usual marking including
// ephemeron iteration is done. This is used to delay the marking of
// collection backing stores until we know if they are reachable from
// locations other than the collection front object. If collection backings
// are reachable from other locations we strongify them to avoid issues with
// iterators and weak processing.
// All work is registered through RegisterWeakCallback.
void VisitBackingStoreWeakly(void* object,
void** object_slot,
TraceDescriptor desc) final {
TraceDescriptor desc,
WeakCallback callback,
void* parameter) final {
RegisterWeakCallback(parameter, callback);
}
// Used to only mark the backing store when it has been registered for weak
// processing. In this case, the contents are processed separately using
// the corresponding traits but the backing store requires marking.
void VisitBackingStoreOnly(void* object, void** object_slot) final {
MarkHeaderNoTracing(HeapObjectHeader::FromPayload(object));
RegisterBackingStoreReference(object_slot);
post_marking_worklist_.Push({object, &MarkNoTracingCallback});
}
void RegisterBackingStoreCallback(void* backing_store,
......@@ -132,15 +136,12 @@ class PLATFORM_EXPORT MarkingVisitor final : public Visitor {
void RegisterWeakCallback(void* closure, WeakCallback) final;
private:
static void MarkNoTracingCallback(Visitor*, void*);
void RegisterBackingStoreReference(void* slot);
void ConservativelyMarkHeader(HeapObjectHeader*);
MarkingWorklist::View marking_worklist_;
NotFullyConstructedWorklist::View not_fully_constructed_worklist_;
PostMarkingWorklist::View post_marking_worklist_;
WeakCallbackWorklist::View weak_callback_worklist_;
const MarkingMode marking_mode_;
};
......
......@@ -1463,7 +1463,6 @@ void ThreadState::MarkPhaseEpilogue(BlinkGC::MarkingType marking_type) {
visitor, std::numeric_limits<double>::infinity()));
VisitWeakPersistents(visitor);
Heap().PostMarkingProcessing(visitor);
Heap().WeakProcessing(visitor);
Heap().DecommitCallbackStacks();
......
......@@ -59,6 +59,7 @@ namespace blink {
namespace incremental_marking_test {
class IncrementalMarkingScope;
class IncrementalMarkingTestDriver;
} // namespace incremental_marking_test
class GarbageCollectedMixinConstructorMarkerBase;
......@@ -584,6 +585,7 @@ class PLATFORM_EXPORT ThreadState {
private:
// Needs to set up visitor for testing purposes.
friend class incremental_marking_test::IncrementalMarkingScope;
friend class incremental_marking_test::IncrementalMarkingTestDriver;
template <typename T>
friend class PrefinalizerRegistration;
......
......@@ -129,7 +129,10 @@ class PLATFORM_EXPORT Visitor {
}
template <typename T>
void TraceBackingStoreWeakly(T* backing_store, T** backing_store_slot) {
void TraceBackingStoreWeakly(T* backing_store,
T** backing_store_slot,
WeakCallback callback,
void* parameter) {
static_assert(sizeof(T), "T must be fully defined");
static_assert(IsGarbageCollectedType<T>::value,
"T needs to be a garbage collected object");
......@@ -139,7 +142,20 @@ class PLATFORM_EXPORT Visitor {
VisitBackingStoreWeakly(reinterpret_cast<void*>(backing_store),
reinterpret_cast<void**>(backing_store_slot),
TraceTrait<T>::GetTraceDescriptor(
reinterpret_cast<void*>(backing_store)));
reinterpret_cast<void*>(backing_store)),
callback, parameter);
}
template <typename T>
void TraceBackingStoreOnly(T* backing_store, T** backing_store_slot) {
static_assert(sizeof(T), "T must be fully defined");
static_assert(IsGarbageCollectedType<T>::value,
"T needs to be a garbage collected object");
if (!backing_store)
return;
VisitBackingStoreOnly(reinterpret_cast<void*>(backing_store),
reinterpret_cast<void**>(backing_store_slot));
}
// WeakMember version of the templated trace method. It doesn't keep
......@@ -202,7 +218,12 @@ class PLATFORM_EXPORT Visitor {
// Visitors for collection backing stores.
virtual void VisitBackingStoreStrongly(void*, void**, TraceDescriptor) = 0;
virtual void VisitBackingStoreWeakly(void*, void**, TraceDescriptor) = 0;
virtual void VisitBackingStoreWeakly(void*,
void**,
TraceDescriptor,
WeakCallback,
void*) = 0;
virtual void VisitBackingStoreOnly(void*, void**) = 0;
// Registers backing store pointers so that they can be moved and properly
// updated.
......
......@@ -2023,8 +2023,14 @@ struct WeakProcessingHashTableHelper<kWeakHandling,
// Used for purely weak and for weak-and-strong tables (ephemerons).
static void Process(typename Allocator::Visitor* visitor, void* closure) {
HashTableType* table = reinterpret_cast<HashTableType*>(closure);
// During incremental marking, the table may be freed after the callback has
// been registered.
if (!table->table_)
return;
// Only trace the backing store. Its fields will be processed below.
Allocator::template TraceHashTableBackingOnly<ValueType, HashTableType>(
visitor, table->table_, &(table->table_));
// Now perform weak processing (this is a no-op if the backing was
// accessible through an iterator and was already marked strongly).
for (ValueType* element = table->table_ + table->table_size_ - 1;
......@@ -2091,18 +2097,15 @@ HashTable<Key, Value, Extractor, HashFunctions, Traits, KeyTraits, Allocator>::
// Weak HashTable. The HashTable may be held alive strongly from somewhere
// else, e.g., an iterator.
// Marking of the table is delayed because the backing store is potentially
// held alive strongly by other objects. Delayed marking happens after
// regular marking.
// Trace the table weakly. For marking this will result in delaying the
// processing until the end of the atomic pause. It is safe to trace
// weakly multiple times.
Allocator::template TraceHashTableBackingWeakly<ValueType, HashTable>(
visitor, table_, &table_);
// It is safe to register the table multiple times.
Allocator::RegisterWeakMembers(
visitor, this,
visitor, table_, &table_,
WeakProcessingHashTableHelper<Traits::kWeakHandlingFlag, Key, Value,
Extractor, HashFunctions, Traits,
KeyTraits, Allocator>::Process);
KeyTraits, Allocator>::Process,
this);
if (IsTraceableInCollectionTrait<Traits>::value) {
// Mix of strong and weak fields. We use an approach similar to ephemeron
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment