Commit 7d097c74 authored by Omer Katz's avatar Omer Katz Committed by Commit Bot

heap: Retrace weak container found through stack scanning

Weak containers are marked by the MarkingVisitor when reached through
regular tracing. However, weak containers might not be traced (e.g. in
case of a weak-to-weak HashMap). Ephemerons are traced, but only values
belonging to live keys are marked. At the end of marking, all unmarked
buckets in the container are removed.

If the container is reachable from stack (e.g. via an iterator), all
buckets become reachable, meaning all buckets should be marked as live.
However, conservative GC does not re-trace a previously marked/traced
container, resulting in dangling references (to deleted buckets) and
invalid iterators.

This CL marks all marked weak containers so that conservative GC knows
to retrace them if they are found again through stack scanning.

Bug: 1108676
Change-Id: I8790c4af2dcd70513b77c4a5fced4ce85852a9c7
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2489905
Commit-Queue: Omer Katz <omerkatz@chromium.org>
Reviewed-by: default avatarMichael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#819493}
parent 4260cede
......@@ -59,6 +59,24 @@
namespace blink {
void WeakContainersWorklist::Push(const HeapObjectHeader* object) {
DCHECK(object);
WTF::MutexLocker locker(lock_);
objects_.insert(object);
}
void WeakContainersWorklist::Erase(const HeapObjectHeader* object) {
// This method is called only during atomic pause, so lock is not needed.
DCHECK(object);
objects_.erase(object);
}
bool WeakContainersWorklist::Contains(const HeapObjectHeader* object) {
// This method is called only during atomic pause, so lock is not needed.
DCHECK(object);
return objects_.find(object) != objects_.end();
}
HeapAllocHooks::AllocationHook* HeapAllocHooks::allocation_hook_ = nullptr;
HeapAllocHooks::FreeHook* HeapAllocHooks::free_hook_ = nullptr;
......@@ -180,6 +198,7 @@ void ThreadHeap::SetupWorklists(bool should_initialize_compaction_worklists) {
v8_references_worklist_ = std::make_unique<V8ReferencesWorklist>();
not_safe_to_concurrently_trace_worklist_ =
std::make_unique<NotSafeToConcurrentlyTraceWorklist>();
weak_containers_worklist_ = std::make_unique<WeakContainersWorklist>();
if (should_initialize_compaction_worklists) {
movable_reference_worklist_ = std::make_unique<MovableReferenceWorklist>();
}
......@@ -193,6 +212,7 @@ void ThreadHeap::DestroyMarkingWorklists(BlinkGC::StackState stack_state) {
ephemeron_pairs_to_process_worklist_.reset();
v8_references_worklist_.reset();
not_safe_to_concurrently_trace_worklist_.reset();
weak_containers_worklist_.reset();
// The fixed point iteration may have found not-fully-constructed objects.
// Such objects should have already been found through the stack scan though
// and should thus already be marked.
......
......@@ -33,6 +33,7 @@
#include <limits>
#include <memory>
#include <unordered_set>
#include "base/macros.h"
#include "build/build_config.h"
......@@ -49,6 +50,7 @@
#include "third_party/blink/renderer/platform/wtf/assertions.h"
#include "third_party/blink/renderer/platform/wtf/forward.h"
#include "third_party/blink/renderer/platform/wtf/sanitizers.h"
#include "third_party/blink/renderer/platform/wtf/threading_primitives.h"
namespace blink {
......@@ -102,6 +104,17 @@ using V8ReferencesWorklist = Worklist<V8Reference, 16 /* local entries */>;
using NotSafeToConcurrentlyTraceWorklist =
Worklist<NotSafeToConcurrentlyTraceItem, 64 /* local entries */>;
class WeakContainersWorklist {
public:
void Push(const HeapObjectHeader*);
void Erase(const HeapObjectHeader*);
bool Contains(const HeapObjectHeader*);
private:
WTF::Mutex lock_;
std::unordered_set<const HeapObjectHeader*> objects_;
};
class PLATFORM_EXPORT HeapAllocHooks {
STATIC_ONLY(HeapAllocHooks);
......@@ -242,6 +255,11 @@ class PLATFORM_EXPORT ThreadHeap {
const {
return not_safe_to_concurrently_trace_worklist_.get();
}
WeakContainersWorklist* GetWeakContainersWorklist() const {
return weak_containers_worklist_.get();
}
// Register an ephemeron table for fixed-point iteration.
void RegisterWeakTable(void* container_object,
EphemeronCallback);
......@@ -446,6 +464,8 @@ class PLATFORM_EXPORT ThreadHeap {
std::unique_ptr<NotSafeToConcurrentlyTraceWorklist>
not_safe_to_concurrently_trace_worklist_;
std::unique_ptr<WeakContainersWorklist> weak_containers_worklist_;
std::unique_ptr<HeapCompact> compaction_;
LastAllocatedRegion last_allocated_region_;
......
......@@ -28,6 +28,7 @@ MarkingVisitorBase::MarkingVisitorBase(ThreadState* state,
ephemeron_pairs_to_process_worklist_(
Heap().GetEphemeronPairsToProcessWorklist(),
task_id),
weak_containers_worklist_(Heap().GetWeakContainersWorklist()),
marking_mode_(marking_mode),
task_id_(task_id) {}
......@@ -115,6 +116,7 @@ void MarkingVisitorBase::VisitWeakContainer(
// non-empty/deleted buckets have been moved to the new backing store.
MarkHeaderNoTracing(header);
AccountMarkedBytes(header);
weak_containers_worklist_->Push(header);
// Register final weak processing of the backing store.
RegisterWeakCallback(weak_callback, weak_callback_parameter);
......@@ -242,8 +244,24 @@ void MarkingVisitor::ConservativelyMarkAddress(BasePage* page,
? static_cast<LargeObjectPage*>(page)->ObjectHeader()
: static_cast<NormalPage*>(page)->ConservativelyFindHeaderFromAddress(
address);
if (!header || header->IsMarked())
if (!header)
return;
if (header->IsMarked()) {
// Weak containers found through conservative GC need to be strongified. In
// case the container was previously marked and weakly traced, it should be
// retraced strongly now. Previously marked/traced weak containers are
// marked using the |weak_containers_worklist_|. Other marked object can be
// skipped.
if (weak_containers_worklist_->Contains(header)) {
DCHECK(!header->IsInConstruction());
// Remove from the set so multiple iterators don't cause multiple
// retracings of the backing store.
weak_containers_worklist_->Erase(header);
marking_worklist_.Push(
{header->Payload(), GCInfo::From(header->GcInfoIndex()).trace});
}
return;
}
// Simple case for fully constructed objects. This just adds the object to the
// regular marking worklist.
......
......@@ -86,6 +86,7 @@ class PLATFORM_EXPORT MarkingVisitorBase : public Visitor {
MovableReferenceWorklist::View movable_reference_worklist_;
EphemeronPairsWorklist::View discovered_ephemeron_pairs_worklist_;
EphemeronPairsWorklist::View ephemeron_pairs_to_process_worklist_;
WeakContainersWorklist* const weak_containers_worklist_;
size_t marked_bytes_ = 0;
const MarkingMode marking_mode_;
int task_id_;
......
......@@ -1826,5 +1826,53 @@ TEST_F(IncrementalMarkingTest, LinkedHashSetMovingCallback) {
EXPECT_EQ(10u, Destructed::n_destructed);
}
class DestructedAndTraced final : public GarbageCollected<DestructedAndTraced> {
public:
~DestructedAndTraced() { n_destructed++; }
void Trace(Visitor*) const { n_traced++; }
static size_t n_destructed;
static size_t n_traced;
};
size_t DestructedAndTraced::n_destructed = 0;
size_t DestructedAndTraced::n_traced = 0;
TEST_F(IncrementalMarkingTest, ConservativeGCOfWeakContainer) {
// Regression test: https://crbug.com/1108676
//
// Test ensures that on-stack references to weak containers (e.g. iterators)
// force re-tracing of the entire container. Otherwise, if the container was
// previously traced and is not re-traced, some bucket might be deleted which
// will make existing iterators invalid.
using WeakContainer = HeapHashMap<WeakMember<DestructedAndTraced>, size_t>;
Persistent<WeakContainer> map = MakeGarbageCollected<WeakContainer>();
static constexpr size_t kNumObjects = 10u;
for (size_t i = 0; i < kNumObjects; ++i) {
map->insert(MakeGarbageCollected<DestructedAndTraced>(), i);
}
DestructedAndTraced::n_destructed = 0;
for (auto it = map->begin(); it != map->end(); ++it) {
size_t value = it->value;
DestructedAndTraced::n_traced = 0;
IncrementalMarkingTestDriver driver(ThreadState::Current());
driver.Start();
driver.FinishSteps();
// map should now be marked, but has not been traced since it's weak.
EXPECT_EQ(0u, DestructedAndTraced::n_traced);
ConservativelyCollectGarbage();
// map buckets were traced (at least once).
EXPECT_NE(kNumObjects, DestructedAndTraced::n_traced);
// Check that iterator is still valid.
EXPECT_EQ(value, it->value);
}
// All buckets were kept alive.
EXPECT_EQ(0u, DestructedAndTraced::n_destructed);
}
} // namespace incremental_marking_test
} // namespace blink
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment