Commit b9efe58a authored by Anton Bikineev's avatar Anton Bikineev Committed by Commit Bot

heap: Apply some fixes to the generational barrier

1) Fix IsInLastAllocatedRegion() semi-interval check;
2) Simplify GenerationalBarrier() a bit.

Bug: 1029379
Change-Id: If2c23ffc2c9addb7a996b6094e8666b226173466
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2066729
Commit-Queue: Anton Bikineev <bikineev@chromium.org>
Reviewed-by: default avatarKentaro Hara <haraken@chromium.org>
Cr-Commit-Position: refs/heads/master@{#743678}
parent c5524dd6
......@@ -641,8 +641,8 @@ Address ThreadHeap::Allocate(size_t size) {
inline bool ThreadHeap::IsInLastAllocatedRegion(Address address) const {
return last_allocated_region_.start <= address &&
address <=
last_allocated_region_.start + last_allocated_region_.length;
address <
(last_allocated_region_.start + last_allocated_region_.length);
}
inline void ThreadHeap::SetLastAllocatedRegion(Address start, size_t length) {
......
......@@ -184,9 +184,9 @@ void MarkingVisitor::GenerationalBarrierSlow(Address slot,
if (UNLIKELY(slot_page->IsLargeObjectPage())) {
auto* large_page = static_cast<LargeObjectPage*>(slot_page);
if (LIKELY(!large_page->ObjectHeader()->IsMarked()))
return;
large_page->SetRemembered(true);
if (UNLIKELY(large_page->ObjectHeader()->IsMarked())) {
large_page->SetRemembered(true);
}
return;
}
......@@ -194,6 +194,7 @@ void MarkingVisitor::GenerationalBarrierSlow(Address slot,
const HeapObjectHeader* source_header = reinterpret_cast<HeapObjectHeader*>(
normal_page->object_start_bit_map()->FindHeader(slot));
DCHECK_LT(0u, source_header->GcInfoIndex());
DCHECK_GT(source_header->PayloadEnd(), slot);
if (UNLIKELY(source_header->IsMarked())) {
normal_page->MarkCard(slot);
}
......
......@@ -177,7 +177,7 @@ class PLATFORM_EXPORT MarkingVisitor
template <typename T>
static bool WriteBarrier(T** slot);
static bool GenerationalBarrier(Address slot, ThreadState* state);
static void GenerationalBarrier(Address slot, ThreadState* state);
// Eagerly traces an already marked backing store ensuring that all its
// children are discovered by the marker. The barrier bails out if marking
......@@ -227,6 +227,7 @@ ALWAYS_INLINE bool MarkingVisitor::WriteBarrier(T** slot) {
// Dijkstra barrier if concurrent marking is in progress.
BasePage* value_page = PageFromObject(value);
ThreadState* thread_state = value_page->thread_state();
if (UNLIKELY(thread_state->IsIncrementalMarking()))
return MarkValue(value, value_page, thread_state);
......@@ -243,14 +244,14 @@ ALWAYS_INLINE bool MarkingVisitor::WriteBarrier(T** slot) {
}
// static
ALWAYS_INLINE bool MarkingVisitor::GenerationalBarrier(Address slot,
ALWAYS_INLINE void MarkingVisitor::GenerationalBarrier(Address slot,
ThreadState* state) {
// First, check if the source object is in the last allocated region of heap.
if (LIKELY(state->Heap().IsInLastAllocatedRegion(slot)))
return false;
return;
if (UNLIKELY(state->IsOnStack(slot)))
return false;
return;
GenerationalBarrierSlow(slot, state);
return false;
}
// static
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment