Commit 4024aad3 authored by Michael Lippautz's avatar Michael Lippautz Committed by Commit Bot

[oilpan] Split write barrier in fast and slow parts

Split write barrier into fast and slow parts:
- The fast part only checks whether any Oilpan heap is currently marking. This
  check is only approximate as it does not consider the current ThreadState. In
  general, we expect this check to be enough to allow bailing out of the barrier.
- The slow version checks whether the current ThreadState is marking and whether
  the values actually require a write barrier.

This way we emit only a short instruction sequence for the fast cases and avoid
poluting the regular instruction sequences.

Verified locally on the microbenchmark blink_perf.parser query-selector-deep
which showed a 42% regression. Scores (higher is better):
- ToT: 8932
- Without barrier: 15188
- With this CL: 13352

Bug: chromium:844576, chromium:757440
Change-Id: Ie8ebbf95fef0ff59ad8f1a111dd5daecfabc4109
Reviewed-on: https://chromium-review.googlesource.com/1071272
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Reviewed-by: default avatarKentaro Hara <haraken@chromium.org>
Reviewed-by: default avatarKeishi Hattori <keishi@chromium.org>
Cr-Commit-Position: refs/heads/master@{#561424}
parent 79573b37
...@@ -9,6 +9,14 @@ ...@@ -9,6 +9,14 @@
namespace blink { namespace blink {
namespace {
ALWAYS_INLINE bool IsHashTableDeleteValue(const void* value) {
return value == reinterpret_cast<void*>(-1);
}
} // namespace
std::unique_ptr<MarkingVisitor> MarkingVisitor::Create(ThreadState* state, std::unique_ptr<MarkingVisitor> MarkingVisitor::Create(ThreadState* state,
MarkingMode mode) { MarkingMode mode) {
return std::make_unique<MarkingVisitor>(state, mode); return std::make_unique<MarkingVisitor>(state, mode);
...@@ -132,4 +140,38 @@ bool MarkingVisitor::RegisterWeakTable(const void* closure, ...@@ -132,4 +140,38 @@ bool MarkingVisitor::RegisterWeakTable(const void* closure,
return true; return true;
} }
void MarkingVisitor::WriteBarrierSlow(void* value) {
if (!value || IsHashTableDeleteValue(value))
return;
ThreadState* const thread_state = ThreadState::Current();
if (!thread_state->IsIncrementalMarking())
return;
thread_state->Heap().WriteBarrier(value);
}
void MarkingVisitor::TraceMarkedBackingStoreSlow(void* value) {
if (!value)
return;
ThreadState* const thread_state = ThreadState::Current();
if (!thread_state->IsIncrementalMarking())
return;
// |value| is pointing to the start of a backing store.
HeapObjectHeader* header = HeapObjectHeader::FromPayload(value);
CHECK(header->IsMarked());
DCHECK(thread_state->CurrentVisitor());
// This check ensures that the visitor will not eagerly recurse into children
// but rather push all blink::GarbageCollected objects and only eagerly trace
// non-managed objects.
DCHECK(!thread_state->Heap().GetStackFrameDepth().IsEnabled());
// No weak handling for write barriers. Modifying weakly reachable objects
// strongifies them for the current cycle.
GCInfoTable::Get()
.GCInfoFromIndex(header->GcInfoIndex())
->trace_(thread_state->CurrentVisitor(), value);
}
} // namespace blink } // namespace blink
...@@ -37,14 +37,14 @@ class PLATFORM_EXPORT MarkingVisitor final : public Visitor { ...@@ -37,14 +37,14 @@ class PLATFORM_EXPORT MarkingVisitor final : public Visitor {
// Write barrier that adds |value| to the set of marked objects. The barrier // Write barrier that adds |value| to the set of marked objects. The barrier
// bails out if marking is off or the object is not yet marked. // bails out if marking is off or the object is not yet marked.
inline static void WriteBarrier(void* value); ALWAYS_INLINE static void WriteBarrier(void* value);
// Eagerly traces an already marked backing store ensuring that all its // Eagerly traces an already marked backing store ensuring that all its
// children are discovered by the marker. The barrier bails out if marking // children are discovered by the marker. The barrier bails out if marking
// is off and on individual objects reachable if they are already marked. The // is off and on individual objects reachable if they are already marked. The
// barrier uses the callback function through GcInfo, so it will not inline // barrier uses the callback function through GcInfo, so it will not inline
// any templated type-specific code. // any templated type-specific code.
inline static void TraceMarkedBackingStore(void* value); ALWAYS_INLINE static void TraceMarkedBackingStore(void* value);
MarkingVisitor(ThreadState*, MarkingMode); MarkingVisitor(ThreadState*, MarkingMode);
~MarkingVisitor() override; ~MarkingVisitor() override;
...@@ -157,6 +157,10 @@ class PLATFORM_EXPORT MarkingVisitor final : public Visitor { ...@@ -157,6 +157,10 @@ class PLATFORM_EXPORT MarkingVisitor final : public Visitor {
const ScriptWrappable* key) final {} const ScriptWrappable* key) final {}
private: private:
// Exact version of the marking write barriers.
static void WriteBarrierSlow(void*);
static void TraceMarkedBackingStoreSlow(void*);
void RegisterBackingStoreReference(void* slot); void RegisterBackingStoreReference(void* slot);
void ConservativelyMarkHeader(HeapObjectHeader*); void ConservativelyMarkHeader(HeapObjectHeader*);
...@@ -191,41 +195,25 @@ inline void MarkingVisitor::MarkHeader(HeapObjectHeader* header, ...@@ -191,41 +195,25 @@ inline void MarkingVisitor::MarkHeader(HeapObjectHeader* header,
} }
} }
inline void MarkingVisitor::WriteBarrier(void* value) { ALWAYS_INLINE void MarkingVisitor::WriteBarrier(void* value) {
#if BUILDFLAG(BLINK_HEAP_INCREMENTAL_MARKING) #if BUILDFLAG(BLINK_HEAP_INCREMENTAL_MARKING)
if (!ThreadState::IsAnyIncrementalMarking() || !value) if (!ThreadState::IsAnyIncrementalMarking())
return;
ThreadState* const thread_state = ThreadState::Current();
if (!thread_state->IsIncrementalMarking())
return; return;
thread_state->Heap().WriteBarrier(value); // Avoid any further checks and dispatch to a call at this point. Aggressive
// inlining otherwise pollutes the regular execution paths.
WriteBarrierSlow(value);
#endif // BUILDFLAG(BLINK_HEAP_INCREMENTAL_MARKING) #endif // BUILDFLAG(BLINK_HEAP_INCREMENTAL_MARKING)
} }
inline void MarkingVisitor::TraceMarkedBackingStore(void* value) { ALWAYS_INLINE void MarkingVisitor::TraceMarkedBackingStore(void* value) {
#if BUILDFLAG(BLINK_HEAP_INCREMENTAL_MARKING) #if BUILDFLAG(BLINK_HEAP_INCREMENTAL_MARKING)
if (!ThreadState::IsAnyIncrementalMarking() || !value) if (!ThreadState::IsAnyIncrementalMarking())
return;
ThreadState* const thread_state = ThreadState::Current();
if (!thread_state->IsIncrementalMarking())
return; return;
// |value| is pointing to the start of a backing store. // Avoid any further checks and dispatch to a call at this point. Aggressive
HeapObjectHeader* header = HeapObjectHeader::FromPayload(value); // inlining otherwise pollutes the regular execution paths.
CHECK(header->IsMarked()); TraceMarkedBackingStoreSlow(value);
DCHECK(thread_state->CurrentVisitor());
// This check ensures that the visitor will not eagerly recurse into children
// but rather push all blink::GarbageCollected objects and only eagerly trace
// non-managed objects.
DCHECK(!thread_state->Heap().GetStackFrameDepth().IsEnabled());
// No weak handling for write barriers. Modifying weakly reachable objects
// strongifies them for the current cycle.
GCInfoTable::Get()
.GCInfoFromIndex(header->GcInfoIndex())
->trace_(thread_state->CurrentVisitor(), value);
#endif // BUILDFLAG(BLINK_HEAP_INCREMENTAL_MARKING) #endif // BUILDFLAG(BLINK_HEAP_INCREMENTAL_MARKING)
} }
......
...@@ -216,47 +216,47 @@ class Member : public MemberBase<T, TracenessMemberConfiguration::kTraced> { ...@@ -216,47 +216,47 @@ class Member : public MemberBase<T, TracenessMemberConfiguration::kTraced> {
} }
Member(WTF::HashTableDeletedValueType x) : Parent(x) {} Member(WTF::HashTableDeletedValueType x) : Parent(x) {}
Member(const Member& other) : Parent(other) { WriteBarrier(this->raw_); } Member(const Member& other) : Parent(other) { WriteBarrier(); }
template <typename U> template <typename U>
Member(const Member<U>& other) : Parent(other) { Member(const Member<U>& other) : Parent(other) {
WriteBarrier(this->raw_); WriteBarrier();
} }
template <typename U> template <typename U>
Member(const Persistent<U>& other) : Parent(other) { Member(const Persistent<U>& other) : Parent(other) {
WriteBarrier(this->raw_); WriteBarrier();
} }
template <typename U> template <typename U>
Member& operator=(const Persistent<U>& other) { Member& operator=(const Persistent<U>& other) {
Parent::operator=(other); Parent::operator=(other);
WriteBarrier(this->raw_); WriteBarrier();
return *this; return *this;
} }
Member& operator=(const Member& other) { Member& operator=(const Member& other) {
Parent::operator=(other); Parent::operator=(other);
WriteBarrier(this->raw_); WriteBarrier();
return *this; return *this;
} }
template <typename U> template <typename U>
Member& operator=(const Member<U>& other) { Member& operator=(const Member<U>& other) {
Parent::operator=(other); Parent::operator=(other);
WriteBarrier(this->raw_); WriteBarrier();
return *this; return *this;
} }
template <typename U> template <typename U>
Member& operator=(const WeakMember<U>& other) { Member& operator=(const WeakMember<U>& other) {
Parent::operator=(other); Parent::operator=(other);
WriteBarrier(this->raw_); WriteBarrier();
return *this; return *this;
} }
template <typename U> template <typename U>
Member& operator=(U* other) { Member& operator=(U* other) {
Parent::operator=(other); Parent::operator=(other);
WriteBarrier(this->raw_); WriteBarrier();
return *this; return *this;
} }
...@@ -271,12 +271,10 @@ class Member : public MemberBase<T, TracenessMemberConfiguration::kTraced> { ...@@ -271,12 +271,10 @@ class Member : public MemberBase<T, TracenessMemberConfiguration::kTraced> {
} }
protected: protected:
ALWAYS_INLINE void WriteBarrier(T* value) const { ALWAYS_INLINE void WriteBarrier() const {
#if BUILDFLAG(BLINK_HEAP_INCREMENTAL_MARKING) #if BUILDFLAG(BLINK_HEAP_INCREMENTAL_MARKING)
if (LIKELY(!this->IsHashTableDeletedValue())) { MarkingVisitor::WriteBarrier(
MarkingVisitor::WriteBarrier( const_cast<typename std::remove_const<T>::type*>(this->raw_));
const_cast<typename std::remove_const<T>::type*>(value));
}
#endif // BUILDFLAG(BLINK_HEAP_INCREMENTAL_MARKING) #endif // BUILDFLAG(BLINK_HEAP_INCREMENTAL_MARKING)
} }
...@@ -604,13 +602,13 @@ class ConstructTraits<blink::Member<T>, Traits, Allocator> { ...@@ -604,13 +602,13 @@ class ConstructTraits<blink::Member<T>, Traits, Allocator> {
Args&&... args) { Args&&... args) {
blink::Member<T>* object = blink::Member<T>* object =
new (NotNull, location) blink::Member<T>(std::forward<Args>(args)...); new (NotNull, location) blink::Member<T>(std::forward<Args>(args)...);
object->WriteBarrier(object->raw_); object->WriteBarrier();
return object; return object;
} }
static void NotifyNewElements(blink::Member<T>* array, size_t len) { static void NotifyNewElements(blink::Member<T>* array, size_t len) {
while (len-- > 0) { while (len-- > 0) {
array->WriteBarrier(array->raw_); array->WriteBarrier();
array++; array++;
} }
} }
......
...@@ -220,7 +220,7 @@ class PLATFORM_EXPORT ThreadState { ...@@ -220,7 +220,7 @@ class PLATFORM_EXPORT ThreadState {
// Returns true if any thread is currently incremental marking its heap and // Returns true if any thread is currently incremental marking its heap and
// false otherwise. For an exact check use // false otherwise. For an exact check use
// ThreadState::IsIncrementalMarking(). // ThreadState::IsIncrementalMarking().
static bool IsAnyIncrementalMarking() { ALWAYS_INLINE static bool IsAnyIncrementalMarking() {
// Stores use full barrier to allow using the simplest relaxed load here. // Stores use full barrier to allow using the simplest relaxed load here.
return base::subtle::NoBarrier_Load(&incremental_marking_counter_) > 0; return base::subtle::NoBarrier_Load(&incremental_marking_counter_) > 0;
} }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment