Commit 0f1d1346 authored by Vlad Tsyrklevich's avatar Vlad Tsyrklevich Committed by Commit Bot

SafeStack: Make oilpan unsafe stack aware

SafeStack [1] introduces a secondary thread stack called the unsafe
stack that also needs to be scanned for object references. Introduce
matching logic in oilpan to scan the unsafe stack for heap references.

[1] https://clang.llvm.org/docs/SafeStack.html

Bug: 864705
Change-Id: I376077bd985e2077aa3771101c1822e1570c7807
Reviewed-on: https://chromium-review.googlesource.com/1169772Reviewed-by: default avatarKentaro Hara <haraken@chromium.org>
Reviewed-by: default avatarMichael Lippautz <mlippautz@chromium.org>
Commit-Queue: Vlad Tsyrklevich <vtsyrklevich@chromium.org>
Cr-Commit-Position: refs/heads/master@{#583056}
parent 54e2583f
......@@ -169,6 +169,13 @@ ThreadState::ThreadState()
start_of_stack_(reinterpret_cast<intptr_t*>(WTF::GetStackStart())),
end_of_stack_(reinterpret_cast<intptr_t*>(WTF::GetStackStart())),
safe_point_scope_marker_(nullptr),
#if HAS_FEATURE(safe_stack)
start_of_unsafe_stack_(
reinterpret_cast<intptr_t*>(__builtin___get_unsafe_stack_top())),
end_of_unsafe_stack_(
reinterpret_cast<intptr_t*>(__builtin___get_unsafe_stack_bottom())),
safe_point_scope_unsafe_marker_(nullptr),
#endif
sweep_forbidden_(false),
no_allocation_count_(0),
gc_forbidden_count_(0),
......@@ -357,6 +364,21 @@ void ThreadState::VisitStack(MarkingVisitor* visitor) {
VisitAsanFakeStackForPointer(visitor, ptr);
}
#if HAS_FEATURE(safe_stack)
start = reinterpret_cast<Address*>(start_of_unsafe_stack_);
end = reinterpret_cast<Address*>(end_of_unsafe_stack_);
safe_point_scope_marker =
reinterpret_cast<Address*>(safe_point_scope_unsafe_marker_);
current = safe_point_scope_marker ? safe_point_scope_marker : end;
for (; current < start; ++current) {
Address ptr = *current;
// SafeStack And MSan are not compatible
heap_->CheckAndMarkPointer(visitor, ptr);
VisitAsanFakeStackForPointer(visitor, ptr);
}
#endif
for (Address ptr : safe_point_stack_copy_) {
#if defined(MEMORY_SANITIZER)
// See the comment above.
......@@ -1268,6 +1290,10 @@ static void EnterSafePointAfterPushRegisters(void*,
ThreadState* state,
intptr_t* stack_end) {
state->RecordStackEnd(stack_end);
#if HAS_FEATURE(safe_stack)
state->RecordUnsafeStackEnd(
reinterpret_cast<intptr_t*>(__builtin___get_unsafe_stack_ptr()));
#endif
state->CopyStackUntilSafePointScope();
}
......@@ -1281,7 +1307,12 @@ void ThreadState::EnterSafePoint(BlinkGC::StackState stack_state,
DCHECK(stack_state == BlinkGC::kNoHeapPointersOnStack || scope_marker);
DCHECK(IsGCForbidden());
stack_state_ = stack_state;
#if HAS_FEATURE(safe_stack)
safe_point_scope_marker_ = __builtin_frame_address(0);
safe_point_scope_unsafe_marker_ = scope_marker;
#else
safe_point_scope_marker_ = scope_marker;
#endif
PushAllRegisters(nullptr, this, EnterSafePointAfterPushRegisters);
}
......@@ -1325,20 +1356,35 @@ void ThreadState::CopyStackUntilSafePointScope() {
CHECK_LT(from, to);
CHECK_LE(to, reinterpret_cast<Address*>(start_of_stack_));
size_t slot_count = static_cast<size_t>(to - from);
#if HAS_FEATURE(safe_stack)
Address* unsafe_to =
reinterpret_cast<Address*>(safe_point_scope_unsafe_marker_);
Address* unsafe_from = reinterpret_cast<Address*>(end_of_unsafe_stack_);
CHECK_LE(unsafe_from, unsafe_to);
CHECK_LE(unsafe_to, reinterpret_cast<Address*>(start_of_unsafe_stack_));
size_t unsafe_slot_count = static_cast<size_t>(unsafe_to - unsafe_from);
#else
constexpr size_t unsafe_slot_count = 0;
#endif
// Catch potential performance issues.
#if defined(LEAK_SANITIZER) || defined(ADDRESS_SANITIZER)
// ASan/LSan use more space on the stack and we therefore
// increase the allowed stack copying for those builds.
DCHECK_LT(slot_count, 2048u);
DCHECK_LT(slot_count + unsafe_slot_count, 2048u);
#else
DCHECK_LT(slot_count, 1024u);
DCHECK_LT(slot_count + unsafe_slot_count, 1024u);
#endif
DCHECK(!safe_point_stack_copy_.size());
safe_point_stack_copy_.resize(slot_count);
for (size_t i = 0; i < slot_count; ++i) {
safe_point_stack_copy_.resize(slot_count + unsafe_slot_count);
for (size_t i = 0; i < slot_count; ++i)
safe_point_stack_copy_[i] = from[i];
}
#if HAS_FEATURE(safe_stack)
for (size_t i = 0; i < unsafe_slot_count; ++i)
safe_point_stack_copy_[slot_count + i] = unsafe_from[i];
#endif
}
void ThreadState::RegisterStaticPersistentNode(
......
......@@ -438,6 +438,11 @@ class PLATFORM_EXPORT ThreadState final
void LeaveSafePoint();
void RecordStackEnd(intptr_t* end_of_stack) { end_of_stack_ = end_of_stack; }
#if HAS_FEATURE(safe_stack)
void RecordUnsafeStackEnd(intptr_t* end_of_unsafe_stack) {
end_of_unsafe_stack_ = end_of_unsafe_stack;
}
#endif
NO_SANITIZE_ADDRESS void CopyStackUntilSafePointScope();
// A region of non-weak PersistentNodes allocated on the given thread.
......@@ -621,6 +626,9 @@ class PLATFORM_EXPORT ThreadState final
void ClearSafePointScopeMarker() {
safe_point_stack_copy_.clear();
safe_point_scope_marker_ = nullptr;
#if HAS_FEATURE(safe_stack)
safe_point_scope_unsafe_marker_ = nullptr;
#endif
}
bool ShouldVerifyMarking() const;
......@@ -707,8 +715,14 @@ class PLATFORM_EXPORT ThreadState final
BlinkGC::StackState stack_state_;
intptr_t* start_of_stack_;
intptr_t* end_of_stack_;
void* safe_point_scope_marker_;
#if HAS_FEATURE(safe_stack)
intptr_t* start_of_unsafe_stack_;
intptr_t* end_of_unsafe_stack_;
void* safe_point_scope_unsafe_marker_;
#endif
Vector<Address> safe_point_stack_copy_;
bool sweep_forbidden_;
size_t no_allocation_count_;
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment