Commit 66501e94 authored by Omer Katz's avatar Omer Katz Committed by Commit Bot

heap: Resolve data races in LinkedHashSetNode with prev_ and next_

This is a followup to CL 2012030:
https://chromium-review.googlesource.com/c/chromium/src/+/2012961

Loading whole keys atomically resulted in data races involving the
fields prev_ and next_ (which were not accessed concurrently before).

This CL takes the same approach as was used for ListHashSetNode
(https://chromium-review.googlesource.com/c/chromium/src/+/1984308)
and wraps prev_ and next_ with atomic writes.
This CL already includes the optimizations added to ListHashSetNode in
https://chromium-review.googlesource.com/c/chromium/src/+/1995175.

Bug: 986235
Change-Id: I4ef881ce181af25e53ccb51169b199c3315c7132
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2012345Reviewed-by: default avatarKentaro Hara <haraken@chromium.org>
Reviewed-by: default avatarMichael Lippautz <mlippautz@chromium.org>
Commit-Queue: Omer Katz <omerkatz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#739240}
parent 616862e8
......@@ -181,28 +181,6 @@ static_assert(
namespace internal {
// This is needed due to asan complaining deep from std::atomic<>::load/store
// stacktraces.
class AsanUnpoisonScope {
public:
AsanUnpoisonScope(const void* addr, size_t size)
: addr_(addr), size_(size), was_poisoned_(false) {
if (!ASAN_REGION_IS_POISONED(const_cast<void*>(addr_), size_))
return;
ASAN_UNPOISON_MEMORY_REGION(addr_, size_);
was_poisoned_ = true;
}
~AsanUnpoisonScope() {
if (was_poisoned_)
ASAN_POISON_MEMORY_REGION(addr_, size_);
}
private:
const void* addr_;
size_t size_;
bool was_poisoned_;
};
NO_SANITIZE_ADDRESS constexpr uint16_t EncodeSize(size_t size) {
// Essentially, gets optimized to >> 1.
return static_cast<uint16_t>((size << kHeaderSizeShift) /
......@@ -1158,8 +1136,8 @@ NO_SANITIZE_ADDRESS inline size_t HeapObjectHeader::size() const {
// mode == AccessMode::kAtomic
// Relaxed load as size is immutable after construction while either
// marking or sweeping is running
internal::AsanUnpoisonScope unpoison_scope(
static_cast<const void*>(&encoded_low_), sizeof(encoded_low_));
AsanUnpoisonScope unpoison_scope(static_cast<const void*>(&encoded_low_),
sizeof(encoded_low_));
encoded_low_value =
WTF::AsAtomicPtr(&encoded_low_)->load(std::memory_order_relaxed);
}
......@@ -1184,8 +1162,8 @@ NO_SANITIZE_ADDRESS inline bool HeapObjectHeader::IsLargeObject() const {
if (mode == AccessMode::kNonAtomic) {
encoded_low_value = encoded_low_;
} else {
internal::AsanUnpoisonScope unpoison_scope(
static_cast<const void*>(&encoded_low_), sizeof(encoded_low_));
AsanUnpoisonScope unpoison_scope(static_cast<const void*>(&encoded_low_),
sizeof(encoded_low_));
encoded_low_value =
WTF::AsAtomicPtr(&encoded_low_)->load(std::memory_order_relaxed);
}
......@@ -1254,8 +1232,8 @@ NO_SANITIZE_ADDRESS inline bool HeapObjectHeader::TryMark() {
encoded_low_ |= kHeaderMarkBitMask;
return true;
}
internal::AsanUnpoisonScope unpoison_scope(
static_cast<const void*>(&encoded_low_), sizeof(encoded_low_));
AsanUnpoisonScope unpoison_scope(static_cast<const void*>(&encoded_low_),
sizeof(encoded_low_));
auto* atomic_encoded = WTF::AsAtomicPtr(&encoded_low_);
uint16_t old_value = atomic_encoded->load(std::memory_order_relaxed);
if (old_value & kHeaderMarkBitMask)
......@@ -1394,8 +1372,8 @@ template <HeapObjectHeader::AccessMode mode, HeapObjectHeader::EncodedHalf part>
NO_SANITIZE_ADDRESS inline uint16_t HeapObjectHeader::LoadEncoded() const {
const uint16_t& half =
(part == EncodedHalf::kLow ? encoded_low_ : encoded_high_);
internal::AsanUnpoisonScope unpoison_scope(static_cast<const void*>(&half),
sizeof(half));
AsanUnpoisonScope unpoison_scope(static_cast<const void*>(&half),
sizeof(half));
if (mode == AccessMode::kNonAtomic)
return half;
return WTF::AsAtomicPtr(&half)->load(std::memory_order_acquire);
......@@ -1408,8 +1386,7 @@ NO_SANITIZE_ADDRESS inline void HeapObjectHeader::StoreEncoded(uint16_t bits,
uint16_t mask) {
DCHECK_EQ(static_cast<uint16_t>(0u), bits & ~mask);
uint16_t* half = (part == EncodedHalf::kLow ? &encoded_low_ : &encoded_high_);
internal::AsanUnpoisonScope unpoison_scope(static_cast<void*>(half),
sizeof(&half));
AsanUnpoisonScope unpoison_scope(static_cast<void*>(half), sizeof(&half));
if (mode == AccessMode::kNonAtomic) {
*half = (*half & ~mask) | bits;
return;
......
......@@ -57,12 +57,47 @@ class LinkedHashSetReverseIterator;
template <typename LinkedHashSet>
class LinkedHashSetConstReverseIterator;
template <typename Value, typename HashFunctions, typename Allocator>
template <typename Value, typename HashFunctions>
struct LinkedHashSetTranslator;
template <typename Value, typename Allocator>
template <typename Value>
struct LinkedHashSetExtractor;
template <typename Value, typename ValueTraits, typename Allocator>
struct LinkedHashSetTraits;
class LinkedHashSetNodeBase;
class LinkedHashSetNodeBasePointer {
public:
LinkedHashSetNodeBasePointer(LinkedHashSetNodeBase* node) : node_(node) {}
LinkedHashSetNodeBasePointer& operator=(
const LinkedHashSetNodeBasePointer& other) {
SetSafe(other);
return *this;
}
LinkedHashSetNodeBasePointer& operator=(LinkedHashSetNodeBase* other) {
SetSafe(other);
return *this;
}
LinkedHashSetNodeBasePointer& operator=(std::nullptr_t) {
SetSafe(nullptr);
return *this;
}
LinkedHashSetNodeBase* Get() const { return node_; }
explicit operator bool() const { return Get(); }
operator LinkedHashSetNodeBase*() const { return Get(); }
LinkedHashSetNodeBase* operator->() const { return Get(); }
LinkedHashSetNodeBase& operator*() const { return *Get(); }
private:
void SetSafe(LinkedHashSetNodeBase* node) {
AsAtomicPtr(&node_)->store(node, std::memory_order_relaxed);
}
LinkedHashSetNodeBase* node_ = nullptr;
};
class LinkedHashSetNodeBase {
DISALLOW_NEW();
......@@ -75,10 +110,16 @@ class LinkedHashSetNodeBase {
if (!next_)
return;
DCHECK(prev_);
DCHECK(next_->prev_ == this);
DCHECK(prev_->next_ == this);
next_->prev_ = prev_;
prev_->next_ = next_;
{
AsanUnpoisonScope unpoison_scope(next_, sizeof(LinkedHashSetNodeBase));
DCHECK(next_->prev_ == this);
next_->prev_ = prev_;
}
{
AsanUnpoisonScope unpoison_scope(prev_, sizeof(LinkedHashSetNodeBase));
DCHECK(prev_->next_ == this);
prev_->next_ = next_;
}
}
~LinkedHashSetNodeBase() { Unlink(); }
......@@ -111,8 +152,8 @@ class LinkedHashSetNodeBase {
DCHECK((prev && next) || (!prev && !next));
}
LinkedHashSetNodeBase* prev_;
LinkedHashSetNodeBase* next_;
LinkedHashSetNodeBasePointer prev_;
LinkedHashSetNodeBasePointer next_;
protected:
// If we take a copy of a node we can't copy the next and prev pointers,
......@@ -177,8 +218,7 @@ class LinkedHashSet {
typedef TraitsArg Traits;
typedef LinkedHashSetNode<Value> Node;
typedef LinkedHashSetNodeBase NodeBase;
typedef LinkedHashSetTranslator<Value, HashFunctions, Allocator>
NodeHashFunctions;
typedef LinkedHashSetTranslator<Value, HashFunctions> NodeHashFunctions;
typedef LinkedHashSetTraits<Value, Traits, Allocator> NodeHashTraits;
typedef HashTable<Node,
......@@ -345,13 +385,13 @@ class LinkedHashSet {
private:
Node* Anchor() { return reinterpret_cast<Node*>(&anchor_); }
const Node* Anchor() const { return reinterpret_cast<const Node*>(&anchor_); }
Node* FirstNode() { return reinterpret_cast<Node*>(anchor_.next_); }
Node* FirstNode() { return reinterpret_cast<Node*>(anchor_.next_.Get()); }
const Node* FirstNode() const {
return reinterpret_cast<const Node*>(anchor_.next_);
return reinterpret_cast<const Node*>(anchor_.next_.Get());
}
Node* LastNode() { return reinterpret_cast<Node*>(anchor_.prev_); }
Node* LastNode() { return reinterpret_cast<Node*>(anchor_.prev_.Get()); }
const Node* LastNode() const {
return reinterpret_cast<const Node*>(anchor_.prev_);
return reinterpret_cast<const Node*>(anchor_.prev_.Get());
}
iterator MakeIterator(const Node* position) {
......@@ -371,7 +411,7 @@ class LinkedHashSet {
NodeBase anchor_;
};
template <typename Value, typename HashFunctions, typename Allocator>
template <typename Value, typename HashFunctions>
struct LinkedHashSetTranslator {
STATIC_ONLY(LinkedHashSetTranslator);
typedef LinkedHashSetNode<Value> Node;
......@@ -404,7 +444,7 @@ struct LinkedHashSetTranslator {
static const bool safe_to_compare_to_empty_or_deleted = false;
};
template <typename Value, typename Allocator>
template <typename Value>
struct LinkedHashSetExtractor {
STATIC_ONLY(LinkedHashSetExtractor);
static const Value& Extract(const LinkedHashSetNode<Value>& node) {
......@@ -473,7 +513,7 @@ struct LinkedHashSetTraits
if (HashTable::IsEmptyOrDeletedBucket(node))
continue;
if (node.next_ >= from_start && node.next_ < from_end) {
const size_t diff = reinterpret_cast<uintptr_t>(node.next_) -
const size_t diff = reinterpret_cast<uintptr_t>(node.next_.Get()) -
reinterpret_cast<uintptr_t>(from);
node.next_ =
reinterpret_cast<NodeBase*>(reinterpret_cast<uintptr_t>(to) + diff);
......@@ -482,7 +522,7 @@ struct LinkedHashSetTraits
anchor_node = node.next_;
}
if (node.prev_ >= from_start && node.prev_ < from_end) {
const size_t diff = reinterpret_cast<uintptr_t>(node.prev_) -
const size_t diff = reinterpret_cast<uintptr_t>(node.prev_.Get()) -
reinterpret_cast<uintptr_t>(from);
node.prev_ =
reinterpret_cast<NodeBase*>(reinterpret_cast<uintptr_t>(to) + diff);
......@@ -499,15 +539,17 @@ struct LinkedHashSetTraits
}
{
DCHECK(anchor_node->prev_ >= from_start && anchor_node->prev_ < from_end);
const size_t diff = reinterpret_cast<uintptr_t>(anchor_node->prev_) -
reinterpret_cast<uintptr_t>(from);
const size_t diff =
reinterpret_cast<uintptr_t>(anchor_node->prev_.Get()) -
reinterpret_cast<uintptr_t>(from);
anchor_node->prev_ =
reinterpret_cast<NodeBase*>(reinterpret_cast<uintptr_t>(to) + diff);
}
{
DCHECK(anchor_node->next_ >= from_start && anchor_node->next_ < from_end);
const size_t diff = reinterpret_cast<uintptr_t>(anchor_node->next_) -
reinterpret_cast<uintptr_t>(from);
const size_t diff =
reinterpret_cast<uintptr_t>(anchor_node->next_.Get()) -
reinterpret_cast<uintptr_t>(from);
anchor_node->next_ =
reinterpret_cast<NodeBase*>(reinterpret_cast<uintptr_t>(to) + diff);
}
......@@ -806,7 +848,7 @@ inline const T& LinkedHashSet<T, U, V, W>::front() const {
template <typename T, typename U, typename V, typename W>
inline void LinkedHashSet<T, U, V, W>::RemoveFirst() {
DCHECK(!IsEmpty());
impl_.erase(static_cast<Node*>(anchor_.next_));
impl_.erase(static_cast<Node*>(anchor_.next_.Get()));
}
template <typename T, typename U, typename V, typename W>
......@@ -824,7 +866,7 @@ inline const T& LinkedHashSet<T, U, V, W>::back() const {
template <typename T, typename U, typename V, typename W>
inline void LinkedHashSet<T, U, V, W>::pop_back() {
DCHECK(!IsEmpty());
impl_.erase(static_cast<Node*>(anchor_.prev_));
impl_.erase(static_cast<Node*>(anchor_.prev_.Get()));
}
template <typename T, typename U, typename V, typename W>
......@@ -973,14 +1015,14 @@ inline void LinkedHashSet<T, U, V, W>::erase(ValuePeekInType value) {
template <typename T, typename Allocator>
inline void swap(LinkedHashSetNode<T>& a, LinkedHashSetNode<T>& b) {
typedef LinkedHashSetNodeBase Base;
// The key and value cannot be swapped atomically, and it would be
// wrong to have a GC when only one was swapped and the other still
// contained garbage (eg. from a previous use of the same slot).
// Therefore we forbid a GC until both the key and the value are
// swapped.
Allocator::EnterGCForbiddenScope();
swap(static_cast<Base&>(a), static_cast<Base&>(b));
swap(static_cast<LinkedHashSetNodeBase&>(a),
static_cast<LinkedHashSetNodeBase&>(b));
swap(a.value_, b.value_);
Allocator::LeaveGCForbiddenScope();
}
......
......@@ -11,12 +11,36 @@
#define ASAN_REGION_IS_POISONED(addr, size) \
__asan_region_is_poisoned(addr, size)
#define NO_SANITIZE_ADDRESS __attribute__((no_sanitize_address))
class AsanUnpoisonScope {
public:
AsanUnpoisonScope(const void* addr, size_t size)
: addr_(addr), size_(size), was_poisoned_(false) {
if (!ASAN_REGION_IS_POISONED(const_cast<void*>(addr_), size_))
return;
ASAN_UNPOISON_MEMORY_REGION(addr_, size_);
was_poisoned_ = true;
}
~AsanUnpoisonScope() {
if (was_poisoned_)
ASAN_POISON_MEMORY_REGION(addr_, size_);
}
private:
const void* addr_;
size_t size_;
bool was_poisoned_;
};
#else
#define ASAN_POISON_MEMORY_REGION(addr, size) ((void)(addr), (void)(size))
#define ASAN_UNPOISON_MEMORY_REGION(addr, size) ((void)(addr), (void)(size))
#define ASAN_REGION_IS_POISONED(addr, size) \
((void)(addr), (void)(size), (void*)nullptr)
#define NO_SANITIZE_ADDRESS
class AsanUnpoisonScope {
public:
AsanUnpoisonScope(const void*, size_t) {}
~AsanUnpoisonScope() {}
};
#endif
#if defined(LEAK_SANITIZER)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment