Commit e191013b authored by Omer Katz's avatar Omer Katz Committed by Commit Bot

heap: Resolve data race when clearing values in hash table

When deleting an entry is a hash table which is garbage collected, the
entry is zeroed out (so that stale values won't be traced). However, An
entry might be deleted while it is being traced. To avoid a data race
in such cases, during tracing garbage collected objects should be
atomically set to zero size_t by size_t (if the size of the entry is not
a multiple of sizeof(size_t), the remainder of entry can be zeroed using
memset as it is guaranteed not to hold a pointer).

Bug: 986235
Change-Id: Id1a6d2fd82fcc31a18caf13764a3488e57b23177
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1949466
Commit-Queue: Omer Katz <omerkatz@chromium.org>
Reviewed-by: default avatarAnton Bikineev <bikineev@chromium.org>
Reviewed-by: default avatarUlan Degenbaev <ulan@chromium.org>
Reviewed-by: default avatarKentaro Hara <haraken@chromium.org>
Cr-Commit-Position: refs/heads/master@{#722494}
parent 87d33b5b
...@@ -147,6 +147,11 @@ class __thisIsHereToForceASemicolonAfterThisMacro; ...@@ -147,6 +147,11 @@ class __thisIsHereToForceASemicolonAfterThisMacro;
#define USING_FAST_MALLOC_WITH_TYPE_NAME(type) \ #define USING_FAST_MALLOC_WITH_TYPE_NAME(type) \
USING_FAST_MALLOC_INTERNAL(type, #type) USING_FAST_MALLOC_INTERNAL(type, #type)
template <typename T>
ALWAYS_INLINE std::atomic<T>* AsAtomicPtr(T* t) {
return reinterpret_cast<std::atomic<T>*>(t);
}
} // namespace WTF } // namespace WTF
// This version of placement new omits a 0 check. // This version of placement new omits a 0 check.
......
...@@ -92,13 +92,6 @@ ...@@ -92,13 +92,6 @@
#endif #endif
#endif #endif
namespace {
template <typename T>
ALWAYS_INLINE std::atomic<T>& AsAtomic(T& t) {
return reinterpret_cast<std::atomic<T>&>(t);
}
} // namespace
namespace WTF { namespace WTF {
// This is for tracing inside collections that have special support for weak // This is for tracing inside collections that have special support for weak
...@@ -1793,7 +1786,7 @@ HashTable<Key, Value, Extractor, HashFunctions, Traits, KeyTraits, Allocator>:: ...@@ -1793,7 +1786,7 @@ HashTable<Key, Value, Extractor, HashFunctions, Traits, KeyTraits, Allocator>::
stats_->numRehashes.fetch_add(1, std::memory_order_relaxed); stats_->numRehashes.fetch_add(1, std::memory_order_relaxed);
#endif #endif
AsAtomic<ValueType*>(table_).store(new_table, std::memory_order_relaxed); AsAtomicPtr(&table_)->store(new_table, std::memory_order_relaxed);
Allocator::template BackingWriteBarrierForHashTable<HashTable>(new_table); Allocator::template BackingWriteBarrierForHashTable<HashTable>(new_table);
table_size_ = new_table_size; table_size_ = new_table_size;
...@@ -1977,8 +1970,8 @@ void HashTable<Key, ...@@ -1977,8 +1970,8 @@ void HashTable<Key,
// on the mutator thread, which is also the only one that writes to them, so // on the mutator thread, which is also the only one that writes to them, so
// there is *no* risk of data races when reading. // there is *no* risk of data races when reading.
Value* tmp_table = other.table_; Value* tmp_table = other.table_;
AsAtomic<Value*>(other.table_).store(table_, std::memory_order_relaxed); AsAtomicPtr(&other.table_)->store(table_, std::memory_order_relaxed);
AsAtomic<Value*>(table_).store(tmp_table, std::memory_order_relaxed); AsAtomicPtr(&table_)->store(tmp_table, std::memory_order_relaxed);
Allocator::template BackingWriteBarrierForHashTable<HashTable>(table_); Allocator::template BackingWriteBarrierForHashTable<HashTable>(table_);
Allocator::template BackingWriteBarrierForHashTable<HashTable>(other.table_); Allocator::template BackingWriteBarrierForHashTable<HashTable>(other.table_);
std::swap(table_size_, other.table_size_); std::swap(table_size_, other.table_size_);
...@@ -2109,8 +2102,7 @@ HashTable<Key, Value, Extractor, HashFunctions, Traits, KeyTraits, Allocator>:: ...@@ -2109,8 +2102,7 @@ HashTable<Key, Value, Extractor, HashFunctions, Traits, KeyTraits, Allocator>::
static_assert(WTF::IsWeak<ValueType>::value || static_assert(WTF::IsWeak<ValueType>::value ||
IsTraceableInCollectionTrait<Traits>::value, IsTraceableInCollectionTrait<Traits>::value,
"Value should not be traced"); "Value should not be traced");
ValueType* table = ValueType* table = AsAtomicPtr(&table_)->load(std::memory_order_relaxed);
AsAtomic<ValueType*>(table_).load(std::memory_order_relaxed);
if (!WTF::IsWeak<ValueType>::value) { if (!WTF::IsWeak<ValueType>::value) {
// Strong HashTable. // Strong HashTable.
Allocator::template TraceHashTableBackingStrongly<ValueType, HashTable>( Allocator::template TraceHashTableBackingStrongly<ValueType, HashTable>(
......
...@@ -22,7 +22,6 @@ ...@@ -22,7 +22,6 @@
#ifndef THIRD_PARTY_BLINK_RENDERER_PLATFORM_WTF_HASH_TRAITS_H_ #ifndef THIRD_PARTY_BLINK_RENDERER_PLATFORM_WTF_HASH_TRAITS_H_
#define THIRD_PARTY_BLINK_RENDERER_PLATFORM_WTF_HASH_TRAITS_H_ #define THIRD_PARTY_BLINK_RENDERER_PLATFORM_WTF_HASH_TRAITS_H_
#include <string.h> // For memset.
#include <limits> #include <limits>
#include <memory> #include <memory>
#include <type_traits> #include <type_traits>
...@@ -43,6 +42,27 @@ struct EnumOrGenericHashTraits; ...@@ -43,6 +42,27 @@ struct EnumOrGenericHashTraits;
template <typename T> template <typename T>
struct HashTraits; struct HashTraits;
namespace {
template <typename T, bool use_atomic_writes = IsTraceable<T>::value>
void ClearMemoryAtomically(T* slot, size_t size) {
size_t* address = reinterpret_cast<size_t*>(slot);
// This method is called for clearing hash table entires that are removed. In
// case Oilpan concurrent marking is tracing the hash table at the same time,
// there might be a data race between the marker reading the entry and zeroing
// the entry. Using atomic reads here resolves any possible races.
// Note that sizeof(T) might not be a multiple of sizeof(size_t). The last
// sizeof(T)%sizeof(size_t) bytes don't require atomic write as it cannot hold
// a pointer (i.e it will not be traceable).
if (use_atomic_writes) {
for (; size >= sizeof(size_t); size -= sizeof(size_t), ++address) {
WTF::AsAtomicPtr(address)->store(0, std::memory_order_relaxed);
}
}
DCHECK(!use_atomic_writes || (size < sizeof(size_t)));
memset(address, 0, size);
}
} // namespace
template <typename T> template <typename T>
struct GenericHashTraitsBase<false, T> { struct GenericHashTraitsBase<false, T> {
// The emptyValueIsZero flag is used to optimize allocation of empty hash // The emptyValueIsZero flag is used to optimize allocation of empty hash
...@@ -374,8 +394,9 @@ struct PairHashTraits ...@@ -374,8 +394,9 @@ struct PairHashTraits
// at a later point, the same assumptions around memory zeroing must // at a later point, the same assumptions around memory zeroing must
// hold as they did at the initial allocation. Therefore we zero the // hold as they did at the initial allocation. Therefore we zero the
// value part of the slot here for GC collections. // value part of the slot here for GC collections.
if (zero_value) if (zero_value) {
memset(reinterpret_cast<void*>(&slot.second), 0, sizeof(slot.second)); ClearMemoryAtomically(&slot.second, sizeof(slot.second));
}
} }
static bool IsDeletedValue(const TraitType& value) { static bool IsDeletedValue(const TraitType& value) {
return FirstTraits::IsDeletedValue(value.first); return FirstTraits::IsDeletedValue(value.first);
...@@ -446,8 +467,9 @@ struct KeyValuePairHashTraits ...@@ -446,8 +467,9 @@ struct KeyValuePairHashTraits
static void ConstructDeletedValue(TraitType& slot, bool zero_value) { static void ConstructDeletedValue(TraitType& slot, bool zero_value) {
KeyTraits::ConstructDeletedValue(slot.key, zero_value); KeyTraits::ConstructDeletedValue(slot.key, zero_value);
// See similar code in this file for why we need to do this. // See similar code in this file for why we need to do this.
if (zero_value) if (zero_value) {
memset(reinterpret_cast<void*>(&slot.value), 0, sizeof(slot.value)); ClearMemoryAtomically(&slot.value, sizeof(slot.value));
}
} }
static bool IsDeletedValue(const TraitType& value) { static bool IsDeletedValue(const TraitType& value) {
return KeyTraits::IsDeletedValue(value.key); return KeyTraits::IsDeletedValue(value.key);
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment