Commit db2775f1 authored by Michael Lippautz's avatar Michael Lippautz Committed by Commit Bot

wtf,heap: Refactor ListHashSet split between PA and Oilpan

Instead of relying on a non-GCed type for Oilpan, use a different type
in the garabge-collected world and split the traits based on the
allocator.

All PA relevant types are provided in list_hash_set.h, whereas the
garbage collected specializations are provided by heap_allocator.h.

Switching to a regular GCed type for Oilpan allows for removing
special handling in various traits.

The following are removed:
- HeapAllocator::Malloc;
- Custom support for TraceListHashSetValue;
- Custom support for trace traits;

Bug: 1056170
Change-Id: I74e913a2b63e60e62018bb1303dd3345058b1ddf
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2547240
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Reviewed-by: default avatarOmer Katz <omerkatz@chromium.org>
Reviewed-by: default avatarKentaro Hara <haraken@chromium.org>
Cr-Commit-Position: refs/heads/master@{#829218}
parent 83867f85
...@@ -31,6 +31,17 @@ ...@@ -31,6 +31,17 @@
namespace blink { namespace blink {
class HeapListHashSetAllocator;
template <typename ValueArg>
class HeapListHashSetNode;
namespace internal {
template <typename T>
constexpr bool IsMember = WTF::IsSubclassOfTemplate<T, Member>::value;
} // namespace internal
#define DISALLOW_IN_CONTAINER() \ #define DISALLOW_IN_CONTAINER() \
public: \ public: \
using IsDisallowedInContainerMarker = int; \ using IsDisallowedInContainerMarker = int; \
...@@ -108,12 +119,6 @@ class PLATFORM_EXPORT HeapAllocator { ...@@ -108,12 +119,6 @@ class PLATFORM_EXPORT HeapAllocator {
MarkingVisitor::WriteBarrier(slot); MarkingVisitor::WriteBarrier(slot);
} }
template <typename Return, typename Metadata>
static Return Malloc(size_t size, const char* type_name) {
return reinterpret_cast<Return>(
MarkAsConstructed(ThreadHeap::Allocate<Metadata>(size)));
}
static bool IsAllocationAllowed() { static bool IsAllocationAllowed() {
return ThreadState::Current()->IsAllocationAllowed(); return ThreadState::Current()->IsAllocationAllowed();
} }
...@@ -260,78 +265,6 @@ class PLATFORM_EXPORT HeapAllocator { ...@@ -260,78 +265,6 @@ class PLATFORM_EXPORT HeapAllocator {
friend class WTF::HashMap; friend class WTF::HashMap;
}; };
template <typename VisitorDispatcher, typename Value>
static void TraceListHashSetValue(VisitorDispatcher visitor,
const Value& value) {
// We use the default hash traits for the value in the node, because
// ListHashSet does not let you specify any specific ones.
// We don't allow ListHashSet of WeakMember, so we set that one false
// (there's an assert elsewhere), but we have to specify some value for the
// strongify template argument, so we specify WTF::WeakPointersActWeak,
// arbitrarily.
TraceCollectionIfEnabled<WTF::kNoWeakHandling, Value,
WTF::HashTraits<Value>>::Trace(visitor, &value);
}
// The inline capacity is just a dummy template argument to match the off-heap
// allocator.
// This inherits from the static-only HeapAllocator trait class, but we do
// declare pointers to instances. These pointers are always null, and no
// objects are instantiated.
template <typename ValueArg, wtf_size_t inlineCapacity>
class HeapListHashSetAllocator : public HeapAllocator {
DISALLOW_NEW();
public:
using TableAllocator = HeapAllocator;
using Node = WTF::ListHashSetNode<ValueArg, HeapListHashSetAllocator>;
class AllocatorProvider {
DISALLOW_NEW();
public:
// For the heap allocation we don't need an actual allocator object, so
// we just return null.
HeapListHashSetAllocator* Get() const { return nullptr; }
// No allocator object is needed.
void CreateAllocatorIfNeeded() {}
void ReleaseAllocator() {}
// There is no allocator object in the HeapListHashSet (unlike in the
// regular ListHashSet) so there is nothing to swap.
void Swap(AllocatorProvider& other) {}
};
void Deallocate(void* dummy) {}
// This is not a static method even though it could be, because it needs to
// match the one that the (off-heap) ListHashSetAllocator has. The 'this'
// pointer will always be null.
void* AllocateNode() {
// Consider using a LinkedHashSet instead if this compile-time assert fails:
static_assert(!WTF::IsWeak<ValueArg>::value,
"weak pointers in a ListHashSet will result in null entries "
"in the set");
return Malloc<void*, Node>(
sizeof(Node),
nullptr /* Oilpan does not use the heap profiler at the moment. */);
}
template <typename VisitorDispatcher>
static void TraceValue(VisitorDispatcher visitor, const Node* node) {
TraceListHashSetValue(visitor, node->value_);
}
};
namespace internal {
template <typename T>
constexpr bool IsMember = WTF::IsSubclassOfTemplate<T, Member>::value;
} // namespace internal
template <typename KeyArg, template <typename KeyArg,
typename MappedArg, typename MappedArg,
typename HashArg = typename DefaultHash<KeyArg>::Hash, typename HashArg = typename DefaultHash<KeyArg>::Hash,
...@@ -452,6 +385,77 @@ class HeapLinkedHashSet ...@@ -452,6 +385,77 @@ class HeapLinkedHashSet
HeapLinkedHashSet() { CheckType(); } HeapLinkedHashSet() { CheckType(); }
}; };
} // namespace blink
namespace WTF {
template <typename Value, wtf_size_t inlineCapacity>
struct ListHashSetTraits<Value, inlineCapacity, blink::HeapListHashSetAllocator>
: public HashTraits<blink::Member<blink::HeapListHashSetNode<Value>>> {
using Allocator = blink::HeapListHashSetAllocator;
using Node = blink::HeapListHashSetNode<Value>;
static constexpr bool kCanTraceConcurrently =
HashTraits<Value>::kCanTraceConcurrently;
};
} // namespace WTF
namespace blink {
template <typename ValueArg>
class HeapListHashSetNode final
: public GarbageCollected<HeapListHashSetNode<ValueArg>> {
public:
using NodeAllocator = HeapListHashSetAllocator;
using PointerType = Member<HeapListHashSetNode>;
using Value = ValueArg;
template <typename U>
static HeapListHashSetNode* Create(NodeAllocator* allocator, U&& value) {
return MakeGarbageCollected<HeapListHashSetNode>(std::forward<U>(value));
}
template <typename U>
explicit HeapListHashSetNode(U&& value) : value_(std::forward<U>(value)) {
static_assert(std::is_trivially_destructible<Value>::value,
"Garbage collected types used in ListHashSet must be "
"trivially destructible");
}
void Destroy(NodeAllocator* allocator) {}
HeapListHashSetNode* Next() const { return next_; }
HeapListHashSetNode* Prev() const { return prev_; }
void Trace(Visitor* visitor) const {
visitor->Trace(prev_);
visitor->Trace(next_);
visitor->Trace(value_);
}
ValueArg value_;
PointerType prev_;
PointerType next_;
};
// Empty allocator as HeapListHashSetNode directly allocates using
// MakeGarbageCollected().
class HeapListHashSetAllocator {
DISALLOW_NEW();
public:
using TableAllocator = HeapAllocator;
static constexpr bool kIsGarbageCollected = true;
struct AllocatorProvider final {
void CreateAllocatorIfNeeded() {}
HeapListHashSetAllocator* Get() { return nullptr; }
void Swap(AllocatorProvider& other) {}
};
};
template <typename T, typename U> template <typename T, typename U>
struct GCInfoTrait<HeapLinkedHashSet<T, U>> struct GCInfoTrait<HeapLinkedHashSet<T, U>>
: public GCInfoTrait<LinkedHashSet<T, U, HeapAllocator>> {}; : public GCInfoTrait<LinkedHashSet<T, U, HeapAllocator>> {};
...@@ -460,11 +464,10 @@ template <typename ValueArg, ...@@ -460,11 +464,10 @@ template <typename ValueArg,
wtf_size_t inlineCapacity = 0, // The inlineCapacity is just a dummy wtf_size_t inlineCapacity = 0, // The inlineCapacity is just a dummy
// to match ListHashSet (off-heap). // to match ListHashSet (off-heap).
typename HashArg = typename DefaultHash<ValueArg>::Hash> typename HashArg = typename DefaultHash<ValueArg>::Hash>
class HeapListHashSet class HeapListHashSet : public ListHashSet<ValueArg,
: public ListHashSet<ValueArg,
inlineCapacity, inlineCapacity,
HashArg, HashArg,
HeapListHashSetAllocator<ValueArg, inlineCapacity>> { HeapListHashSetAllocator> {
IS_GARBAGE_COLLECTED_CONTAINER_TYPE(); IS_GARBAGE_COLLECTED_CONTAINER_TYPE();
DISALLOW_NEW(); DISALLOW_NEW();
...@@ -494,10 +497,7 @@ class HeapListHashSet ...@@ -494,10 +497,7 @@ class HeapListHashSet
template <typename T, wtf_size_t inlineCapacity, typename U> template <typename T, wtf_size_t inlineCapacity, typename U>
struct GCInfoTrait<HeapListHashSet<T, inlineCapacity, U>> struct GCInfoTrait<HeapListHashSet<T, inlineCapacity, U>>
: public GCInfoTrait< : public GCInfoTrait<
ListHashSet<T, ListHashSet<T, inlineCapacity, U, HeapListHashSetAllocator>> {};
inlineCapacity,
U,
HeapListHashSetAllocator<T, inlineCapacity>>> {};
template <typename Value, template <typename Value,
typename HashFunctions = typename DefaultHash<Value>::Hash, typename HashFunctions = typename DefaultHash<Value>::Hash,
...@@ -815,43 +815,6 @@ struct HashTraits<blink::UntracedMember<T>> ...@@ -815,43 +815,6 @@ struct HashTraits<blink::UntracedMember<T>>
} }
}; };
template <typename T, wtf_size_t inlineCapacity>
struct IsTraceable<
ListHashSetNode<T, blink::HeapListHashSetAllocator<T, inlineCapacity>>*> {
STATIC_ONLY(IsTraceable);
static_assert(sizeof(T), "T must be fully defined");
// All heap allocated node pointers need visiting to keep the nodes alive,
// regardless of whether they contain pointers to other heap allocated
// objects.
static const bool value = true;
};
template <typename T, wtf_size_t inlineCapacity>
struct IsGarbageCollectedType<
ListHashSetNode<T, blink::HeapListHashSetAllocator<T, inlineCapacity>>> {
static const bool value = true;
};
template <typename Set>
struct IsGarbageCollectedType<ListHashSetIterator<Set>> {
static const bool value = IsGarbageCollectedType<Set>::value;
};
template <typename Set>
struct IsGarbageCollectedType<ListHashSetConstIterator<Set>> {
static const bool value = IsGarbageCollectedType<Set>::value;
};
template <typename Set>
struct IsGarbageCollectedType<ListHashSetReverseIterator<Set>> {
static const bool value = IsGarbageCollectedType<Set>::value;
};
template <typename Set>
struct IsGarbageCollectedType<ListHashSetConstReverseIterator<Set>> {
static const bool value = IsGarbageCollectedType<Set>::value;
};
template <typename T, typename H> template <typename T, typename H>
struct HandleHashTraits : SimpleClassHashTraits<H> { struct HandleHashTraits : SimpleClassHashTraits<H> {
STATIC_ONLY(HandleHashTraits); STATIC_ONLY(HandleHashTraits);
......
...@@ -22,10 +22,6 @@ ...@@ -22,10 +22,6 @@
namespace blink { namespace blink {
template <typename Table>
class HeapHashTableBacking;
template <typename ValueArg, wtf_size_t inlineCapacity>
class HeapListHashSetAllocator;
template <typename T> template <typename T>
struct TraceTrait; struct TraceTrait;
template <typename T> template <typename T>
...@@ -300,82 +296,6 @@ struct TraceInCollectionTrait<kWeakHandling, blink::WeakMember<T>, Traits> { ...@@ -300,82 +296,6 @@ struct TraceInCollectionTrait<kWeakHandling, blink::WeakMember<T>, Traits> {
} }
}; };
// This specialization of TraceInCollectionTrait is for the backing of
// HeapListHashSet. This is for the case that we find a reference to the
// backing from the stack. That probably means we have a GC while we are in a
// ListHashSet method since normal API use does not put pointers to the backing
// on the stack.
template <typename NodeContents,
size_t inlineCapacity,
typename T,
typename U,
typename V,
typename W,
typename X,
typename Y>
struct TraceInCollectionTrait<
kNoWeakHandling,
blink::HeapHashTableBacking<HashTable<
ListHashSetNode<NodeContents,
blink::HeapListHashSetAllocator<T, inlineCapacity>>*,
U,
V,
W,
X,
Y,
blink::HeapAllocator>>,
void> {
using Node =
ListHashSetNode<NodeContents,
blink::HeapListHashSetAllocator<T, inlineCapacity>>;
using Table = HashTable<Node*, U, V, W, X, Y, blink::HeapAllocator>;
static void Trace(blink::Visitor* visitor, const void* self) {
const Node* const* array = reinterpret_cast<const Node* const*>(self);
blink::HeapObjectHeader* header =
blink::HeapObjectHeader::FromPayload(self);
size_t length = header->PayloadSize() / sizeof(Node*);
const bool is_concurrent = visitor->IsConcurrent();
for (size_t i = 0; i < length; ++i) {
const Node* node;
if (is_concurrent) {
// If tracing concurrently, IsEmptyOrDeletedBucket can cause data
// races. Loading array[i] atomically prevents possible data races.
// array[i] is of type Node* so can directly loaded atomically.
node = AsAtomicPtr(&array[i])->load(std::memory_order_relaxed);
} else {
node = array[i];
}
if (!HashTableHelper<
const Node*, typename Table::ExtractorType,
typename Table::KeyTraitsType>::IsEmptyOrDeletedBucket(node)) {
visitor->Trace(node);
}
}
}
};
// ListHashSetNode pointers (a ListHashSet is implemented as a hash table of
// these pointers).
template <typename Value, size_t inlineCapacity, typename Traits>
struct TraceInCollectionTrait<
kNoWeakHandling,
ListHashSetNode<Value,
blink::HeapListHashSetAllocator<Value, inlineCapacity>>*,
Traits> {
using Node =
ListHashSetNode<Value,
blink::HeapListHashSetAllocator<Value, inlineCapacity>>;
static void Trace(blink::Visitor* visitor, const Node* node) {
static_assert(!IsWeak<Node>::value,
"ListHashSet does not support weakness");
static_assert(IsTraceableInCollectionTrait<Traits>::value,
"T should be traceable");
visitor->Trace(node);
}
};
} // namespace WTF } // namespace WTF
#endif // THIRD_PARTY_BLINK_RENDERER_PLATFORM_HEAP_IMPL_TRACE_TRAITS_H_ #endif // THIRD_PARTY_BLINK_RENDERER_PLATFORM_HEAP_IMPL_TRACE_TRAITS_H_
...@@ -59,30 +59,167 @@ class ListHashSetReverseIterator; ...@@ -59,30 +59,167 @@ class ListHashSetReverseIterator;
template <typename Set> template <typename Set>
class ListHashSetConstReverseIterator; class ListHashSetConstReverseIterator;
template <typename ValueArg>
class ListHashSetNodeBase;
template <typename ValueArg, typename Allocator> template <typename ValueArg, typename Allocator>
class ListHashSetNode; class MallocedListHashSetNode;
template <typename ValueArg, size_t inlineCapacity> template <typename ValueArg, wtf_size_t inlineCapacity>
struct ListHashSetAllocator; struct MallocedListHashSetAllocator;
template <typename HashArg> template <typename HashArg>
struct ListHashSetNodeHashFunctions; struct ListHashSetNodeHashFunctions;
template <typename HashArg> template <typename HashArg, typename NodeArg>
struct ListHashSetTranslator; struct ListHashSetTranslator;
template <typename Value, wtf_size_t inlineCapacity, typename Allocator>
struct ListHashSetTraits;
template <typename Value, wtf_size_t inlineCapacity>
struct ListHashSetTraits<Value,
inlineCapacity,
MallocedListHashSetAllocator<Value, inlineCapacity>>
: public HashTraits<MallocedListHashSetNode<
Value,
MallocedListHashSetAllocator<Value, inlineCapacity>>*> {
using Allocator = MallocedListHashSetAllocator<Value, inlineCapacity>;
using Node = MallocedListHashSetNode<Value, Allocator>;
};
template <typename ValueArg, typename AllocatorArg>
class MallocedListHashSetNode {
DISALLOW_NEW();
public:
using NodeAllocator = AllocatorArg;
using PointerType = MallocedListHashSetNode*;
using Value = ValueArg;
template <typename U>
static MallocedListHashSetNode* Create(NodeAllocator* allocator, U&& value) {
return new (allocator->AllocateNode())
MallocedListHashSetNode(std::forward<U>(value));
}
template <typename U>
explicit MallocedListHashSetNode(U&& value)
: value_(std::forward<U>(value)) {}
void Destroy(NodeAllocator* allocator) {
this->~MallocedListHashSetNode();
allocator->Deallocate(this);
}
MallocedListHashSetNode* Next() const { return next_; }
MallocedListHashSetNode* Prev() const { return prev_; }
ValueArg value_;
PointerType prev_ = nullptr;
PointerType next_ = nullptr;
#if DCHECK_IS_ON()
bool is_allocated_ = true;
#endif
};
// This allocator is only used for non-Heap ListHashSets.
template <typename ValueArg, wtf_size_t inlineCapacity>
struct MallocedListHashSetAllocator : public PartitionAllocator {
using TableAllocator = PartitionAllocator;
using Node = MallocedListHashSetNode<ValueArg, MallocedListHashSetAllocator>;
class AllocatorProvider {
DISALLOW_NEW();
public:
AllocatorProvider() = default;
void CreateAllocatorIfNeeded() {
if (!allocator_)
allocator_ = new MallocedListHashSetAllocator;
}
void ReleaseAllocator() {
delete allocator_;
allocator_ = nullptr;
}
void Swap(AllocatorProvider& other) {
std::swap(allocator_, other.allocator_);
}
MallocedListHashSetAllocator* Get() const {
DCHECK(allocator_);
return allocator_;
}
private:
// Not using std::unique_ptr as this pointer should be deleted at
// releaseAllocator() method rather than at destructor.
MallocedListHashSetAllocator* allocator_ = nullptr;
};
MallocedListHashSetAllocator() : free_list_(Pool()) {
memset(pool_, 0, sizeof(pool_));
}
Node* AllocateNode() {
Node* result = free_list_;
if (!result) {
return static_cast<Node*>(WTF::Partitions::FastMalloc(
sizeof(Node), WTF_HEAP_PROFILER_TYPE_NAME(Node)));
}
#if DCHECK_IS_ON()
DCHECK(!result->is_allocated_);
#endif
template <typename Value, typename Allocator> Node* next = result->Next();
struct ListHashSetTraits #if DCHECK_IS_ON()
: public HashTraits<ListHashSetNode<Value, Allocator>*> { DCHECK(!next || !next->is_allocated_);
using Node = ListHashSetNode<Value, Allocator>; #endif
if (!next && !is_done_with_initial_free_list_) {
next = result + 1;
if (next == PastPool()) {
is_done_with_initial_free_list_ = true;
next = nullptr;
} else {
DCHECK(InPool(next));
#if DCHECK_IS_ON()
DCHECK(!next->is_allocated_);
#endif
}
}
free_list_ = next;
return result;
}
void Deallocate(Node* node) {
if (InPool(node)) {
#if DCHECK_IS_ON()
node->is_allocated_ = false;
#endif
node->next_ = free_list_;
free_list_ = node;
return;
}
static void ConstructDeletedValue(Node*& slot, bool) { WTF::Partitions::FastFree(node);
AsAtomicPtr(&slot)->store(reinterpret_cast<Node*>(-1),
std::memory_order_relaxed);
} }
static constexpr bool kCanTraceConcurrently = bool InPool(Node* node) { return node >= Pool() && node < PastPool(); }
HashTraits<Value>::kCanTraceConcurrently;
private:
Node* Pool() { return reinterpret_cast_ptr<Node*>(pool_); }
Node* PastPool() { return Pool() + kPoolSize; }
Node* free_list_;
bool is_done_with_initial_free_list_ = false;
#if defined(MEMORY_TOOL_REPLACES_ALLOCATOR)
// The allocation pool for nodes is one big chunk that ASAN has no insight
// into, so it can cloak errors. Make it as small as possible to force nodes
// to be allocated individually where ASAN can see them.
static const size_t kPoolSize = 1;
#else
static const size_t kPoolSize = inlineCapacity;
#endif
alignas(Node) char pool_[sizeof(Node) * kPoolSize];
}; };
// Note that for a ListHashSet you cannot specify the HashTraits as a template // Note that for a ListHashSet you cannot specify the HashTraits as a template
...@@ -91,37 +228,37 @@ template <typename ValueArg, ...@@ -91,37 +228,37 @@ template <typename ValueArg,
size_t inlineCapacity = 256, size_t inlineCapacity = 256,
typename HashArg = typename DefaultHash<ValueArg>::Hash, typename HashArg = typename DefaultHash<ValueArg>::Hash,
typename AllocatorArg = typename AllocatorArg =
ListHashSetAllocator<ValueArg, inlineCapacity>> MallocedListHashSetAllocator<ValueArg, inlineCapacity>>
class ListHashSet class ListHashSet
: public ConditionalDestructor< : public ConditionalDestructor<
ListHashSet<ValueArg, inlineCapacity, HashArg, AllocatorArg>, ListHashSet<ValueArg, inlineCapacity, HashArg, AllocatorArg>,
AllocatorArg::kIsGarbageCollected> { AllocatorArg::kIsGarbageCollected> {
typedef AllocatorArg Allocator; using Allocator = AllocatorArg;
USE_ALLOCATOR(ListHashSet, Allocator); USE_ALLOCATOR(ListHashSet, Allocator);
typedef ListHashSetNode<ValueArg, Allocator> Node; using NodeTraits = ListHashSetTraits<ValueArg, inlineCapacity, Allocator>;
typedef ListHashSetTraits<ValueArg, Allocator> NodeTraits; using Node = typename NodeTraits::Node;
typedef ListHashSetNodeHashFunctions<HashArg> NodeHash; using NodeHash = ListHashSetNodeHashFunctions<HashArg>;
typedef ListHashSetTranslator<HashArg> BaseTranslator; using BaseTranslator = ListHashSetTranslator<HashArg, Node>;
typedef HashTable<Node*, typedef HashTable<typename Node::PointerType,
Node*, typename Node::PointerType,
IdentityExtractor, IdentityExtractor,
NodeHash, NodeHash,
NodeTraits, NodeTraits,
NodeTraits, NodeTraits,
typename Allocator::TableAllocator> typename Allocator::TableAllocator>
ImplType; ImplType;
typedef HashTableIterator<Node*, typedef HashTableIterator<typename Node::PointerType,
Node*, typename Node::PointerType,
IdentityExtractor, IdentityExtractor,
NodeHash, NodeHash,
NodeTraits, NodeTraits,
NodeTraits, NodeTraits,
typename Allocator::TableAllocator> typename Allocator::TableAllocator>
ImplTypeIterator; ImplTypeIterator;
typedef HashTableConstIterator<Node*, typedef HashTableConstIterator<typename Node::PointerType,
Node*, typename Node::PointerType,
IdentityExtractor, IdentityExtractor,
NodeHash, NodeHash,
NodeTraits, NodeTraits,
...@@ -254,7 +391,7 @@ class ListHashSet ...@@ -254,7 +391,7 @@ class ListHashSet
void PrependNode(Node*); void PrependNode(Node*);
void InsertNodeBefore(Node* before_node, Node* new_node); void InsertNodeBefore(Node* before_node, Node* new_node);
void DeleteAllNodes(); void DeleteAllNodes();
Allocator* GetAllocator() const { return allocator_provider_.Get(); } Allocator* GetAllocator() { return allocator_provider_.Get(); }
void CreateAllocatorIfNeeded() { void CreateAllocatorIfNeeded() {
allocator_provider_.CreateAllocatorIfNeeded(); allocator_provider_.CreateAllocatorIfNeeded();
} }
...@@ -271,190 +408,11 @@ class ListHashSet ...@@ -271,190 +408,11 @@ class ListHashSet
} }
ImplType impl_; ImplType impl_;
Node* head_; typename Node::PointerType head_;
Node* tail_; typename Node::PointerType tail_;
typename Allocator::AllocatorProvider allocator_provider_; typename Allocator::AllocatorProvider allocator_provider_;
}; };
// ListHashSetNode has this base class to hold the members because the MSVC
// compiler otherwise gets into circular template dependencies when trying to do
// sizeof on a node.
template <typename ValueArg>
class ListHashSetNodeBase {
DISALLOW_NEW();
protected:
template <typename U>
explicit ListHashSetNodeBase(U&& value) : value_(std::forward<U>(value)) {}
public:
ValueArg value_;
ListHashSetNodeBase* prev_ = nullptr;
ListHashSetNodeBase* next_ = nullptr;
#if DCHECK_IS_ON()
bool is_allocated_ = true;
#endif
};
// This allocator is only used for non-Heap ListHashSets.
template <typename ValueArg, size_t inlineCapacity>
struct ListHashSetAllocator : public PartitionAllocator {
typedef PartitionAllocator TableAllocator;
typedef ListHashSetNode<ValueArg, ListHashSetAllocator> Node;
typedef ListHashSetNodeBase<ValueArg> NodeBase;
class AllocatorProvider {
DISALLOW_NEW();
public:
AllocatorProvider() : allocator_(nullptr) {}
void CreateAllocatorIfNeeded() {
if (!allocator_)
allocator_ = new ListHashSetAllocator;
}
void ReleaseAllocator() {
delete allocator_;
allocator_ = nullptr;
}
void Swap(AllocatorProvider& other) {
std::swap(allocator_, other.allocator_);
}
ListHashSetAllocator* Get() const {
DCHECK(allocator_);
return allocator_;
}
private:
// Not using std::unique_ptr as this pointer should be deleted at
// releaseAllocator() method rather than at destructor.
ListHashSetAllocator* allocator_;
};
ListHashSetAllocator()
: free_list_(Pool()), is_done_with_initial_free_list_(false) {
memset(pool_, 0, sizeof(pool_));
}
Node* AllocateNode() {
Node* result = free_list_;
if (!result)
return static_cast<Node*>(WTF::Partitions::FastMalloc(
sizeof(NodeBase), WTF_HEAP_PROFILER_TYPE_NAME(Node)));
#if DCHECK_IS_ON()
DCHECK(!result->is_allocated_);
#endif
Node* next = result->Next();
#if DCHECK_IS_ON()
DCHECK(!next || !next->is_allocated_);
#endif
if (!next && !is_done_with_initial_free_list_) {
next = result + 1;
if (next == PastPool()) {
is_done_with_initial_free_list_ = true;
next = nullptr;
} else {
DCHECK(InPool(next));
#if DCHECK_IS_ON()
DCHECK(!next->is_allocated_);
#endif
}
}
free_list_ = next;
return result;
}
void Deallocate(Node* node) {
if (InPool(node)) {
#if DCHECK_IS_ON()
node->is_allocated_ = false;
#endif
node->next_ = free_list_;
free_list_ = node;
return;
}
WTF::Partitions::FastFree(node);
}
bool InPool(Node* node) { return node >= Pool() && node < PastPool(); }
template <typename VisitorDispatcher>
static void TraceValue(VisitorDispatcher, const Node*) {}
private:
Node* Pool() { return reinterpret_cast_ptr<Node*>(pool_); }
Node* PastPool() { return Pool() + kPoolSize; }
Node* free_list_;
bool is_done_with_initial_free_list_;
#if defined(MEMORY_TOOL_REPLACES_ALLOCATOR)
// The allocation pool for nodes is one big chunk that ASAN has no insight
// into, so it can cloak errors. Make it as small as possible to force nodes
// to be allocated individually where ASAN can see them.
static const size_t kPoolSize = 1;
#else
static const size_t kPoolSize = inlineCapacity;
#endif
alignas(NodeBase) char pool_[sizeof(NodeBase) * kPoolSize];
};
template <typename ValueArg, typename AllocatorArg>
class ListHashSetNode : public ListHashSetNodeBase<ValueArg> {
public:
typedef AllocatorArg NodeAllocator;
typedef ValueArg Value;
template <typename U>
explicit ListHashSetNode(U&& value)
: ListHashSetNodeBase<ValueArg>(std::forward<U>(value)) {
static_assert(!NodeAllocator::kIsGarbageCollected ||
std::is_trivially_destructible<Value>::value,
"Garbage collected types used in ListHashSet should be "
"trivially destructible");
}
void* operator new(size_t, NodeAllocator* allocator) {
static_assert(
sizeof(ListHashSetNode) == sizeof(ListHashSetNodeBase<ValueArg>),
"please add any fields to the base");
return allocator->AllocateNode();
}
void Destroy(NodeAllocator* allocator) {
if (NodeAllocator::kIsGarbageCollected)
return;
this->~ListHashSetNode();
allocator->Deallocate(this);
}
template <typename VisitorDispatcher, typename A = NodeAllocator>
std::enable_if_t<A::kIsGarbageCollected> Trace(
VisitorDispatcher visitor) const {
NodeAllocator::TraceValue(visitor, this);
}
ListHashSetNode* Next() const {
return reinterpret_cast<ListHashSetNode*>(this->next_);
}
ListHashSetNode* Prev() const {
return reinterpret_cast<ListHashSetNode*>(this->prev_);
}
// Don't add fields here, the ListHashSetNodeBase and this should have the
// same size.
static constexpr ListHashSetNode* UnlinkedNodePointer() {
return reinterpret_cast<ListHashSetNode*>(-1);
}
};
template <typename HashArg> template <typename HashArg>
struct ListHashSetNodeHashFunctions { struct ListHashSetNodeHashFunctions {
STATIC_ONLY(ListHashSetNodeHashFunctions); STATIC_ONLY(ListHashSetNodeHashFunctions);
...@@ -462,8 +420,8 @@ struct ListHashSetNodeHashFunctions { ...@@ -462,8 +420,8 @@ struct ListHashSetNodeHashFunctions {
static unsigned GetHash(const T& key) { static unsigned GetHash(const T& key) {
return HashArg::GetHash(key->value_); return HashArg::GetHash(key->value_);
} }
template <typename T> template <typename U, typename V>
static bool Equal(const T& a, const T& b) { static bool Equal(const U& a, const V& b) {
return HashArg::Equal(a->value_, b->value_); return HashArg::Equal(a->value_, b->value_);
} }
static const bool safe_to_compare_to_empty_or_deleted = false; static const bool safe_to_compare_to_empty_or_deleted = false;
...@@ -513,11 +471,6 @@ class ListHashSetIterator { ...@@ -513,11 +471,6 @@ class ListHashSetIterator {
operator const_iterator() const { return iterator_; } operator const_iterator() const { return iterator_; }
template <typename VisitorDispatcher>
void Trace(VisitorDispatcher visitor) const {
iterator_.Trace(visitor);
}
private: private:
Node* GetNode() { return iterator_.GetNode(); } Node* GetNode() { return iterator_.GetNode(); }
...@@ -575,12 +528,6 @@ class ListHashSetConstIterator { ...@@ -575,12 +528,6 @@ class ListHashSetConstIterator {
return position_ != other.position_; return position_ != other.position_;
} }
template <typename VisitorDispatcher>
void Trace(VisitorDispatcher visitor) const {
visitor->Trace(*set_);
visitor->Trace(position_);
}
private: private:
Node* GetNode() { return position_; } Node* GetNode() { return position_; }
...@@ -635,11 +582,6 @@ class ListHashSetReverseIterator { ...@@ -635,11 +582,6 @@ class ListHashSetReverseIterator {
operator const_reverse_iterator() const { return iterator_; } operator const_reverse_iterator() const { return iterator_; }
template <typename VisitorDispatcher>
void Trace(VisitorDispatcher visitor) const {
iterator_.trace(visitor);
}
private: private:
Node* GetNode() { return iterator_.node(); } Node* GetNode() { return iterator_.node(); }
...@@ -697,12 +639,6 @@ class ListHashSetConstReverseIterator { ...@@ -697,12 +639,6 @@ class ListHashSetConstReverseIterator {
return position_ != other.position_; return position_ != other.position_;
} }
template <typename VisitorDispatcher>
void Trace(VisitorDispatcher visitor) const {
visitor->Trace(*set_);
visitor->Trace(position_);
}
private: private:
Node* GetNode() { return position_; } Node* GetNode() { return position_; }
...@@ -713,9 +649,12 @@ class ListHashSetConstReverseIterator { ...@@ -713,9 +649,12 @@ class ListHashSetConstReverseIterator {
friend class ListHashSet; friend class ListHashSet;
}; };
template <typename HashFunctions> template <typename HashFunctions, typename NodeArg>
struct ListHashSetTranslator { struct ListHashSetTranslator {
STATIC_ONLY(ListHashSetTranslator); STATIC_ONLY(ListHashSetTranslator);
using Node = NodeArg;
template <typename T> template <typename T>
static unsigned GetHash(const T& key) { static unsigned GetHash(const T& key) {
return HashFunctions::GetHash(key); return HashFunctions::GetHash(key);
...@@ -724,11 +663,15 @@ struct ListHashSetTranslator { ...@@ -724,11 +663,15 @@ struct ListHashSetTranslator {
static bool Equal(const T& a, const U& b) { static bool Equal(const T& a, const U& b) {
return HashFunctions::Equal(a->value_, b); return HashFunctions::Equal(a->value_, b);
} }
template <typename T, typename U, typename V> template <typename Key, typename Allocator>
static void Translate(T*& location, U&& key, const V& allocator) { static void Translate(typename Node::PointerType& location,
AsAtomicPtr(&location)->store(new (const_cast<V*>(&allocator)) Key&& key,
T(std::forward<U>(key)), Allocator& allocator) {
std::memory_order_relaxed); // PointerType is
// - Member<Node> for the Heap version, supporting concurrency using
// atomics;
// - Node* for the PA version;
location = Node::Create(&allocator, std::forward<Key>(key));
} }
}; };
...@@ -1095,10 +1038,9 @@ ListHashSet<T, inlineCapacity, U, V>::Trace(VisitorDispatcher visitor) const { ...@@ -1095,10 +1038,9 @@ ListHashSet<T, inlineCapacity, U, V>::Trace(VisitorDispatcher visitor) const {
static_assert(!IsWeak<T>::value, static_assert(!IsWeak<T>::value,
"HeapListHashSet does not support weakness, consider using " "HeapListHashSet does not support weakness, consider using "
"HeapLinkedHashSet instead."); "HeapLinkedHashSet instead.");
// This marks all the nodes and their contents live that can be accessed
// through the HashTable. That includes m_head and m_tail so we do not have
// to explicitly trace them here.
impl_.Trace(visitor); impl_.Trace(visitor);
visitor->Trace(head_);
visitor->Trace(tail_);
} }
} // namespace WTF } // namespace WTF
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment