Commit db2775f1 authored by Michael Lippautz's avatar Michael Lippautz Committed by Commit Bot

wtf,heap: Refactor ListHashSet split between PA and Oilpan

Instead of relying on a non-GCed type for Oilpan, use a different type
in the garabge-collected world and split the traits based on the
allocator.

All PA relevant types are provided in list_hash_set.h, whereas the
garbage collected specializations are provided by heap_allocator.h.

Switching to a regular GCed type for Oilpan allows for removing
special handling in various traits.

The following are removed:
- HeapAllocator::Malloc;
- Custom support for TraceListHashSetValue;
- Custom support for trace traits;

Bug: 1056170
Change-Id: I74e913a2b63e60e62018bb1303dd3345058b1ddf
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2547240
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Reviewed-by: default avatarOmer Katz <omerkatz@chromium.org>
Reviewed-by: default avatarKentaro Hara <haraken@chromium.org>
Cr-Commit-Position: refs/heads/master@{#829218}
parent 83867f85
...@@ -31,6 +31,17 @@ ...@@ -31,6 +31,17 @@
namespace blink { namespace blink {
class HeapListHashSetAllocator;
template <typename ValueArg>
class HeapListHashSetNode;
namespace internal {
template <typename T>
constexpr bool IsMember = WTF::IsSubclassOfTemplate<T, Member>::value;
} // namespace internal
#define DISALLOW_IN_CONTAINER() \ #define DISALLOW_IN_CONTAINER() \
public: \ public: \
using IsDisallowedInContainerMarker = int; \ using IsDisallowedInContainerMarker = int; \
...@@ -108,12 +119,6 @@ class PLATFORM_EXPORT HeapAllocator { ...@@ -108,12 +119,6 @@ class PLATFORM_EXPORT HeapAllocator {
MarkingVisitor::WriteBarrier(slot); MarkingVisitor::WriteBarrier(slot);
} }
template <typename Return, typename Metadata>
static Return Malloc(size_t size, const char* type_name) {
return reinterpret_cast<Return>(
MarkAsConstructed(ThreadHeap::Allocate<Metadata>(size)));
}
static bool IsAllocationAllowed() { static bool IsAllocationAllowed() {
return ThreadState::Current()->IsAllocationAllowed(); return ThreadState::Current()->IsAllocationAllowed();
} }
...@@ -260,78 +265,6 @@ class PLATFORM_EXPORT HeapAllocator { ...@@ -260,78 +265,6 @@ class PLATFORM_EXPORT HeapAllocator {
friend class WTF::HashMap; friend class WTF::HashMap;
}; };
template <typename VisitorDispatcher, typename Value>
static void TraceListHashSetValue(VisitorDispatcher visitor,
const Value& value) {
// We use the default hash traits for the value in the node, because
// ListHashSet does not let you specify any specific ones.
// We don't allow ListHashSet of WeakMember, so we set that one false
// (there's an assert elsewhere), but we have to specify some value for the
// strongify template argument, so we specify WTF::WeakPointersActWeak,
// arbitrarily.
TraceCollectionIfEnabled<WTF::kNoWeakHandling, Value,
WTF::HashTraits<Value>>::Trace(visitor, &value);
}
// The inline capacity is just a dummy template argument to match the off-heap
// allocator.
// This inherits from the static-only HeapAllocator trait class, but we do
// declare pointers to instances. These pointers are always null, and no
// objects are instantiated.
template <typename ValueArg, wtf_size_t inlineCapacity>
class HeapListHashSetAllocator : public HeapAllocator {
DISALLOW_NEW();
public:
using TableAllocator = HeapAllocator;
using Node = WTF::ListHashSetNode<ValueArg, HeapListHashSetAllocator>;
class AllocatorProvider {
DISALLOW_NEW();
public:
// For the heap allocation we don't need an actual allocator object, so
// we just return null.
HeapListHashSetAllocator* Get() const { return nullptr; }
// No allocator object is needed.
void CreateAllocatorIfNeeded() {}
void ReleaseAllocator() {}
// There is no allocator object in the HeapListHashSet (unlike in the
// regular ListHashSet) so there is nothing to swap.
void Swap(AllocatorProvider& other) {}
};
void Deallocate(void* dummy) {}
// This is not a static method even though it could be, because it needs to
// match the one that the (off-heap) ListHashSetAllocator has. The 'this'
// pointer will always be null.
void* AllocateNode() {
// Consider using a LinkedHashSet instead if this compile-time assert fails:
static_assert(!WTF::IsWeak<ValueArg>::value,
"weak pointers in a ListHashSet will result in null entries "
"in the set");
return Malloc<void*, Node>(
sizeof(Node),
nullptr /* Oilpan does not use the heap profiler at the moment. */);
}
template <typename VisitorDispatcher>
static void TraceValue(VisitorDispatcher visitor, const Node* node) {
TraceListHashSetValue(visitor, node->value_);
}
};
namespace internal {
template <typename T>
constexpr bool IsMember = WTF::IsSubclassOfTemplate<T, Member>::value;
} // namespace internal
template <typename KeyArg, template <typename KeyArg,
typename MappedArg, typename MappedArg,
typename HashArg = typename DefaultHash<KeyArg>::Hash, typename HashArg = typename DefaultHash<KeyArg>::Hash,
...@@ -452,6 +385,77 @@ class HeapLinkedHashSet ...@@ -452,6 +385,77 @@ class HeapLinkedHashSet
HeapLinkedHashSet() { CheckType(); } HeapLinkedHashSet() { CheckType(); }
}; };
} // namespace blink
namespace WTF {
template <typename Value, wtf_size_t inlineCapacity>
struct ListHashSetTraits<Value, inlineCapacity, blink::HeapListHashSetAllocator>
: public HashTraits<blink::Member<blink::HeapListHashSetNode<Value>>> {
using Allocator = blink::HeapListHashSetAllocator;
using Node = blink::HeapListHashSetNode<Value>;
static constexpr bool kCanTraceConcurrently =
HashTraits<Value>::kCanTraceConcurrently;
};
} // namespace WTF
namespace blink {
template <typename ValueArg>
class HeapListHashSetNode final
: public GarbageCollected<HeapListHashSetNode<ValueArg>> {
public:
using NodeAllocator = HeapListHashSetAllocator;
using PointerType = Member<HeapListHashSetNode>;
using Value = ValueArg;
template <typename U>
static HeapListHashSetNode* Create(NodeAllocator* allocator, U&& value) {
return MakeGarbageCollected<HeapListHashSetNode>(std::forward<U>(value));
}
template <typename U>
explicit HeapListHashSetNode(U&& value) : value_(std::forward<U>(value)) {
static_assert(std::is_trivially_destructible<Value>::value,
"Garbage collected types used in ListHashSet must be "
"trivially destructible");
}
void Destroy(NodeAllocator* allocator) {}
HeapListHashSetNode* Next() const { return next_; }
HeapListHashSetNode* Prev() const { return prev_; }
void Trace(Visitor* visitor) const {
visitor->Trace(prev_);
visitor->Trace(next_);
visitor->Trace(value_);
}
ValueArg value_;
PointerType prev_;
PointerType next_;
};
// Empty allocator as HeapListHashSetNode directly allocates using
// MakeGarbageCollected().
class HeapListHashSetAllocator {
DISALLOW_NEW();
public:
using TableAllocator = HeapAllocator;
static constexpr bool kIsGarbageCollected = true;
struct AllocatorProvider final {
void CreateAllocatorIfNeeded() {}
HeapListHashSetAllocator* Get() { return nullptr; }
void Swap(AllocatorProvider& other) {}
};
};
template <typename T, typename U> template <typename T, typename U>
struct GCInfoTrait<HeapLinkedHashSet<T, U>> struct GCInfoTrait<HeapLinkedHashSet<T, U>>
: public GCInfoTrait<LinkedHashSet<T, U, HeapAllocator>> {}; : public GCInfoTrait<LinkedHashSet<T, U, HeapAllocator>> {};
...@@ -460,11 +464,10 @@ template <typename ValueArg, ...@@ -460,11 +464,10 @@ template <typename ValueArg,
wtf_size_t inlineCapacity = 0, // The inlineCapacity is just a dummy wtf_size_t inlineCapacity = 0, // The inlineCapacity is just a dummy
// to match ListHashSet (off-heap). // to match ListHashSet (off-heap).
typename HashArg = typename DefaultHash<ValueArg>::Hash> typename HashArg = typename DefaultHash<ValueArg>::Hash>
class HeapListHashSet class HeapListHashSet : public ListHashSet<ValueArg,
: public ListHashSet<ValueArg, inlineCapacity,
inlineCapacity, HashArg,
HashArg, HeapListHashSetAllocator> {
HeapListHashSetAllocator<ValueArg, inlineCapacity>> {
IS_GARBAGE_COLLECTED_CONTAINER_TYPE(); IS_GARBAGE_COLLECTED_CONTAINER_TYPE();
DISALLOW_NEW(); DISALLOW_NEW();
...@@ -494,10 +497,7 @@ class HeapListHashSet ...@@ -494,10 +497,7 @@ class HeapListHashSet
template <typename T, wtf_size_t inlineCapacity, typename U> template <typename T, wtf_size_t inlineCapacity, typename U>
struct GCInfoTrait<HeapListHashSet<T, inlineCapacity, U>> struct GCInfoTrait<HeapListHashSet<T, inlineCapacity, U>>
: public GCInfoTrait< : public GCInfoTrait<
ListHashSet<T, ListHashSet<T, inlineCapacity, U, HeapListHashSetAllocator>> {};
inlineCapacity,
U,
HeapListHashSetAllocator<T, inlineCapacity>>> {};
template <typename Value, template <typename Value,
typename HashFunctions = typename DefaultHash<Value>::Hash, typename HashFunctions = typename DefaultHash<Value>::Hash,
...@@ -815,43 +815,6 @@ struct HashTraits<blink::UntracedMember<T>> ...@@ -815,43 +815,6 @@ struct HashTraits<blink::UntracedMember<T>>
} }
}; };
template <typename T, wtf_size_t inlineCapacity>
struct IsTraceable<
ListHashSetNode<T, blink::HeapListHashSetAllocator<T, inlineCapacity>>*> {
STATIC_ONLY(IsTraceable);
static_assert(sizeof(T), "T must be fully defined");
// All heap allocated node pointers need visiting to keep the nodes alive,
// regardless of whether they contain pointers to other heap allocated
// objects.
static const bool value = true;
};
template <typename T, wtf_size_t inlineCapacity>
struct IsGarbageCollectedType<
ListHashSetNode<T, blink::HeapListHashSetAllocator<T, inlineCapacity>>> {
static const bool value = true;
};
template <typename Set>
struct IsGarbageCollectedType<ListHashSetIterator<Set>> {
static const bool value = IsGarbageCollectedType<Set>::value;
};
template <typename Set>
struct IsGarbageCollectedType<ListHashSetConstIterator<Set>> {
static const bool value = IsGarbageCollectedType<Set>::value;
};
template <typename Set>
struct IsGarbageCollectedType<ListHashSetReverseIterator<Set>> {
static const bool value = IsGarbageCollectedType<Set>::value;
};
template <typename Set>
struct IsGarbageCollectedType<ListHashSetConstReverseIterator<Set>> {
static const bool value = IsGarbageCollectedType<Set>::value;
};
template <typename T, typename H> template <typename T, typename H>
struct HandleHashTraits : SimpleClassHashTraits<H> { struct HandleHashTraits : SimpleClassHashTraits<H> {
STATIC_ONLY(HandleHashTraits); STATIC_ONLY(HandleHashTraits);
......
...@@ -22,10 +22,6 @@ ...@@ -22,10 +22,6 @@
namespace blink { namespace blink {
template <typename Table>
class HeapHashTableBacking;
template <typename ValueArg, wtf_size_t inlineCapacity>
class HeapListHashSetAllocator;
template <typename T> template <typename T>
struct TraceTrait; struct TraceTrait;
template <typename T> template <typename T>
...@@ -300,82 +296,6 @@ struct TraceInCollectionTrait<kWeakHandling, blink::WeakMember<T>, Traits> { ...@@ -300,82 +296,6 @@ struct TraceInCollectionTrait<kWeakHandling, blink::WeakMember<T>, Traits> {
} }
}; };
// This specialization of TraceInCollectionTrait is for the backing of
// HeapListHashSet. This is for the case that we find a reference to the
// backing from the stack. That probably means we have a GC while we are in a
// ListHashSet method since normal API use does not put pointers to the backing
// on the stack.
template <typename NodeContents,
size_t inlineCapacity,
typename T,
typename U,
typename V,
typename W,
typename X,
typename Y>
struct TraceInCollectionTrait<
kNoWeakHandling,
blink::HeapHashTableBacking<HashTable<
ListHashSetNode<NodeContents,
blink::HeapListHashSetAllocator<T, inlineCapacity>>*,
U,
V,
W,
X,
Y,
blink::HeapAllocator>>,
void> {
using Node =
ListHashSetNode<NodeContents,
blink::HeapListHashSetAllocator<T, inlineCapacity>>;
using Table = HashTable<Node*, U, V, W, X, Y, blink::HeapAllocator>;
static void Trace(blink::Visitor* visitor, const void* self) {
const Node* const* array = reinterpret_cast<const Node* const*>(self);
blink::HeapObjectHeader* header =
blink::HeapObjectHeader::FromPayload(self);
size_t length = header->PayloadSize() / sizeof(Node*);
const bool is_concurrent = visitor->IsConcurrent();
for (size_t i = 0; i < length; ++i) {
const Node* node;
if (is_concurrent) {
// If tracing concurrently, IsEmptyOrDeletedBucket can cause data
// races. Loading array[i] atomically prevents possible data races.
// array[i] is of type Node* so can directly loaded atomically.
node = AsAtomicPtr(&array[i])->load(std::memory_order_relaxed);
} else {
node = array[i];
}
if (!HashTableHelper<
const Node*, typename Table::ExtractorType,
typename Table::KeyTraitsType>::IsEmptyOrDeletedBucket(node)) {
visitor->Trace(node);
}
}
}
};
// ListHashSetNode pointers (a ListHashSet is implemented as a hash table of
// these pointers).
template <typename Value, size_t inlineCapacity, typename Traits>
struct TraceInCollectionTrait<
kNoWeakHandling,
ListHashSetNode<Value,
blink::HeapListHashSetAllocator<Value, inlineCapacity>>*,
Traits> {
using Node =
ListHashSetNode<Value,
blink::HeapListHashSetAllocator<Value, inlineCapacity>>;
static void Trace(blink::Visitor* visitor, const Node* node) {
static_assert(!IsWeak<Node>::value,
"ListHashSet does not support weakness");
static_assert(IsTraceableInCollectionTrait<Traits>::value,
"T should be traceable");
visitor->Trace(node);
}
};
} // namespace WTF } // namespace WTF
#endif // THIRD_PARTY_BLINK_RENDERER_PLATFORM_HEAP_IMPL_TRACE_TRAITS_H_ #endif // THIRD_PARTY_BLINK_RENDERER_PLATFORM_HEAP_IMPL_TRACE_TRAITS_H_
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment