Commit 47af09d1 authored by Michael Lippautz's avatar Michael Lippautz Committed by Chromium LUCI CQ

heap: Split off collection backings for the Oilpan library

Backings require re-implementing of traits. Split the files off instead
of relying on the macro to split the implementation of each trait.

Bug: 1056170
Change-Id: I43a005119986ec51c05a34f76dca25614a6f1196
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2642340
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Reviewed-by: default avatarOmer Katz <omerkatz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#846189}
parent a6e94cb4
......@@ -99,6 +99,8 @@ blink_platform_sources("heap") {
sources += [
"v8_wrapper/blink_gc.h",
"v8_wrapper/blink_gc_memory_dump_provider.h",
"v8_wrapper/collection_support/heap_hash_table_backing.h",
"v8_wrapper/collection_support/heap_vector_backing.h",
"v8_wrapper/disallow_new_wrapper.h",
"v8_wrapper/garbage_collected.h",
"v8_wrapper/gc_task_runner.h",
......@@ -126,6 +128,8 @@ blink_platform_sources("heap") {
"impl/blink_gc.h",
"impl/blink_gc_memory_dump_provider.cc",
"impl/blink_gc_memory_dump_provider.h",
"impl/collection_support/heap_hash_table_backing.h",
"impl/collection_support/heap_vector_backing.h",
"impl/finalizer_traits.h",
"impl/garbage_collected.h",
"impl/gc_info.cc",
......
// Copyright 2020 The Chromium Authors. All rights reserved.
// Copyright 2021 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef THIRD_PARTY_BLINK_RENDERER_PLATFORM_HEAP_COLLECTION_SUPPORT_HEAP_VECTOR_BACKING_H_
#define THIRD_PARTY_BLINK_RENDERER_PLATFORM_HEAP_COLLECTION_SUPPORT_HEAP_VECTOR_BACKING_H_
#include "base/check_op.h"
#include "third_party/blink/renderer/platform/heap/heap.h"
#include "third_party/blink/renderer/platform/heap/impl/finalizer_traits.h"
#include "third_party/blink/renderer/platform/heap/impl/gc_info.h"
#include "third_party/blink/renderer/platform/heap/impl/threading_traits.h"
#include "third_party/blink/renderer/platform/heap/impl/trace_traits.h"
#include "third_party/blink/renderer/platform/heap/thread_state.h"
#include "third_party/blink/renderer/platform/wtf/conditional_destructor.h"
#include "third_party/blink/renderer/platform/wtf/vector.h"
#include "third_party/blink/renderer/platform/wtf/buildflags.h"
namespace blink {
template <typename T, typename Traits = WTF::VectorTraits<T>>
class HeapVectorBacking final
: public GarbageCollected<HeapVectorBacking<T, Traits>>,
public WTF::ConditionalDestructor<HeapVectorBacking<T, Traits>,
!Traits::kNeedsDestruction> {
public:
template <typename Backing>
static void* AllocateObject(size_t);
// Conditionally invoked via destructor.
void Finalize();
};
// static
template <typename T, typename Traits>
template <typename Backing>
void* HeapVectorBacking<T, Traits>::AllocateObject(size_t size) {
ThreadState* state = ThreadStateFor<ThreadingTrait<T>::kAffinity>::GetState();
DCHECK(state->IsAllocationAllowed());
return state->Heap().AllocateOnArenaIndex(
state, size, BlinkGC::kVectorArenaIndex, GCInfoTrait<Backing>::Index(),
WTF_HEAP_PROFILER_TYPE_NAME(Backing));
}
template <typename T, typename Traits>
void HeapVectorBacking<T, Traits>::Finalize() {
static_assert(Traits::kNeedsDestruction,
"Only vector buffers with items requiring destruction should "
"be finalized");
static_assert(
Traits::kCanClearUnusedSlotsWithMemset || std::is_polymorphic<T>::value,
"HeapVectorBacking doesn't support objects that cannot be cleared as "
"unused with memset or don't have a vtable");
static_assert(
!std::is_trivially_destructible<T>::value,
"Finalization of trivially destructible classes should not happen.");
HeapObjectHeader* header = HeapObjectHeader::FromPayload(this);
// Use the payload size as recorded by the heap to determine how many
// elements to finalize.
size_t length = header->PayloadSize() / sizeof(T);
Address payload = header->Payload();
#ifdef ANNOTATE_CONTIGUOUS_CONTAINER
ANNOTATE_CHANGE_SIZE(payload, length * sizeof(T), 0, length * sizeof(T));
#endif
// As commented above, HeapVectorBacking calls finalizers for unused slots
// (which are already zeroed out).
if (std::is_polymorphic<T>::value) {
for (unsigned i = 0; i < length; ++i) {
Address element = payload + i * sizeof(T);
if (blink::VTableInitialized(element))
reinterpret_cast<T*>(element)->~T();
}
} else {
T* buffer = reinterpret_cast<T*>(payload);
for (unsigned i = 0; i < length; ++i)
buffer[i].~T();
}
}
template <typename T>
struct MakeGarbageCollectedTrait<HeapVectorBacking<T>> {
static HeapVectorBacking<T>* Call(size_t num_elements) {
CHECK_GT(num_elements, 0u);
void* memory =
HeapVectorBacking<T>::template AllocateObject<HeapVectorBacking<T>>(
num_elements * sizeof(T));
HeapObjectHeader* header = HeapObjectHeader::FromPayload(memory);
// Placement new as regular operator new() is deleted.
HeapVectorBacking<T>* object = ::new (memory) HeapVectorBacking<T>();
header->MarkFullyConstructed<HeapObjectHeader::AccessMode::kAtomic>();
return object;
}
};
template <typename T, typename Traits>
struct ThreadingTrait<HeapVectorBacking<T, Traits>> {
STATIC_ONLY(ThreadingTrait);
static const ThreadAffinity kAffinity = ThreadingTrait<T>::Affinity;
};
template <typename T, typename Traits>
struct TraceTrait<HeapVectorBacking<T, Traits>> {
STATIC_ONLY(TraceTrait);
using Backing = HeapVectorBacking<T, Traits>;
public:
static TraceDescriptor GetTraceDescriptor(const void* self) {
return {self, TraceTrait<Backing>::Trace};
}
static void Trace(Visitor* visitor, const void* self) {
if (!Traits::kCanTraceConcurrently && self) {
if (visitor->DeferredTraceIfConcurrent({self, &Trace},
GetBackingStoreSize(self)))
return;
}
static_assert(!WTF::IsWeak<T>::value,
"Weakness is not supported in HeapVector and HeapDeque");
if (WTF::IsTraceableInCollectionTrait<Traits>::value) {
WTF::TraceInCollectionTrait<WTF::kNoWeakHandling,
HeapVectorBacking<T, Traits>,
void>::Trace(visitor, self);
}
}
private:
static size_t GetBackingStoreSize(const void* backing_store) {
const HeapObjectHeader* header =
HeapObjectHeader::FromPayload(backing_store);
return header->IsLargeObject<HeapObjectHeader::AccessMode::kAtomic>()
? static_cast<LargeObjectPage*>(PageFromObject(header))
->ObjectSize()
: header->size<HeapObjectHeader::AccessMode::kAtomic>();
}
};
} // namespace blink
namespace WTF {
// This trace method is used only for on-stack HeapVectors found in
// conservative scanning. On-heap HeapVectors are traced by Vector::trace.
template <typename T, typename Traits>
struct TraceInCollectionTrait<kNoWeakHandling,
blink::HeapVectorBacking<T, Traits>,
void> {
static void Trace(blink::Visitor* visitor, const void* self) {
// HeapVectorBacking does not know the exact size of the vector
// and just knows the capacity of the vector. Due to the constraint,
// HeapVectorBacking can support only the following objects:
//
// - An object that has a vtable. In this case, HeapVectorBacking
// traces only slots that are not zeroed out. This is because if
// the object has a vtable, the zeroed slot means that it is
// an unused slot (Remember that the unused slots are guaranteed
// to be zeroed out by VectorUnusedSlotClearer).
//
// - An object that can be initialized with memset. In this case,
// HeapVectorBacking traces all slots including unused slots.
// This is fine because the fact that the object can be initialized
// with memset indicates that it is safe to treat the zerod slot
// as a valid object.
static_assert(!IsTraceableInCollectionTrait<Traits>::value ||
Traits::kCanClearUnusedSlotsWithMemset ||
std::is_polymorphic<T>::value,
"HeapVectorBacking doesn't support objects that cannot be "
"cleared as unused with memset.");
// This trace method is instantiated for vectors where
// IsTraceableInCollectionTrait<Traits>::value is false, but the trace
// method should not be called. Thus we cannot static-assert
// IsTraceableInCollectionTrait<Traits>::value but should runtime-assert it.
DCHECK(IsTraceableInCollectionTrait<Traits>::value);
const T* array = reinterpret_cast<const T*>(self);
blink::HeapObjectHeader* header =
blink::HeapObjectHeader::FromPayload(self);
// Use the payload size as recorded by the heap to determine how many
// elements to trace.
size_t length = header->PayloadSize() / sizeof(T);
#ifdef ANNOTATE_CONTIGUOUS_CONTAINER
// As commented above, HeapVectorBacking can trace unused slots
// (which are already zeroed out).
ANNOTATE_CHANGE_SIZE(array, length, 0, length);
#endif
if (std::is_polymorphic<T>::value) {
const char* pointer = reinterpret_cast<const char*>(array);
for (unsigned i = 0; i < length; ++i) {
const char* element = pointer + i * sizeof(T);
if (blink::VTableInitialized(element)) {
blink::TraceIfNeeded<
T, IsTraceableInCollectionTrait<Traits>::value>::Trace(visitor,
array[i]);
}
}
} else {
for (size_t i = 0; i < length; ++i) {
blink::TraceIfNeeded<
T, IsTraceableInCollectionTrait<Traits>::value>::Trace(visitor,
array[i]);
}
}
}
};
} // namespace WTF
#if BUILDFLAG(USE_V8_OILPAN)
#include "third_party/blink/renderer/platform/heap/v8_wrapper/collection_support/heap_vector_backing.h"
#else // !USE_V8_OILPAN
#include "third_party/blink/renderer/platform/heap/impl/collection_support/heap_vector_backing.h"
#endif // !USE_V8_OILPAN
#endif // THIRD_PARTY_BLINK_RENDERER_PLATFORM_HEAP_COLLECTION_SUPPORT_HEAP_VECTOR_BACKING_H_
// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef THIRD_PARTY_BLINK_RENDERER_PLATFORM_HEAP_IMPL_COLLECTION_SUPPORT_HEAP_VECTOR_BACKING_H_
#define THIRD_PARTY_BLINK_RENDERER_PLATFORM_HEAP_IMPL_COLLECTION_SUPPORT_HEAP_VECTOR_BACKING_H_
#include "base/check_op.h"
#include "third_party/blink/renderer/platform/heap/heap.h"
#include "third_party/blink/renderer/platform/heap/impl/finalizer_traits.h"
#include "third_party/blink/renderer/platform/heap/impl/gc_info.h"
#include "third_party/blink/renderer/platform/heap/impl/threading_traits.h"
#include "third_party/blink/renderer/platform/heap/impl/trace_traits.h"
#include "third_party/blink/renderer/platform/heap/thread_state.h"
#include "third_party/blink/renderer/platform/wtf/conditional_destructor.h"
#include "third_party/blink/renderer/platform/wtf/vector.h"
namespace blink {
template <typename T, typename Traits = WTF::VectorTraits<T>>
class HeapVectorBacking final
: public GarbageCollected<HeapVectorBacking<T, Traits>>,
public WTF::ConditionalDestructor<HeapVectorBacking<T, Traits>,
!Traits::kNeedsDestruction> {
public:
template <typename Backing>
static void* AllocateObject(size_t);
// Conditionally invoked via destructor.
void Finalize();
};
// static
template <typename T, typename Traits>
template <typename Backing>
void* HeapVectorBacking<T, Traits>::AllocateObject(size_t size) {
ThreadState* state = ThreadStateFor<ThreadingTrait<T>::kAffinity>::GetState();
DCHECK(state->IsAllocationAllowed());
return state->Heap().AllocateOnArenaIndex(
state, size, BlinkGC::kVectorArenaIndex, GCInfoTrait<Backing>::Index(),
WTF_HEAP_PROFILER_TYPE_NAME(Backing));
}
template <typename T, typename Traits>
void HeapVectorBacking<T, Traits>::Finalize() {
static_assert(Traits::kNeedsDestruction,
"Only vector buffers with items requiring destruction should "
"be finalized");
static_assert(
Traits::kCanClearUnusedSlotsWithMemset || std::is_polymorphic<T>::value,
"HeapVectorBacking doesn't support objects that cannot be cleared as "
"unused with memset or don't have a vtable");
static_assert(
!std::is_trivially_destructible<T>::value,
"Finalization of trivially destructible classes should not happen.");
HeapObjectHeader* header = HeapObjectHeader::FromPayload(this);
// Use the payload size as recorded by the heap to determine how many
// elements to finalize.
size_t length = header->PayloadSize() / sizeof(T);
Address payload = header->Payload();
#ifdef ANNOTATE_CONTIGUOUS_CONTAINER
ANNOTATE_CHANGE_SIZE(payload, length * sizeof(T), 0, length * sizeof(T));
#endif
// As commented above, HeapVectorBacking calls finalizers for unused slots
// (which are already zeroed out).
if (std::is_polymorphic<T>::value) {
for (unsigned i = 0; i < length; ++i) {
Address element = payload + i * sizeof(T);
if (blink::VTableInitialized(element))
reinterpret_cast<T*>(element)->~T();
}
} else {
T* buffer = reinterpret_cast<T*>(payload);
for (unsigned i = 0; i < length; ++i)
buffer[i].~T();
}
}
template <typename T>
struct MakeGarbageCollectedTrait<HeapVectorBacking<T>> {
static HeapVectorBacking<T>* Call(size_t num_elements) {
CHECK_GT(num_elements, 0u);
void* memory =
HeapVectorBacking<T>::template AllocateObject<HeapVectorBacking<T>>(
num_elements * sizeof(T));
HeapObjectHeader* header = HeapObjectHeader::FromPayload(memory);
// Placement new as regular operator new() is deleted.
HeapVectorBacking<T>* object = ::new (memory) HeapVectorBacking<T>();
header->MarkFullyConstructed<HeapObjectHeader::AccessMode::kAtomic>();
return object;
}
};
template <typename T, typename Traits>
struct ThreadingTrait<HeapVectorBacking<T, Traits>> {
STATIC_ONLY(ThreadingTrait);
static const ThreadAffinity kAffinity = ThreadingTrait<T>::Affinity;
};
template <typename T, typename Traits>
struct TraceTrait<HeapVectorBacking<T, Traits>> {
STATIC_ONLY(TraceTrait);
using Backing = HeapVectorBacking<T, Traits>;
public:
static TraceDescriptor GetTraceDescriptor(const void* self) {
return {self, TraceTrait<Backing>::Trace};
}
static void Trace(Visitor* visitor, const void* self) {
if (!Traits::kCanTraceConcurrently && self) {
if (visitor->DeferredTraceIfConcurrent({self, &Trace},
GetBackingStoreSize(self)))
return;
}
static_assert(!WTF::IsWeak<T>::value,
"Weakness is not supported in HeapVector and HeapDeque");
if (WTF::IsTraceableInCollectionTrait<Traits>::value) {
WTF::TraceInCollectionTrait<WTF::kNoWeakHandling,
HeapVectorBacking<T, Traits>,
void>::Trace(visitor, self);
}
}
private:
static size_t GetBackingStoreSize(const void* backing_store) {
const HeapObjectHeader* header =
HeapObjectHeader::FromPayload(backing_store);
return header->IsLargeObject<HeapObjectHeader::AccessMode::kAtomic>()
? static_cast<LargeObjectPage*>(PageFromObject(header))
->ObjectSize()
: header->size<HeapObjectHeader::AccessMode::kAtomic>();
}
};
} // namespace blink
namespace WTF {
// This trace method is used only for on-stack HeapVectors found in
// conservative scanning. On-heap HeapVectors are traced by Vector::trace.
template <typename T, typename Traits>
struct TraceInCollectionTrait<kNoWeakHandling,
blink::HeapVectorBacking<T, Traits>,
void> {
static void Trace(blink::Visitor* visitor, const void* self) {
// HeapVectorBacking does not know the exact size of the vector
// and just knows the capacity of the vector. Due to the constraint,
// HeapVectorBacking can support only the following objects:
//
// - An object that has a vtable. In this case, HeapVectorBacking
// traces only slots that are not zeroed out. This is because if
// the object has a vtable, the zeroed slot means that it is
// an unused slot (Remember that the unused slots are guaranteed
// to be zeroed out by VectorUnusedSlotClearer).
//
// - An object that can be initialized with memset. In this case,
// HeapVectorBacking traces all slots including unused slots.
// This is fine because the fact that the object can be initialized
// with memset indicates that it is safe to treat the zerod slot
// as a valid object.
static_assert(!IsTraceableInCollectionTrait<Traits>::value ||
Traits::kCanClearUnusedSlotsWithMemset ||
std::is_polymorphic<T>::value,
"HeapVectorBacking doesn't support objects that cannot be "
"cleared as unused with memset.");
// This trace method is instantiated for vectors where
// IsTraceableInCollectionTrait<Traits>::value is false, but the trace
// method should not be called. Thus we cannot static-assert
// IsTraceableInCollectionTrait<Traits>::value but should runtime-assert it.
DCHECK(IsTraceableInCollectionTrait<Traits>::value);
const T* array = reinterpret_cast<const T*>(self);
blink::HeapObjectHeader* header =
blink::HeapObjectHeader::FromPayload(self);
// Use the payload size as recorded by the heap to determine how many
// elements to trace.
size_t length = header->PayloadSize() / sizeof(T);
#ifdef ANNOTATE_CONTIGUOUS_CONTAINER
// As commented above, HeapVectorBacking can trace unused slots
// (which are already zeroed out).
ANNOTATE_CHANGE_SIZE(array, length, 0, length);
#endif
if (std::is_polymorphic<T>::value) {
const char* pointer = reinterpret_cast<const char*>(array);
for (unsigned i = 0; i < length; ++i) {
const char* element = pointer + i * sizeof(T);
if (blink::VTableInitialized(element)) {
blink::TraceIfNeeded<
T, IsTraceableInCollectionTrait<Traits>::value>::Trace(visitor,
array[i]);
}
}
} else {
for (size_t i = 0; i < length; ++i) {
blink::TraceIfNeeded<
T, IsTraceableInCollectionTrait<Traits>::value>::Trace(visitor,
array[i]);
}
}
}
};
} // namespace WTF
#endif // THIRD_PARTY_BLINK_RENDERER_PLATFORM_HEAP_IMPL_COLLECTION_SUPPORT_HEAP_VECTOR_BACKING_H_
......@@ -7,6 +7,7 @@
#include "third_party/blink/renderer/platform/platform_export.h"
#include "third_party/blink/renderer/platform/wtf/allocator/allocator.h"
#include "v8/include/cppgc/liveness-broker.h"
namespace blink {
......@@ -21,6 +22,8 @@ class PLATFORM_EXPORT BlinkGC final {
enum StackState { kNoHeapPointersOnStack, kHeapPointersOnStack };
};
using WeakCallback = void (*)(const cppgc::LivenessBroker&, const void*);
} // namespace blink
#endif // THIRD_PARTY_BLINK_RENDERER_PLATFORM_HEAP_V8_WRAPPER_BLINK_GC_H_
// Copyright 2021 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef THIRD_PARTY_BLINK_RENDERER_PLATFORM_HEAP_V8_WRAPPER_COLLECTION_SUPPORT_HEAP_HASH_TABLE_BACKING_H_
#define THIRD_PARTY_BLINK_RENDERER_PLATFORM_HEAP_V8_WRAPPER_COLLECTION_SUPPORT_HEAP_HASH_TABLE_BACKING_H_
#include "third_party/blink/renderer/platform/heap/heap.h"
#include "third_party/blink/renderer/platform/wtf/conditional_destructor.h"
namespace blink {
template <typename Table>
class HeapHashTableBacking final
: public GarbageCollected<HeapHashTableBacking<Table>>,
public WTF::ConditionalDestructor<
HeapHashTableBacking<Table>,
std::is_trivially_destructible<typename Table::ValueType>::value> {
public:
// Conditionally invoked via destructor.
void Finalize();
};
template <typename Table>
void HeapHashTableBacking<Table>::Finalize() {
using Value = typename Table::ValueType;
static_assert(
!std::is_trivially_destructible<Value>::value,
"Finalization of trivially destructible classes should not happen.");
// TODO(1056170): Implement.
}
} // namespace blink
#endif // THIRD_PARTY_BLINK_RENDERER_PLATFORM_HEAP_V8_WRAPPER_COLLECTION_SUPPORT_HEAP_HASH_TABLE_BACKING_H_
// Copyright 2021 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef THIRD_PARTY_BLINK_RENDERER_PLATFORM_HEAP_V8_WRAPPER_COLLECTION_SUPPORT_HEAP_VECTOR_BACKING_H_
#define THIRD_PARTY_BLINK_RENDERER_PLATFORM_HEAP_V8_WRAPPER_COLLECTION_SUPPORT_HEAP_VECTOR_BACKING_H_
#include "third_party/blink/renderer/platform/heap/heap.h"
#include "third_party/blink/renderer/platform/wtf/conditional_destructor.h"
#include "third_party/blink/renderer/platform/wtf/vector_traits.h"
namespace blink {
template <typename T, typename Traits = WTF::VectorTraits<T>>
class HeapVectorBacking final
: public GarbageCollected<HeapVectorBacking<T, Traits>>,
public WTF::ConditionalDestructor<HeapVectorBacking<T, Traits>,
!Traits::kNeedsDestruction> {
public:
// Conditionally invoked via destructor.
void Finalize();
};
template <typename T, typename Traits>
void HeapVectorBacking<T, Traits>::Finalize() {
static_assert(Traits::kNeedsDestruction,
"Only vector buffers with items requiring destruction should "
"be finalized");
static_assert(
Traits::kCanClearUnusedSlotsWithMemset || std::is_polymorphic<T>::value,
"HeapVectorBacking doesn't support objects that cannot be cleared as "
"unused with memset or don't have a vtable");
static_assert(
!std::is_trivially_destructible<T>::value,
"Finalization of trivially destructible classes should not happen.");
// TODO(1056170): Implement.
}
} // namespace blink
#endif // THIRD_PARTY_BLINK_RENDERER_PLATFORM_HEAP_V8_WRAPPER_COLLECTION_SUPPORT_HEAP_VECTOR_BACKING_H_
......@@ -5,6 +5,10 @@
#ifndef THIRD_PARTY_BLINK_RENDERER_PLATFORM_HEAP_V8_WRAPPER_HEAP_ALLOCATOR_IMPL_H_
#define THIRD_PARTY_BLINK_RENDERER_PLATFORM_HEAP_V8_WRAPPER_HEAP_ALLOCATOR_IMPL_H_
#include "third_party/blink/renderer/platform/heap/collection_support/heap_hash_table_backing.h"
#include "third_party/blink/renderer/platform/heap/collection_support/heap_vector_backing.h"
#include "third_party/blink/renderer/platform/heap/v8_wrapper/heap.h"
#include "third_party/blink/renderer/platform/heap/v8_wrapper/visitor.h"
#include "third_party/blink/renderer/platform/platform_export.h"
#include "third_party/blink/renderer/platform/wtf/allocator/allocator.h"
......@@ -14,6 +18,8 @@ class PLATFORM_EXPORT HeapAllocator {
STATIC_ONLY(HeapAllocator);
public:
using LivenessBroker = blink::LivenessBroker;
static constexpr bool kIsGarbageCollected = true;
// See wtf/size_t.h for details.
......@@ -109,6 +115,47 @@ class PLATFORM_EXPORT HeapAllocator {
static void NotifyNewObjects(T*, size_t) {
// TODO(1056170): Implement.
}
template <typename T, typename Traits>
static void Trace(Visitor* visitor, const T& t) {
// TODO(1056170): Forward to TraceInCollectionTrait.
}
template <typename T>
static void TraceVectorBacking(Visitor* visitor,
const T* backing,
const T* const* backing_slot) {
visitor->RegisterMovableReference(const_cast<const HeapVectorBacking<T>**>(
reinterpret_cast<const HeapVectorBacking<T>* const*>(backing_slot)));
visitor->Trace(reinterpret_cast<const HeapVectorBacking<T>*>(backing));
}
template <typename T, typename HashTable>
static void TraceHashTableBackingStrongly(Visitor* visitor,
const T* backing,
const T* const* backing_slot) {
visitor->RegisterMovableReference(
const_cast<const HeapHashTableBacking<HashTable>**>(
reinterpret_cast<const HeapHashTableBacking<HashTable>* const*>(
backing_slot)));
visitor->Trace(
reinterpret_cast<const HeapHashTableBacking<HashTable>*>(backing));
}
template <typename T, typename HashTable>
static void TraceHashTableBackingWeakly(Visitor* visitor,
const T* backing,
const T* const* backing_slot,
WeakCallback callback,
const void* parameter) {
visitor->RegisterMovableReference(
const_cast<const HeapHashTableBacking<HashTable>**>(
reinterpret_cast<const HeapHashTableBacking<HashTable>* const*>(
backing_slot)));
visitor->TraceWeakContainer(
reinterpret_cast<const HeapHashTableBacking<HashTable>*>(backing),
callback, parameter);
}
};
} // namespace blink
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment