Commit 251bb0c0 authored by Anton Bikineev's avatar Anton Bikineev Committed by Commit Bot

PartitionAlloc: PCScan: Globalize PCScan

Currently, inter-partition dangling pointers are not covered by PCScan.
This fixes it by making PCScan a singleton. The CL also ensures that
access to the global object is kept fast, i.e. no double-checked locking
(e.g. __cxa_guard_acquire/release) is introduced by the compiler.

Bug: 11297512
Change-Id: I07500a974607a30f2ad79c96f8a1e0d2c52bbbd5
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2552397
Commit-Queue: Anton Bikineev <bikineev@chromium.org>
Reviewed-by: default avatarBartek Nowierski <bartekn@chromium.org>
Reviewed-by: default avatarKentaro Hara <haraken@chromium.org>
Reviewed-by: default avatarMichael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#831099}
parent fe087c3a
...@@ -20,6 +20,7 @@ ...@@ -20,6 +20,7 @@
#include "base/allocator/partition_allocator/partition_page.h" #include "base/allocator/partition_allocator/partition_page.h"
#include "base/allocator/partition_allocator/partition_root.h" #include "base/allocator/partition_allocator/partition_root.h"
#include "base/allocator/partition_allocator/partition_stats.h" #include "base/allocator/partition_allocator/partition_stats.h"
#include "base/allocator/partition_allocator/pcscan.h"
namespace base { namespace base {
...@@ -75,6 +76,8 @@ void PartitionAllocGlobalUninitForTesting() { ...@@ -75,6 +76,8 @@ void PartitionAllocGlobalUninitForTesting() {
#endif // defined(PA_HAS_64_BITS_POINTERS) #endif // defined(PA_HAS_64_BITS_POINTERS)
} }
#endif // !BUILDFLAG(USE_PARTITION_ALLOC_AS_MALLOC) #endif // !BUILDFLAG(USE_PARTITION_ALLOC_AS_MALLOC)
internal::PCScan<internal::ThreadSafe>::Instance()
.ClearRootsForTesting(); // IN-TEST
internal::g_oom_handling_function = nullptr; internal::g_oom_handling_function = nullptr;
} }
......
...@@ -265,7 +265,7 @@ ALWAYS_INLINE void* PartitionBucket<thread_safe>::AllocNewSlotSpan( ...@@ -265,7 +265,7 @@ ALWAYS_INLINE void* PartitionBucket<thread_safe>::AllocNewSlotSpan(
char* quarantine_bitmaps = super_page + PartitionPageSize(); char* quarantine_bitmaps = super_page + PartitionPageSize();
size_t quarantine_bitmaps_reserved_size = 0; size_t quarantine_bitmaps_reserved_size = 0;
size_t quarantine_bitmaps_size_to_commit = 0; size_t quarantine_bitmaps_size_to_commit = 0;
if (root->scannable) { if (root->IsScannable()) {
quarantine_bitmaps_reserved_size = ReservedQuarantineBitmapsSize(); quarantine_bitmaps_reserved_size = ReservedQuarantineBitmapsSize();
quarantine_bitmaps_size_to_commit = CommittedQuarantineBitmapsSize(); quarantine_bitmaps_size_to_commit = CommittedQuarantineBitmapsSize();
} }
...@@ -276,7 +276,7 @@ ALWAYS_INLINE void* PartitionBucket<thread_safe>::AllocNewSlotSpan( ...@@ -276,7 +276,7 @@ ALWAYS_INLINE void* PartitionBucket<thread_safe>::AllocNewSlotSpan(
char* ret = quarantine_bitmaps + quarantine_bitmaps_reserved_size; char* ret = quarantine_bitmaps + quarantine_bitmaps_reserved_size;
root->next_partition_page = ret + slot_span_reserved_size; root->next_partition_page = ret + slot_span_reserved_size;
root->next_partition_page_end = root->next_super_page - PartitionPageSize(); root->next_partition_page_end = root->next_super_page - PartitionPageSize();
PA_DCHECK(ret == SuperPagePayloadBegin(super_page, root->scannable)); PA_DCHECK(ret == SuperPagePayloadBegin(super_page, root->IsScannable()));
PA_DCHECK(root->next_partition_page_end == SuperPagePayloadEnd(super_page)); PA_DCHECK(root->next_partition_page_end == SuperPagePayloadEnd(super_page));
// The first slot span is accessible. The given slot_span_committed_size is // The first slot span is accessible. The given slot_span_committed_size is
...@@ -306,8 +306,8 @@ ALWAYS_INLINE void* PartitionBucket<thread_safe>::AllocNewSlotSpan( ...@@ -306,8 +306,8 @@ ALWAYS_INLINE void* PartitionBucket<thread_safe>::AllocNewSlotSpan(
// unused part of partition page, if any. If PCScan isn't used, release the // unused part of partition page, if any. If PCScan isn't used, release the
// entire reserved region (PartitionRoot::EnablePCScan will be responsible // entire reserved region (PartitionRoot::EnablePCScan will be responsible
// for committing it when enabling PCScan). // for committing it when enabling PCScan).
if (root->pcscan.has_value()) { if (root->IsScanEnabled()) {
PA_DCHECK(root->scannable); PA_DCHECK(root->IsScannable());
if (quarantine_bitmaps_reserved_size > quarantine_bitmaps_size_to_commit) { if (quarantine_bitmaps_reserved_size > quarantine_bitmaps_size_to_commit) {
SetSystemPagesAccess( SetSystemPagesAccess(
quarantine_bitmaps + quarantine_bitmaps_size_to_commit, quarantine_bitmaps + quarantine_bitmaps_size_to_commit,
...@@ -317,7 +317,7 @@ ALWAYS_INLINE void* PartitionBucket<thread_safe>::AllocNewSlotSpan( ...@@ -317,7 +317,7 @@ ALWAYS_INLINE void* PartitionBucket<thread_safe>::AllocNewSlotSpan(
} else { } else {
// If partition isn't scannable, no quarantine bitmaps were reserved, hence // If partition isn't scannable, no quarantine bitmaps were reserved, hence
// nothing to decommit. // nothing to decommit.
if (root->scannable) { if (root->IsScannable()) {
PA_DCHECK(quarantine_bitmaps_reserved_size > 0); PA_DCHECK(quarantine_bitmaps_reserved_size > 0);
SetSystemPagesAccess(quarantine_bitmaps, quarantine_bitmaps_reserved_size, SetSystemPagesAccess(quarantine_bitmaps, quarantine_bitmaps_reserved_size,
PageInaccessible); PageInaccessible);
......
...@@ -291,8 +291,8 @@ ALWAYS_INLINE char* GetSlotStartInSuperPage(char* maybe_inner_ptr) { ...@@ -291,8 +291,8 @@ ALWAYS_INLINE char* GetSlotStartInSuperPage(char* maybe_inner_ptr) {
reinterpret_cast<uintptr_t>(maybe_inner_ptr) & kSuperPageBaseMask); reinterpret_cast<uintptr_t>(maybe_inner_ptr) & kSuperPageBaseMask);
auto* extent = reinterpret_cast<PartitionSuperPageExtentEntry<thread_safe>*>( auto* extent = reinterpret_cast<PartitionSuperPageExtentEntry<thread_safe>*>(
PartitionSuperPageToMetadataArea(super_page_ptr)); PartitionSuperPageToMetadataArea(super_page_ptr));
PA_DCHECK(IsWithinSuperPagePayload(maybe_inner_ptr, PA_DCHECK(
extent->root->pcscan.has_value())); IsWithinSuperPagePayload(maybe_inner_ptr, extent->root->IsScanEnabled()));
auto* slot_span = SlotSpanMetadata<thread_safe>::FromPointerNoAlignmentCheck( auto* slot_span = SlotSpanMetadata<thread_safe>::FromPointerNoAlignmentCheck(
maybe_inner_ptr); maybe_inner_ptr);
// Check if the slot span is actually used and valid. // Check if the slot span is actually used and valid.
......
...@@ -14,6 +14,22 @@ ...@@ -14,6 +14,22 @@
namespace base { namespace base {
namespace {
template <bool thread_safe>
typename PartitionRoot<thread_safe>::PCScanMode PartitionOptionsToPCScanMode(
PartitionOptions::PCScan opt) {
using Root = PartitionRoot<thread_safe>;
switch (opt) {
case PartitionOptions::PCScan::kAlwaysDisabled:
return Root::PCScanMode::kNonScannable;
case PartitionOptions::PCScan::kDisabledByDefault:
return Root::PCScanMode::kDisabled;
case PartitionOptions::PCScan::kForcedEnabledForTesting:
return Root::PCScanMode::kEnabled;
}
}
} // namespace
namespace internal { namespace internal {
template <bool thread_safe> template <bool thread_safe>
...@@ -381,12 +397,13 @@ void PartitionRoot<thread_safe>::Init(PartitionOptions opts) { ...@@ -381,12 +397,13 @@ void PartitionRoot<thread_safe>::Init(PartitionOptions opts) {
extras_size = static_cast<uint32_t>(size); extras_size = static_cast<uint32_t>(size);
extras_offset = static_cast<uint32_t>(offset); extras_offset = static_cast<uint32_t>(offset);
scannable = (opts.pcscan != PartitionOptions::PCScan::kAlwaysDisabled); pcscan_mode = PartitionOptionsToPCScanMode<thread_safe>(opts.pcscan);
// Concurrent freeing in PCScan can only safely work on thread-safe if (pcscan_mode == PCScanMode::kEnabled) {
// partitions. // Concurrent freeing in PCScan can only safely work on thread-safe
if (thread_safe && // partitions.
opts.pcscan == PartitionOptions::PCScan::kForcedEnabledForTesting) PA_CHECK(thread_safe);
pcscan.emplace(this); PCScan::Instance().RegisterRoot(this);
}
// We mark the sentinel slot span as free to make sure it is skipped by our // We mark the sentinel slot span as free to make sure it is skipped by our
// logic to find a new active slot span. // logic to find a new active slot span.
...@@ -618,9 +635,8 @@ void* PartitionRoot<thread_safe>::ReallocFlags(int flags, ...@@ -618,9 +635,8 @@ void* PartitionRoot<thread_safe>::ReallocFlags(int flags,
template <bool thread_safe> template <bool thread_safe>
void PartitionRoot<thread_safe>::PurgeMemory(int flags) { void PartitionRoot<thread_safe>::PurgeMemory(int flags) {
// TODO(chromium:1129751): Change to LIKELY once PCScan is enabled by default. // TODO(chromium:1129751): Change to LIKELY once PCScan is enabled by default.
if (UNLIKELY(pcscan) && (flags & PartitionPurgeForceAllFreed)) { if (UNLIKELY(IsScanEnabled()) && (flags & PartitionPurgeForceAllFreed)) {
pcscan->PerformScanIfNeeded( PCScan::Instance().PerformScanIfNeeded(PCScan::InvocationMode::kBlocking);
internal::PCScan<thread_safe>::InvocationMode::kBlocking);
} }
{ {
......
...@@ -129,14 +129,19 @@ struct BASE_EXPORT PartitionRoot { ...@@ -129,14 +129,19 @@ struct BASE_EXPORT PartitionRoot {
internal::PartitionSuperPageExtentEntry<thread_safe>; internal::PartitionSuperPageExtentEntry<thread_safe>;
using DirectMapExtent = internal::PartitionDirectMapExtent<thread_safe>; using DirectMapExtent = internal::PartitionDirectMapExtent<thread_safe>;
using ScopedGuard = internal::ScopedGuard<thread_safe>; using ScopedGuard = internal::ScopedGuard<thread_safe>;
using PCScan = base::Optional<internal::PCScan<thread_safe>>; using PCScan = internal::PCScan<thread_safe>;
internal::MaybeSpinLock<thread_safe> lock_; internal::MaybeSpinLock<thread_safe> lock_;
enum class PCScanMode : uint8_t {
kNonScannable,
kDisabled,
kEnabled,
} pcscan_mode = PCScanMode::kNonScannable;
// Flags accessed on fast paths. // Flags accessed on fast paths.
bool with_thread_cache = false; bool with_thread_cache = false;
const bool is_thread_safe = thread_safe; const bool is_thread_safe = thread_safe;
bool scannable = false;
bool initialized = false; bool initialized = false;
bool allow_extras; bool allow_extras;
...@@ -171,7 +176,6 @@ struct BASE_EXPORT PartitionRoot { ...@@ -171,7 +176,6 @@ struct BASE_EXPORT PartitionRoot {
// Integrity check = ~reinterpret_cast<uintptr_t>(this). // Integrity check = ~reinterpret_cast<uintptr_t>(this).
uintptr_t inverted_self = 0; uintptr_t inverted_self = 0;
PCScan pcscan;
// The bucket lookup table lets us map a size_t to a bucket quickly. // The bucket lookup table lets us map a size_t to a bucket quickly.
// The trailing +1 caters for the overflow case for very large allocation // The trailing +1 caters for the overflow case for very large allocation
...@@ -203,6 +207,8 @@ struct BASE_EXPORT PartitionRoot { ...@@ -203,6 +207,8 @@ struct BASE_EXPORT PartitionRoot {
ALWAYS_INLINE static bool IsValidSlotSpan(SlotSpan* slot_span); ALWAYS_INLINE static bool IsValidSlotSpan(SlotSpan* slot_span);
ALWAYS_INLINE static PartitionRoot* FromSlotSpan(SlotSpan* slot_span); ALWAYS_INLINE static PartitionRoot* FromSlotSpan(SlotSpan* slot_span);
ALWAYS_INLINE static PartitionRoot* FromSuperPage(char* super_page);
ALWAYS_INLINE static PartitionRoot* FromPointerInNormalBucketPool(char* ptr);
ALWAYS_INLINE void IncreaseCommittedPages(size_t len); ALWAYS_INLINE void IncreaseCommittedPages(size_t len);
ALWAYS_INLINE void DecreaseCommittedPages(size_t len); ALWAYS_INLINE void DecreaseCommittedPages(size_t len);
...@@ -284,14 +290,21 @@ struct BASE_EXPORT PartitionRoot { ...@@ -284,14 +290,21 @@ struct BASE_EXPORT PartitionRoot {
return features::IsPartitionAllocGigaCageEnabled() && allow_extras; return features::IsPartitionAllocGigaCageEnabled() && allow_extras;
} }
ALWAYS_INLINE bool IsScannable() const {
return pcscan_mode != PCScanMode::kNonScannable;
}
ALWAYS_INLINE bool IsScanEnabled() const {
return pcscan_mode == PCScanMode::kEnabled;
}
// Enables PCScan for this root.
void EnablePCScan() { void EnablePCScan() {
PA_CHECK(thread_safe); PA_CHECK(thread_safe);
PA_CHECK(scannable && !pcscan.has_value()); PA_CHECK(IsScannable() && !IsScanEnabled());
// Setting |pcscan| and committing bitmaps has to be done under the lock to ScopedGuard guard{lock_};
// avoid racing with PartitionBucket::AllocNewSlotSpan and avoid racing on PCScan::Instance().RegisterRoot(this);
// |pcscan| ifself during free calls. pcscan_mode = PCScanMode::kEnabled;
internal::ScopedGuard<thread_safe> guard{lock_};
pcscan.emplace(this);
} }
static PAGE_ALLOCATOR_CONSTANTS_DECLARE_CONSTEXPR ALWAYS_INLINE size_t static PAGE_ALLOCATOR_CONSTANTS_DECLARE_CONSTEXPR ALWAYS_INLINE size_t
...@@ -576,9 +589,9 @@ ALWAYS_INLINE void PartitionRoot<thread_safe>::FreeNoHooks(void* ptr) { ...@@ -576,9 +589,9 @@ ALWAYS_INLINE void PartitionRoot<thread_safe>::FreeNoHooks(void* ptr) {
// TODO(bikineev): Change the first condition to LIKELY once PCScan is enabled // TODO(bikineev): Change the first condition to LIKELY once PCScan is enabled
// by default. // by default.
if (UNLIKELY(root->pcscan) && if (UNLIKELY(root->IsScanEnabled()) &&
LIKELY(!slot_span->bucket->is_direct_mapped())) { LIKELY(!slot_span->bucket->is_direct_mapped())) {
root->pcscan->MoveToQuarantine(ptr, slot_span); PCScan::Instance().MoveToQuarantine(ptr, slot_span);
return; return;
} }
...@@ -723,6 +736,25 @@ PartitionRoot<thread_safe>::FromSlotSpan(SlotSpan* slot_span) { ...@@ -723,6 +736,25 @@ PartitionRoot<thread_safe>::FromSlotSpan(SlotSpan* slot_span) {
return extent_entry->root; return extent_entry->root;
} }
template <bool thread_safe>
ALWAYS_INLINE PartitionRoot<thread_safe>*
PartitionRoot<thread_safe>::FromSuperPage(char* super_page) {
auto* extent_entry = reinterpret_cast<SuperPageExtentEntry*>(
internal::PartitionSuperPageToMetadataArea(super_page));
PartitionRoot* root = extent_entry->root;
PA_DCHECK(root->inverted_self == ~reinterpret_cast<uintptr_t>(root));
return root;
}
template <bool thread_safe>
ALWAYS_INLINE PartitionRoot<thread_safe>*
PartitionRoot<thread_safe>::FromPointerInNormalBucketPool(char* ptr) {
PA_DCHECK(!IsManagedByPartitionAllocDirectMap(ptr));
char* super_page = reinterpret_cast<char*>(reinterpret_cast<uintptr_t>(ptr) &
kSuperPageBaseMask);
return FromSuperPage(super_page);
}
template <bool thread_safe> template <bool thread_safe>
ALWAYS_INLINE void PartitionRoot<thread_safe>::IncreaseCommittedPages( ALWAYS_INLINE void PartitionRoot<thread_safe>::IncreaseCommittedPages(
size_t len) { size_t len) {
......
...@@ -14,6 +14,14 @@ ...@@ -14,6 +14,14 @@
#include "base/allocator/partition_allocator/partition_page.h" #include "base/allocator/partition_allocator/partition_page.h"
#include "base/base_export.h" #include "base/base_export.h"
#if defined(__has_attribute)
#if __has_attribute(require_constant_initialization)
#define PA_CONSTINIT __attribute__((require_constant_initialization))
#else
#define PA_CONSTINIT
#endif
#endif
namespace base { namespace base {
namespace internal { namespace internal {
...@@ -30,9 +38,6 @@ namespace internal { ...@@ -30,9 +38,6 @@ namespace internal {
// The driver class encapsulates the entire PCScan infrastructure. It provides // The driver class encapsulates the entire PCScan infrastructure. It provides
// a single function MoveToQuarantine() that posts a concurrent task if the // a single function MoveToQuarantine() that posts a concurrent task if the
// limit is reached. // limit is reached.
//
// TODO: PCScan should work for all partitions in the PartitionAlloc heap, not
// on a per-PartitionRoot basis.
template <bool thread_safe> template <bool thread_safe>
class BASE_EXPORT PCScan final { class BASE_EXPORT PCScan final {
public: public:
...@@ -44,17 +49,27 @@ class BASE_EXPORT PCScan final { ...@@ -44,17 +49,27 @@ class BASE_EXPORT PCScan final {
kNonBlocking, kNonBlocking,
}; };
explicit PCScan(Root* root); static PCScan& Instance() {
// The instance is declared as a static member, not static local. The reason
// is that we want to use the require_constant_initialization attribute to
// avoid double-checked-locking which would otherwise have been introduced
// by the compiler for thread-safe dynamic initialization (see constinit
// from C++20).
return instance_;
}
PCScan(const PCScan&) = delete; PCScan(const PCScan&) = delete;
PCScan& operator=(const PCScan&) = delete; PCScan& operator=(const PCScan&) = delete;
~PCScan(); // Registers a root for scanning.
void RegisterRoot(Root* root);
ALWAYS_INLINE void MoveToQuarantine(void* ptr, SlotSpan* slot_span); ALWAYS_INLINE void MoveToQuarantine(void* ptr, SlotSpan* slot_span);
void PerformScanIfNeeded(InvocationMode invocation_mode); void PerformScanIfNeeded(InvocationMode invocation_mode);
void ClearRootsForTesting();
private: private:
class PCScanTask; class PCScanTask;
friend class PCScanTest; friend class PCScanTest;
...@@ -86,10 +101,45 @@ class BASE_EXPORT PCScan final { ...@@ -86,10 +101,45 @@ class BASE_EXPORT PCScan final {
size_t last_size_ = 0; size_t last_size_ = 0;
}; };
static constexpr size_t kMaxNumberOfPartitions = 8u;
// A custom constexpr container class that avoids dynamic initialization.
// Constexprness is required to const-initialize the global PCScan.
class Roots final : private std::array<Root*, kMaxNumberOfPartitions> {
using Base = std::array<Root*, kMaxNumberOfPartitions>;
public:
using typename Base::const_iterator;
using typename Base::iterator;
// Explicitly value-initialize Base{} as otherwise the default
// (aggregate) initialization won't be considered as constexpr.
constexpr Roots() : Base{} {}
iterator begin() { return Base::begin(); }
const_iterator begin() const { return Base::begin(); }
iterator end() { return begin() + current_; }
const_iterator end() const { return begin() + current_; }
void Add(Root* root);
size_t size() const { return current_; }
void ClearForTesting();
private:
size_t current_ = 0u;
};
constexpr PCScan() = default;
void PerformScan(InvocationMode invocation_mode); void PerformScan(InvocationMode invocation_mode);
Root* root_; static PCScan instance_ PA_CONSTINIT;
QuarantineData quarantine_data_;
Roots roots_{};
QuarantineData quarantine_data_{};
std::atomic<bool> in_progress_{false}; std::atomic<bool> in_progress_{false};
}; };
......
...@@ -2,6 +2,7 @@ ...@@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style license that can be // Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file. // found in the LICENSE file.
#include "base/allocator/partition_allocator/partition_root.h"
#if !defined(MEMORY_TOOL_REPLACES_ALLOCATOR) #if !defined(MEMORY_TOOL_REPLACES_ALLOCATOR)
#include "base/allocator/partition_allocator/pcscan.h" #include "base/allocator/partition_allocator/pcscan.h"
...@@ -27,13 +28,14 @@ class PCScanTest : public testing::Test { ...@@ -27,13 +28,14 @@ class PCScanTest : public testing::Test {
} }
void RunPCScan() { void RunPCScan() {
root().pcscan->PerformScan(PCScan<ThreadSafe>::InvocationMode::kBlocking); PCScan<true>::Instance().PerformScan(
PCScan<ThreadSafe>::InvocationMode::kBlocking);
} }
bool IsInQuarantine(void* ptr) const { bool IsInQuarantine(void* ptr) const {
return QuarantineBitmapFromPointer(QuarantineBitmapType::kMutator, return QuarantineBitmapFromPointer(
root().pcscan->quarantine_data_.epoch(), QuarantineBitmapType::kMutator,
ptr) PCScan<true>::Instance().quarantine_data_.epoch(), ptr)
->CheckBit(reinterpret_cast<uintptr_t>(ptr)); ->CheckBit(reinterpret_cast<uintptr_t>(ptr));
} }
...@@ -162,10 +164,13 @@ template <typename SourceList, typename ValueList> ...@@ -162,10 +164,13 @@ template <typename SourceList, typename ValueList>
void TestDanglingReference(PCScanTest& test, void TestDanglingReference(PCScanTest& test,
SourceList* source, SourceList* source,
ValueList* value) { ValueList* value) {
auto& root = test.root(); auto* source_root = ThreadSafePartitionRoot::FromPointerInNormalBucketPool(
reinterpret_cast<char*>(source));
auto* value_root = ThreadSafePartitionRoot::FromPointerInNormalBucketPool(
reinterpret_cast<char*>(value));
{ {
// Free |value| and leave the dangling reference in |source|. // Free |value| and leave the dangling reference in |source|.
ValueList::Destroy(root, value); ValueList::Destroy(*source_root, value);
// Check that |value| is in the quarantine now. // Check that |value| is in the quarantine now.
EXPECT_TRUE(test.IsInQuarantine(value)); EXPECT_TRUE(test.IsInQuarantine(value));
// Run PCScan. // Run PCScan.
...@@ -182,7 +187,8 @@ void TestDanglingReference(PCScanTest& test, ...@@ -182,7 +187,8 @@ void TestDanglingReference(PCScanTest& test,
// Check that the object is no longer in the quarantine. // Check that the object is no longer in the quarantine.
EXPECT_FALSE(test.IsInQuarantine(value)); EXPECT_FALSE(test.IsInQuarantine(value));
// Check that the object is in the freelist now. // Check that the object is in the freelist now.
EXPECT_TRUE(IsInFreeList(root.AdjustPointerForExtrasSubtract(value))); EXPECT_TRUE(
IsInFreeList(value_root->AdjustPointerForExtrasSubtract(value)));
} }
} }
...@@ -300,6 +306,26 @@ TEST_F(PCScanTest, DanglingInnerReference) { ...@@ -300,6 +306,26 @@ TEST_F(PCScanTest, DanglingInnerReference) {
TestDanglingReference(*this, source, value); TestDanglingReference(*this, source, value);
} }
TEST_F(PCScanTest, DanglingInterPartitionReference) {
using SourceList = List<64>;
using ValueList = SourceList;
ThreadSafePartitionRoot source_root(
{PartitionOptions::Alignment::kRegular,
PartitionOptions::ThreadCache::kDisabled,
PartitionOptions::PCScan::kForcedEnabledForTesting});
ThreadSafePartitionRoot value_root(
{PartitionOptions::Alignment::kRegular,
PartitionOptions::ThreadCache::kDisabled,
PartitionOptions::PCScan::kForcedEnabledForTesting});
auto* source = SourceList::Create(source_root);
auto* value = ValueList::Create(value_root);
source->next = value;
TestDanglingReference(*this, source, value);
}
} // namespace internal } // namespace internal
} // namespace base } // namespace base
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment