Commit 1b6baead authored by Anton Bikineev's avatar Anton Bikineev Committed by Commit Bot

PartitionAlloc: Implement object bitmap

This is a prerequisite for quarantine bitmap in PCScan.

Bug: 11297512
Change-Id: I4d99e348140fe8b4826be46c4bc0a0e88afed4c8
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2421894
Commit-Queue: Anton Bikineev <bikineev@chromium.org>
Reviewed-by: default avatarBartek Nowierski <bartekn@chromium.org>
Reviewed-by: default avatarBenoit L <lizeb@chromium.org>
Reviewed-by: default avatarMichael Lippautz <mlippautz@chromium.org>
Reviewed-by: default avatarKentaro Hara <haraken@chromium.org>
Cr-Commit-Position: refs/heads/master@{#810237}
parent 87987f0e
......@@ -1762,6 +1762,7 @@ component("base") {
"allocator/partition_allocator/checked_ptr_support.h",
"allocator/partition_allocator/memory_reclaimer.cc",
"allocator/partition_allocator/memory_reclaimer.h",
"allocator/partition_allocator/object_bitmap.h",
"allocator/partition_allocator/oom.h",
"allocator/partition_allocator/oom_callback.cc",
"allocator/partition_allocator/oom_callback.h",
......@@ -3232,6 +3233,7 @@ test("base_unittests") {
"allocator/partition_allocator/address_pool_manager_unittest.cc",
"allocator/partition_allocator/address_space_randomization_unittest.cc",
"allocator/partition_allocator/memory_reclaimer_unittest.cc",
"allocator/partition_allocator/object_bitmap_unittest.cc",
"allocator/partition_allocator/page_allocator_unittest.cc",
"allocator/partition_allocator/partition_alloc_unittest.cc",
"allocator/partition_allocator/partition_lock_unittest.cc",
......
// Copyright (c) 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef BASE_ALLOCATOR_PARTITION_ALLOCATOR_OBJECT_BITMAP_H_
#define BASE_ALLOCATOR_PARTITION_ALLOCATOR_OBJECT_BITMAP_H_
#include <climits>
#include <cstddef>
#include <cstdint>
#include <algorithm>
#include <array>
#include <atomic>
#include <tuple>
#include "base/allocator/partition_allocator/partition_alloc_check.h"
#include "base/bits.h"
namespace base {
namespace internal {
// Bitmap which tracks beginning of allocated objects. The bitmap can be safely
// accessed from multiple threads, but this doesn't imply visibility on the data
// (i.e. no ordering guaranties, since relaxed atomics are used underneath). The
// bitmap itself must be created inside a page, size and alignment of which are
// specified as template arguments |PageSize| and |PageAlignment|.
// |ObjectAlignment| specifies the minimal alignment of objects that are
// allocated inside a page (serves as the granularity in the bitmap).
template <size_t PageSize, size_t PageAlignment, size_t ObjectAlignment>
class ObjectBitmap final {
static constexpr size_t kBitsPerCell = sizeof(uint8_t) * CHAR_BIT;
static constexpr size_t kBitmapSize =
(PageSize + ((kBitsPerCell * ObjectAlignment) - 1)) /
(kBitsPerCell * ObjectAlignment);
static constexpr size_t kPageOffsetMask = PageAlignment - 1;
static constexpr size_t kPageBaseMask = ~kPageOffsetMask;
public:
static constexpr size_t kPageSize = PageSize;
static constexpr size_t kPageAlignment = PageAlignment;
static constexpr size_t kObjectAlignment = ObjectAlignment;
static constexpr size_t kMaxEntries = kBitmapSize * kBitsPerCell;
static constexpr uintptr_t kSentinel = 0u;
inline ObjectBitmap();
// Finds the beginning of the closest object that starts at or before
// |address|. It may return an object from another slot if the slot where
// |address| lies in is unallocated. The caller is responsible for range
// checking. Returns |kSentinel| if no object was found.
inline uintptr_t FindPotentialObjectBeginning(uintptr_t address) const;
inline void SetBit(uintptr_t address);
inline void ClearBit(uintptr_t address);
inline bool CheckBit(uintptr_t address) const;
// Iterates all objects recorded in the bitmap.
//
// The callback is of type
// void(Address)
// and is passed the object address as parameter.
template <typename Callback>
inline void Iterate(Callback) const;
inline void Clear();
private:
std::atomic<uint8_t>& AsAtomicCell(size_t cell_index) {
return reinterpret_cast<std::atomic<uint8_t>&>(bitmap_[cell_index]);
}
const std::atomic<uint8_t>& AsAtomicCell(size_t cell_index) const {
return reinterpret_cast<const std::atomic<uint8_t>&>(bitmap_[cell_index]);
}
inline uint8_t LoadCell(size_t cell_index) const;
inline std::pair<size_t, size_t> ObjectIndexAndBit(uintptr_t) const;
std::array<uint8_t, kBitmapSize> bitmap_;
};
template <size_t PageSize, size_t PageAlignment, size_t ObjectAlignment>
constexpr size_t
ObjectBitmap<PageSize, PageAlignment, ObjectAlignment>::kSentinel;
// The constructor can be omitted, but the Chromium's clang plugin wrongly
// warns that the type is not trivially constructible.
template <size_t PageSize, size_t PageAlignment, size_t ObjectAlignment>
inline ObjectBitmap<PageSize, PageAlignment, ObjectAlignment>::ObjectBitmap() =
default;
template <size_t PageSize, size_t PageAlignment, size_t ObjectAlignment>
uintptr_t ObjectBitmap<PageSize, PageAlignment, ObjectAlignment>::
FindPotentialObjectBeginning(uintptr_t address) const {
const uintptr_t page_base = reinterpret_cast<uintptr_t>(this) & kPageBaseMask;
PA_DCHECK(page_base <= address && address < page_base + kPageSize);
size_t cell_index, bit;
std::tie(cell_index, bit) = ObjectIndexAndBit(address);
// Find the first set bit at or before |bit|.
uint8_t byte = LoadCell(cell_index) & ((1 << (bit + 1)) - 1);
while (!byte && cell_index) {
PA_DCHECK(0u < cell_index);
byte = LoadCell(--cell_index);
}
if (!byte) {
// No object was found.
return kSentinel;
}
const int leading_zeroes = base::bits::CountLeadingZeroBits(byte);
const size_t object_number =
(cell_index * kBitsPerCell) + (kBitsPerCell - 1) - leading_zeroes;
const size_t offset_in_page = object_number * kObjectAlignment;
return offset_in_page + page_base;
}
template <size_t PageSize, size_t PageAlignment, size_t ObjectAlignment>
void ObjectBitmap<PageSize, PageAlignment, ObjectAlignment>::SetBit(
uintptr_t address) {
size_t cell_index, object_bit;
std::tie(cell_index, object_bit) = ObjectIndexAndBit(address);
auto& cell = AsAtomicCell(cell_index);
cell.fetch_or(1 << object_bit, std::memory_order_relaxed);
}
template <size_t PageSize, size_t PageAlignment, size_t ObjectAlignment>
void ObjectBitmap<PageSize, PageAlignment, ObjectAlignment>::ClearBit(
uintptr_t address) {
size_t cell_index, object_bit;
std::tie(cell_index, object_bit) = ObjectIndexAndBit(address);
auto& cell = AsAtomicCell(cell_index);
cell.fetch_and(~(1 << object_bit), std::memory_order_relaxed);
}
template <size_t PageSize, size_t PageAlignment, size_t ObjectAlignment>
bool ObjectBitmap<PageSize, PageAlignment, ObjectAlignment>::CheckBit(
uintptr_t address) const {
size_t cell_index, object_bit;
std::tie(cell_index, object_bit) = ObjectIndexAndBit(address);
return LoadCell(cell_index) & (1 << object_bit);
}
template <size_t PageSize, size_t PageAlignment, size_t ObjectAlignment>
uint8_t ObjectBitmap<PageSize, PageAlignment, ObjectAlignment>::LoadCell(
size_t cell_index) const {
return AsAtomicCell(cell_index).load(std::memory_order_relaxed);
}
template <size_t PageSize, size_t PageAlignment, size_t ObjectAlignment>
std::pair<size_t, size_t>
ObjectBitmap<PageSize, PageAlignment, ObjectAlignment>::ObjectIndexAndBit(
uintptr_t address) const {
const uintptr_t offset_in_page = address & kPageOffsetMask;
PA_DCHECK(!(offset_in_page % kObjectAlignment));
const size_t object_number = offset_in_page / kObjectAlignment;
const size_t cell_index = object_number / kBitsPerCell;
PA_DCHECK(kBitmapSize > cell_index);
const size_t bit = object_number % kBitsPerCell;
return {cell_index, bit};
}
template <size_t PageSize, size_t PageAlignment, size_t ObjectAlignment>
template <typename Callback>
inline void ObjectBitmap<PageSize, PageAlignment, ObjectAlignment>::Iterate(
Callback callback) const {
// The bitmap (|this|) is allocated inside the page with |kPageAlignment|.
const uintptr_t base = reinterpret_cast<uintptr_t>(this) & kPageBaseMask;
for (size_t cell_index = 0; cell_index < kBitmapSize; ++cell_index) {
uint8_t value = LoadCell(cell_index);
while (value) {
const int trailing_zeroes = base::bits::CountTrailingZeroBits(value);
const size_t object_number =
(cell_index * kBitsPerCell) + trailing_zeroes;
const uintptr_t object_address =
base + (kObjectAlignment * object_number);
callback(object_address);
// Clear current object bit in temporary value to advance iteration.
value &= ~(1 << trailing_zeroes);
}
}
}
template <size_t PageSize, size_t PageAlignment, size_t ObjectAlignment>
void ObjectBitmap<PageSize, PageAlignment, ObjectAlignment>::Clear() {
std::fill(bitmap_.begin(), bitmap_.end(), '\0');
}
} // namespace internal
} // namespace base
#endif // BASE_ALLOCATOR_PARTITION_ALLOCATOR_OBJECT_BITMAP_H_
// Copyright (c) 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "base/allocator/partition_allocator/object_bitmap.h"
#include "base/allocator/partition_allocator/page_allocator.h"
#include "base/allocator/partition_allocator/partition_alloc_constants.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace base {
namespace internal {
namespace {
using TestBitmap = ObjectBitmap<kSuperPageSize, kSuperPageSize, kAlignment>;
class PageWithBitmap final {
public:
PageWithBitmap()
: base_(base::AllocPages(nullptr,
kSuperPageSize,
kSuperPageAlignment,
PageReadWrite,
PageTag::kPartitionAlloc)),
bitmap_(new (base_) TestBitmap) {}
PageWithBitmap(const PageWithBitmap&) = delete;
PageWithBitmap& operator=(const PageWithBitmap&) = delete;
~PageWithBitmap() { base::FreePages(base_, kSuperPageSize); }
TestBitmap& bitmap() const { return *bitmap_; }
void* base() const { return base_; }
size_t size() const { return kSuperPageSize; }
void* base_;
TestBitmap* bitmap_;
};
class ObjectBitmapTest : public ::testing::Test {
protected:
TestBitmap& bitmap() const { return page.bitmap(); }
void SetBitForObject(size_t object_position) {
page.bitmap().SetBit(ObjectAddress(object_position));
}
void ClearBitForObject(size_t object_position) {
page.bitmap().ClearBit(ObjectAddress(object_position));
}
bool CheckBitForObject(size_t object_position) const {
return page.bitmap().CheckBit(ObjectAddress(object_position));
}
bool IsEmpty() const {
size_t count = 0;
bitmap().Iterate([&count](uintptr_t) { count++; });
return count == 0;
}
uintptr_t ObjectAddress(size_t pos) const {
return reinterpret_cast<uintptr_t>(page.base()) + sizeof(TestBitmap) +
pos * kAlignment;
}
uintptr_t LastIndex() const {
return TestBitmap::kMaxEntries - (sizeof(TestBitmap) / kAlignment) - 1;
}
private:
PageWithBitmap page;
};
} // namespace
TEST_F(ObjectBitmapTest, MoreThanZeroEntriesPossible) {
const size_t max_entries = TestBitmap::kMaxEntries;
EXPECT_LT(0u, max_entries);
}
TEST_F(ObjectBitmapTest, InitialEmpty) {
EXPECT_TRUE(IsEmpty());
}
TEST_F(ObjectBitmapTest, SetBitImpliesNonEmpty) {
SetBitForObject(0);
EXPECT_FALSE(IsEmpty());
}
TEST_F(ObjectBitmapTest, SetBitCheckBit) {
SetBitForObject(0);
EXPECT_TRUE(CheckBitForObject(0));
}
TEST_F(ObjectBitmapTest, SetBitClearbitCheckBit) {
SetBitForObject(0);
ClearBitForObject(0);
EXPECT_FALSE(CheckBitForObject(0));
}
TEST_F(ObjectBitmapTest, SetBitClearBitImpliesEmpty) {
SetBitForObject(LastIndex());
ClearBitForObject(LastIndex());
EXPECT_TRUE(IsEmpty());
}
TEST_F(ObjectBitmapTest, AdjacentObjectsAtBegin) {
SetBitForObject(0);
SetBitForObject(1);
EXPECT_FALSE(CheckBitForObject(3));
size_t count = 0;
bitmap().Iterate([&count, this](uintptr_t current) {
if (count == 0) {
EXPECT_EQ(ObjectAddress(0), current);
} else if (count == 1) {
EXPECT_EQ(ObjectAddress(1), current);
}
count++;
});
EXPECT_EQ(2u, count);
}
TEST_F(ObjectBitmapTest, AdjacentObjectsAtEnd) {
static const size_t last_entry_index = LastIndex();
SetBitForObject(last_entry_index - 1);
SetBitForObject(last_entry_index);
EXPECT_FALSE(CheckBitForObject(last_entry_index - 2));
size_t count = 0;
bitmap().Iterate([&count, this](uintptr_t current) {
if (count == 0) {
EXPECT_EQ(ObjectAddress(last_entry_index - 1), current);
} else if (count == 1) {
EXPECT_EQ(ObjectAddress(last_entry_index), current);
}
count++;
});
EXPECT_EQ(2u, count);
}
TEST_F(ObjectBitmapTest, FindElementSentinel) {
EXPECT_EQ(TestBitmap::kSentinel,
bitmap().FindPotentialObjectBeginning(ObjectAddress(654)));
}
TEST_F(ObjectBitmapTest, FindElementExact) {
SetBitForObject(654);
EXPECT_EQ(ObjectAddress(654),
bitmap().FindPotentialObjectBeginning(ObjectAddress(654)));
}
TEST_F(ObjectBitmapTest, FindElementApproximate) {
static const size_t kInternalDelta = 37;
SetBitForObject(654);
EXPECT_EQ(ObjectAddress(654), bitmap().FindPotentialObjectBeginning(
ObjectAddress(654 + kInternalDelta)));
}
TEST_F(ObjectBitmapTest, FindElementIteratingWholeBitmap) {
SetBitForObject(0);
const uintptr_t hint_index = LastIndex();
EXPECT_EQ(ObjectAddress(0),
bitmap().FindPotentialObjectBeginning(ObjectAddress(hint_index)));
}
} // namespace internal
} // namespace base
......@@ -175,7 +175,8 @@ MaxSystemPagesPerSlotSpan() {
static const size_t kSuperPageShift = 21; // 2 MiB
static const size_t kSuperPageSize = 1 << kSuperPageShift;
static const size_t kSuperPageOffsetMask = kSuperPageSize - 1;
static const size_t kSuperPageAlignment = kSuperPageSize;
static const size_t kSuperPageOffsetMask = kSuperPageAlignment - 1;
static const size_t kSuperPageBaseMask = ~kSuperPageOffsetMask;
PAGE_ALLOCATOR_CONSTANTS_DECLARE_CONSTEXPR ALWAYS_INLINE size_t
NumPartitionPagesPerSuperPage() {
......
......@@ -57,8 +57,8 @@ PartitionDirectMap(PartitionRoot<thread_safe>* root, int flags, size_t raw_size)
NOTREACHED();
#endif // defined(PA_HAS_64_BITS_POINTERS)
} else {
ptr = reinterpret_cast<char*>(AllocPages(nullptr, map_size, kSuperPageSize,
PageReadWrite,
ptr = reinterpret_cast<char*>(AllocPages(nullptr, map_size,
kSuperPageAlignment, PageReadWrite,
PageTag::kPartitionAlloc));
}
if (UNLIKELY(!ptr))
......@@ -274,7 +274,7 @@ ALWAYS_INLINE void* PartitionBucket<thread_safe>::AllocNewSlotSpan(
#endif
} else {
super_page = reinterpret_cast<char*>(
AllocPages(requested_address, kSuperPageSize, kSuperPageSize,
AllocPages(requested_address, kSuperPageSize, kSuperPageAlignment,
PageReadWrite, PageTag::kPartitionAlloc));
}
if (UNLIKELY(!super_page))
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment