Commit 64c2f7f4 authored by Michael Lippautz's avatar Michael Lippautz Committed by Commit Bot

gc: Add rest of low-level backend

Migrate:
- RegionTree->PageMemoryRegionTree: Keep track of all PageMemoryRegion
  regions in a BST for lookup of arbitrary addresses.
- PagePool->NormalPagePool: All normal pages are kept in a page pool
  for fast retrieval. Currently, PageMemoryRegion used for normal
  pages is never returned to the operating system but pages are kept
  in the NormalPagePool until shutdown. This can be improved in
  future.

Introduce:
- PageBackend: Entry point for higher-level abstractions. Provides
  bottlenecks for allocation/freeing of PageMemory objects (large and
  normal) as well as new ones.

Change-Id: Ia46daef501a8bd4785885d68f3b420157bf153b6
Bug: 1056170
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2083474
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Reviewed-by: default avatarOmer Katz <omerkatz@chromium.org>
Reviewed-by: default avatarAnton Bikineev <bikineev@chromium.org>
Cr-Commit-Position: refs/heads/master@{#747681}
parent 76185863
......@@ -5,7 +5,6 @@
#include "components/gc/core/page_memory.h"
#include "base/bits.h"
#include "base/memory/ptr_util.h"
#include "components/gc/core/globals.h"
namespace gc {
......@@ -32,14 +31,21 @@ void Unprotect(PageAllocator* allocator, const PageMemory& page_memory) {
}
}
MemoryRegion GuardMemoryRegion(const MemoryRegion overall_page_region) {
// Always add guard pages, independently of whether they are actually
// protected or not.
MemoryRegion writeable_page_region(
overall_page_region.base() + kGuardPageSize,
overall_page_region.size() - 2 * kGuardPageSize);
DCHECK(overall_page_region.Contains(writeable_page_region));
return writeable_page_region;
void Protect(PageAllocator* allocator, const PageMemory& page_memory) {
if (SupportsCommittingGuardPages(allocator)) {
// Swap the same region, providing the OS with a chance for fast lookup and
// change.
CHECK(allocator->SetPermissions(page_memory.writeable_region().base(),
page_memory.writeable_region().size(),
PageAllocator::Permission::kNoAccess));
} else {
// See Unprotect().
CHECK_EQ(0u,
page_memory.overall_region().size() % allocator->CommitPageSize());
CHECK(allocator->SetPermissions(page_memory.overall_region().base(),
page_memory.overall_region().size(),
PageAllocator::Permission::kNoAccess));
}
}
MemoryRegion ReserveMemoryRegion(PageAllocator* allocator,
......@@ -53,6 +59,11 @@ MemoryRegion ReserveMemoryRegion(PageAllocator* allocator,
return reserved_region;
}
void FreeMemoryRegion(PageAllocator* allocator,
const MemoryRegion& reserved_region) {
allocator->FreePages(reserved_region.base(), reserved_region.size());
}
} // namespace
PageMemoryRegion::PageMemoryRegion(PageAllocator* allocator,
......@@ -63,9 +74,12 @@ PageMemoryRegion::PageMemoryRegion(PageAllocator* allocator,
is_large_(is_large) {}
PageMemoryRegion::~PageMemoryRegion() {
allocator_->FreePages(reserved_region().base(), reserved_region().size());
FreeMemoryRegion(allocator_, reserved_region());
}
// static
constexpr size_t NormalPageMemoryRegion::kNumPageRegions;
NormalPageMemoryRegion::NormalPageMemoryRegion(PageAllocator* allocator)
: PageMemoryRegion(
allocator,
......@@ -73,21 +87,30 @@ NormalPageMemoryRegion::NormalPageMemoryRegion(PageAllocator* allocator)
base::bits::Align(kPageSize * kNumPageRegions,
allocator->AllocatePageSize())),
false) {
#ifdef DEBUG
for (size_t i = 0; i < kNumPageRegions; ++i) {
const MemoryRegion overall_page_region(
reserved_region_.base() + i * kPageSize, kPageSize);
DCHECK(reserved_region_.Contains(overall_page_region));
const MemoryRegion writeable_page_region =
GuardMemoryRegion(overall_page_region);
page_memories_[i] = PageMemory(overall_page_region, writeable_page_region);
DCHECK_EQ(false, page_memories_in_use_[i]);
}
#endif // DEBUG
}
NormalPageMemoryRegion::~NormalPageMemoryRegion() = default;
void NormalPageMemoryRegion::Allocate(Address writeable_base) {
const size_t index = GetIndex(writeable_base);
ChangeUsed(index, true);
Unprotect(allocator_, GetPageMemory(index));
}
void NormalPageMemoryRegion::Free(Address writeable_base) {
const size_t index = GetIndex(writeable_base);
ChangeUsed(index, false);
Protect(allocator_, GetPageMemory(index));
}
void NormalPageMemoryRegion::UnprotectForTesting() {
for (size_t i = 0; i < kNumPageRegions; ++i) {
Unprotect(allocator_, page_memories_[i]);
Unprotect(allocator_, GetPageMemory(i));
}
}
......@@ -98,16 +121,88 @@ LargePageMemoryRegion::LargePageMemoryRegion(PageAllocator* allocator,
ReserveMemoryRegion(allocator,
base::bits::Align(length + 2 * kGuardPageSize,
allocator->AllocatePageSize())),
true) {
const MemoryRegion writeable_page_region =
GuardMemoryRegion(reserved_region_);
page_memory_ = PageMemory(reserved_region_, writeable_page_region);
}
true) {}
LargePageMemoryRegion::~LargePageMemoryRegion() = default;
void LargePageMemoryRegion::UnprotectForTesting() {
Unprotect(allocator_, page_memory_);
Unprotect(allocator_, GetPageMemory());
}
PageMemoryRegionTree::PageMemoryRegionTree() = default;
PageMemoryRegionTree::~PageMemoryRegionTree() = default;
void PageMemoryRegionTree::Add(PageMemoryRegion* region) {
DCHECK(region);
auto result = set_.emplace(region->reserved_region().base(), region);
DCHECK(result.second);
}
void PageMemoryRegionTree::Remove(PageMemoryRegion* region) {
DCHECK(region);
auto size = set_.erase(region->reserved_region().base());
DCHECK_EQ(1u, size);
}
NormalPageMemoryPool::NormalPageMemoryPool() = default;
NormalPageMemoryPool::~NormalPageMemoryPool() = default;
void NormalPageMemoryPool::Add(Address writeable_base) {
pool_.push_back(writeable_base);
}
Address NormalPageMemoryPool::Take() {
if (pool_.empty())
return nullptr;
Address writeable_base = pool_.back();
pool_.pop_back();
return writeable_base;
}
PageBackend::PageBackend(PageAllocator* allocator) : allocator_(allocator) {}
PageBackend::~PageBackend() = default;
Address PageBackend::AllocateNormalPageMemory() {
Address writeable_base = page_pool_.Take();
if (!writeable_base) {
auto pmr = std::make_unique<NormalPageMemoryRegion>(allocator_);
for (size_t i = 0; i < NormalPageMemoryRegion::kNumPageRegions; ++i) {
page_pool_.Add(pmr->GetPageMemory(i).writeable_region().base());
}
page_memory_region_tree_.Add(pmr.get());
normal_page_memory_regions_.push_back(std::move(pmr));
return AllocateNormalPageMemory();
}
static_cast<NormalPageMemoryRegion*>(
page_memory_region_tree_.Lookup(writeable_base))
->Allocate(writeable_base);
return writeable_base;
}
void PageBackend::FreeNormalPageMemory(Address writeable_base) {
static_cast<NormalPageMemoryRegion*>(
page_memory_region_tree_.Lookup(writeable_base))
->Free(writeable_base);
page_pool_.Add(writeable_base);
}
Address PageBackend::AllocateLargePageMemory(size_t size) {
auto pmr = std::make_unique<LargePageMemoryRegion>(allocator_, size);
const PageMemory pm = pmr->GetPageMemory();
Unprotect(allocator_, pm);
page_memory_region_tree_.Add(pmr.get());
large_page_memory_regions_.insert({pmr.get(), std::move(pmr)});
return pm.writeable_region().base();
}
void PageBackend::FreeLargePageMemory(Address writeable_base) {
PageMemoryRegion* pmr = page_memory_region_tree_.Lookup(writeable_base);
page_memory_region_tree_.Remove(pmr);
auto size = large_page_memory_regions_.erase(pmr);
DCHECK_EQ(1u, size);
}
} // namespace internal
......
......@@ -7,7 +7,9 @@
#include <array>
#include <memory>
#include <unordered_map>
#include "base/containers/flat_map.h"
#include "base/logging.h"
#include "components/gc/core/gc_export.h"
#include "components/gc/core/globals.h"
......@@ -51,14 +53,13 @@ class GC_EXPORT MemoryRegion final {
// PageMemory provides the backing of a single normal or large page.
class GC_EXPORT PageMemory final {
public:
PageMemory() = default;
PageMemory(MemoryRegion overall, MemoryRegion writable)
: overall_(overall), writable_(writable) {
DCHECK(overall.Contains(writable));
PageMemory(MemoryRegion overall, MemoryRegion writeable)
: overall_(overall), writable_(writeable) {
DCHECK(overall.Contains(writeable));
}
const MemoryRegion overall_region() const { return overall_; }
const MemoryRegion writeable_region() const { return writable_; }
const MemoryRegion overall_region() const { return overall_; }
private:
MemoryRegion overall_;
......@@ -69,11 +70,15 @@ class GC_EXPORT PageMemoryRegion {
public:
virtual ~PageMemoryRegion();
MemoryRegion reserved_region() const { return reserved_region_; }
const MemoryRegion reserved_region() const { return reserved_region_; }
bool is_large() const { return is_large_; }
// Disallow copy.
// Lookup writeable base for an |address| that's contained in
// PageMemoryRegion. Filters out addresses that are contained in non-writeable
// regions (e.g. guard pages).
inline Address Lookup(Address address) const;
// Disallow copy/move.
PageMemoryRegion(const PageMemoryRegion&) = delete;
PageMemoryRegion& operator=(const PageMemoryRegion&) = delete;
......@@ -95,13 +100,40 @@ class GC_EXPORT NormalPageMemoryRegion final : public PageMemoryRegion {
explicit NormalPageMemoryRegion(PageAllocator*);
~NormalPageMemoryRegion() override;
const PageMemory* begin() { return page_memories_.cbegin(); }
const PageMemory* end() { return page_memories_.cend(); }
const PageMemory GetPageMemory(size_t index) const {
DCHECK_LT(index, kNumPageRegions);
return PageMemory(
MemoryRegion(reserved_region().base() + kPageSize * index, kPageSize),
MemoryRegion(
reserved_region().base() + kPageSize * index + kGuardPageSize,
kPageSize - 2 * kGuardPageSize));
}
// Allocates a normal page at |writeable_base| address. Changes page
// protection.
void Allocate(Address writeable_base);
// Frees a normal page at at |writeable_base| address. Changes page
// protection.
void Free(Address);
inline Address Lookup(Address) const;
void UnprotectForTesting() final;
private:
std::array<PageMemory, kNumPageRegions> page_memories_ = {};
void ChangeUsed(size_t index, bool value) {
DCHECK_LT(index, kNumPageRegions);
DCHECK_EQ(value, !page_memories_in_use_[index]);
page_memories_in_use_[index] = value;
}
size_t GetIndex(Address address) const {
return static_cast<size_t>(address - reserved_region().base()) >>
kPageSizeLog2;
}
std::array<bool, kNumPageRegions> page_memories_in_use_ = {};
};
// LargePageMemoryRegion serves a single large PageMemory object.
......@@ -110,14 +142,137 @@ class GC_EXPORT LargePageMemoryRegion final : public PageMemoryRegion {
LargePageMemoryRegion(PageAllocator*, size_t);
~LargePageMemoryRegion() override;
const PageMemory* page_memory() const { return &page_memory_; }
const PageMemory GetPageMemory() const {
return PageMemory(
MemoryRegion(reserved_region().base(), reserved_region().size()),
MemoryRegion(reserved_region().base() + kGuardPageSize,
reserved_region().size() - 2 * kGuardPageSize));
}
inline Address Lookup(Address) const;
void UnprotectForTesting() final;
};
// A PageMemoryRegionTree is a binary search tree of PageMemoryRegions sorted
// by reserved base addresses.
//
// The tree does not keep its elements alive but merely provides indexing
// capabilities.
class GC_EXPORT PageMemoryRegionTree final {
public:
PageMemoryRegionTree();
~PageMemoryRegionTree();
void Add(PageMemoryRegion*);
void Remove(PageMemoryRegion*);
inline PageMemoryRegion* Lookup(Address) const;
private:
// Using flat_map allows to improve locality to minimize cache misses and
// balance binary lookup.
base::flat_map<Address, PageMemoryRegion*> set_;
};
// A pool of PageMemory objects represented by the writeable base addresses.
//
// The pool does not keep its elements alive but merely provides pooling
// capabilities.
class GC_EXPORT NormalPageMemoryPool final {
public:
NormalPageMemoryPool();
~NormalPageMemoryPool();
void Add(Address);
Address Take();
private:
PageMemory page_memory_;
std::vector<Address> pool_;
};
// A backend that is used for allocating and freeing normal and large pages.
//
// Internally maintaints a set of PageMemoryRegions. The backend keeps its used
// regions alive.
class GC_EXPORT PageBackend final {
public:
explicit PageBackend(PageAllocator*);
~PageBackend();
// Allocates a normal page from the backend.
//
// Returns the writeable base of the region.
Address AllocateNormalPageMemory();
// Returns normal page memory back to the backend. Expects the
// |writeable_base| returned by |AllocateNormalMemory()|.
void FreeNormalPageMemory(Address writeable_base);
// Allocates a large page from the backend.
//
// Returns the writeable base of the region.
Address AllocateLargePageMemory(size_t size);
// Returns large page memory back to the backend. Expects the |writeable_base|
// returned by |AllocateLargePageMemory()|.
void FreeLargePageMemory(Address writeable_base);
// Returns the writeable base if |address| is contained in a valid page
// memory.
inline Address Lookup(Address) const;
// Disallow copy/move.
PageBackend(const PageBackend&) = delete;
PageBackend& operator=(const PageBackend&) = delete;
private:
PageAllocator* allocator_;
NormalPageMemoryPool page_pool_;
PageMemoryRegionTree page_memory_region_tree_;
std::vector<std::unique_ptr<PageMemoryRegion>> normal_page_memory_regions_;
std::unordered_map<PageMemoryRegion*, std::unique_ptr<PageMemoryRegion>>
large_page_memory_regions_;
};
Address NormalPageMemoryRegion::Lookup(Address address) const {
size_t index = GetIndex(address);
if (!page_memories_in_use_[index])
return nullptr;
const MemoryRegion writeable_region = GetPageMemory(index).writeable_region();
return writeable_region.Contains(address) ? writeable_region.base() : nullptr;
}
Address LargePageMemoryRegion::Lookup(Address address) const {
const MemoryRegion writeable_region = GetPageMemory().writeable_region();
return writeable_region.Contains(address) ? writeable_region.base() : nullptr;
}
Address PageMemoryRegion::Lookup(Address address) const {
DCHECK(reserved_region().Contains(address));
return is_large()
? static_cast<const LargePageMemoryRegion*>(this)->Lookup(address)
: static_cast<const NormalPageMemoryRegion*>(this)->Lookup(
address);
}
PageMemoryRegion* PageMemoryRegionTree::Lookup(Address address) const {
auto it = set_.upper_bound(address);
// This check also covers set_.size() > 0, since for empty vectors it is
// guaranteed that begin() == end().
if (it == set_.begin())
return nullptr;
auto* result = std::next(it, -1)->second;
if (address < result->reserved_region().end())
return result;
return nullptr;
}
Address PageBackend::Lookup(Address address) const {
PageMemoryRegion* pmr = page_memory_region_tree_.Lookup(address);
return pmr ? pmr->Lookup(address) : nullptr;
}
} // namespace internal
} // namespace gc
......
......@@ -2,9 +2,10 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "components/gc/core/page_memory.h"
#include <iterator>
#include "build/build_config.h"
#include "components/gc/core/page_memory.h"
#include "components/gc/test/base_allocator.h"
#include "testing/gtest/include/gtest/gtest.h"
......@@ -15,11 +16,10 @@ namespace test {
TEST(PageMemoryRegionTest, NormalPageMemoryRegion) {
gc::test::BaseAllocator allocator;
auto pmr = std::make_unique<NormalPageMemoryRegion>(&allocator);
constexpr size_t kExpectedPageMemories = 10;
size_t page_memory_cnt = 0;
pmr->UnprotectForTesting();
MemoryRegion prev_overall;
for (auto& pm : *pmr) {
page_memory_cnt++;
for (size_t i = 0; i < NormalPageMemoryRegion::kNumPageRegions; ++i) {
const PageMemory pm = pmr->GetPageMemory(i);
// Previous PageMemory aligns with the current one.
if (prev_overall.base()) {
EXPECT_EQ(prev_overall.end(), pm.overall_region().base());
......@@ -28,7 +28,8 @@ TEST(PageMemoryRegionTest, NormalPageMemoryRegion) {
MemoryRegion(pm.overall_region().base(), pm.overall_region().size());
// Writeable region is contained in overall region.
EXPECT_TRUE(pm.overall_region().Contains(pm.writeable_region()));
EXPECT_EQ(0u, pm.writeable_region().base()[0]);
EXPECT_EQ(0u, pm.writeable_region().end()[-1]);
// Front guard page.
EXPECT_EQ(pm.writeable_region().base(),
pm.overall_region().base() + kGuardPageSize);
......@@ -36,18 +37,16 @@ TEST(PageMemoryRegionTest, NormalPageMemoryRegion) {
EXPECT_EQ(pm.overall_region().end(),
pm.writeable_region().end() + kGuardPageSize);
}
EXPECT_EQ(kExpectedPageMemories, page_memory_cnt);
}
TEST(PageMemoryRegionTest, LargePageMemoryRegion) {
gc::test::BaseAllocator allocator;
auto pmr = std::make_unique<LargePageMemoryRegion>(&allocator, 1024);
pmr->UnprotectForTesting();
// Only one PageMemory.
const auto* pm = pmr->page_memory();
EXPECT_LE(1024u, pm->writeable_region().size());
EXPECT_EQ(0u, pm->writeable_region().base()[0]);
EXPECT_EQ(0u, pm->writeable_region().end()[-1]);
const PageMemory pm = pmr->GetPageMemory();
EXPECT_LE(1024u, pm.writeable_region().size());
EXPECT_EQ(0u, pm.writeable_region().base()[0]);
EXPECT_EQ(0u, pm.writeable_region().end()[-1]);
}
TEST(PageMemoryRegionTest, PlatformUsesGuardPages) {
......@@ -81,7 +80,7 @@ TEST(PageMemoryRegionDeathTest, FrontGuardPageAccessCrashes) {
gc::test::BaseAllocator allocator;
auto pmr = std::make_unique<NormalPageMemoryRegion>(&allocator);
if (SupportsCommittingGuardPages(&allocator)) {
EXPECT_DEATH(access(pmr->begin()->overall_region().base()[0]), "");
EXPECT_DEATH(access(pmr->GetPageMemory(0).overall_region().base()[0]), "");
}
}
......@@ -89,8 +88,131 @@ TEST(PageMemoryRegionDeathTest, BackGuardPageAccessCrashes) {
gc::test::BaseAllocator allocator;
auto pmr = std::make_unique<NormalPageMemoryRegion>(&allocator);
if (SupportsCommittingGuardPages(&allocator)) {
EXPECT_DEATH(access(pmr->begin()->writeable_region().end()[0]), "");
EXPECT_DEATH(access(pmr->GetPageMemory(0).writeable_region().end()[0]), "");
}
}
TEST(PageMemoryRegionTreeTest, AddNormalLookupRemove) {
gc::test::BaseAllocator allocator;
auto pmr = std::make_unique<NormalPageMemoryRegion>(&allocator);
PageMemoryRegionTree tree;
tree.Add(pmr.get());
ASSERT_EQ(pmr.get(), tree.Lookup(pmr->reserved_region().base()));
ASSERT_EQ(pmr.get(), tree.Lookup(pmr->reserved_region().end() - 1));
ASSERT_EQ(nullptr, tree.Lookup(pmr->reserved_region().base() - 1));
ASSERT_EQ(nullptr, tree.Lookup(pmr->reserved_region().end()));
tree.Remove(pmr.get());
ASSERT_EQ(nullptr, tree.Lookup(pmr->reserved_region().base()));
ASSERT_EQ(nullptr, tree.Lookup(pmr->reserved_region().end() - 1));
}
TEST(PageMemoryRegionTreeTest, AddLargeLookupRemove) {
gc::test::BaseAllocator allocator;
constexpr size_t kLargeSize = 5012;
auto pmr = std::make_unique<LargePageMemoryRegion>(&allocator, kLargeSize);
PageMemoryRegionTree tree;
tree.Add(pmr.get());
ASSERT_EQ(pmr.get(), tree.Lookup(pmr->reserved_region().base()));
ASSERT_EQ(pmr.get(), tree.Lookup(pmr->reserved_region().end() - 1));
ASSERT_EQ(nullptr, tree.Lookup(pmr->reserved_region().base() - 1));
ASSERT_EQ(nullptr, tree.Lookup(pmr->reserved_region().end()));
tree.Remove(pmr.get());
ASSERT_EQ(nullptr, tree.Lookup(pmr->reserved_region().base()));
ASSERT_EQ(nullptr, tree.Lookup(pmr->reserved_region().end() - 1));
}
TEST(PageMemoryRegionTreeTest, AddLookupRemoveMultiple) {
gc::test::BaseAllocator allocator;
auto pmr1 = std::make_unique<NormalPageMemoryRegion>(&allocator);
constexpr size_t kLargeSize = 3127;
auto pmr2 = std::make_unique<LargePageMemoryRegion>(&allocator, kLargeSize);
PageMemoryRegionTree tree;
tree.Add(pmr1.get());
tree.Add(pmr2.get());
ASSERT_EQ(pmr1.get(), tree.Lookup(pmr1->reserved_region().base()));
ASSERT_EQ(pmr1.get(), tree.Lookup(pmr1->reserved_region().end() - 1));
ASSERT_EQ(pmr2.get(), tree.Lookup(pmr2->reserved_region().base()));
ASSERT_EQ(pmr2.get(), tree.Lookup(pmr2->reserved_region().end() - 1));
tree.Remove(pmr1.get());
ASSERT_EQ(pmr2.get(), tree.Lookup(pmr2->reserved_region().base()));
ASSERT_EQ(pmr2.get(), tree.Lookup(pmr2->reserved_region().end() - 1));
tree.Remove(pmr2.get());
ASSERT_EQ(nullptr, tree.Lookup(pmr2->reserved_region().base()));
ASSERT_EQ(nullptr, tree.Lookup(pmr2->reserved_region().end() - 1));
}
TEST(NormalPageMemoryPool, ConstructorEmpty) {
gc::test::BaseAllocator allocator;
NormalPageMemoryPool pool;
EXPECT_EQ(nullptr, pool.Take());
}
TEST(NormalPageMemoryPool, AddTakeSameBucket) {
gc::test::BaseAllocator allocator;
auto pmr = std::make_unique<NormalPageMemoryRegion>(&allocator);
const PageMemory pm = pmr->GetPageMemory(0);
NormalPageMemoryPool pool;
pool.Add(pm.writeable_region().base());
EXPECT_EQ(pm.writeable_region().base(), pool.Take());
}
TEST(PageBackendTest, AllocateNormalUsesPool) {
gc::test::BaseAllocator allocator;
PageBackend backend(&allocator);
Address writeable_base1 = backend.AllocateNormalPageMemory();
EXPECT_NE(nullptr, writeable_base1);
backend.FreeNormalPageMemory(writeable_base1);
Address writeable_base2 = backend.AllocateNormalPageMemory();
EXPECT_NE(nullptr, writeable_base2);
EXPECT_EQ(writeable_base1, writeable_base2);
}
TEST(PageBackendTest, AllocateLarge) {
gc::test::BaseAllocator allocator;
PageBackend backend(&allocator);
Address writeable_base1 = backend.AllocateLargePageMemory(13731);
EXPECT_NE(nullptr, writeable_base1);
Address writeable_base2 = backend.AllocateLargePageMemory(9478);
EXPECT_NE(nullptr, writeable_base2);
EXPECT_NE(writeable_base1, writeable_base2);
backend.FreeLargePageMemory(writeable_base1);
backend.FreeLargePageMemory(writeable_base2);
}
TEST(PageBackendTest, LookupNormal) {
gc::test::BaseAllocator allocator;
PageBackend backend(&allocator);
Address writeable_base = backend.AllocateNormalPageMemory();
EXPECT_EQ(nullptr, backend.Lookup(writeable_base - kGuardPageSize));
EXPECT_EQ(nullptr, backend.Lookup(writeable_base - 1));
EXPECT_EQ(writeable_base, backend.Lookup(writeable_base));
EXPECT_EQ(writeable_base, backend.Lookup(writeable_base + kPageSize -
2 * kGuardPageSize - 1));
EXPECT_EQ(nullptr,
backend.Lookup(writeable_base + kPageSize - 2 * kGuardPageSize));
EXPECT_EQ(nullptr,
backend.Lookup(writeable_base - kGuardPageSize + kPageSize - 1));
}
TEST(PageBackendTest, LookupLarge) {
gc::test::BaseAllocator allocator;
PageBackend backend(&allocator);
constexpr size_t kSize = 7934;
Address writeable_base = backend.AllocateLargePageMemory(kSize);
EXPECT_EQ(nullptr, backend.Lookup(writeable_base - kGuardPageSize));
EXPECT_EQ(nullptr, backend.Lookup(writeable_base - 1));
EXPECT_EQ(writeable_base, backend.Lookup(writeable_base));
EXPECT_EQ(writeable_base, backend.Lookup(writeable_base + kSize - 1));
}
TEST(PageBackendDeathTest, DestructingBackendDestroysPageMemory) {
gc::test::BaseAllocator allocator;
Address base;
{
PageBackend backend(&allocator);
base = backend.AllocateNormalPageMemory();
}
EXPECT_DEATH(access(base[0]), "");
}
} // namespace test
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment