Commit a83bc97f authored by Michael Lippautz's avatar Michael Lippautz Committed by Commit Bot

components/gc: Remove files

Existing work found a new home in V8 as cppgc.

Bug: chromium:1056170
Change-Id: Ic7f159a6432792787d552f0fc53eca8b3fffef0a
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2151588Reviewed-by: default avatarKentaro Hara <haraken@chromium.org>
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#759593}
parent f0948f33
...@@ -73,7 +73,6 @@ group("gn_all") { ...@@ -73,7 +73,6 @@ group("gn_all") {
"//chrome/installer", "//chrome/installer",
"//chrome/updater", "//chrome/updater",
"//components:components_unittests", "//components:components_unittests",
"//components/gc:gc_unittests",
"//components/gwp_asan:gwp_asan_unittests", "//components/gwp_asan:gwp_asan_unittests",
"//net:net_unittests", "//net:net_unittests",
"//services:services_unittests", "//services:services_unittests",
......
...@@ -88,7 +88,6 @@ test("components_unittests") { ...@@ -88,7 +88,6 @@ test("components_unittests") {
"//components/filename_generation:unit_tests", "//components/filename_generation:unit_tests",
"//components/flags_ui:unit_tests", "//components/flags_ui:unit_tests",
"//components/games/core:unit_tests", "//components/games/core:unit_tests",
"//components/gc:unit_tests",
"//components/gcm_driver:unit_tests", "//components/gcm_driver:unit_tests",
"//components/gcm_driver/crypto:unit_tests", "//components/gcm_driver/crypto:unit_tests",
"//components/google/core/common:unit_tests", "//components/google/core/common:unit_tests",
......
# Copyright 2020 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import("//build/config/allocator.gni")
import("//testing/test.gni")
component("gc") {
sources = [
"core/gc_export.h",
"core/globals.h",
"core/page_memory.cc",
"core/page_memory.h",
"public/platform.h",
]
deps = [ "//base" ]
defines = [ "GC_IMPLEMENTATION=1" ]
}
source_set("test_support") {
testonly = true
sources = []
if (use_partition_alloc) {
sources += [
"test/base_allocator.cc",
"test/base_allocator.h",
]
}
deps = [
":gc",
"//base",
]
}
source_set("unit_tests") {
testonly = true
sources = [ "core/page_memory_basic_test.cc" ]
if (use_partition_alloc) {
sources += [
"core/page_memory_test.cc",
"test/base_allocator_test.cc",
]
}
deps = [
":gc",
":test_support",
"//base",
"//testing/gtest",
]
}
# Convenience target to allow just building GC-related tests for local development.
test("gc_unittests") {
testonly = true
sources = [ "test/run_all_unittests.cc" ]
deps = [
":unit_tests",
"//testing/gtest",
]
}
bikineev@chromium.org
haraken@chromium.org
mlippautz@chromium.org
omerkatz@chromium.org
# TEAM: oilpan-reviews@chromium.org
# COMPONENT: Blink>MemoryAllocator>GarbageCollection
# Oilpan - C++ Garbage Collection
Oilpan is an open-source garbage collection library for C++ in Chromium.
Directory structure:
- `public`: Users should depend on interfaces in the `gc` namespace exposed through this directory.
- `test`: Test utilities that are needed to write unit tests using the garbage collection library.
- `core`: The implementation of the garbage collection library. Blink may temporarily depend on concepts found in here until the public interface is complete.
The library is currently under construction. The Blink-specific parts can be found in third_party/blink/renderer/platform/heap.
// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef COMPONENTS_GC_CORE_GC_EXPORT_H_
#define COMPONENTS_GC_CORE_GC_EXPORT_H_
#if defined(COMPONENT_BUILD)
#if defined(WIN32)
#if defined(GC_IMPLEMENTATION)
#define GC_EXPORT __declspec(dllexport)
#else
#define GC_EXPORT __declspec(dllimport)
#endif // defined(GC_IMPLEMENTATION)
#else // defined(WIN32)
#if defined(GC_IMPLEMENTATION)
#define GC_EXPORT __attribute__((visibility("default")))
#else
#define GC_EXPORT
#endif // defined(GC_IMPLEMENTATION)
#endif
#else // defined(COMPONENT_BUILD)
#define GC_EXPORT
#endif
#endif // COMPONENTS_GC_CORE_GC_EXPORT_H_
// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef COMPONENTS_GC_CORE_GLOBALS_H_
#define COMPONENTS_GC_CORE_GLOBALS_H_
#include <stddef.h>
#include <stdint.h>
namespace gc {
namespace internal {
using Address = uint8_t*;
// Page size of normal pages used for allocation. Actually usable area on the
// page depends on pager headers and guard pages.
constexpr size_t kPageSizeLog2 = 17;
constexpr size_t kPageSize = 1 << kPageSizeLog2; // 128 KiB.
constexpr size_t kPageOffsetMask = kPageSize - 1;
constexpr size_t kPageBaseMask = ~kPageOffsetMask;
// Guard pages are always put into memory. Whether they are actually protected
// depends on the allocator provided to the garbage collector.
constexpr size_t kGuardPageSize = 4096;
static_assert((kPageSize & (kPageSize - 1)) == 0,
"kPageSize must be power of 2");
static_assert((kGuardPageSize & (kGuardPageSize - 1)) == 0,
"kGuardPageSize must be power of 2");
} // namespace internal
} // namespace gc
#endif // COMPONENTS_GC_CORE_GLOBALS_H_
// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "components/gc/core/page_memory.h"
#include "base/bits.h"
#include "components/gc/core/globals.h"
namespace gc {
namespace internal {
namespace {
void Unprotect(PageAllocator* allocator, const PageMemory& page_memory) {
if (SupportsCommittingGuardPages(allocator)) {
CHECK(allocator->SetPermissions(page_memory.writeable_region().base(),
page_memory.writeable_region().size(),
PageAllocator::Permission::kReadWrite));
} else {
// No protection in case the allocator cannot commit at the required
// granularity. Only protect if the allocator supports committing at that
// granularity.
//
// The allocator needs to support committing the overall range.
CHECK_EQ(0u,
page_memory.overall_region().size() % allocator->CommitPageSize());
CHECK(allocator->SetPermissions(page_memory.overall_region().base(),
page_memory.overall_region().size(),
PageAllocator::Permission::kReadWrite));
}
}
void Protect(PageAllocator* allocator, const PageMemory& page_memory) {
if (SupportsCommittingGuardPages(allocator)) {
// Swap the same region, providing the OS with a chance for fast lookup and
// change.
CHECK(allocator->SetPermissions(page_memory.writeable_region().base(),
page_memory.writeable_region().size(),
PageAllocator::Permission::kNoAccess));
} else {
// See Unprotect().
CHECK_EQ(0u,
page_memory.overall_region().size() % allocator->CommitPageSize());
CHECK(allocator->SetPermissions(page_memory.overall_region().base(),
page_memory.overall_region().size(),
PageAllocator::Permission::kNoAccess));
}
}
MemoryRegion ReserveMemoryRegion(PageAllocator* allocator,
size_t allocation_size) {
void* region_memory =
allocator->AllocatePages(nullptr, allocation_size, kPageSize,
PageAllocator::Permission::kNoAccess);
const MemoryRegion reserved_region(static_cast<Address>(region_memory),
allocation_size);
DCHECK_EQ(reserved_region.base() + allocation_size, reserved_region.end());
return reserved_region;
}
void FreeMemoryRegion(PageAllocator* allocator,
const MemoryRegion& reserved_region) {
allocator->FreePages(reserved_region.base(), reserved_region.size());
}
} // namespace
PageMemoryRegion::PageMemoryRegion(PageAllocator* allocator,
MemoryRegion reserved_region,
bool is_large)
: allocator_(allocator),
reserved_region_(reserved_region),
is_large_(is_large) {}
PageMemoryRegion::~PageMemoryRegion() {
FreeMemoryRegion(allocator_, reserved_region());
}
// static
constexpr size_t NormalPageMemoryRegion::kNumPageRegions;
NormalPageMemoryRegion::NormalPageMemoryRegion(PageAllocator* allocator)
: PageMemoryRegion(
allocator,
ReserveMemoryRegion(allocator,
base::bits::Align(kPageSize * kNumPageRegions,
allocator->AllocatePageSize())),
false) {
#ifdef DEBUG
for (size_t i = 0; i < kNumPageRegions; ++i) {
DCHECK_EQ(false, page_memories_in_use_[i]);
}
#endif // DEBUG
}
NormalPageMemoryRegion::~NormalPageMemoryRegion() = default;
void NormalPageMemoryRegion::Allocate(Address writeable_base) {
const size_t index = GetIndex(writeable_base);
ChangeUsed(index, true);
Unprotect(allocator_, GetPageMemory(index));
}
void NormalPageMemoryRegion::Free(Address writeable_base) {
const size_t index = GetIndex(writeable_base);
ChangeUsed(index, false);
Protect(allocator_, GetPageMemory(index));
}
void NormalPageMemoryRegion::UnprotectForTesting() {
for (size_t i = 0; i < kNumPageRegions; ++i) {
Unprotect(allocator_, GetPageMemory(i));
}
}
LargePageMemoryRegion::LargePageMemoryRegion(PageAllocator* allocator,
size_t length)
: PageMemoryRegion(
allocator,
ReserveMemoryRegion(allocator,
base::bits::Align(length + 2 * kGuardPageSize,
allocator->AllocatePageSize())),
true) {}
LargePageMemoryRegion::~LargePageMemoryRegion() = default;
void LargePageMemoryRegion::UnprotectForTesting() {
Unprotect(allocator_, GetPageMemory());
}
PageMemoryRegionTree::PageMemoryRegionTree() = default;
PageMemoryRegionTree::~PageMemoryRegionTree() = default;
void PageMemoryRegionTree::Add(PageMemoryRegion* region) {
DCHECK(region);
auto result = set_.emplace(region->reserved_region().base(), region);
DCHECK(result.second);
}
void PageMemoryRegionTree::Remove(PageMemoryRegion* region) {
DCHECK(region);
auto size = set_.erase(region->reserved_region().base());
DCHECK_EQ(1u, size);
}
NormalPageMemoryPool::NormalPageMemoryPool() = default;
NormalPageMemoryPool::~NormalPageMemoryPool() = default;
void NormalPageMemoryPool::Add(Address writeable_base) {
pool_.push_back(writeable_base);
}
Address NormalPageMemoryPool::Take() {
if (pool_.empty())
return nullptr;
Address writeable_base = pool_.back();
pool_.pop_back();
return writeable_base;
}
PageBackend::PageBackend(PageAllocator* allocator) : allocator_(allocator) {}
PageBackend::~PageBackend() = default;
Address PageBackend::AllocateNormalPageMemory() {
Address writeable_base = page_pool_.Take();
if (!writeable_base) {
auto pmr = std::make_unique<NormalPageMemoryRegion>(allocator_);
for (size_t i = 0; i < NormalPageMemoryRegion::kNumPageRegions; ++i) {
page_pool_.Add(pmr->GetPageMemory(i).writeable_region().base());
}
page_memory_region_tree_.Add(pmr.get());
normal_page_memory_regions_.push_back(std::move(pmr));
return AllocateNormalPageMemory();
}
static_cast<NormalPageMemoryRegion*>(
page_memory_region_tree_.Lookup(writeable_base))
->Allocate(writeable_base);
return writeable_base;
}
void PageBackend::FreeNormalPageMemory(Address writeable_base) {
static_cast<NormalPageMemoryRegion*>(
page_memory_region_tree_.Lookup(writeable_base))
->Free(writeable_base);
page_pool_.Add(writeable_base);
}
Address PageBackend::AllocateLargePageMemory(size_t size) {
auto pmr = std::make_unique<LargePageMemoryRegion>(allocator_, size);
const PageMemory pm = pmr->GetPageMemory();
Unprotect(allocator_, pm);
page_memory_region_tree_.Add(pmr.get());
large_page_memory_regions_.insert({pmr.get(), std::move(pmr)});
return pm.writeable_region().base();
}
void PageBackend::FreeLargePageMemory(Address writeable_base) {
PageMemoryRegion* pmr = page_memory_region_tree_.Lookup(writeable_base);
page_memory_region_tree_.Remove(pmr);
auto size = large_page_memory_regions_.erase(pmr);
DCHECK_EQ(1u, size);
}
} // namespace internal
} // namespace gc
// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef COMPONENTS_GC_CORE_PAGE_MEMORY_H_
#define COMPONENTS_GC_CORE_PAGE_MEMORY_H_
#include <array>
#include <memory>
#include <unordered_map>
#include "base/containers/flat_map.h"
#include "base/logging.h"
#include "components/gc/core/gc_export.h"
#include "components/gc/core/globals.h"
#include "components/gc/public/platform.h"
namespace gc {
namespace internal {
// Returns true if the provided allocator supports committing at the required
// granularity.
inline bool SupportsCommittingGuardPages(PageAllocator* allocator) {
return kGuardPageSize % allocator->CommitPageSize() == 0;
}
class GC_EXPORT MemoryRegion final {
public:
MemoryRegion() = default;
MemoryRegion(Address base, size_t size) : base_(base), size_(size) {
DCHECK(base);
DCHECK_LT(0u, size);
}
Address base() const { return base_; }
size_t size() const { return size_; }
Address end() const { return base_ + size_; }
bool Contains(Address addr) const {
return (reinterpret_cast<uintptr_t>(addr) -
reinterpret_cast<uintptr_t>(base_)) < size_;
}
bool Contains(const MemoryRegion& other) const {
return base_ <= other.base() && other.end() <= end();
}
private:
Address base_ = nullptr;
size_t size_ = 0;
};
// PageMemory provides the backing of a single normal or large page.
class GC_EXPORT PageMemory final {
public:
PageMemory(MemoryRegion overall, MemoryRegion writeable)
: overall_(overall), writable_(writeable) {
DCHECK(overall.Contains(writeable));
}
const MemoryRegion writeable_region() const { return writable_; }
const MemoryRegion overall_region() const { return overall_; }
private:
MemoryRegion overall_;
MemoryRegion writable_;
};
class GC_EXPORT PageMemoryRegion {
public:
virtual ~PageMemoryRegion();
const MemoryRegion reserved_region() const { return reserved_region_; }
bool is_large() const { return is_large_; }
// Lookup writeable base for an |address| that's contained in
// PageMemoryRegion. Filters out addresses that are contained in non-writeable
// regions (e.g. guard pages).
inline Address Lookup(Address address) const;
// Disallow copy/move.
PageMemoryRegion(const PageMemoryRegion&) = delete;
PageMemoryRegion& operator=(const PageMemoryRegion&) = delete;
virtual void UnprotectForTesting() = 0;
protected:
PageMemoryRegion(PageAllocator*, MemoryRegion, bool);
PageAllocator* const allocator_;
const MemoryRegion reserved_region_;
const bool is_large_;
};
// NormalPageMemoryRegion serves kNumPageRegions normal-sized PageMemory object.
class GC_EXPORT NormalPageMemoryRegion final : public PageMemoryRegion {
public:
static constexpr size_t kNumPageRegions = 10;
explicit NormalPageMemoryRegion(PageAllocator*);
~NormalPageMemoryRegion() override;
const PageMemory GetPageMemory(size_t index) const {
DCHECK_LT(index, kNumPageRegions);
return PageMemory(
MemoryRegion(reserved_region().base() + kPageSize * index, kPageSize),
MemoryRegion(
reserved_region().base() + kPageSize * index + kGuardPageSize,
kPageSize - 2 * kGuardPageSize));
}
// Allocates a normal page at |writeable_base| address. Changes page
// protection.
void Allocate(Address writeable_base);
// Frees a normal page at at |writeable_base| address. Changes page
// protection.
void Free(Address);
inline Address Lookup(Address) const;
void UnprotectForTesting() final;
private:
void ChangeUsed(size_t index, bool value) {
DCHECK_LT(index, kNumPageRegions);
DCHECK_EQ(value, !page_memories_in_use_[index]);
page_memories_in_use_[index] = value;
}
size_t GetIndex(Address address) const {
return static_cast<size_t>(address - reserved_region().base()) >>
kPageSizeLog2;
}
std::array<bool, kNumPageRegions> page_memories_in_use_ = {};
};
// LargePageMemoryRegion serves a single large PageMemory object.
class GC_EXPORT LargePageMemoryRegion final : public PageMemoryRegion {
public:
LargePageMemoryRegion(PageAllocator*, size_t);
~LargePageMemoryRegion() override;
const PageMemory GetPageMemory() const {
return PageMemory(
MemoryRegion(reserved_region().base(), reserved_region().size()),
MemoryRegion(reserved_region().base() + kGuardPageSize,
reserved_region().size() - 2 * kGuardPageSize));
}
inline Address Lookup(Address) const;
void UnprotectForTesting() final;
};
// A PageMemoryRegionTree is a binary search tree of PageMemoryRegions sorted
// by reserved base addresses.
//
// The tree does not keep its elements alive but merely provides indexing
// capabilities.
class GC_EXPORT PageMemoryRegionTree final {
public:
PageMemoryRegionTree();
~PageMemoryRegionTree();
void Add(PageMemoryRegion*);
void Remove(PageMemoryRegion*);
inline PageMemoryRegion* Lookup(Address) const;
private:
// Using flat_map allows to improve locality to minimize cache misses and
// balance binary lookup.
base::flat_map<Address, PageMemoryRegion*> set_;
};
// A pool of PageMemory objects represented by the writeable base addresses.
//
// The pool does not keep its elements alive but merely provides pooling
// capabilities.
class GC_EXPORT NormalPageMemoryPool final {
public:
NormalPageMemoryPool();
~NormalPageMemoryPool();
void Add(Address);
Address Take();
private:
std::vector<Address> pool_;
};
// A backend that is used for allocating and freeing normal and large pages.
//
// Internally maintaints a set of PageMemoryRegions. The backend keeps its used
// regions alive.
class GC_EXPORT PageBackend final {
public:
explicit PageBackend(PageAllocator*);
~PageBackend();
// Allocates a normal page from the backend.
//
// Returns the writeable base of the region.
Address AllocateNormalPageMemory();
// Returns normal page memory back to the backend. Expects the
// |writeable_base| returned by |AllocateNormalMemory()|.
void FreeNormalPageMemory(Address writeable_base);
// Allocates a large page from the backend.
//
// Returns the writeable base of the region.
Address AllocateLargePageMemory(size_t size);
// Returns large page memory back to the backend. Expects the |writeable_base|
// returned by |AllocateLargePageMemory()|.
void FreeLargePageMemory(Address writeable_base);
// Returns the writeable base if |address| is contained in a valid page
// memory.
inline Address Lookup(Address) const;
// Disallow copy/move.
PageBackend(const PageBackend&) = delete;
PageBackend& operator=(const PageBackend&) = delete;
private:
PageAllocator* allocator_;
NormalPageMemoryPool page_pool_;
PageMemoryRegionTree page_memory_region_tree_;
std::vector<std::unique_ptr<PageMemoryRegion>> normal_page_memory_regions_;
std::unordered_map<PageMemoryRegion*, std::unique_ptr<PageMemoryRegion>>
large_page_memory_regions_;
};
Address NormalPageMemoryRegion::Lookup(Address address) const {
size_t index = GetIndex(address);
if (!page_memories_in_use_[index])
return nullptr;
const MemoryRegion writeable_region = GetPageMemory(index).writeable_region();
return writeable_region.Contains(address) ? writeable_region.base() : nullptr;
}
Address LargePageMemoryRegion::Lookup(Address address) const {
const MemoryRegion writeable_region = GetPageMemory().writeable_region();
return writeable_region.Contains(address) ? writeable_region.base() : nullptr;
}
Address PageMemoryRegion::Lookup(Address address) const {
DCHECK(reserved_region().Contains(address));
return is_large()
? static_cast<const LargePageMemoryRegion*>(this)->Lookup(address)
: static_cast<const NormalPageMemoryRegion*>(this)->Lookup(
address);
}
PageMemoryRegion* PageMemoryRegionTree::Lookup(Address address) const {
auto it = set_.upper_bound(address);
// This check also covers set_.size() > 0, since for empty vectors it is
// guaranteed that begin() == end().
if (it == set_.begin())
return nullptr;
auto* result = std::next(it, -1)->second;
if (address < result->reserved_region().end())
return result;
return nullptr;
}
Address PageBackend::Lookup(Address address) const {
PageMemoryRegion* pmr = page_memory_region_tree_.Lookup(address);
return pmr ? pmr->Lookup(address) : nullptr;
}
} // namespace internal
} // namespace gc
#endif // COMPONENTS_GC_CORE_PAGE_MEMORY_H_
// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "components/gc/core/page_memory.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace gc {
namespace internal {
namespace test {
TEST(MemoryRegionTest, Construct) {
uint8_t dummy;
constexpr size_t kSize = 17;
const MemoryRegion region(&dummy, kSize);
EXPECT_EQ(&dummy, region.base());
EXPECT_EQ(kSize, region.size());
EXPECT_EQ(&dummy + kSize, region.end());
}
TEST(MemoryRegionTest, ContainsAddress) {
uint8_t dummy;
constexpr size_t kSize = 7;
const MemoryRegion region(&dummy, kSize);
EXPECT_FALSE(region.Contains(&dummy - 1));
EXPECT_TRUE(region.Contains(&dummy));
EXPECT_TRUE(region.Contains(&dummy + kSize - 1));
EXPECT_FALSE(region.Contains(&dummy + kSize));
}
TEST(MemoryRegionTest, ContainsMemoryRegion) {
uint8_t dummy;
constexpr size_t kSize = 7;
const MemoryRegion region(&dummy, kSize);
const MemoryRegion contained_region1(&dummy, kSize - 1);
EXPECT_TRUE(region.Contains(contained_region1));
const MemoryRegion contained_region2(&dummy + 1, kSize - 1);
EXPECT_TRUE(region.Contains(contained_region2));
const MemoryRegion not_contained_region1(&dummy - 1, kSize);
EXPECT_FALSE(region.Contains(not_contained_region1));
const MemoryRegion not_contained_region2(&dummy + kSize, 1);
EXPECT_FALSE(region.Contains(not_contained_region2));
}
TEST(PageMemoryTest, Construct) {
uint8_t dummy;
constexpr size_t kOverallSize = 17;
const MemoryRegion overall_region(&dummy, kOverallSize);
const MemoryRegion writeable_region(&dummy + 1, kOverallSize - 2);
const PageMemory page_memory(overall_region, writeable_region);
EXPECT_EQ(&dummy, page_memory.overall_region().base());
EXPECT_EQ(&dummy + kOverallSize, page_memory.overall_region().end());
EXPECT_EQ(&dummy + 1, page_memory.writeable_region().base());
EXPECT_EQ(&dummy + kOverallSize - 1, page_memory.writeable_region().end());
}
#if DCHECK_IS_ON()
TEST(PageMemoryDeathTest, ConstructNonContainedRegions) {
uint8_t dummy;
constexpr size_t kOverallSize = 17;
const MemoryRegion overall_region(&dummy, kOverallSize);
const MemoryRegion writeable_region(&dummy + 1, kOverallSize);
EXPECT_DEATH_IF_SUPPORTED(PageMemory(overall_region, writeable_region), "");
}
#endif // DCHECK_IS_ON()
} // namespace test
} // namespace internal
} // namespace gc
// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include <iterator>
#include "build/build_config.h"
#include "components/gc/core/page_memory.h"
#include "components/gc/test/base_allocator.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace gc {
namespace internal {
namespace test {
TEST(PageMemoryRegionTest, NormalPageMemoryRegion) {
gc::test::BaseAllocator allocator;
auto pmr = std::make_unique<NormalPageMemoryRegion>(&allocator);
pmr->UnprotectForTesting();
MemoryRegion prev_overall;
for (size_t i = 0; i < NormalPageMemoryRegion::kNumPageRegions; ++i) {
const PageMemory pm = pmr->GetPageMemory(i);
// Previous PageMemory aligns with the current one.
if (prev_overall.base()) {
EXPECT_EQ(prev_overall.end(), pm.overall_region().base());
}
prev_overall =
MemoryRegion(pm.overall_region().base(), pm.overall_region().size());
// Writeable region is contained in overall region.
EXPECT_TRUE(pm.overall_region().Contains(pm.writeable_region()));
EXPECT_EQ(0u, pm.writeable_region().base()[0]);
EXPECT_EQ(0u, pm.writeable_region().end()[-1]);
// Front guard page.
EXPECT_EQ(pm.writeable_region().base(),
pm.overall_region().base() + kGuardPageSize);
// Back guard page.
EXPECT_EQ(pm.overall_region().end(),
pm.writeable_region().end() + kGuardPageSize);
}
}
TEST(PageMemoryRegionTest, LargePageMemoryRegion) {
gc::test::BaseAllocator allocator;
auto pmr = std::make_unique<LargePageMemoryRegion>(&allocator, 1024);
pmr->UnprotectForTesting();
const PageMemory pm = pmr->GetPageMemory();
EXPECT_LE(1024u, pm.writeable_region().size());
EXPECT_EQ(0u, pm.writeable_region().base()[0]);
EXPECT_EQ(0u, pm.writeable_region().end()[-1]);
}
TEST(PageMemoryRegionTest, PlatformUsesGuardPages) {
// This tests that the testing allocator actually uses protected guard
// regions.
gc::test::BaseAllocator allocator;
#ifdef ARCH_CPU_PPC64
EXPECT_FALSE(SupportsCommittingGuardPages(&allocator));
#else // ARCH_CPU_PPC64
EXPECT_TRUE(SupportsCommittingGuardPages(&allocator));
#endif // ARCH_CPU_PPC64
}
namespace {
void access(volatile uint8_t) {}
} // namespace
TEST(PageMemoryRegionDeathTest, ReservationIsFreed) {
gc::test::BaseAllocator allocator;
Address base;
{
auto pmr = std::make_unique<LargePageMemoryRegion>(&allocator, 1024);
base = pmr->reserved_region().base();
}
EXPECT_DEATH(access(base[0]), "");
}
TEST(PageMemoryRegionDeathTest, FrontGuardPageAccessCrashes) {
gc::test::BaseAllocator allocator;
auto pmr = std::make_unique<NormalPageMemoryRegion>(&allocator);
if (SupportsCommittingGuardPages(&allocator)) {
EXPECT_DEATH(access(pmr->GetPageMemory(0).overall_region().base()[0]), "");
}
}
TEST(PageMemoryRegionDeathTest, BackGuardPageAccessCrashes) {
gc::test::BaseAllocator allocator;
auto pmr = std::make_unique<NormalPageMemoryRegion>(&allocator);
if (SupportsCommittingGuardPages(&allocator)) {
EXPECT_DEATH(access(pmr->GetPageMemory(0).writeable_region().end()[0]), "");
}
}
TEST(PageMemoryRegionTreeTest, AddNormalLookupRemove) {
gc::test::BaseAllocator allocator;
auto pmr = std::make_unique<NormalPageMemoryRegion>(&allocator);
PageMemoryRegionTree tree;
tree.Add(pmr.get());
ASSERT_EQ(pmr.get(), tree.Lookup(pmr->reserved_region().base()));
ASSERT_EQ(pmr.get(), tree.Lookup(pmr->reserved_region().end() - 1));
ASSERT_EQ(nullptr, tree.Lookup(pmr->reserved_region().base() - 1));
ASSERT_EQ(nullptr, tree.Lookup(pmr->reserved_region().end()));
tree.Remove(pmr.get());
ASSERT_EQ(nullptr, tree.Lookup(pmr->reserved_region().base()));
ASSERT_EQ(nullptr, tree.Lookup(pmr->reserved_region().end() - 1));
}
TEST(PageMemoryRegionTreeTest, AddLargeLookupRemove) {
gc::test::BaseAllocator allocator;
constexpr size_t kLargeSize = 5012;
auto pmr = std::make_unique<LargePageMemoryRegion>(&allocator, kLargeSize);
PageMemoryRegionTree tree;
tree.Add(pmr.get());
ASSERT_EQ(pmr.get(), tree.Lookup(pmr->reserved_region().base()));
ASSERT_EQ(pmr.get(), tree.Lookup(pmr->reserved_region().end() - 1));
ASSERT_EQ(nullptr, tree.Lookup(pmr->reserved_region().base() - 1));
ASSERT_EQ(nullptr, tree.Lookup(pmr->reserved_region().end()));
tree.Remove(pmr.get());
ASSERT_EQ(nullptr, tree.Lookup(pmr->reserved_region().base()));
ASSERT_EQ(nullptr, tree.Lookup(pmr->reserved_region().end() - 1));
}
TEST(PageMemoryRegionTreeTest, AddLookupRemoveMultiple) {
gc::test::BaseAllocator allocator;
auto pmr1 = std::make_unique<NormalPageMemoryRegion>(&allocator);
constexpr size_t kLargeSize = 3127;
auto pmr2 = std::make_unique<LargePageMemoryRegion>(&allocator, kLargeSize);
PageMemoryRegionTree tree;
tree.Add(pmr1.get());
tree.Add(pmr2.get());
ASSERT_EQ(pmr1.get(), tree.Lookup(pmr1->reserved_region().base()));
ASSERT_EQ(pmr1.get(), tree.Lookup(pmr1->reserved_region().end() - 1));
ASSERT_EQ(pmr2.get(), tree.Lookup(pmr2->reserved_region().base()));
ASSERT_EQ(pmr2.get(), tree.Lookup(pmr2->reserved_region().end() - 1));
tree.Remove(pmr1.get());
ASSERT_EQ(pmr2.get(), tree.Lookup(pmr2->reserved_region().base()));
ASSERT_EQ(pmr2.get(), tree.Lookup(pmr2->reserved_region().end() - 1));
tree.Remove(pmr2.get());
ASSERT_EQ(nullptr, tree.Lookup(pmr2->reserved_region().base()));
ASSERT_EQ(nullptr, tree.Lookup(pmr2->reserved_region().end() - 1));
}
TEST(NormalPageMemoryPool, ConstructorEmpty) {
gc::test::BaseAllocator allocator;
NormalPageMemoryPool pool;
EXPECT_EQ(nullptr, pool.Take());
}
TEST(NormalPageMemoryPool, AddTakeSameBucket) {
gc::test::BaseAllocator allocator;
auto pmr = std::make_unique<NormalPageMemoryRegion>(&allocator);
const PageMemory pm = pmr->GetPageMemory(0);
NormalPageMemoryPool pool;
pool.Add(pm.writeable_region().base());
EXPECT_EQ(pm.writeable_region().base(), pool.Take());
}
TEST(PageBackendTest, AllocateNormalUsesPool) {
gc::test::BaseAllocator allocator;
PageBackend backend(&allocator);
Address writeable_base1 = backend.AllocateNormalPageMemory();
EXPECT_NE(nullptr, writeable_base1);
backend.FreeNormalPageMemory(writeable_base1);
Address writeable_base2 = backend.AllocateNormalPageMemory();
EXPECT_NE(nullptr, writeable_base2);
EXPECT_EQ(writeable_base1, writeable_base2);
}
TEST(PageBackendTest, AllocateLarge) {
gc::test::BaseAllocator allocator;
PageBackend backend(&allocator);
Address writeable_base1 = backend.AllocateLargePageMemory(13731);
EXPECT_NE(nullptr, writeable_base1);
Address writeable_base2 = backend.AllocateLargePageMemory(9478);
EXPECT_NE(nullptr, writeable_base2);
EXPECT_NE(writeable_base1, writeable_base2);
backend.FreeLargePageMemory(writeable_base1);
backend.FreeLargePageMemory(writeable_base2);
}
TEST(PageBackendTest, LookupNormal) {
gc::test::BaseAllocator allocator;
PageBackend backend(&allocator);
Address writeable_base = backend.AllocateNormalPageMemory();
EXPECT_EQ(nullptr, backend.Lookup(writeable_base - kGuardPageSize));
EXPECT_EQ(nullptr, backend.Lookup(writeable_base - 1));
EXPECT_EQ(writeable_base, backend.Lookup(writeable_base));
EXPECT_EQ(writeable_base, backend.Lookup(writeable_base + kPageSize -
2 * kGuardPageSize - 1));
EXPECT_EQ(nullptr,
backend.Lookup(writeable_base + kPageSize - 2 * kGuardPageSize));
EXPECT_EQ(nullptr,
backend.Lookup(writeable_base - kGuardPageSize + kPageSize - 1));
}
TEST(PageBackendTest, LookupLarge) {
gc::test::BaseAllocator allocator;
PageBackend backend(&allocator);
constexpr size_t kSize = 7934;
Address writeable_base = backend.AllocateLargePageMemory(kSize);
EXPECT_EQ(nullptr, backend.Lookup(writeable_base - kGuardPageSize));
EXPECT_EQ(nullptr, backend.Lookup(writeable_base - 1));
EXPECT_EQ(writeable_base, backend.Lookup(writeable_base));
EXPECT_EQ(writeable_base, backend.Lookup(writeable_base + kSize - 1));
}
TEST(PageBackendDeathTest, DestructingBackendDestroysPageMemory) {
gc::test::BaseAllocator allocator;
Address base;
{
PageBackend backend(&allocator);
base = backend.AllocateNormalPageMemory();
}
EXPECT_DEATH(access(base[0]), "");
}
} // namespace test
} // namespace internal
} // namespace gc
// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef COMPONENTS_GC_PUBLIC_PLATFORM_H_
#define COMPONENTS_GC_PUBLIC_PLATFORM_H_
#include <stddef.h>
#include "components/gc/core/gc_export.h"
namespace gc {
// Allocator used to get memory from the embedder.
class GC_EXPORT PageAllocator {
public:
// Memory permissions.
enum Permission { kNoAccess, kRead, kReadWrite, kReadExecute };
virtual ~PageAllocator() = default;
// Page granularity for |AllocatePages()| and |FreePages()|. Addresses and
// lengths must be multiples of |AllocatePageSize()|.
virtual size_t AllocatePageSize() const = 0;
// Page granularity for |SetPermissions()| and |DiscardSystemPages()|.
// Addresses and lengths must be multiples of |CommitPageSize()|.
virtual size_t CommitPageSize() const = 0;
// Allocates memory at the given |address| (hint) with the provided |length|,
// |alignment|, and |permissions|.
virtual void* AllocatePages(void* address,
size_t length,
size_t alignment,
Permission permissions) = 0;
// Frees memory in a range that was allocated by |AllocatedPages()|.
virtual bool FreePages(void* address, size_t length) = 0;
// Sets permissions in a range that was allocated by |AllocatedPages()|.
virtual bool SetPermissions(void* address,
size_t length,
Permission permissions) = 0;
// Potentially frees physical memory in the range [address, address+length).
// Address and size should be aligned with |CommitPageSize()|. Note that this
// call transparently brings back physical memory at an unknown state.
//
// Returns true on success, and false otherwise.
virtual bool DiscardSystemPages(void* address, size_t size) = 0;
};
} // namespace gc
#endif // COMPONENTS_GC_PUBLIC_PLATFORM_H_
// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "components/gc/test/base_allocator.h"
#include "base/allocator/partition_allocator/page_allocator.h"
#include "base/allocator/partition_allocator/page_allocator_constants.h"
namespace gc {
namespace test {
namespace {
base::PageAccessibilityConfiguration GetPageAccessibility(
PageAllocator::Permission permission) {
switch (permission) {
case PageAllocator::Permission::kRead:
return base::PageRead;
case PageAllocator::Permission::kReadWrite:
return base::PageReadWrite;
case PageAllocator::Permission::kReadExecute:
return base::PageReadExecute;
case PageAllocator::Permission::kNoAccess:
return base::PageInaccessible;
}
}
} // namespace
size_t BaseAllocator::AllocatePageSize() const {
return base::kPageAllocationGranularity;
}
size_t BaseAllocator::CommitPageSize() const {
return base::kSystemPageSize;
}
void* BaseAllocator::AllocatePages(void* address,
size_t length,
size_t alignment,
Permission permissions) {
base::PageAccessibilityConfiguration config =
GetPageAccessibility(permissions);
const bool commit = (permissions != PageAllocator::Permission::kNoAccess);
// Use generic PartitionAlloc page tag as the allocator is only used for
// testing.
const base::PageTag page_tag = base::PageTag::kChromium;
return base::AllocPages(address, length, alignment, config, page_tag, commit);
}
bool BaseAllocator::FreePages(void* address, size_t length) {
base::FreePages(address, length);
return true;
}
// Sets permissions in a range that was allocated by |AllocatedPages()|.
bool BaseAllocator::SetPermissions(void* address,
size_t length,
Permission permissions) {
if (permissions == Permission::kNoAccess) {
base::DecommitSystemPages(address, length);
return true;
}
return base::TrySetSystemPagesAccess(address, length,
GetPageAccessibility(permissions));
}
bool BaseAllocator::DiscardSystemPages(void* address, size_t size) {
return false;
}
} // namespace test
} // namespace gc
// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef COMPONENTS_GC_TEST_BASE_ALLOCATOR_H_
#define COMPONENTS_GC_TEST_BASE_ALLOCATOR_H_
#include "components/gc/public/platform.h"
namespace gc {
namespace test {
class BaseAllocator final : public PageAllocator {
public:
size_t AllocatePageSize() const final;
size_t CommitPageSize() const final;
void* AllocatePages(void*, size_t, size_t, Permission) final;
bool FreePages(void*, size_t) final;
bool SetPermissions(void*, size_t, Permission) final;
bool DiscardSystemPages(void*, size_t) final;
};
} // namespace test
} // namespace gc
#endif // COMPONENTS_GC_TEST_BASE_ALLOCATOR_H_
// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "components/gc/test/base_allocator.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace gc {
namespace test {
TEST(BaseAllocatorTest, AllocateAndFreePage) {
BaseAllocator alloc;
const size_t allocation_size = alloc.AllocatePageSize();
uint8_t* memory = static_cast<uint8_t*>(
alloc.AllocatePages(nullptr, allocation_size, alloc.AllocatePageSize(),
BaseAllocator::Permission::kReadWrite));
ASSERT_TRUE(memory);
memory[0] = 1;
ASSERT_EQ(1u, memory[0]);
memory[allocation_size - 1] = 2;
ASSERT_EQ(2u, memory[allocation_size - 1]);
ASSERT_TRUE(alloc.FreePages(memory, allocation_size));
}
} // namespace test
} // namespace gc
// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "testing/gtest/include/gtest/gtest.h"
int main(int argc, char** argv) {
testing::InitGoogleTest(&argc, argv);
return RUN_ALL_TESTS();
}
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment