Commit bb975bfe authored by Alexei Filippov's avatar Alexei Filippov Committed by Commit Bot

Sampling Heap Profiler: Avoid SEGV crashes when allocation is aligned with a page.

BUG=803276

Change-Id: I1722ef6d2a9b529919189d7babb767bfc7cbbf83
Reviewed-on: https://chromium-review.googlesource.com/888328
Commit-Queue: Alexei Filippov <alph@chromium.org>
Reviewed-by: default avatarPavel Feldman <pfeldman@chromium.org>
Cr-Commit-Position: refs/heads/master@{#532243}
parent 3b2682d7
...@@ -13,6 +13,7 @@ ...@@ -13,6 +13,7 @@
#include "base/debug/stack_trace.h" #include "base/debug/stack_trace.h"
#include "base/memory/singleton.h" #include "base/memory/singleton.h"
#include "base/rand_util.h" #include "base/rand_util.h"
#include "base/sys_info.h"
#include "build/build_config.h" #include "build/build_config.h"
namespace blink { namespace blink {
...@@ -23,10 +24,11 @@ using base::subtle::AtomicWord; ...@@ -23,10 +24,11 @@ using base::subtle::AtomicWord;
namespace { namespace {
const unsigned kMagicSignature = 0x14690ca5; const uint32_t kMagicSignature = 0x14690ca5;
const unsigned kDefaultAlignment = 16; const unsigned kDefaultAlignment = 16;
const unsigned kSkipAllocatorFrames = 4; const unsigned kSkipAllocatorFrames = 4;
const size_t kDefaultSamplingInterval = 128 * 1024; const size_t kDefaultSamplingInterval = 128 * 1024;
const uintptr_t kPageMask = 0xfff; // Assume page size is at least 4 KB.
bool g_deterministic; bool g_deterministic;
Atomic32 g_running; Atomic32 g_running;
...@@ -36,7 +38,11 @@ AtomicWord g_sampling_interval = kDefaultSamplingInterval; ...@@ -36,7 +38,11 @@ AtomicWord g_sampling_interval = kDefaultSamplingInterval;
uint32_t g_last_sample_ordinal = 0; uint32_t g_last_sample_ordinal = 0;
inline bool HasBeenSampledFastCheck(void* address) { inline bool HasBeenSampledFastCheck(void* address) {
return address && reinterpret_cast<unsigned*>(address)[-1] == kMagicSignature; // If address falls onto the beginning of a page pessimistically return true.
if (UNLIKELY((reinterpret_cast<uintptr_t>(address) & kPageMask) <
sizeof(uint32_t)))
return address;
return reinterpret_cast<uint32_t*>(address)[-1] == kMagicSignature;
} }
} // namespace } // namespace
...@@ -47,6 +53,11 @@ SamplingNativeHeapProfiler::Sample::Sample(size_t size, ...@@ -47,6 +53,11 @@ SamplingNativeHeapProfiler::Sample::Sample(size_t size,
unsigned offset) unsigned offset)
: size(size), count(count), ordinal(ordinal), offset(offset) {} : size(size), count(count), ordinal(ordinal), offset(offset) {}
SamplingNativeHeapProfiler::SamplingNativeHeapProfiler() {
size_t page_size = base::SysInfo::VMAllocationGranularity();
CHECK_GT(page_size, kPageMask);
}
SamplingHeapProfiler* SamplingHeapProfiler::GetInstance() { SamplingHeapProfiler* SamplingHeapProfiler::GetInstance() {
return SamplingNativeHeapProfiler::GetInstance(); return SamplingNativeHeapProfiler::GetInstance();
} }
...@@ -317,15 +328,22 @@ void* SamplingNativeHeapProfiler::RecordAlloc(size_t total_allocated, ...@@ -317,15 +328,22 @@ void* SamplingNativeHeapProfiler::RecordAlloc(size_t total_allocated,
} }
void* SamplingNativeHeapProfiler::RecordFree(void* address) { void* SamplingNativeHeapProfiler::RecordFree(void* address) {
// We never record allocations made by profiler itself. Bailout if reentering.
if (entered_.Get())
return address;
base::AutoLock lock(mutex_); base::AutoLock lock(mutex_);
entered_.Set(true);
auto& samples = GetInstance()->samples_; auto& samples = GetInstance()->samples_;
auto it = samples.find(address); auto it = samples.find(address);
if (it == samples.end()) if (it == samples.end()) {
entered_.Set(false);
return address; return address;
}
void* address_to_free = reinterpret_cast<char*>(address) - it->second.offset; void* address_to_free = reinterpret_cast<char*>(address) - it->second.offset;
samples.erase(it); samples.erase(it);
if (it->second.offset) if (it->second.offset)
reinterpret_cast<unsigned*>(address)[-1] = 0; reinterpret_cast<unsigned*>(address)[-1] = 0;
entered_.Set(false);
return address_to_free; return address_to_free;
} }
...@@ -345,11 +363,10 @@ SamplingNativeHeapProfiler::GetSamples(uint32_t profile_id) { ...@@ -345,11 +363,10 @@ SamplingNativeHeapProfiler::GetSamples(uint32_t profile_id) {
CHECK(!entered_.Get()); CHECK(!entered_.Get());
entered_.Set(true); entered_.Set(true);
std::vector<Sample> samples; std::vector<Sample> samples;
for (auto it = samples_.begin(); it != samples_.end(); ++it) { for (auto& it : samples_) {
Sample& sample = it->second; Sample& sample = it.second;
if (sample.ordinal <= profile_id) if (sample.ordinal > profile_id)
continue; samples.push_back(sample);
samples.push_back(sample);
} }
entered_.Set(false); entered_.Set(false);
return samples; return samples;
......
...@@ -40,8 +40,6 @@ class PLATFORM_EXPORT SamplingNativeHeapProfiler : public SamplingHeapProfiler { ...@@ -40,8 +40,6 @@ class PLATFORM_EXPORT SamplingNativeHeapProfiler : public SamplingHeapProfiler {
uint32_t offset; uint32_t offset;
}; };
SamplingNativeHeapProfiler() = default;
uint32_t Start() override; uint32_t Start() override;
void Stop() override; void Stop() override;
void SetSamplingInterval(size_t sampling_interval) override; void SetSamplingInterval(size_t sampling_interval) override;
...@@ -52,6 +50,8 @@ class PLATFORM_EXPORT SamplingNativeHeapProfiler : public SamplingHeapProfiler { ...@@ -52,6 +50,8 @@ class PLATFORM_EXPORT SamplingNativeHeapProfiler : public SamplingHeapProfiler {
static SamplingNativeHeapProfiler* GetInstance(); static SamplingNativeHeapProfiler* GetInstance();
private: private:
SamplingNativeHeapProfiler();
static void InstallAllocatorHooksOnce(); static void InstallAllocatorHooksOnce();
static bool InstallAllocatorHooks(); static bool InstallAllocatorHooks();
static size_t GetNextSampleInterval(size_t base_interval); static size_t GetNextSampleInterval(size_t base_interval);
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment