Commit 70e7884a authored by Alexei Filippov's avatar Alexei Filippov Committed by Commit Bot

Sampling Heap Profiler: Optimize fast path of the allocation hooks.

Do not create Sample object ahead.
Moved all the hooks into the SamplingNativeHeapProfiler class to improve
code readability.

BUG=803276

Change-Id: I88a920c338d19fc89689c3f797d2c002a3d976cb
Reviewed-on: https://chromium-review.googlesource.com/879595
Commit-Queue: Alexei Filippov <alph@chromium.org>
Reviewed-by: default avatarPavel Feldman <pfeldman@chromium.org>
Reviewed-by: default avatarPrimiano Tucci <primiano@chromium.org>
Cr-Commit-Position: refs/heads/master@{#532028}
parent 4db68ed9
......@@ -8,6 +8,7 @@
#include <unordered_map>
#include <vector>
#include "base/allocator/allocator_shim.h"
#include "base/atomicops.h"
#include "base/macros.h"
#include "base/synchronization/lock.h"
......@@ -23,12 +24,19 @@ namespace blink {
class PLATFORM_EXPORT SamplingNativeHeapProfiler {
public:
struct Sample {
class Sample {
public:
size_t size;
size_t count;
std::vector<void*> stack;
private:
friend class SamplingNativeHeapProfiler;
Sample(size_t, size_t count, unsigned ordinal, unsigned offset);
uint32_t ordinal;
uint32_t offset;
std::vector<void*> stack;
};
SamplingNativeHeapProfiler() = default;
......@@ -42,25 +50,64 @@ class PLATFORM_EXPORT SamplingNativeHeapProfiler {
static SamplingNativeHeapProfiler* GetInstance();
static inline bool CreateAllocSample(size_t, Sample*);
void* RecordAlloc(Sample&,
private:
static void InstallAllocatorHooksOnce();
static bool InstallAllocatorHooks();
static size_t GetNextSampleInterval(size_t base_interval);
static inline bool ShouldRecordSample(size_t, size_t* accumulated);
void* RecordAlloc(size_t total_allocated,
size_t allocation_size,
void* address,
uint32_t offset,
unsigned skip_frames,
bool preserve_data = false);
void* RecordFree(void* address);
private:
static void InstallAllocatorHooksOnce();
static bool InstallAllocatorHooks();
static intptr_t GetNextSampleInterval(uint64_t base_interval);
void RecordStackTrace(Sample*, unsigned skip_frames);
static void* AllocFn(const base::allocator::AllocatorDispatch* self,
size_t,
void* context);
static void* AllocZeroInitializedFn(
const base::allocator::AllocatorDispatch* self,
size_t n,
size_t,
void* context);
static void* AllocAlignedFn(const base::allocator::AllocatorDispatch* self,
size_t alignment,
size_t,
void* context);
static void* ReallocFn(const base::allocator::AllocatorDispatch* self,
void* address,
size_t,
void* context);
static void FreeFn(const base::allocator::AllocatorDispatch* self,
void* address,
void* context);
static size_t GetSizeEstimateFn(
const base::allocator::AllocatorDispatch* self,
void* address,
void* context);
static unsigned BatchMallocFn(const base::allocator::AllocatorDispatch* self,
size_t,
void** results,
unsigned num_requested,
void* context);
static void BatchFreeFn(const base::allocator::AllocatorDispatch* self,
void** to_be_freed,
unsigned num_to_be_freed,
void* context);
static void FreeDefiniteSizeFn(const base::allocator::AllocatorDispatch* self,
void* ptr,
size_t,
void* context);
base::ThreadLocalBoolean entered_;
base::Lock mutex_;
std::unordered_map<void*, Sample> samples_;
static base::allocator::AllocatorDispatch allocator_dispatch_;
friend struct base::DefaultSingletonTraits<SamplingNativeHeapProfiler>;
DISALLOW_COPY_AND_ASSIGN(SamplingNativeHeapProfiler);
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment