1*7c3d14c8STreehugger Robot //===-- asan_allocator.cc -------------------------------------------------===//
2*7c3d14c8STreehugger Robot //
3*7c3d14c8STreehugger Robot // The LLVM Compiler Infrastructure
4*7c3d14c8STreehugger Robot //
5*7c3d14c8STreehugger Robot // This file is distributed under the University of Illinois Open Source
6*7c3d14c8STreehugger Robot // License. See LICENSE.TXT for details.
7*7c3d14c8STreehugger Robot //
8*7c3d14c8STreehugger Robot //===----------------------------------------------------------------------===//
9*7c3d14c8STreehugger Robot //
10*7c3d14c8STreehugger Robot // This file is a part of AddressSanitizer, an address sanity checker.
11*7c3d14c8STreehugger Robot //
12*7c3d14c8STreehugger Robot // Implementation of ASan's memory allocator, 2-nd version.
13*7c3d14c8STreehugger Robot // This variant uses the allocator from sanitizer_common, i.e. the one shared
14*7c3d14c8STreehugger Robot // with ThreadSanitizer and MemorySanitizer.
15*7c3d14c8STreehugger Robot //
16*7c3d14c8STreehugger Robot //===----------------------------------------------------------------------===//
17*7c3d14c8STreehugger Robot
18*7c3d14c8STreehugger Robot #include "asan_allocator.h"
19*7c3d14c8STreehugger Robot #include "asan_mapping.h"
20*7c3d14c8STreehugger Robot #include "asan_poisoning.h"
21*7c3d14c8STreehugger Robot #include "asan_report.h"
22*7c3d14c8STreehugger Robot #include "asan_stack.h"
23*7c3d14c8STreehugger Robot #include "asan_thread.h"
24*7c3d14c8STreehugger Robot #include "sanitizer_common/sanitizer_allocator_interface.h"
25*7c3d14c8STreehugger Robot #include "sanitizer_common/sanitizer_flags.h"
26*7c3d14c8STreehugger Robot #include "sanitizer_common/sanitizer_internal_defs.h"
27*7c3d14c8STreehugger Robot #include "sanitizer_common/sanitizer_list.h"
28*7c3d14c8STreehugger Robot #include "sanitizer_common/sanitizer_stackdepot.h"
29*7c3d14c8STreehugger Robot #include "sanitizer_common/sanitizer_quarantine.h"
30*7c3d14c8STreehugger Robot #include "lsan/lsan_common.h"
31*7c3d14c8STreehugger Robot
32*7c3d14c8STreehugger Robot namespace __asan {
33*7c3d14c8STreehugger Robot
34*7c3d14c8STreehugger Robot // Valid redzone sizes are 16, 32, 64, ... 2048, so we encode them in 3 bits.
35*7c3d14c8STreehugger Robot // We use adaptive redzones: for larger allocation larger redzones are used.
RZLog2Size(u32 rz_log)36*7c3d14c8STreehugger Robot static u32 RZLog2Size(u32 rz_log) {
37*7c3d14c8STreehugger Robot CHECK_LT(rz_log, 8);
38*7c3d14c8STreehugger Robot return 16 << rz_log;
39*7c3d14c8STreehugger Robot }
40*7c3d14c8STreehugger Robot
RZSize2Log(u32 rz_size)41*7c3d14c8STreehugger Robot static u32 RZSize2Log(u32 rz_size) {
42*7c3d14c8STreehugger Robot CHECK_GE(rz_size, 16);
43*7c3d14c8STreehugger Robot CHECK_LE(rz_size, 2048);
44*7c3d14c8STreehugger Robot CHECK(IsPowerOfTwo(rz_size));
45*7c3d14c8STreehugger Robot u32 res = Log2(rz_size) - 4;
46*7c3d14c8STreehugger Robot CHECK_EQ(rz_size, RZLog2Size(res));
47*7c3d14c8STreehugger Robot return res;
48*7c3d14c8STreehugger Robot }
49*7c3d14c8STreehugger Robot
50*7c3d14c8STreehugger Robot static AsanAllocator &get_allocator();
51*7c3d14c8STreehugger Robot
52*7c3d14c8STreehugger Robot // The memory chunk allocated from the underlying allocator looks like this:
53*7c3d14c8STreehugger Robot // L L L L L L H H U U U U U U R R
54*7c3d14c8STreehugger Robot // L -- left redzone words (0 or more bytes)
55*7c3d14c8STreehugger Robot // H -- ChunkHeader (16 bytes), which is also a part of the left redzone.
56*7c3d14c8STreehugger Robot // U -- user memory.
57*7c3d14c8STreehugger Robot // R -- right redzone (0 or more bytes)
58*7c3d14c8STreehugger Robot // ChunkBase consists of ChunkHeader and other bytes that overlap with user
59*7c3d14c8STreehugger Robot // memory.
60*7c3d14c8STreehugger Robot
61*7c3d14c8STreehugger Robot // If the left redzone is greater than the ChunkHeader size we store a magic
62*7c3d14c8STreehugger Robot // value in the first uptr word of the memory block and store the address of
63*7c3d14c8STreehugger Robot // ChunkBase in the next uptr.
64*7c3d14c8STreehugger Robot // M B L L L L L L L L L H H U U U U U U
65*7c3d14c8STreehugger Robot // | ^
66*7c3d14c8STreehugger Robot // ---------------------|
67*7c3d14c8STreehugger Robot // M -- magic value kAllocBegMagic
68*7c3d14c8STreehugger Robot // B -- address of ChunkHeader pointing to the first 'H'
69*7c3d14c8STreehugger Robot static const uptr kAllocBegMagic = 0xCC6E96B9;
70*7c3d14c8STreehugger Robot
71*7c3d14c8STreehugger Robot struct ChunkHeader {
72*7c3d14c8STreehugger Robot // 1-st 8 bytes.
73*7c3d14c8STreehugger Robot u32 chunk_state : 8; // Must be first.
74*7c3d14c8STreehugger Robot u32 alloc_tid : 24;
75*7c3d14c8STreehugger Robot
76*7c3d14c8STreehugger Robot u32 free_tid : 24;
77*7c3d14c8STreehugger Robot u32 from_memalign : 1;
78*7c3d14c8STreehugger Robot u32 alloc_type : 2;
79*7c3d14c8STreehugger Robot u32 rz_log : 3;
80*7c3d14c8STreehugger Robot u32 lsan_tag : 2;
81*7c3d14c8STreehugger Robot // 2-nd 8 bytes
82*7c3d14c8STreehugger Robot // This field is used for small sizes. For large sizes it is equal to
83*7c3d14c8STreehugger Robot // SizeClassMap::kMaxSize and the actual size is stored in the
84*7c3d14c8STreehugger Robot // SecondaryAllocator's metadata.
85*7c3d14c8STreehugger Robot u32 user_requested_size;
86*7c3d14c8STreehugger Robot u32 alloc_context_id;
87*7c3d14c8STreehugger Robot };
88*7c3d14c8STreehugger Robot
89*7c3d14c8STreehugger Robot struct ChunkBase : ChunkHeader {
90*7c3d14c8STreehugger Robot // Header2, intersects with user memory.
91*7c3d14c8STreehugger Robot u32 free_context_id;
92*7c3d14c8STreehugger Robot };
93*7c3d14c8STreehugger Robot
94*7c3d14c8STreehugger Robot static const uptr kChunkHeaderSize = sizeof(ChunkHeader);
95*7c3d14c8STreehugger Robot static const uptr kChunkHeader2Size = sizeof(ChunkBase) - kChunkHeaderSize;
96*7c3d14c8STreehugger Robot COMPILER_CHECK(kChunkHeaderSize == 16);
97*7c3d14c8STreehugger Robot COMPILER_CHECK(kChunkHeader2Size <= 16);
98*7c3d14c8STreehugger Robot
99*7c3d14c8STreehugger Robot // Every chunk of memory allocated by this allocator can be in one of 3 states:
100*7c3d14c8STreehugger Robot // CHUNK_AVAILABLE: the chunk is in the free list and ready to be allocated.
101*7c3d14c8STreehugger Robot // CHUNK_ALLOCATED: the chunk is allocated and not yet freed.
102*7c3d14c8STreehugger Robot // CHUNK_QUARANTINE: the chunk was freed and put into quarantine zone.
103*7c3d14c8STreehugger Robot enum {
104*7c3d14c8STreehugger Robot CHUNK_AVAILABLE = 0, // 0 is the default value even if we didn't set it.
105*7c3d14c8STreehugger Robot CHUNK_ALLOCATED = 2,
106*7c3d14c8STreehugger Robot CHUNK_QUARANTINE = 3
107*7c3d14c8STreehugger Robot };
108*7c3d14c8STreehugger Robot
109*7c3d14c8STreehugger Robot struct AsanChunk: ChunkBase {
Beg__asan::AsanChunk110*7c3d14c8STreehugger Robot uptr Beg() { return reinterpret_cast<uptr>(this) + kChunkHeaderSize; }
UsedSize__asan::AsanChunk111*7c3d14c8STreehugger Robot uptr UsedSize(bool locked_version = false) {
112*7c3d14c8STreehugger Robot if (user_requested_size != SizeClassMap::kMaxSize)
113*7c3d14c8STreehugger Robot return user_requested_size;
114*7c3d14c8STreehugger Robot return *reinterpret_cast<uptr *>(
115*7c3d14c8STreehugger Robot get_allocator().GetMetaData(AllocBeg(locked_version)));
116*7c3d14c8STreehugger Robot }
AllocBeg__asan::AsanChunk117*7c3d14c8STreehugger Robot void *AllocBeg(bool locked_version = false) {
118*7c3d14c8STreehugger Robot if (from_memalign) {
119*7c3d14c8STreehugger Robot if (locked_version)
120*7c3d14c8STreehugger Robot return get_allocator().GetBlockBeginFastLocked(
121*7c3d14c8STreehugger Robot reinterpret_cast<void *>(this));
122*7c3d14c8STreehugger Robot return get_allocator().GetBlockBegin(reinterpret_cast<void *>(this));
123*7c3d14c8STreehugger Robot }
124*7c3d14c8STreehugger Robot return reinterpret_cast<void*>(Beg() - RZLog2Size(rz_log));
125*7c3d14c8STreehugger Robot }
AddrIsInside__asan::AsanChunk126*7c3d14c8STreehugger Robot bool AddrIsInside(uptr addr, bool locked_version = false) {
127*7c3d14c8STreehugger Robot return (addr >= Beg()) && (addr < Beg() + UsedSize(locked_version));
128*7c3d14c8STreehugger Robot }
129*7c3d14c8STreehugger Robot };
130*7c3d14c8STreehugger Robot
131*7c3d14c8STreehugger Robot struct QuarantineCallback {
QuarantineCallback__asan::QuarantineCallback132*7c3d14c8STreehugger Robot explicit QuarantineCallback(AllocatorCache *cache)
133*7c3d14c8STreehugger Robot : cache_(cache) {
134*7c3d14c8STreehugger Robot }
135*7c3d14c8STreehugger Robot
Recycle__asan::QuarantineCallback136*7c3d14c8STreehugger Robot void Recycle(AsanChunk *m) {
137*7c3d14c8STreehugger Robot CHECK_EQ(m->chunk_state, CHUNK_QUARANTINE);
138*7c3d14c8STreehugger Robot atomic_store((atomic_uint8_t*)m, CHUNK_AVAILABLE, memory_order_relaxed);
139*7c3d14c8STreehugger Robot CHECK_NE(m->alloc_tid, kInvalidTid);
140*7c3d14c8STreehugger Robot CHECK_NE(m->free_tid, kInvalidTid);
141*7c3d14c8STreehugger Robot PoisonShadow(m->Beg(),
142*7c3d14c8STreehugger Robot RoundUpTo(m->UsedSize(), SHADOW_GRANULARITY),
143*7c3d14c8STreehugger Robot kAsanHeapLeftRedzoneMagic);
144*7c3d14c8STreehugger Robot void *p = reinterpret_cast<void *>(m->AllocBeg());
145*7c3d14c8STreehugger Robot if (p != m) {
146*7c3d14c8STreehugger Robot uptr *alloc_magic = reinterpret_cast<uptr *>(p);
147*7c3d14c8STreehugger Robot CHECK_EQ(alloc_magic[0], kAllocBegMagic);
148*7c3d14c8STreehugger Robot // Clear the magic value, as allocator internals may overwrite the
149*7c3d14c8STreehugger Robot // contents of deallocated chunk, confusing GetAsanChunk lookup.
150*7c3d14c8STreehugger Robot alloc_magic[0] = 0;
151*7c3d14c8STreehugger Robot CHECK_EQ(alloc_magic[1], reinterpret_cast<uptr>(m));
152*7c3d14c8STreehugger Robot }
153*7c3d14c8STreehugger Robot
154*7c3d14c8STreehugger Robot // Statistics.
155*7c3d14c8STreehugger Robot AsanStats &thread_stats = GetCurrentThreadStats();
156*7c3d14c8STreehugger Robot thread_stats.real_frees++;
157*7c3d14c8STreehugger Robot thread_stats.really_freed += m->UsedSize();
158*7c3d14c8STreehugger Robot
159*7c3d14c8STreehugger Robot get_allocator().Deallocate(cache_, p);
160*7c3d14c8STreehugger Robot }
161*7c3d14c8STreehugger Robot
Allocate__asan::QuarantineCallback162*7c3d14c8STreehugger Robot void *Allocate(uptr size) {
163*7c3d14c8STreehugger Robot return get_allocator().Allocate(cache_, size, 1, false);
164*7c3d14c8STreehugger Robot }
165*7c3d14c8STreehugger Robot
Deallocate__asan::QuarantineCallback166*7c3d14c8STreehugger Robot void Deallocate(void *p) {
167*7c3d14c8STreehugger Robot get_allocator().Deallocate(cache_, p);
168*7c3d14c8STreehugger Robot }
169*7c3d14c8STreehugger Robot
170*7c3d14c8STreehugger Robot AllocatorCache *cache_;
171*7c3d14c8STreehugger Robot };
172*7c3d14c8STreehugger Robot
173*7c3d14c8STreehugger Robot typedef Quarantine<QuarantineCallback, AsanChunk> AsanQuarantine;
174*7c3d14c8STreehugger Robot typedef AsanQuarantine::Cache QuarantineCache;
175*7c3d14c8STreehugger Robot
OnMap(uptr p,uptr size) const176*7c3d14c8STreehugger Robot void AsanMapUnmapCallback::OnMap(uptr p, uptr size) const {
177*7c3d14c8STreehugger Robot PoisonShadow(p, size, kAsanHeapLeftRedzoneMagic);
178*7c3d14c8STreehugger Robot // Statistics.
179*7c3d14c8STreehugger Robot AsanStats &thread_stats = GetCurrentThreadStats();
180*7c3d14c8STreehugger Robot thread_stats.mmaps++;
181*7c3d14c8STreehugger Robot thread_stats.mmaped += size;
182*7c3d14c8STreehugger Robot }
OnUnmap(uptr p,uptr size) const183*7c3d14c8STreehugger Robot void AsanMapUnmapCallback::OnUnmap(uptr p, uptr size) const {
184*7c3d14c8STreehugger Robot PoisonShadow(p, size, 0);
185*7c3d14c8STreehugger Robot // We are about to unmap a chunk of user memory.
186*7c3d14c8STreehugger Robot // Mark the corresponding shadow memory as not needed.
187*7c3d14c8STreehugger Robot FlushUnneededASanShadowMemory(p, size);
188*7c3d14c8STreehugger Robot // Statistics.
189*7c3d14c8STreehugger Robot AsanStats &thread_stats = GetCurrentThreadStats();
190*7c3d14c8STreehugger Robot thread_stats.munmaps++;
191*7c3d14c8STreehugger Robot thread_stats.munmaped += size;
192*7c3d14c8STreehugger Robot }
193*7c3d14c8STreehugger Robot
194*7c3d14c8STreehugger Robot // We can not use THREADLOCAL because it is not supported on some of the
195*7c3d14c8STreehugger Robot // platforms we care about (OSX 10.6, Android).
196*7c3d14c8STreehugger Robot // static THREADLOCAL AllocatorCache cache;
GetAllocatorCache(AsanThreadLocalMallocStorage * ms)197*7c3d14c8STreehugger Robot AllocatorCache *GetAllocatorCache(AsanThreadLocalMallocStorage *ms) {
198*7c3d14c8STreehugger Robot CHECK(ms);
199*7c3d14c8STreehugger Robot return &ms->allocator_cache;
200*7c3d14c8STreehugger Robot }
201*7c3d14c8STreehugger Robot
GetQuarantineCache(AsanThreadLocalMallocStorage * ms)202*7c3d14c8STreehugger Robot QuarantineCache *GetQuarantineCache(AsanThreadLocalMallocStorage *ms) {
203*7c3d14c8STreehugger Robot CHECK(ms);
204*7c3d14c8STreehugger Robot CHECK_LE(sizeof(QuarantineCache), sizeof(ms->quarantine_cache));
205*7c3d14c8STreehugger Robot return reinterpret_cast<QuarantineCache *>(ms->quarantine_cache);
206*7c3d14c8STreehugger Robot }
207*7c3d14c8STreehugger Robot
SetFrom(const Flags * f,const CommonFlags * cf)208*7c3d14c8STreehugger Robot void AllocatorOptions::SetFrom(const Flags *f, const CommonFlags *cf) {
209*7c3d14c8STreehugger Robot quarantine_size_mb = f->quarantine_size_mb;
210*7c3d14c8STreehugger Robot min_redzone = f->redzone;
211*7c3d14c8STreehugger Robot max_redzone = f->max_redzone;
212*7c3d14c8STreehugger Robot may_return_null = cf->allocator_may_return_null;
213*7c3d14c8STreehugger Robot alloc_dealloc_mismatch = f->alloc_dealloc_mismatch;
214*7c3d14c8STreehugger Robot }
215*7c3d14c8STreehugger Robot
CopyTo(Flags * f,CommonFlags * cf)216*7c3d14c8STreehugger Robot void AllocatorOptions::CopyTo(Flags *f, CommonFlags *cf) {
217*7c3d14c8STreehugger Robot f->quarantine_size_mb = quarantine_size_mb;
218*7c3d14c8STreehugger Robot f->redzone = min_redzone;
219*7c3d14c8STreehugger Robot f->max_redzone = max_redzone;
220*7c3d14c8STreehugger Robot cf->allocator_may_return_null = may_return_null;
221*7c3d14c8STreehugger Robot f->alloc_dealloc_mismatch = alloc_dealloc_mismatch;
222*7c3d14c8STreehugger Robot }
223*7c3d14c8STreehugger Robot
224*7c3d14c8STreehugger Robot struct Allocator {
225*7c3d14c8STreehugger Robot static const uptr kMaxAllowedMallocSize =
226*7c3d14c8STreehugger Robot FIRST_32_SECOND_64(3UL << 30, 1ULL << 40);
227*7c3d14c8STreehugger Robot static const uptr kMaxThreadLocalQuarantine =
228*7c3d14c8STreehugger Robot FIRST_32_SECOND_64(1 << 18, 1 << 20);
229*7c3d14c8STreehugger Robot
230*7c3d14c8STreehugger Robot AsanAllocator allocator;
231*7c3d14c8STreehugger Robot AsanQuarantine quarantine;
232*7c3d14c8STreehugger Robot StaticSpinMutex fallback_mutex;
233*7c3d14c8STreehugger Robot AllocatorCache fallback_allocator_cache;
234*7c3d14c8STreehugger Robot QuarantineCache fallback_quarantine_cache;
235*7c3d14c8STreehugger Robot
236*7c3d14c8STreehugger Robot // ------------------- Options --------------------------
237*7c3d14c8STreehugger Robot atomic_uint16_t min_redzone;
238*7c3d14c8STreehugger Robot atomic_uint16_t max_redzone;
239*7c3d14c8STreehugger Robot atomic_uint8_t alloc_dealloc_mismatch;
240*7c3d14c8STreehugger Robot
241*7c3d14c8STreehugger Robot // ------------------- Initialization ------------------------
Allocator__asan::Allocator242*7c3d14c8STreehugger Robot explicit Allocator(LinkerInitialized)
243*7c3d14c8STreehugger Robot : quarantine(LINKER_INITIALIZED),
244*7c3d14c8STreehugger Robot fallback_quarantine_cache(LINKER_INITIALIZED) {}
245*7c3d14c8STreehugger Robot
CheckOptions__asan::Allocator246*7c3d14c8STreehugger Robot void CheckOptions(const AllocatorOptions &options) const {
247*7c3d14c8STreehugger Robot CHECK_GE(options.min_redzone, 16);
248*7c3d14c8STreehugger Robot CHECK_GE(options.max_redzone, options.min_redzone);
249*7c3d14c8STreehugger Robot CHECK_LE(options.max_redzone, 2048);
250*7c3d14c8STreehugger Robot CHECK(IsPowerOfTwo(options.min_redzone));
251*7c3d14c8STreehugger Robot CHECK(IsPowerOfTwo(options.max_redzone));
252*7c3d14c8STreehugger Robot }
253*7c3d14c8STreehugger Robot
SharedInitCode__asan::Allocator254*7c3d14c8STreehugger Robot void SharedInitCode(const AllocatorOptions &options) {
255*7c3d14c8STreehugger Robot CheckOptions(options);
256*7c3d14c8STreehugger Robot quarantine.Init((uptr)options.quarantine_size_mb << 20,
257*7c3d14c8STreehugger Robot kMaxThreadLocalQuarantine);
258*7c3d14c8STreehugger Robot atomic_store(&alloc_dealloc_mismatch, options.alloc_dealloc_mismatch,
259*7c3d14c8STreehugger Robot memory_order_release);
260*7c3d14c8STreehugger Robot atomic_store(&min_redzone, options.min_redzone, memory_order_release);
261*7c3d14c8STreehugger Robot atomic_store(&max_redzone, options.max_redzone, memory_order_release);
262*7c3d14c8STreehugger Robot }
263*7c3d14c8STreehugger Robot
Initialize__asan::Allocator264*7c3d14c8STreehugger Robot void Initialize(const AllocatorOptions &options) {
265*7c3d14c8STreehugger Robot allocator.Init(options.may_return_null);
266*7c3d14c8STreehugger Robot SharedInitCode(options);
267*7c3d14c8STreehugger Robot }
268*7c3d14c8STreehugger Robot
ReInitialize__asan::Allocator269*7c3d14c8STreehugger Robot void ReInitialize(const AllocatorOptions &options) {
270*7c3d14c8STreehugger Robot allocator.SetMayReturnNull(options.may_return_null);
271*7c3d14c8STreehugger Robot SharedInitCode(options);
272*7c3d14c8STreehugger Robot }
273*7c3d14c8STreehugger Robot
GetOptions__asan::Allocator274*7c3d14c8STreehugger Robot void GetOptions(AllocatorOptions *options) const {
275*7c3d14c8STreehugger Robot options->quarantine_size_mb = quarantine.GetSize() >> 20;
276*7c3d14c8STreehugger Robot options->min_redzone = atomic_load(&min_redzone, memory_order_acquire);
277*7c3d14c8STreehugger Robot options->max_redzone = atomic_load(&max_redzone, memory_order_acquire);
278*7c3d14c8STreehugger Robot options->may_return_null = allocator.MayReturnNull();
279*7c3d14c8STreehugger Robot options->alloc_dealloc_mismatch =
280*7c3d14c8STreehugger Robot atomic_load(&alloc_dealloc_mismatch, memory_order_acquire);
281*7c3d14c8STreehugger Robot }
282*7c3d14c8STreehugger Robot
283*7c3d14c8STreehugger Robot // -------------------- Helper methods. -------------------------
ComputeRZLog__asan::Allocator284*7c3d14c8STreehugger Robot uptr ComputeRZLog(uptr user_requested_size) {
285*7c3d14c8STreehugger Robot u32 rz_log =
286*7c3d14c8STreehugger Robot user_requested_size <= 64 - 16 ? 0 :
287*7c3d14c8STreehugger Robot user_requested_size <= 128 - 32 ? 1 :
288*7c3d14c8STreehugger Robot user_requested_size <= 512 - 64 ? 2 :
289*7c3d14c8STreehugger Robot user_requested_size <= 4096 - 128 ? 3 :
290*7c3d14c8STreehugger Robot user_requested_size <= (1 << 14) - 256 ? 4 :
291*7c3d14c8STreehugger Robot user_requested_size <= (1 << 15) - 512 ? 5 :
292*7c3d14c8STreehugger Robot user_requested_size <= (1 << 16) - 1024 ? 6 : 7;
293*7c3d14c8STreehugger Robot u32 min_rz = atomic_load(&min_redzone, memory_order_acquire);
294*7c3d14c8STreehugger Robot u32 max_rz = atomic_load(&max_redzone, memory_order_acquire);
295*7c3d14c8STreehugger Robot return Min(Max(rz_log, RZSize2Log(min_rz)), RZSize2Log(max_rz));
296*7c3d14c8STreehugger Robot }
297*7c3d14c8STreehugger Robot
298*7c3d14c8STreehugger Robot // We have an address between two chunks, and we want to report just one.
ChooseChunk__asan::Allocator299*7c3d14c8STreehugger Robot AsanChunk *ChooseChunk(uptr addr, AsanChunk *left_chunk,
300*7c3d14c8STreehugger Robot AsanChunk *right_chunk) {
301*7c3d14c8STreehugger Robot // Prefer an allocated chunk over freed chunk and freed chunk
302*7c3d14c8STreehugger Robot // over available chunk.
303*7c3d14c8STreehugger Robot if (left_chunk->chunk_state != right_chunk->chunk_state) {
304*7c3d14c8STreehugger Robot if (left_chunk->chunk_state == CHUNK_ALLOCATED)
305*7c3d14c8STreehugger Robot return left_chunk;
306*7c3d14c8STreehugger Robot if (right_chunk->chunk_state == CHUNK_ALLOCATED)
307*7c3d14c8STreehugger Robot return right_chunk;
308*7c3d14c8STreehugger Robot if (left_chunk->chunk_state == CHUNK_QUARANTINE)
309*7c3d14c8STreehugger Robot return left_chunk;
310*7c3d14c8STreehugger Robot if (right_chunk->chunk_state == CHUNK_QUARANTINE)
311*7c3d14c8STreehugger Robot return right_chunk;
312*7c3d14c8STreehugger Robot }
313*7c3d14c8STreehugger Robot // Same chunk_state: choose based on offset.
314*7c3d14c8STreehugger Robot sptr l_offset = 0, r_offset = 0;
315*7c3d14c8STreehugger Robot CHECK(AsanChunkView(left_chunk).AddrIsAtRight(addr, 1, &l_offset));
316*7c3d14c8STreehugger Robot CHECK(AsanChunkView(right_chunk).AddrIsAtLeft(addr, 1, &r_offset));
317*7c3d14c8STreehugger Robot if (l_offset < r_offset)
318*7c3d14c8STreehugger Robot return left_chunk;
319*7c3d14c8STreehugger Robot return right_chunk;
320*7c3d14c8STreehugger Robot }
321*7c3d14c8STreehugger Robot
322*7c3d14c8STreehugger Robot // -------------------- Allocation/Deallocation routines ---------------
Allocate__asan::Allocator323*7c3d14c8STreehugger Robot void *Allocate(uptr size, uptr alignment, BufferedStackTrace *stack,
324*7c3d14c8STreehugger Robot AllocType alloc_type, bool can_fill) {
325*7c3d14c8STreehugger Robot if (UNLIKELY(!asan_inited))
326*7c3d14c8STreehugger Robot AsanInitFromRtl();
327*7c3d14c8STreehugger Robot Flags &fl = *flags();
328*7c3d14c8STreehugger Robot CHECK(stack);
329*7c3d14c8STreehugger Robot const uptr min_alignment = SHADOW_GRANULARITY;
330*7c3d14c8STreehugger Robot if (alignment < min_alignment)
331*7c3d14c8STreehugger Robot alignment = min_alignment;
332*7c3d14c8STreehugger Robot if (size == 0) {
333*7c3d14c8STreehugger Robot // We'd be happy to avoid allocating memory for zero-size requests, but
334*7c3d14c8STreehugger Robot // some programs/tests depend on this behavior and assume that malloc
335*7c3d14c8STreehugger Robot // would not return NULL even for zero-size allocations. Moreover, it
336*7c3d14c8STreehugger Robot // looks like operator new should never return NULL, and results of
337*7c3d14c8STreehugger Robot // consecutive "new" calls must be different even if the allocated size
338*7c3d14c8STreehugger Robot // is zero.
339*7c3d14c8STreehugger Robot size = 1;
340*7c3d14c8STreehugger Robot }
341*7c3d14c8STreehugger Robot CHECK(IsPowerOfTwo(alignment));
342*7c3d14c8STreehugger Robot uptr rz_log = ComputeRZLog(size);
343*7c3d14c8STreehugger Robot uptr rz_size = RZLog2Size(rz_log);
344*7c3d14c8STreehugger Robot uptr rounded_size = RoundUpTo(Max(size, kChunkHeader2Size), alignment);
345*7c3d14c8STreehugger Robot uptr needed_size = rounded_size + rz_size;
346*7c3d14c8STreehugger Robot if (alignment > min_alignment)
347*7c3d14c8STreehugger Robot needed_size += alignment;
348*7c3d14c8STreehugger Robot bool using_primary_allocator = true;
349*7c3d14c8STreehugger Robot // If we are allocating from the secondary allocator, there will be no
350*7c3d14c8STreehugger Robot // automatic right redzone, so add the right redzone manually.
351*7c3d14c8STreehugger Robot if (!PrimaryAllocator::CanAllocate(needed_size, alignment)) {
352*7c3d14c8STreehugger Robot needed_size += rz_size;
353*7c3d14c8STreehugger Robot using_primary_allocator = false;
354*7c3d14c8STreehugger Robot }
355*7c3d14c8STreehugger Robot CHECK(IsAligned(needed_size, min_alignment));
356*7c3d14c8STreehugger Robot if (size > kMaxAllowedMallocSize || needed_size > kMaxAllowedMallocSize) {
357*7c3d14c8STreehugger Robot Report("WARNING: AddressSanitizer failed to allocate 0x%zx bytes\n",
358*7c3d14c8STreehugger Robot (void*)size);
359*7c3d14c8STreehugger Robot return allocator.ReturnNullOrDie();
360*7c3d14c8STreehugger Robot }
361*7c3d14c8STreehugger Robot
362*7c3d14c8STreehugger Robot AsanThread *t = GetCurrentThread();
363*7c3d14c8STreehugger Robot void *allocated;
364*7c3d14c8STreehugger Robot bool check_rss_limit = true;
365*7c3d14c8STreehugger Robot if (t) {
366*7c3d14c8STreehugger Robot AllocatorCache *cache = GetAllocatorCache(&t->malloc_storage());
367*7c3d14c8STreehugger Robot allocated =
368*7c3d14c8STreehugger Robot allocator.Allocate(cache, needed_size, 8, false, check_rss_limit);
369*7c3d14c8STreehugger Robot } else {
370*7c3d14c8STreehugger Robot SpinMutexLock l(&fallback_mutex);
371*7c3d14c8STreehugger Robot AllocatorCache *cache = &fallback_allocator_cache;
372*7c3d14c8STreehugger Robot allocated =
373*7c3d14c8STreehugger Robot allocator.Allocate(cache, needed_size, 8, false, check_rss_limit);
374*7c3d14c8STreehugger Robot }
375*7c3d14c8STreehugger Robot
376*7c3d14c8STreehugger Robot if (!allocated)
377*7c3d14c8STreehugger Robot return allocator.ReturnNullOrDie();
378*7c3d14c8STreehugger Robot
379*7c3d14c8STreehugger Robot if (*(u8 *)MEM_TO_SHADOW((uptr)allocated) == 0 && CanPoisonMemory()) {
380*7c3d14c8STreehugger Robot // Heap poisoning is enabled, but the allocator provides an unpoisoned
381*7c3d14c8STreehugger Robot // chunk. This is possible if CanPoisonMemory() was false for some
382*7c3d14c8STreehugger Robot // time, for example, due to flags()->start_disabled.
383*7c3d14c8STreehugger Robot // Anyway, poison the block before using it for anything else.
384*7c3d14c8STreehugger Robot uptr allocated_size = allocator.GetActuallyAllocatedSize(allocated);
385*7c3d14c8STreehugger Robot PoisonShadow((uptr)allocated, allocated_size, kAsanHeapLeftRedzoneMagic);
386*7c3d14c8STreehugger Robot }
387*7c3d14c8STreehugger Robot
388*7c3d14c8STreehugger Robot uptr alloc_beg = reinterpret_cast<uptr>(allocated);
389*7c3d14c8STreehugger Robot uptr alloc_end = alloc_beg + needed_size;
390*7c3d14c8STreehugger Robot uptr beg_plus_redzone = alloc_beg + rz_size;
391*7c3d14c8STreehugger Robot uptr user_beg = beg_plus_redzone;
392*7c3d14c8STreehugger Robot if (!IsAligned(user_beg, alignment))
393*7c3d14c8STreehugger Robot user_beg = RoundUpTo(user_beg, alignment);
394*7c3d14c8STreehugger Robot uptr user_end = user_beg + size;
395*7c3d14c8STreehugger Robot CHECK_LE(user_end, alloc_end);
396*7c3d14c8STreehugger Robot uptr chunk_beg = user_beg - kChunkHeaderSize;
397*7c3d14c8STreehugger Robot AsanChunk *m = reinterpret_cast<AsanChunk *>(chunk_beg);
398*7c3d14c8STreehugger Robot m->alloc_type = alloc_type;
399*7c3d14c8STreehugger Robot m->rz_log = rz_log;
400*7c3d14c8STreehugger Robot u32 alloc_tid = t ? t->tid() : 0;
401*7c3d14c8STreehugger Robot m->alloc_tid = alloc_tid;
402*7c3d14c8STreehugger Robot CHECK_EQ(alloc_tid, m->alloc_tid); // Does alloc_tid fit into the bitfield?
403*7c3d14c8STreehugger Robot m->free_tid = kInvalidTid;
404*7c3d14c8STreehugger Robot m->from_memalign = user_beg != beg_plus_redzone;
405*7c3d14c8STreehugger Robot if (alloc_beg != chunk_beg) {
406*7c3d14c8STreehugger Robot CHECK_LE(alloc_beg+ 2 * sizeof(uptr), chunk_beg);
407*7c3d14c8STreehugger Robot reinterpret_cast<uptr *>(alloc_beg)[0] = kAllocBegMagic;
408*7c3d14c8STreehugger Robot reinterpret_cast<uptr *>(alloc_beg)[1] = chunk_beg;
409*7c3d14c8STreehugger Robot }
410*7c3d14c8STreehugger Robot if (using_primary_allocator) {
411*7c3d14c8STreehugger Robot CHECK(size);
412*7c3d14c8STreehugger Robot m->user_requested_size = size;
413*7c3d14c8STreehugger Robot CHECK(allocator.FromPrimary(allocated));
414*7c3d14c8STreehugger Robot } else {
415*7c3d14c8STreehugger Robot CHECK(!allocator.FromPrimary(allocated));
416*7c3d14c8STreehugger Robot m->user_requested_size = SizeClassMap::kMaxSize;
417*7c3d14c8STreehugger Robot uptr *meta = reinterpret_cast<uptr *>(allocator.GetMetaData(allocated));
418*7c3d14c8STreehugger Robot meta[0] = size;
419*7c3d14c8STreehugger Robot meta[1] = chunk_beg;
420*7c3d14c8STreehugger Robot }
421*7c3d14c8STreehugger Robot
422*7c3d14c8STreehugger Robot m->alloc_context_id = StackDepotPut(*stack);
423*7c3d14c8STreehugger Robot
424*7c3d14c8STreehugger Robot uptr size_rounded_down_to_granularity =
425*7c3d14c8STreehugger Robot RoundDownTo(size, SHADOW_GRANULARITY);
426*7c3d14c8STreehugger Robot // Unpoison the bulk of the memory region.
427*7c3d14c8STreehugger Robot if (size_rounded_down_to_granularity)
428*7c3d14c8STreehugger Robot PoisonShadow(user_beg, size_rounded_down_to_granularity, 0);
429*7c3d14c8STreehugger Robot // Deal with the end of the region if size is not aligned to granularity.
430*7c3d14c8STreehugger Robot if (size != size_rounded_down_to_granularity && CanPoisonMemory()) {
431*7c3d14c8STreehugger Robot u8 *shadow =
432*7c3d14c8STreehugger Robot (u8 *)MemToShadow(user_beg + size_rounded_down_to_granularity);
433*7c3d14c8STreehugger Robot *shadow = fl.poison_partial ? (size & (SHADOW_GRANULARITY - 1)) : 0;
434*7c3d14c8STreehugger Robot }
435*7c3d14c8STreehugger Robot
436*7c3d14c8STreehugger Robot AsanStats &thread_stats = GetCurrentThreadStats();
437*7c3d14c8STreehugger Robot thread_stats.mallocs++;
438*7c3d14c8STreehugger Robot thread_stats.malloced += size;
439*7c3d14c8STreehugger Robot thread_stats.malloced_redzones += needed_size - size;
440*7c3d14c8STreehugger Robot if (needed_size > SizeClassMap::kMaxSize)
441*7c3d14c8STreehugger Robot thread_stats.malloc_large++;
442*7c3d14c8STreehugger Robot else
443*7c3d14c8STreehugger Robot thread_stats.malloced_by_size[SizeClassMap::ClassID(needed_size)]++;
444*7c3d14c8STreehugger Robot
445*7c3d14c8STreehugger Robot void *res = reinterpret_cast<void *>(user_beg);
446*7c3d14c8STreehugger Robot if (can_fill && fl.max_malloc_fill_size) {
447*7c3d14c8STreehugger Robot uptr fill_size = Min(size, (uptr)fl.max_malloc_fill_size);
448*7c3d14c8STreehugger Robot REAL(memset)(res, fl.malloc_fill_byte, fill_size);
449*7c3d14c8STreehugger Robot }
450*7c3d14c8STreehugger Robot #if CAN_SANITIZE_LEAKS
451*7c3d14c8STreehugger Robot m->lsan_tag = __lsan::DisabledInThisThread() ? __lsan::kIgnored
452*7c3d14c8STreehugger Robot : __lsan::kDirectlyLeaked;
453*7c3d14c8STreehugger Robot #endif
454*7c3d14c8STreehugger Robot // Must be the last mutation of metadata in this function.
455*7c3d14c8STreehugger Robot atomic_store((atomic_uint8_t *)m, CHUNK_ALLOCATED, memory_order_release);
456*7c3d14c8STreehugger Robot ASAN_MALLOC_HOOK(res, size);
457*7c3d14c8STreehugger Robot return res;
458*7c3d14c8STreehugger Robot }
459*7c3d14c8STreehugger Robot
460*7c3d14c8STreehugger Robot // Set quarantine flag if chunk is allocated, issue ASan error report on
461*7c3d14c8STreehugger Robot // available and quarantined chunks. Return true on success, false otherwise.
AtomicallySetQuarantineFlagIfAllocated__asan::Allocator462*7c3d14c8STreehugger Robot bool AtomicallySetQuarantineFlagIfAllocated(AsanChunk *m, void *ptr,
463*7c3d14c8STreehugger Robot BufferedStackTrace *stack) {
464*7c3d14c8STreehugger Robot u8 old_chunk_state = CHUNK_ALLOCATED;
465*7c3d14c8STreehugger Robot // Flip the chunk_state atomically to avoid race on double-free.
466*7c3d14c8STreehugger Robot if (!atomic_compare_exchange_strong((atomic_uint8_t *)m, &old_chunk_state,
467*7c3d14c8STreehugger Robot CHUNK_QUARANTINE,
468*7c3d14c8STreehugger Robot memory_order_acquire)) {
469*7c3d14c8STreehugger Robot ReportInvalidFree(ptr, old_chunk_state, stack);
470*7c3d14c8STreehugger Robot // It's not safe to push a chunk in quarantine on invalid free.
471*7c3d14c8STreehugger Robot return false;
472*7c3d14c8STreehugger Robot }
473*7c3d14c8STreehugger Robot CHECK_EQ(CHUNK_ALLOCATED, old_chunk_state);
474*7c3d14c8STreehugger Robot return true;
475*7c3d14c8STreehugger Robot }
476*7c3d14c8STreehugger Robot
477*7c3d14c8STreehugger Robot // Expects the chunk to already be marked as quarantined by using
478*7c3d14c8STreehugger Robot // AtomicallySetQuarantineFlagIfAllocated.
QuarantineChunk__asan::Allocator479*7c3d14c8STreehugger Robot void QuarantineChunk(AsanChunk *m, void *ptr, BufferedStackTrace *stack,
480*7c3d14c8STreehugger Robot AllocType alloc_type) {
481*7c3d14c8STreehugger Robot CHECK_EQ(m->chunk_state, CHUNK_QUARANTINE);
482*7c3d14c8STreehugger Robot CHECK_GE(m->alloc_tid, 0);
483*7c3d14c8STreehugger Robot if (SANITIZER_WORDSIZE == 64) // On 32-bits this resides in user area.
484*7c3d14c8STreehugger Robot CHECK_EQ(m->free_tid, kInvalidTid);
485*7c3d14c8STreehugger Robot AsanThread *t = GetCurrentThread();
486*7c3d14c8STreehugger Robot m->free_tid = t ? t->tid() : 0;
487*7c3d14c8STreehugger Robot m->free_context_id = StackDepotPut(*stack);
488*7c3d14c8STreehugger Robot // Poison the region.
489*7c3d14c8STreehugger Robot PoisonShadow(m->Beg(),
490*7c3d14c8STreehugger Robot RoundUpTo(m->UsedSize(), SHADOW_GRANULARITY),
491*7c3d14c8STreehugger Robot kAsanHeapFreeMagic);
492*7c3d14c8STreehugger Robot
493*7c3d14c8STreehugger Robot AsanStats &thread_stats = GetCurrentThreadStats();
494*7c3d14c8STreehugger Robot thread_stats.frees++;
495*7c3d14c8STreehugger Robot thread_stats.freed += m->UsedSize();
496*7c3d14c8STreehugger Robot
497*7c3d14c8STreehugger Robot // Push into quarantine.
498*7c3d14c8STreehugger Robot if (t) {
499*7c3d14c8STreehugger Robot AsanThreadLocalMallocStorage *ms = &t->malloc_storage();
500*7c3d14c8STreehugger Robot AllocatorCache *ac = GetAllocatorCache(ms);
501*7c3d14c8STreehugger Robot quarantine.Put(GetQuarantineCache(ms), QuarantineCallback(ac), m,
502*7c3d14c8STreehugger Robot m->UsedSize());
503*7c3d14c8STreehugger Robot } else {
504*7c3d14c8STreehugger Robot SpinMutexLock l(&fallback_mutex);
505*7c3d14c8STreehugger Robot AllocatorCache *ac = &fallback_allocator_cache;
506*7c3d14c8STreehugger Robot quarantine.Put(&fallback_quarantine_cache, QuarantineCallback(ac), m,
507*7c3d14c8STreehugger Robot m->UsedSize());
508*7c3d14c8STreehugger Robot }
509*7c3d14c8STreehugger Robot }
510*7c3d14c8STreehugger Robot
Deallocate__asan::Allocator511*7c3d14c8STreehugger Robot void Deallocate(void *ptr, uptr delete_size, BufferedStackTrace *stack,
512*7c3d14c8STreehugger Robot AllocType alloc_type) {
513*7c3d14c8STreehugger Robot uptr p = reinterpret_cast<uptr>(ptr);
514*7c3d14c8STreehugger Robot if (p == 0) return;
515*7c3d14c8STreehugger Robot
516*7c3d14c8STreehugger Robot uptr chunk_beg = p - kChunkHeaderSize;
517*7c3d14c8STreehugger Robot AsanChunk *m = reinterpret_cast<AsanChunk *>(chunk_beg);
518*7c3d14c8STreehugger Robot
519*7c3d14c8STreehugger Robot ASAN_FREE_HOOK(ptr);
520*7c3d14c8STreehugger Robot // Must mark the chunk as quarantined before any changes to its metadata.
521*7c3d14c8STreehugger Robot // Do not quarantine given chunk if we failed to set CHUNK_QUARANTINE flag.
522*7c3d14c8STreehugger Robot if (!AtomicallySetQuarantineFlagIfAllocated(m, ptr, stack)) return;
523*7c3d14c8STreehugger Robot
524*7c3d14c8STreehugger Robot if (m->alloc_type != alloc_type) {
525*7c3d14c8STreehugger Robot if (atomic_load(&alloc_dealloc_mismatch, memory_order_acquire)) {
526*7c3d14c8STreehugger Robot ReportAllocTypeMismatch((uptr)ptr, stack, (AllocType)m->alloc_type,
527*7c3d14c8STreehugger Robot (AllocType)alloc_type);
528*7c3d14c8STreehugger Robot }
529*7c3d14c8STreehugger Robot }
530*7c3d14c8STreehugger Robot
531*7c3d14c8STreehugger Robot if (delete_size && flags()->new_delete_type_mismatch &&
532*7c3d14c8STreehugger Robot delete_size != m->UsedSize()) {
533*7c3d14c8STreehugger Robot ReportNewDeleteSizeMismatch(p, m->UsedSize(), delete_size, stack);
534*7c3d14c8STreehugger Robot }
535*7c3d14c8STreehugger Robot
536*7c3d14c8STreehugger Robot QuarantineChunk(m, ptr, stack, alloc_type);
537*7c3d14c8STreehugger Robot }
538*7c3d14c8STreehugger Robot
Reallocate__asan::Allocator539*7c3d14c8STreehugger Robot void *Reallocate(void *old_ptr, uptr new_size, BufferedStackTrace *stack) {
540*7c3d14c8STreehugger Robot CHECK(old_ptr && new_size);
541*7c3d14c8STreehugger Robot uptr p = reinterpret_cast<uptr>(old_ptr);
542*7c3d14c8STreehugger Robot uptr chunk_beg = p - kChunkHeaderSize;
543*7c3d14c8STreehugger Robot AsanChunk *m = reinterpret_cast<AsanChunk *>(chunk_beg);
544*7c3d14c8STreehugger Robot
545*7c3d14c8STreehugger Robot AsanStats &thread_stats = GetCurrentThreadStats();
546*7c3d14c8STreehugger Robot thread_stats.reallocs++;
547*7c3d14c8STreehugger Robot thread_stats.realloced += new_size;
548*7c3d14c8STreehugger Robot
549*7c3d14c8STreehugger Robot void *new_ptr = Allocate(new_size, 8, stack, FROM_MALLOC, true);
550*7c3d14c8STreehugger Robot if (new_ptr) {
551*7c3d14c8STreehugger Robot u8 chunk_state = m->chunk_state;
552*7c3d14c8STreehugger Robot if (chunk_state != CHUNK_ALLOCATED)
553*7c3d14c8STreehugger Robot ReportInvalidFree(old_ptr, chunk_state, stack);
554*7c3d14c8STreehugger Robot CHECK_NE(REAL(memcpy), nullptr);
555*7c3d14c8STreehugger Robot uptr memcpy_size = Min(new_size, m->UsedSize());
556*7c3d14c8STreehugger Robot // If realloc() races with free(), we may start copying freed memory.
557*7c3d14c8STreehugger Robot // However, we will report racy double-free later anyway.
558*7c3d14c8STreehugger Robot REAL(memcpy)(new_ptr, old_ptr, memcpy_size);
559*7c3d14c8STreehugger Robot Deallocate(old_ptr, 0, stack, FROM_MALLOC);
560*7c3d14c8STreehugger Robot }
561*7c3d14c8STreehugger Robot return new_ptr;
562*7c3d14c8STreehugger Robot }
563*7c3d14c8STreehugger Robot
Calloc__asan::Allocator564*7c3d14c8STreehugger Robot void *Calloc(uptr nmemb, uptr size, BufferedStackTrace *stack) {
565*7c3d14c8STreehugger Robot if (CallocShouldReturnNullDueToOverflow(size, nmemb))
566*7c3d14c8STreehugger Robot return allocator.ReturnNullOrDie();
567*7c3d14c8STreehugger Robot void *ptr = Allocate(nmemb * size, 8, stack, FROM_MALLOC, false);
568*7c3d14c8STreehugger Robot // If the memory comes from the secondary allocator no need to clear it
569*7c3d14c8STreehugger Robot // as it comes directly from mmap.
570*7c3d14c8STreehugger Robot if (ptr && allocator.FromPrimary(ptr))
571*7c3d14c8STreehugger Robot REAL(memset)(ptr, 0, nmemb * size);
572*7c3d14c8STreehugger Robot return ptr;
573*7c3d14c8STreehugger Robot }
574*7c3d14c8STreehugger Robot
ReportInvalidFree__asan::Allocator575*7c3d14c8STreehugger Robot void ReportInvalidFree(void *ptr, u8 chunk_state, BufferedStackTrace *stack) {
576*7c3d14c8STreehugger Robot if (chunk_state == CHUNK_QUARANTINE)
577*7c3d14c8STreehugger Robot ReportDoubleFree((uptr)ptr, stack);
578*7c3d14c8STreehugger Robot else
579*7c3d14c8STreehugger Robot ReportFreeNotMalloced((uptr)ptr, stack);
580*7c3d14c8STreehugger Robot }
581*7c3d14c8STreehugger Robot
CommitBack__asan::Allocator582*7c3d14c8STreehugger Robot void CommitBack(AsanThreadLocalMallocStorage *ms) {
583*7c3d14c8STreehugger Robot AllocatorCache *ac = GetAllocatorCache(ms);
584*7c3d14c8STreehugger Robot quarantine.Drain(GetQuarantineCache(ms), QuarantineCallback(ac));
585*7c3d14c8STreehugger Robot allocator.SwallowCache(ac);
586*7c3d14c8STreehugger Robot }
587*7c3d14c8STreehugger Robot
588*7c3d14c8STreehugger Robot // -------------------------- Chunk lookup ----------------------
589*7c3d14c8STreehugger Robot
590*7c3d14c8STreehugger Robot // Assumes alloc_beg == allocator.GetBlockBegin(alloc_beg).
GetAsanChunk__asan::Allocator591*7c3d14c8STreehugger Robot AsanChunk *GetAsanChunk(void *alloc_beg) {
592*7c3d14c8STreehugger Robot if (!alloc_beg) return nullptr;
593*7c3d14c8STreehugger Robot if (!allocator.FromPrimary(alloc_beg)) {
594*7c3d14c8STreehugger Robot uptr *meta = reinterpret_cast<uptr *>(allocator.GetMetaData(alloc_beg));
595*7c3d14c8STreehugger Robot AsanChunk *m = reinterpret_cast<AsanChunk *>(meta[1]);
596*7c3d14c8STreehugger Robot return m;
597*7c3d14c8STreehugger Robot }
598*7c3d14c8STreehugger Robot uptr *alloc_magic = reinterpret_cast<uptr *>(alloc_beg);
599*7c3d14c8STreehugger Robot if (alloc_magic[0] == kAllocBegMagic)
600*7c3d14c8STreehugger Robot return reinterpret_cast<AsanChunk *>(alloc_magic[1]);
601*7c3d14c8STreehugger Robot return reinterpret_cast<AsanChunk *>(alloc_beg);
602*7c3d14c8STreehugger Robot }
603*7c3d14c8STreehugger Robot
GetAsanChunkByAddr__asan::Allocator604*7c3d14c8STreehugger Robot AsanChunk *GetAsanChunkByAddr(uptr p) {
605*7c3d14c8STreehugger Robot void *alloc_beg = allocator.GetBlockBegin(reinterpret_cast<void *>(p));
606*7c3d14c8STreehugger Robot return GetAsanChunk(alloc_beg);
607*7c3d14c8STreehugger Robot }
608*7c3d14c8STreehugger Robot
609*7c3d14c8STreehugger Robot // Allocator must be locked when this function is called.
GetAsanChunkByAddrFastLocked__asan::Allocator610*7c3d14c8STreehugger Robot AsanChunk *GetAsanChunkByAddrFastLocked(uptr p) {
611*7c3d14c8STreehugger Robot void *alloc_beg =
612*7c3d14c8STreehugger Robot allocator.GetBlockBeginFastLocked(reinterpret_cast<void *>(p));
613*7c3d14c8STreehugger Robot return GetAsanChunk(alloc_beg);
614*7c3d14c8STreehugger Robot }
615*7c3d14c8STreehugger Robot
AllocationSize__asan::Allocator616*7c3d14c8STreehugger Robot uptr AllocationSize(uptr p) {
617*7c3d14c8STreehugger Robot AsanChunk *m = GetAsanChunkByAddr(p);
618*7c3d14c8STreehugger Robot if (!m) return 0;
619*7c3d14c8STreehugger Robot if (m->chunk_state != CHUNK_ALLOCATED) return 0;
620*7c3d14c8STreehugger Robot if (m->Beg() != p) return 0;
621*7c3d14c8STreehugger Robot return m->UsedSize();
622*7c3d14c8STreehugger Robot }
623*7c3d14c8STreehugger Robot
FindHeapChunkByAddress__asan::Allocator624*7c3d14c8STreehugger Robot AsanChunkView FindHeapChunkByAddress(uptr addr) {
625*7c3d14c8STreehugger Robot AsanChunk *m1 = GetAsanChunkByAddr(addr);
626*7c3d14c8STreehugger Robot if (!m1) return AsanChunkView(m1);
627*7c3d14c8STreehugger Robot sptr offset = 0;
628*7c3d14c8STreehugger Robot if (AsanChunkView(m1).AddrIsAtLeft(addr, 1, &offset)) {
629*7c3d14c8STreehugger Robot // The address is in the chunk's left redzone, so maybe it is actually
630*7c3d14c8STreehugger Robot // a right buffer overflow from the other chunk to the left.
631*7c3d14c8STreehugger Robot // Search a bit to the left to see if there is another chunk.
632*7c3d14c8STreehugger Robot AsanChunk *m2 = nullptr;
633*7c3d14c8STreehugger Robot for (uptr l = 1; l < GetPageSizeCached(); l++) {
634*7c3d14c8STreehugger Robot m2 = GetAsanChunkByAddr(addr - l);
635*7c3d14c8STreehugger Robot if (m2 == m1) continue; // Still the same chunk.
636*7c3d14c8STreehugger Robot break;
637*7c3d14c8STreehugger Robot }
638*7c3d14c8STreehugger Robot if (m2 && AsanChunkView(m2).AddrIsAtRight(addr, 1, &offset))
639*7c3d14c8STreehugger Robot m1 = ChooseChunk(addr, m2, m1);
640*7c3d14c8STreehugger Robot }
641*7c3d14c8STreehugger Robot return AsanChunkView(m1);
642*7c3d14c8STreehugger Robot }
643*7c3d14c8STreehugger Robot
PrintStats__asan::Allocator644*7c3d14c8STreehugger Robot void PrintStats() {
645*7c3d14c8STreehugger Robot allocator.PrintStats();
646*7c3d14c8STreehugger Robot }
647*7c3d14c8STreehugger Robot
ForceLock__asan::Allocator648*7c3d14c8STreehugger Robot void ForceLock() {
649*7c3d14c8STreehugger Robot allocator.ForceLock();
650*7c3d14c8STreehugger Robot fallback_mutex.Lock();
651*7c3d14c8STreehugger Robot }
652*7c3d14c8STreehugger Robot
ForceUnlock__asan::Allocator653*7c3d14c8STreehugger Robot void ForceUnlock() {
654*7c3d14c8STreehugger Robot fallback_mutex.Unlock();
655*7c3d14c8STreehugger Robot allocator.ForceUnlock();
656*7c3d14c8STreehugger Robot }
657*7c3d14c8STreehugger Robot };
658*7c3d14c8STreehugger Robot
659*7c3d14c8STreehugger Robot static Allocator instance(LINKER_INITIALIZED);
660*7c3d14c8STreehugger Robot
get_allocator()661*7c3d14c8STreehugger Robot static AsanAllocator &get_allocator() {
662*7c3d14c8STreehugger Robot return instance.allocator;
663*7c3d14c8STreehugger Robot }
664*7c3d14c8STreehugger Robot
IsValid()665*7c3d14c8STreehugger Robot bool AsanChunkView::IsValid() {
666*7c3d14c8STreehugger Robot return chunk_ && chunk_->chunk_state != CHUNK_AVAILABLE;
667*7c3d14c8STreehugger Robot }
IsAllocated()668*7c3d14c8STreehugger Robot bool AsanChunkView::IsAllocated() {
669*7c3d14c8STreehugger Robot return chunk_ && chunk_->chunk_state == CHUNK_ALLOCATED;
670*7c3d14c8STreehugger Robot }
Beg()671*7c3d14c8STreehugger Robot uptr AsanChunkView::Beg() { return chunk_->Beg(); }
End()672*7c3d14c8STreehugger Robot uptr AsanChunkView::End() { return Beg() + UsedSize(); }
UsedSize()673*7c3d14c8STreehugger Robot uptr AsanChunkView::UsedSize() { return chunk_->UsedSize(); }
AllocTid()674*7c3d14c8STreehugger Robot uptr AsanChunkView::AllocTid() { return chunk_->alloc_tid; }
FreeTid()675*7c3d14c8STreehugger Robot uptr AsanChunkView::FreeTid() { return chunk_->free_tid; }
676*7c3d14c8STreehugger Robot
GetStackTraceFromId(u32 id)677*7c3d14c8STreehugger Robot static StackTrace GetStackTraceFromId(u32 id) {
678*7c3d14c8STreehugger Robot CHECK(id);
679*7c3d14c8STreehugger Robot StackTrace res = StackDepotGet(id);
680*7c3d14c8STreehugger Robot CHECK(res.trace);
681*7c3d14c8STreehugger Robot return res;
682*7c3d14c8STreehugger Robot }
683*7c3d14c8STreehugger Robot
GetAllocStackId()684*7c3d14c8STreehugger Robot u32 AsanChunkView::GetAllocStackId() { return chunk_->alloc_context_id; }
GetFreeStackId()685*7c3d14c8STreehugger Robot u32 AsanChunkView::GetFreeStackId() { return chunk_->free_context_id; }
686*7c3d14c8STreehugger Robot
GetAllocStack()687*7c3d14c8STreehugger Robot StackTrace AsanChunkView::GetAllocStack() {
688*7c3d14c8STreehugger Robot return GetStackTraceFromId(GetAllocStackId());
689*7c3d14c8STreehugger Robot }
690*7c3d14c8STreehugger Robot
GetFreeStack()691*7c3d14c8STreehugger Robot StackTrace AsanChunkView::GetFreeStack() {
692*7c3d14c8STreehugger Robot return GetStackTraceFromId(GetFreeStackId());
693*7c3d14c8STreehugger Robot }
694*7c3d14c8STreehugger Robot
InitializeAllocator(const AllocatorOptions & options)695*7c3d14c8STreehugger Robot void InitializeAllocator(const AllocatorOptions &options) {
696*7c3d14c8STreehugger Robot instance.Initialize(options);
697*7c3d14c8STreehugger Robot }
698*7c3d14c8STreehugger Robot
ReInitializeAllocator(const AllocatorOptions & options)699*7c3d14c8STreehugger Robot void ReInitializeAllocator(const AllocatorOptions &options) {
700*7c3d14c8STreehugger Robot instance.ReInitialize(options);
701*7c3d14c8STreehugger Robot }
702*7c3d14c8STreehugger Robot
GetAllocatorOptions(AllocatorOptions * options)703*7c3d14c8STreehugger Robot void GetAllocatorOptions(AllocatorOptions *options) {
704*7c3d14c8STreehugger Robot instance.GetOptions(options);
705*7c3d14c8STreehugger Robot }
706*7c3d14c8STreehugger Robot
FindHeapChunkByAddress(uptr addr)707*7c3d14c8STreehugger Robot AsanChunkView FindHeapChunkByAddress(uptr addr) {
708*7c3d14c8STreehugger Robot return instance.FindHeapChunkByAddress(addr);
709*7c3d14c8STreehugger Robot }
710*7c3d14c8STreehugger Robot
CommitBack()711*7c3d14c8STreehugger Robot void AsanThreadLocalMallocStorage::CommitBack() {
712*7c3d14c8STreehugger Robot instance.CommitBack(this);
713*7c3d14c8STreehugger Robot }
714*7c3d14c8STreehugger Robot
PrintInternalAllocatorStats()715*7c3d14c8STreehugger Robot void PrintInternalAllocatorStats() {
716*7c3d14c8STreehugger Robot instance.PrintStats();
717*7c3d14c8STreehugger Robot }
718*7c3d14c8STreehugger Robot
asan_memalign(uptr alignment,uptr size,BufferedStackTrace * stack,AllocType alloc_type)719*7c3d14c8STreehugger Robot void *asan_memalign(uptr alignment, uptr size, BufferedStackTrace *stack,
720*7c3d14c8STreehugger Robot AllocType alloc_type) {
721*7c3d14c8STreehugger Robot return instance.Allocate(size, alignment, stack, alloc_type, true);
722*7c3d14c8STreehugger Robot }
723*7c3d14c8STreehugger Robot
asan_free(void * ptr,BufferedStackTrace * stack,AllocType alloc_type)724*7c3d14c8STreehugger Robot void asan_free(void *ptr, BufferedStackTrace *stack, AllocType alloc_type) {
725*7c3d14c8STreehugger Robot instance.Deallocate(ptr, 0, stack, alloc_type);
726*7c3d14c8STreehugger Robot }
727*7c3d14c8STreehugger Robot
asan_sized_free(void * ptr,uptr size,BufferedStackTrace * stack,AllocType alloc_type)728*7c3d14c8STreehugger Robot void asan_sized_free(void *ptr, uptr size, BufferedStackTrace *stack,
729*7c3d14c8STreehugger Robot AllocType alloc_type) {
730*7c3d14c8STreehugger Robot instance.Deallocate(ptr, size, stack, alloc_type);
731*7c3d14c8STreehugger Robot }
732*7c3d14c8STreehugger Robot
asan_malloc(uptr size,BufferedStackTrace * stack)733*7c3d14c8STreehugger Robot void *asan_malloc(uptr size, BufferedStackTrace *stack) {
734*7c3d14c8STreehugger Robot return instance.Allocate(size, 8, stack, FROM_MALLOC, true);
735*7c3d14c8STreehugger Robot }
736*7c3d14c8STreehugger Robot
asan_calloc(uptr nmemb,uptr size,BufferedStackTrace * stack)737*7c3d14c8STreehugger Robot void *asan_calloc(uptr nmemb, uptr size, BufferedStackTrace *stack) {
738*7c3d14c8STreehugger Robot return instance.Calloc(nmemb, size, stack);
739*7c3d14c8STreehugger Robot }
740*7c3d14c8STreehugger Robot
asan_realloc(void * p,uptr size,BufferedStackTrace * stack)741*7c3d14c8STreehugger Robot void *asan_realloc(void *p, uptr size, BufferedStackTrace *stack) {
742*7c3d14c8STreehugger Robot if (!p)
743*7c3d14c8STreehugger Robot return instance.Allocate(size, 8, stack, FROM_MALLOC, true);
744*7c3d14c8STreehugger Robot if (size == 0) {
745*7c3d14c8STreehugger Robot instance.Deallocate(p, 0, stack, FROM_MALLOC);
746*7c3d14c8STreehugger Robot return nullptr;
747*7c3d14c8STreehugger Robot }
748*7c3d14c8STreehugger Robot return instance.Reallocate(p, size, stack);
749*7c3d14c8STreehugger Robot }
750*7c3d14c8STreehugger Robot
asan_valloc(uptr size,BufferedStackTrace * stack)751*7c3d14c8STreehugger Robot void *asan_valloc(uptr size, BufferedStackTrace *stack) {
752*7c3d14c8STreehugger Robot return instance.Allocate(size, GetPageSizeCached(), stack, FROM_MALLOC, true);
753*7c3d14c8STreehugger Robot }
754*7c3d14c8STreehugger Robot
asan_pvalloc(uptr size,BufferedStackTrace * stack)755*7c3d14c8STreehugger Robot void *asan_pvalloc(uptr size, BufferedStackTrace *stack) {
756*7c3d14c8STreehugger Robot uptr PageSize = GetPageSizeCached();
757*7c3d14c8STreehugger Robot size = RoundUpTo(size, PageSize);
758*7c3d14c8STreehugger Robot if (size == 0) {
759*7c3d14c8STreehugger Robot // pvalloc(0) should allocate one page.
760*7c3d14c8STreehugger Robot size = PageSize;
761*7c3d14c8STreehugger Robot }
762*7c3d14c8STreehugger Robot return instance.Allocate(size, PageSize, stack, FROM_MALLOC, true);
763*7c3d14c8STreehugger Robot }
764*7c3d14c8STreehugger Robot
asan_posix_memalign(void ** memptr,uptr alignment,uptr size,BufferedStackTrace * stack)765*7c3d14c8STreehugger Robot int asan_posix_memalign(void **memptr, uptr alignment, uptr size,
766*7c3d14c8STreehugger Robot BufferedStackTrace *stack) {
767*7c3d14c8STreehugger Robot void *ptr = instance.Allocate(size, alignment, stack, FROM_MALLOC, true);
768*7c3d14c8STreehugger Robot CHECK(IsAligned((uptr)ptr, alignment));
769*7c3d14c8STreehugger Robot *memptr = ptr;
770*7c3d14c8STreehugger Robot return 0;
771*7c3d14c8STreehugger Robot }
772*7c3d14c8STreehugger Robot
asan_malloc_usable_size(const void * ptr,uptr pc,uptr bp)773*7c3d14c8STreehugger Robot uptr asan_malloc_usable_size(const void *ptr, uptr pc, uptr bp) {
774*7c3d14c8STreehugger Robot if (!ptr) return 0;
775*7c3d14c8STreehugger Robot uptr usable_size = instance.AllocationSize(reinterpret_cast<uptr>(ptr));
776*7c3d14c8STreehugger Robot if (flags()->check_malloc_usable_size && (usable_size == 0)) {
777*7c3d14c8STreehugger Robot GET_STACK_TRACE_FATAL(pc, bp);
778*7c3d14c8STreehugger Robot ReportMallocUsableSizeNotOwned((uptr)ptr, &stack);
779*7c3d14c8STreehugger Robot }
780*7c3d14c8STreehugger Robot return usable_size;
781*7c3d14c8STreehugger Robot }
782*7c3d14c8STreehugger Robot
asan_mz_size(const void * ptr)783*7c3d14c8STreehugger Robot uptr asan_mz_size(const void *ptr) {
784*7c3d14c8STreehugger Robot return instance.AllocationSize(reinterpret_cast<uptr>(ptr));
785*7c3d14c8STreehugger Robot }
786*7c3d14c8STreehugger Robot
asan_mz_force_lock()787*7c3d14c8STreehugger Robot void asan_mz_force_lock() {
788*7c3d14c8STreehugger Robot instance.ForceLock();
789*7c3d14c8STreehugger Robot }
790*7c3d14c8STreehugger Robot
asan_mz_force_unlock()791*7c3d14c8STreehugger Robot void asan_mz_force_unlock() {
792*7c3d14c8STreehugger Robot instance.ForceUnlock();
793*7c3d14c8STreehugger Robot }
794*7c3d14c8STreehugger Robot
AsanSoftRssLimitExceededCallback(bool exceeded)795*7c3d14c8STreehugger Robot void AsanSoftRssLimitExceededCallback(bool exceeded) {
796*7c3d14c8STreehugger Robot instance.allocator.SetRssLimitIsExceeded(exceeded);
797*7c3d14c8STreehugger Robot }
798*7c3d14c8STreehugger Robot
799*7c3d14c8STreehugger Robot } // namespace __asan
800*7c3d14c8STreehugger Robot
801*7c3d14c8STreehugger Robot // --- Implementation of LSan-specific functions --- {{{1
802*7c3d14c8STreehugger Robot namespace __lsan {
LockAllocator()803*7c3d14c8STreehugger Robot void LockAllocator() {
804*7c3d14c8STreehugger Robot __asan::get_allocator().ForceLock();
805*7c3d14c8STreehugger Robot }
806*7c3d14c8STreehugger Robot
UnlockAllocator()807*7c3d14c8STreehugger Robot void UnlockAllocator() {
808*7c3d14c8STreehugger Robot __asan::get_allocator().ForceUnlock();
809*7c3d14c8STreehugger Robot }
810*7c3d14c8STreehugger Robot
GetAllocatorGlobalRange(uptr * begin,uptr * end)811*7c3d14c8STreehugger Robot void GetAllocatorGlobalRange(uptr *begin, uptr *end) {
812*7c3d14c8STreehugger Robot *begin = (uptr)&__asan::get_allocator();
813*7c3d14c8STreehugger Robot *end = *begin + sizeof(__asan::get_allocator());
814*7c3d14c8STreehugger Robot }
815*7c3d14c8STreehugger Robot
PointsIntoChunk(void * p)816*7c3d14c8STreehugger Robot uptr PointsIntoChunk(void* p) {
817*7c3d14c8STreehugger Robot uptr addr = reinterpret_cast<uptr>(p);
818*7c3d14c8STreehugger Robot __asan::AsanChunk *m = __asan::instance.GetAsanChunkByAddrFastLocked(addr);
819*7c3d14c8STreehugger Robot if (!m) return 0;
820*7c3d14c8STreehugger Robot uptr chunk = m->Beg();
821*7c3d14c8STreehugger Robot if (m->chunk_state != __asan::CHUNK_ALLOCATED)
822*7c3d14c8STreehugger Robot return 0;
823*7c3d14c8STreehugger Robot if (m->AddrIsInside(addr, /*locked_version=*/true))
824*7c3d14c8STreehugger Robot return chunk;
825*7c3d14c8STreehugger Robot if (IsSpecialCaseOfOperatorNew0(chunk, m->UsedSize(/*locked_version*/ true),
826*7c3d14c8STreehugger Robot addr))
827*7c3d14c8STreehugger Robot return chunk;
828*7c3d14c8STreehugger Robot return 0;
829*7c3d14c8STreehugger Robot }
830*7c3d14c8STreehugger Robot
GetUserBegin(uptr chunk)831*7c3d14c8STreehugger Robot uptr GetUserBegin(uptr chunk) {
832*7c3d14c8STreehugger Robot __asan::AsanChunk *m = __asan::instance.GetAsanChunkByAddrFastLocked(chunk);
833*7c3d14c8STreehugger Robot CHECK(m);
834*7c3d14c8STreehugger Robot return m->Beg();
835*7c3d14c8STreehugger Robot }
836*7c3d14c8STreehugger Robot
LsanMetadata(uptr chunk)837*7c3d14c8STreehugger Robot LsanMetadata::LsanMetadata(uptr chunk) {
838*7c3d14c8STreehugger Robot metadata_ = reinterpret_cast<void *>(chunk - __asan::kChunkHeaderSize);
839*7c3d14c8STreehugger Robot }
840*7c3d14c8STreehugger Robot
allocated() const841*7c3d14c8STreehugger Robot bool LsanMetadata::allocated() const {
842*7c3d14c8STreehugger Robot __asan::AsanChunk *m = reinterpret_cast<__asan::AsanChunk *>(metadata_);
843*7c3d14c8STreehugger Robot return m->chunk_state == __asan::CHUNK_ALLOCATED;
844*7c3d14c8STreehugger Robot }
845*7c3d14c8STreehugger Robot
tag() const846*7c3d14c8STreehugger Robot ChunkTag LsanMetadata::tag() const {
847*7c3d14c8STreehugger Robot __asan::AsanChunk *m = reinterpret_cast<__asan::AsanChunk *>(metadata_);
848*7c3d14c8STreehugger Robot return static_cast<ChunkTag>(m->lsan_tag);
849*7c3d14c8STreehugger Robot }
850*7c3d14c8STreehugger Robot
set_tag(ChunkTag value)851*7c3d14c8STreehugger Robot void LsanMetadata::set_tag(ChunkTag value) {
852*7c3d14c8STreehugger Robot __asan::AsanChunk *m = reinterpret_cast<__asan::AsanChunk *>(metadata_);
853*7c3d14c8STreehugger Robot m->lsan_tag = value;
854*7c3d14c8STreehugger Robot }
855*7c3d14c8STreehugger Robot
requested_size() const856*7c3d14c8STreehugger Robot uptr LsanMetadata::requested_size() const {
857*7c3d14c8STreehugger Robot __asan::AsanChunk *m = reinterpret_cast<__asan::AsanChunk *>(metadata_);
858*7c3d14c8STreehugger Robot return m->UsedSize(/*locked_version=*/true);
859*7c3d14c8STreehugger Robot }
860*7c3d14c8STreehugger Robot
stack_trace_id() const861*7c3d14c8STreehugger Robot u32 LsanMetadata::stack_trace_id() const {
862*7c3d14c8STreehugger Robot __asan::AsanChunk *m = reinterpret_cast<__asan::AsanChunk *>(metadata_);
863*7c3d14c8STreehugger Robot return m->alloc_context_id;
864*7c3d14c8STreehugger Robot }
865*7c3d14c8STreehugger Robot
ForEachChunk(ForEachChunkCallback callback,void * arg)866*7c3d14c8STreehugger Robot void ForEachChunk(ForEachChunkCallback callback, void *arg) {
867*7c3d14c8STreehugger Robot __asan::get_allocator().ForEachChunk(callback, arg);
868*7c3d14c8STreehugger Robot }
869*7c3d14c8STreehugger Robot
IgnoreObjectLocked(const void * p)870*7c3d14c8STreehugger Robot IgnoreObjectResult IgnoreObjectLocked(const void *p) {
871*7c3d14c8STreehugger Robot uptr addr = reinterpret_cast<uptr>(p);
872*7c3d14c8STreehugger Robot __asan::AsanChunk *m = __asan::instance.GetAsanChunkByAddr(addr);
873*7c3d14c8STreehugger Robot if (!m) return kIgnoreObjectInvalid;
874*7c3d14c8STreehugger Robot if ((m->chunk_state == __asan::CHUNK_ALLOCATED) && m->AddrIsInside(addr)) {
875*7c3d14c8STreehugger Robot if (m->lsan_tag == kIgnored)
876*7c3d14c8STreehugger Robot return kIgnoreObjectAlreadyIgnored;
877*7c3d14c8STreehugger Robot m->lsan_tag = __lsan::kIgnored;
878*7c3d14c8STreehugger Robot return kIgnoreObjectSuccess;
879*7c3d14c8STreehugger Robot } else {
880*7c3d14c8STreehugger Robot return kIgnoreObjectInvalid;
881*7c3d14c8STreehugger Robot }
882*7c3d14c8STreehugger Robot }
883*7c3d14c8STreehugger Robot } // namespace __lsan
884*7c3d14c8STreehugger Robot
885*7c3d14c8STreehugger Robot // ---------------------- Interface ---------------- {{{1
886*7c3d14c8STreehugger Robot using namespace __asan; // NOLINT
887*7c3d14c8STreehugger Robot
888*7c3d14c8STreehugger Robot // ASan allocator doesn't reserve extra bytes, so normally we would
889*7c3d14c8STreehugger Robot // just return "size". We don't want to expose our redzone sizes, etc here.
__sanitizer_get_estimated_allocated_size(uptr size)890*7c3d14c8STreehugger Robot uptr __sanitizer_get_estimated_allocated_size(uptr size) {
891*7c3d14c8STreehugger Robot return size;
892*7c3d14c8STreehugger Robot }
893*7c3d14c8STreehugger Robot
__sanitizer_get_ownership(const void * p)894*7c3d14c8STreehugger Robot int __sanitizer_get_ownership(const void *p) {
895*7c3d14c8STreehugger Robot uptr ptr = reinterpret_cast<uptr>(p);
896*7c3d14c8STreehugger Robot return instance.AllocationSize(ptr) > 0;
897*7c3d14c8STreehugger Robot }
898*7c3d14c8STreehugger Robot
__sanitizer_get_allocated_size(const void * p)899*7c3d14c8STreehugger Robot uptr __sanitizer_get_allocated_size(const void *p) {
900*7c3d14c8STreehugger Robot if (!p) return 0;
901*7c3d14c8STreehugger Robot uptr ptr = reinterpret_cast<uptr>(p);
902*7c3d14c8STreehugger Robot uptr allocated_size = instance.AllocationSize(ptr);
903*7c3d14c8STreehugger Robot // Die if p is not malloced or if it is already freed.
904*7c3d14c8STreehugger Robot if (allocated_size == 0) {
905*7c3d14c8STreehugger Robot GET_STACK_TRACE_FATAL_HERE;
906*7c3d14c8STreehugger Robot ReportSanitizerGetAllocatedSizeNotOwned(ptr, &stack);
907*7c3d14c8STreehugger Robot }
908*7c3d14c8STreehugger Robot return allocated_size;
909*7c3d14c8STreehugger Robot }
910*7c3d14c8STreehugger Robot
911*7c3d14c8STreehugger Robot #if !SANITIZER_SUPPORTS_WEAK_HOOKS
912*7c3d14c8STreehugger Robot // Provide default (no-op) implementation of malloc hooks.
913*7c3d14c8STreehugger Robot extern "C" {
914*7c3d14c8STreehugger Robot SANITIZER_INTERFACE_ATTRIBUTE SANITIZER_WEAK_ATTRIBUTE
__sanitizer_malloc_hook(void * ptr,uptr size)915*7c3d14c8STreehugger Robot void __sanitizer_malloc_hook(void *ptr, uptr size) {
916*7c3d14c8STreehugger Robot (void)ptr;
917*7c3d14c8STreehugger Robot (void)size;
918*7c3d14c8STreehugger Robot }
919*7c3d14c8STreehugger Robot SANITIZER_INTERFACE_ATTRIBUTE SANITIZER_WEAK_ATTRIBUTE
__sanitizer_free_hook(void * ptr)920*7c3d14c8STreehugger Robot void __sanitizer_free_hook(void *ptr) {
921*7c3d14c8STreehugger Robot (void)ptr;
922*7c3d14c8STreehugger Robot }
923*7c3d14c8STreehugger Robot } // extern "C"
924*7c3d14c8STreehugger Robot #endif
925