xref: /aosp_15_r20/external/scudo/standalone/stack_depot.h (revision 76559068c068bd27e82aff38fac3bfc865233bca)
1*76559068SAndroid Build Coastguard Worker //===-- stack_depot.h -------------------------------------------*- C++ -*-===//
2*76559068SAndroid Build Coastguard Worker //
3*76559068SAndroid Build Coastguard Worker // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4*76559068SAndroid Build Coastguard Worker // See https://llvm.org/LICENSE.txt for license information.
5*76559068SAndroid Build Coastguard Worker // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6*76559068SAndroid Build Coastguard Worker //
7*76559068SAndroid Build Coastguard Worker //===----------------------------------------------------------------------===//
8*76559068SAndroid Build Coastguard Worker 
9*76559068SAndroid Build Coastguard Worker #ifndef SCUDO_STACK_DEPOT_H_
10*76559068SAndroid Build Coastguard Worker #define SCUDO_STACK_DEPOT_H_
11*76559068SAndroid Build Coastguard Worker 
12*76559068SAndroid Build Coastguard Worker #include "atomic_helpers.h"
13*76559068SAndroid Build Coastguard Worker #include "common.h"
14*76559068SAndroid Build Coastguard Worker #include "mutex.h"
15*76559068SAndroid Build Coastguard Worker 
16*76559068SAndroid Build Coastguard Worker namespace scudo {
17*76559068SAndroid Build Coastguard Worker 
18*76559068SAndroid Build Coastguard Worker class MurMur2HashBuilder {
19*76559068SAndroid Build Coastguard Worker   static const u32 M = 0x5bd1e995;
20*76559068SAndroid Build Coastguard Worker   static const u32 Seed = 0x9747b28c;
21*76559068SAndroid Build Coastguard Worker   static const u32 R = 24;
22*76559068SAndroid Build Coastguard Worker   u32 H;
23*76559068SAndroid Build Coastguard Worker 
24*76559068SAndroid Build Coastguard Worker public:
25*76559068SAndroid Build Coastguard Worker   explicit MurMur2HashBuilder(u32 Init = 0) { H = Seed ^ Init; }
add(u32 K)26*76559068SAndroid Build Coastguard Worker   void add(u32 K) {
27*76559068SAndroid Build Coastguard Worker     K *= M;
28*76559068SAndroid Build Coastguard Worker     K ^= K >> R;
29*76559068SAndroid Build Coastguard Worker     K *= M;
30*76559068SAndroid Build Coastguard Worker     H *= M;
31*76559068SAndroid Build Coastguard Worker     H ^= K;
32*76559068SAndroid Build Coastguard Worker   }
get()33*76559068SAndroid Build Coastguard Worker   u32 get() {
34*76559068SAndroid Build Coastguard Worker     u32 X = H;
35*76559068SAndroid Build Coastguard Worker     X ^= X >> 13;
36*76559068SAndroid Build Coastguard Worker     X *= M;
37*76559068SAndroid Build Coastguard Worker     X ^= X >> 15;
38*76559068SAndroid Build Coastguard Worker     return X;
39*76559068SAndroid Build Coastguard Worker   }
40*76559068SAndroid Build Coastguard Worker };
41*76559068SAndroid Build Coastguard Worker 
42*76559068SAndroid Build Coastguard Worker class alignas(atomic_u64) StackDepot {
43*76559068SAndroid Build Coastguard Worker   HybridMutex RingEndMu;
44*76559068SAndroid Build Coastguard Worker   u32 RingEnd = 0;
45*76559068SAndroid Build Coastguard Worker 
46*76559068SAndroid Build Coastguard Worker   // This data structure stores a stack trace for each allocation and
47*76559068SAndroid Build Coastguard Worker   // deallocation when stack trace recording is enabled, that may be looked up
48*76559068SAndroid Build Coastguard Worker   // using a hash of the stack trace. The lower bits of the hash are an index
49*76559068SAndroid Build Coastguard Worker   // into the Tab array, which stores an index into the Ring array where the
50*76559068SAndroid Build Coastguard Worker   // stack traces are stored. As the name implies, Ring is a ring buffer, so a
51*76559068SAndroid Build Coastguard Worker   // stack trace may wrap around to the start of the array.
52*76559068SAndroid Build Coastguard Worker   //
53*76559068SAndroid Build Coastguard Worker   // Each stack trace in Ring is prefixed by a stack trace marker consisting of
54*76559068SAndroid Build Coastguard Worker   // a fixed 1 bit in bit 0 (this allows disambiguation between stack frames
55*76559068SAndroid Build Coastguard Worker   // and stack trace markers in the case where instruction pointers are 4-byte
56*76559068SAndroid Build Coastguard Worker   // aligned, as they are on arm64), the stack trace hash in bits 1-32, and the
57*76559068SAndroid Build Coastguard Worker   // size of the stack trace in bits 33-63.
58*76559068SAndroid Build Coastguard Worker   //
59*76559068SAndroid Build Coastguard Worker   // The insert() function is potentially racy in its accesses to the Tab and
60*76559068SAndroid Build Coastguard Worker   // Ring arrays, but find() is resilient to races in the sense that, barring
61*76559068SAndroid Build Coastguard Worker   // hash collisions, it will either return the correct stack trace or no stack
62*76559068SAndroid Build Coastguard Worker   // trace at all, even if two instances of insert() raced with one another.
63*76559068SAndroid Build Coastguard Worker   // This is achieved by re-checking the hash of the stack trace before
64*76559068SAndroid Build Coastguard Worker   // returning the trace.
65*76559068SAndroid Build Coastguard Worker 
66*76559068SAndroid Build Coastguard Worker   u32 RingSize = 0;
67*76559068SAndroid Build Coastguard Worker   u32 RingMask = 0;
68*76559068SAndroid Build Coastguard Worker   u32 TabMask = 0;
69*76559068SAndroid Build Coastguard Worker   // This is immediately followed by RingSize atomic_u64 and
70*76559068SAndroid Build Coastguard Worker   // (TabMask + 1) atomic_u32.
71*76559068SAndroid Build Coastguard Worker 
getRing()72*76559068SAndroid Build Coastguard Worker   atomic_u64 *getRing() {
73*76559068SAndroid Build Coastguard Worker     return reinterpret_cast<atomic_u64 *>(reinterpret_cast<char *>(this) +
74*76559068SAndroid Build Coastguard Worker                                           sizeof(StackDepot));
75*76559068SAndroid Build Coastguard Worker   }
76*76559068SAndroid Build Coastguard Worker 
getTab()77*76559068SAndroid Build Coastguard Worker   atomic_u32 *getTab() {
78*76559068SAndroid Build Coastguard Worker     return reinterpret_cast<atomic_u32 *>(reinterpret_cast<char *>(this) +
79*76559068SAndroid Build Coastguard Worker                                           sizeof(StackDepot) +
80*76559068SAndroid Build Coastguard Worker                                           sizeof(atomic_u64) * RingSize);
81*76559068SAndroid Build Coastguard Worker   }
82*76559068SAndroid Build Coastguard Worker 
getRing()83*76559068SAndroid Build Coastguard Worker   const atomic_u64 *getRing() const {
84*76559068SAndroid Build Coastguard Worker     return reinterpret_cast<const atomic_u64 *>(
85*76559068SAndroid Build Coastguard Worker         reinterpret_cast<const char *>(this) + sizeof(StackDepot));
86*76559068SAndroid Build Coastguard Worker   }
87*76559068SAndroid Build Coastguard Worker 
getTab()88*76559068SAndroid Build Coastguard Worker   const atomic_u32 *getTab() const {
89*76559068SAndroid Build Coastguard Worker     return reinterpret_cast<const atomic_u32 *>(
90*76559068SAndroid Build Coastguard Worker         reinterpret_cast<const char *>(this) + sizeof(StackDepot) +
91*76559068SAndroid Build Coastguard Worker         sizeof(atomic_u64) * RingSize);
92*76559068SAndroid Build Coastguard Worker   }
93*76559068SAndroid Build Coastguard Worker 
94*76559068SAndroid Build Coastguard Worker public:
init(u32 RingSz,u32 TabSz)95*76559068SAndroid Build Coastguard Worker   void init(u32 RingSz, u32 TabSz) {
96*76559068SAndroid Build Coastguard Worker     DCHECK(isPowerOfTwo(RingSz));
97*76559068SAndroid Build Coastguard Worker     DCHECK(isPowerOfTwo(TabSz));
98*76559068SAndroid Build Coastguard Worker     RingSize = RingSz;
99*76559068SAndroid Build Coastguard Worker     RingMask = RingSz - 1;
100*76559068SAndroid Build Coastguard Worker     TabMask = TabSz - 1;
101*76559068SAndroid Build Coastguard Worker   }
102*76559068SAndroid Build Coastguard Worker 
103*76559068SAndroid Build Coastguard Worker   // Ensure that RingSize, RingMask and TabMask are set up in a way that
104*76559068SAndroid Build Coastguard Worker   // all accesses are within range of BufSize.
isValid(uptr BufSize)105*76559068SAndroid Build Coastguard Worker   bool isValid(uptr BufSize) const {
106*76559068SAndroid Build Coastguard Worker     if (!isPowerOfTwo(RingSize))
107*76559068SAndroid Build Coastguard Worker       return false;
108*76559068SAndroid Build Coastguard Worker     uptr RingBytes = sizeof(atomic_u64) * RingSize;
109*76559068SAndroid Build Coastguard Worker     if (RingMask + 1 != RingSize)
110*76559068SAndroid Build Coastguard Worker       return false;
111*76559068SAndroid Build Coastguard Worker 
112*76559068SAndroid Build Coastguard Worker     if (TabMask == 0)
113*76559068SAndroid Build Coastguard Worker       return false;
114*76559068SAndroid Build Coastguard Worker     uptr TabSize = TabMask + 1;
115*76559068SAndroid Build Coastguard Worker     if (!isPowerOfTwo(TabSize))
116*76559068SAndroid Build Coastguard Worker       return false;
117*76559068SAndroid Build Coastguard Worker     uptr TabBytes = sizeof(atomic_u32) * TabSize;
118*76559068SAndroid Build Coastguard Worker 
119*76559068SAndroid Build Coastguard Worker     // Subtract and detect underflow.
120*76559068SAndroid Build Coastguard Worker     if (BufSize < sizeof(StackDepot))
121*76559068SAndroid Build Coastguard Worker       return false;
122*76559068SAndroid Build Coastguard Worker     BufSize -= sizeof(StackDepot);
123*76559068SAndroid Build Coastguard Worker     if (BufSize < TabBytes)
124*76559068SAndroid Build Coastguard Worker       return false;
125*76559068SAndroid Build Coastguard Worker     BufSize -= TabBytes;
126*76559068SAndroid Build Coastguard Worker     if (BufSize < RingBytes)
127*76559068SAndroid Build Coastguard Worker       return false;
128*76559068SAndroid Build Coastguard Worker     return BufSize == RingBytes;
129*76559068SAndroid Build Coastguard Worker   }
130*76559068SAndroid Build Coastguard Worker 
131*76559068SAndroid Build Coastguard Worker   // Insert hash of the stack trace [Begin, End) into the stack depot, and
132*76559068SAndroid Build Coastguard Worker   // return the hash.
insert(uptr * Begin,uptr * End)133*76559068SAndroid Build Coastguard Worker   u32 insert(uptr *Begin, uptr *End) {
134*76559068SAndroid Build Coastguard Worker     auto *Tab = getTab();
135*76559068SAndroid Build Coastguard Worker     auto *Ring = getRing();
136*76559068SAndroid Build Coastguard Worker 
137*76559068SAndroid Build Coastguard Worker     MurMur2HashBuilder B;
138*76559068SAndroid Build Coastguard Worker     for (uptr *I = Begin; I != End; ++I)
139*76559068SAndroid Build Coastguard Worker       B.add(u32(*I) >> 2);
140*76559068SAndroid Build Coastguard Worker     u32 Hash = B.get();
141*76559068SAndroid Build Coastguard Worker 
142*76559068SAndroid Build Coastguard Worker     u32 Pos = Hash & TabMask;
143*76559068SAndroid Build Coastguard Worker     u32 RingPos = atomic_load_relaxed(&Tab[Pos]);
144*76559068SAndroid Build Coastguard Worker     u64 Entry = atomic_load_relaxed(&Ring[RingPos]);
145*76559068SAndroid Build Coastguard Worker     u64 Id = (u64(End - Begin) << 33) | (u64(Hash) << 1) | 1;
146*76559068SAndroid Build Coastguard Worker     if (Entry == Id)
147*76559068SAndroid Build Coastguard Worker       return Hash;
148*76559068SAndroid Build Coastguard Worker 
149*76559068SAndroid Build Coastguard Worker     ScopedLock Lock(RingEndMu);
150*76559068SAndroid Build Coastguard Worker     RingPos = RingEnd;
151*76559068SAndroid Build Coastguard Worker     atomic_store_relaxed(&Tab[Pos], RingPos);
152*76559068SAndroid Build Coastguard Worker     atomic_store_relaxed(&Ring[RingPos], Id);
153*76559068SAndroid Build Coastguard Worker     for (uptr *I = Begin; I != End; ++I) {
154*76559068SAndroid Build Coastguard Worker       RingPos = (RingPos + 1) & RingMask;
155*76559068SAndroid Build Coastguard Worker       atomic_store_relaxed(&Ring[RingPos], *I);
156*76559068SAndroid Build Coastguard Worker     }
157*76559068SAndroid Build Coastguard Worker     RingEnd = (RingPos + 1) & RingMask;
158*76559068SAndroid Build Coastguard Worker     return Hash;
159*76559068SAndroid Build Coastguard Worker   }
160*76559068SAndroid Build Coastguard Worker 
161*76559068SAndroid Build Coastguard Worker   // Look up a stack trace by hash. Returns true if successful. The trace may be
162*76559068SAndroid Build Coastguard Worker   // accessed via operator[] passing indexes between *RingPosPtr and
163*76559068SAndroid Build Coastguard Worker   // *RingPosPtr + *SizePtr.
find(u32 Hash,uptr * RingPosPtr,uptr * SizePtr)164*76559068SAndroid Build Coastguard Worker   bool find(u32 Hash, uptr *RingPosPtr, uptr *SizePtr) const {
165*76559068SAndroid Build Coastguard Worker     auto *Tab = getTab();
166*76559068SAndroid Build Coastguard Worker     auto *Ring = getRing();
167*76559068SAndroid Build Coastguard Worker 
168*76559068SAndroid Build Coastguard Worker     u32 Pos = Hash & TabMask;
169*76559068SAndroid Build Coastguard Worker     u32 RingPos = atomic_load_relaxed(&Tab[Pos]);
170*76559068SAndroid Build Coastguard Worker     if (RingPos >= RingSize)
171*76559068SAndroid Build Coastguard Worker       return false;
172*76559068SAndroid Build Coastguard Worker     u64 Entry = atomic_load_relaxed(&Ring[RingPos]);
173*76559068SAndroid Build Coastguard Worker     u64 HashWithTagBit = (u64(Hash) << 1) | 1;
174*76559068SAndroid Build Coastguard Worker     if ((Entry & 0x1ffffffff) != HashWithTagBit)
175*76559068SAndroid Build Coastguard Worker       return false;
176*76559068SAndroid Build Coastguard Worker     u32 Size = u32(Entry >> 33);
177*76559068SAndroid Build Coastguard Worker     if (Size >= RingSize)
178*76559068SAndroid Build Coastguard Worker       return false;
179*76559068SAndroid Build Coastguard Worker     *RingPosPtr = (RingPos + 1) & RingMask;
180*76559068SAndroid Build Coastguard Worker     *SizePtr = Size;
181*76559068SAndroid Build Coastguard Worker     MurMur2HashBuilder B;
182*76559068SAndroid Build Coastguard Worker     for (uptr I = 0; I != Size; ++I) {
183*76559068SAndroid Build Coastguard Worker       RingPos = (RingPos + 1) & RingMask;
184*76559068SAndroid Build Coastguard Worker       B.add(u32(atomic_load_relaxed(&Ring[RingPos])) >> 2);
185*76559068SAndroid Build Coastguard Worker     }
186*76559068SAndroid Build Coastguard Worker     return B.get() == Hash;
187*76559068SAndroid Build Coastguard Worker   }
188*76559068SAndroid Build Coastguard Worker 
at(uptr RingPos)189*76559068SAndroid Build Coastguard Worker   u64 at(uptr RingPos) const {
190*76559068SAndroid Build Coastguard Worker     auto *Ring = getRing();
191*76559068SAndroid Build Coastguard Worker     return atomic_load_relaxed(&Ring[RingPos & RingMask]);
192*76559068SAndroid Build Coastguard Worker   }
193*76559068SAndroid Build Coastguard Worker 
194*76559068SAndroid Build Coastguard Worker   // This is done for the purpose of fork safety in multithreaded programs and
195*76559068SAndroid Build Coastguard Worker   // does not fully disable StackDepot. In particular, find() still works and
196*76559068SAndroid Build Coastguard Worker   // only insert() is blocked.
disable()197*76559068SAndroid Build Coastguard Worker   void disable() NO_THREAD_SAFETY_ANALYSIS { RingEndMu.lock(); }
198*76559068SAndroid Build Coastguard Worker 
enable()199*76559068SAndroid Build Coastguard Worker   void enable() NO_THREAD_SAFETY_ANALYSIS { RingEndMu.unlock(); }
200*76559068SAndroid Build Coastguard Worker };
201*76559068SAndroid Build Coastguard Worker 
202*76559068SAndroid Build Coastguard Worker // We need StackDepot to be aligned to 8-bytes so the ring we store after
203*76559068SAndroid Build Coastguard Worker // is correctly assigned.
204*76559068SAndroid Build Coastguard Worker static_assert(sizeof(StackDepot) % alignof(atomic_u64) == 0);
205*76559068SAndroid Build Coastguard Worker 
206*76559068SAndroid Build Coastguard Worker } // namespace scudo
207*76559068SAndroid Build Coastguard Worker 
208*76559068SAndroid Build Coastguard Worker #endif // SCUDO_STACK_DEPOT_H_
209