1 // Copyright 2023 The Chromium Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "base/debug/allocation_trace.h"
6
7 #include <algorithm>
8 #include <cstddef>
9 #include <iterator>
10 #include <memory>
11 #include <sstream>
12 #include <string>
13
14 #include "base/allocator/dispatcher/dispatcher.h"
15 #include "base/allocator/dispatcher/testing/tools.h"
16 #include "base/debug/stack_trace.h"
17 #include "partition_alloc/partition_alloc_allocation_data.h"
18 #include "partition_alloc/partition_alloc_config.h"
19 #include "testing/gmock/include/gmock/gmock-matchers.h"
20 #include "testing/gtest/include/gtest/gtest.h"
21
22 using base::allocator::dispatcher::AllocationNotificationData;
23 using base::allocator::dispatcher::AllocationSubsystem;
24 using base::allocator::dispatcher::FreeNotificationData;
25 using base::allocator::dispatcher::MTEMode;
26 using testing::Combine;
27 using testing::ContainerEq;
28 using testing::Message;
29 using testing::Test;
30 using testing::Values;
31
32 namespace base::debug::tracer {
33 namespace {
34
35 template <typename Iterator>
MakeString(Iterator begin,Iterator end)36 std::string MakeString(Iterator begin, Iterator end) {
37 using value_type = decltype(*begin);
38 std::ostringstream oss;
39 oss << '[';
40 if (begin != end) {
41 auto last_element = end - 1;
42 std::copy(begin, last_element, std::ostream_iterator<value_type>(oss, ","));
43 oss << *last_element;
44 }
45 oss << ']';
46
47 return oss.str();
48 }
49
50 template <typename C>
MakeString(const C & data)51 std::string MakeString(const C& data) {
52 return MakeString(std::begin(data), std::end(data));
53 }
54
AreEqual(const base::debug::tracer::OperationRecord & expected,const base::debug::tracer::OperationRecord & is)55 void AreEqual(const base::debug::tracer::OperationRecord& expected,
56 const base::debug::tracer::OperationRecord& is) {
57 EXPECT_EQ(is.GetOperationType(), expected.GetOperationType());
58 EXPECT_EQ(is.GetAddress(), expected.GetAddress());
59 EXPECT_EQ(is.GetSize(), expected.GetSize());
60 EXPECT_THAT(is.GetStackTrace(), ContainerEq(expected.GetStackTrace()));
61 }
62
63 } // namespace
64
65 class AllocationTraceRecorderTest : public Test {
66 protected:
GetSubjectUnderTest() const67 AllocationTraceRecorder& GetSubjectUnderTest() const {
68 return *subject_under_test_;
69 }
70 // During test, Buffer will hold a binary copy of the AllocationTraceRecorder
71 // under test.
72 struct Buffer {
73 alignas(
74 AllocationTraceRecorder) uint8_t data[sizeof(AllocationTraceRecorder)];
75 };
76
77 protected:
CreateAllocationData(void * address,size_t size,MTEMode mte_mode=MTEMode::kUndefined)78 AllocationNotificationData CreateAllocationData(
79 void* address,
80 size_t size,
81 MTEMode mte_mode = MTEMode::kUndefined) {
82 return AllocationNotificationData(address, size, nullptr,
83 AllocationSubsystem::kPartitionAllocator)
84 #if BUILDFLAG(HAS_MEMORY_TAGGING)
85 .SetMteReportingMode(mte_mode)
86 #endif
87 ;
88 }
89
CreateFreeData(void * address,MTEMode mte_mode=MTEMode::kUndefined)90 FreeNotificationData CreateFreeData(void* address,
91 MTEMode mte_mode = MTEMode::kUndefined) {
92 return FreeNotificationData(address,
93 AllocationSubsystem::kPartitionAllocator)
94 #if BUILDFLAG(HAS_MEMORY_TAGGING)
95 .SetMteReportingMode(mte_mode)
96 #endif
97 ;
98 }
99
100 private:
101 // The recorder under test. Depending on number and size of traces, it
102 // requires quite a lot of space. Therefore, we create it on heap to avoid any
103 // out-of-stack scenarios.
104 std::unique_ptr<AllocationTraceRecorder> const subject_under_test_ =
105 std::make_unique<AllocationTraceRecorder>();
106 };
107
TEST_F(AllocationTraceRecorderTest,VerifyBinaryCopy)108 TEST_F(AllocationTraceRecorderTest, VerifyBinaryCopy) {
109 AllocationTraceRecorder& subject_under_test = GetSubjectUnderTest();
110
111 // Fill the recorder with some fake allocations and frees.
112 constexpr size_t number_of_records = 100;
113
114 for (size_t index = 0; index < number_of_records; ++index) {
115 if (index & 0x1) {
116 subject_under_test.OnAllocation(
117 CreateAllocationData(this, sizeof(*this)));
118 } else {
119 subject_under_test.OnFree(CreateFreeData(this));
120 }
121 }
122
123 ASSERT_EQ(number_of_records, subject_under_test.size());
124
125 // Create a copy of the recorder using buffer as storage for the copy.
126 auto const buffer = std::make_unique<Buffer>();
127
128 ASSERT_TRUE(buffer);
129
130 auto* const buffered_recorder =
131 reinterpret_cast<AllocationTraceRecorder*>(&(buffer->data[0]));
132
133 memcpy(buffered_recorder, &subject_under_test,
134 sizeof(AllocationTraceRecorder));
135
136 // Verify that the original recorder and the buffered recorder are equal.
137 ASSERT_EQ(subject_under_test.size(), buffered_recorder->size());
138
139 for (size_t index = 0; index < subject_under_test.size(); ++index) {
140 SCOPED_TRACE(Message("difference detected at index ") << index);
141 AreEqual(subject_under_test[index], (*buffered_recorder)[index]);
142 }
143 }
144
TEST_F(AllocationTraceRecorderTest,VerifySingleAllocation)145 TEST_F(AllocationTraceRecorderTest, VerifySingleAllocation) {
146 AllocationTraceRecorder& subject_under_test = GetSubjectUnderTest();
147
148 subject_under_test.OnAllocation(
149 CreateAllocationData(&subject_under_test, sizeof(subject_under_test)));
150
151 EXPECT_EQ(1ul, subject_under_test.size());
152
153 const auto& record_data = subject_under_test[0];
154 const auto& stack_trace = record_data.GetStackTrace();
155
156 EXPECT_EQ(OperationType::kAllocation, record_data.GetOperationType());
157 EXPECT_EQ(&subject_under_test, record_data.GetAddress());
158 EXPECT_EQ(sizeof(subject_under_test), record_data.GetSize());
159 EXPECT_NE(nullptr, stack_trace.at(0));
160 }
161
TEST_F(AllocationTraceRecorderTest,VerifySingleFree)162 TEST_F(AllocationTraceRecorderTest, VerifySingleFree) {
163 AllocationTraceRecorder& subject_under_test = GetSubjectUnderTest();
164
165 subject_under_test.OnFree(CreateFreeData(&subject_under_test));
166
167 EXPECT_EQ(1ul, subject_under_test.size());
168
169 const auto& record_data = subject_under_test[0];
170 const auto& stack_trace = record_data.GetStackTrace();
171
172 EXPECT_EQ(OperationType::kFree, record_data.GetOperationType());
173 EXPECT_EQ(&subject_under_test, record_data.GetAddress());
174 EXPECT_EQ(0ul, record_data.GetSize());
175 EXPECT_NE(nullptr, stack_trace.at(0));
176 }
177
TEST_F(AllocationTraceRecorderTest,VerifyMultipleOperations)178 TEST_F(AllocationTraceRecorderTest, VerifyMultipleOperations) {
179 AllocationTraceRecorder& subject_under_test = GetSubjectUnderTest();
180
181 // We perform a number of operations.
182 subject_under_test.OnAllocation(
183 CreateAllocationData(this, 1 * sizeof(*this)));
184 subject_under_test.OnFree(CreateFreeData(this + 2));
185 subject_under_test.OnAllocation(
186 CreateAllocationData(this + 3, 3 * sizeof(*this)));
187 subject_under_test.OnAllocation(
188 CreateAllocationData(this + 4, 4 * sizeof(*this)));
189 subject_under_test.OnFree(CreateFreeData(this + 5));
190 subject_under_test.OnFree(CreateFreeData(this + 6));
191
192 ASSERT_EQ(subject_under_test.size(), 6ul);
193
194 // Verify that the stored operations match the expected.
195 {
196 const auto& entry = subject_under_test[0];
197 ASSERT_EQ(entry.GetOperationType(), OperationType::kAllocation);
198 ASSERT_EQ(entry.GetAddress(), this);
199 ASSERT_EQ(entry.GetSize(), 1 * sizeof(*this));
200 ASSERT_NE(entry.GetStackTrace()[0], nullptr);
201 }
202 {
203 const auto& entry = subject_under_test[1];
204 ASSERT_EQ(entry.GetOperationType(), OperationType::kFree);
205 ASSERT_EQ(entry.GetAddress(), (this + 2));
206 ASSERT_EQ(entry.GetSize(), 0ul);
207 ASSERT_NE(entry.GetStackTrace()[0], nullptr);
208 }
209 {
210 const auto& entry = subject_under_test[2];
211 ASSERT_EQ(entry.GetOperationType(), OperationType::kAllocation);
212 ASSERT_EQ(entry.GetAddress(), (this + 3));
213 ASSERT_EQ(entry.GetSize(), 3 * sizeof(*this));
214 ASSERT_NE(entry.GetStackTrace()[0], nullptr);
215 }
216 {
217 const auto& entry = subject_under_test[3];
218 ASSERT_EQ(entry.GetOperationType(), OperationType::kAllocation);
219 ASSERT_EQ(entry.GetAddress(), (this + 4));
220 ASSERT_EQ(entry.GetSize(), 4 * sizeof(*this));
221 ASSERT_NE(entry.GetStackTrace()[0], nullptr);
222 }
223 {
224 const auto& entry = subject_under_test[4];
225 ASSERT_EQ(entry.GetOperationType(), OperationType::kFree);
226 ASSERT_EQ(entry.GetAddress(), (this + 5));
227 ASSERT_EQ(entry.GetSize(), 0ul);
228 ASSERT_NE(entry.GetStackTrace()[0], nullptr);
229 }
230 {
231 const auto& entry = subject_under_test[5];
232 ASSERT_EQ(entry.GetOperationType(), OperationType::kFree);
233 ASSERT_EQ(entry.GetAddress(), (this + 6));
234 ASSERT_EQ(entry.GetSize(), 0ul);
235 ASSERT_NE(entry.GetStackTrace()[0], nullptr);
236 }
237 }
238
TEST_F(AllocationTraceRecorderTest,VerifyOverflowOfOperations)239 TEST_F(AllocationTraceRecorderTest, VerifyOverflowOfOperations) {
240 AllocationTraceRecorder& subject_under_test = GetSubjectUnderTest();
241
242 decltype(subject_under_test.GetMaximumNumberOfTraces()) idx;
243 for (idx = 0; idx < subject_under_test.GetMaximumNumberOfTraces(); ++idx) {
244 ASSERT_EQ(subject_under_test.size(), idx);
245 const bool is_allocation = !(idx & 0x1);
246
247 // Record an allocation or free.
248 if (is_allocation) {
249 subject_under_test.OnAllocation(CreateAllocationData(this + idx, idx));
250 } else {
251 subject_under_test.OnFree(CreateFreeData(this + idx));
252 }
253
254 // Some verifications.
255 {
256 ASSERT_EQ(subject_under_test.size(), (idx + 1));
257
258 // Some verification on the added entry.
259 {
260 const auto& last_entry = subject_under_test[idx];
261 ASSERT_EQ(last_entry.GetAddress(), (this + idx));
262 // No full verification intended, just a check that something has been
263 // written.
264 ASSERT_NE(last_entry.GetStackTrace()[0], nullptr);
265 if (is_allocation) {
266 ASSERT_EQ(last_entry.GetOperationType(), OperationType::kAllocation);
267 ASSERT_EQ(last_entry.GetSize(), idx);
268 } else {
269 ASSERT_EQ(last_entry.GetOperationType(), OperationType::kFree);
270 ASSERT_EQ(last_entry.GetSize(), 0ul);
271 }
272 }
273
274 // No changes on the first entry must be done.
275 {
276 const auto& first_entry = subject_under_test[0];
277 ASSERT_EQ(first_entry.GetOperationType(), OperationType::kAllocation);
278 ASSERT_EQ(first_entry.GetAddress(), this);
279 ASSERT_EQ(first_entry.GetSize(), 0ul);
280 }
281 }
282 }
283
284 // By now we have written all available records including the last one.
285 // So the following allocation should overwrite the first record.
286 {
287 const auto& old_second_entry = subject_under_test[1];
288
289 subject_under_test.OnAllocation(CreateAllocationData(this + idx, idx));
290 ASSERT_EQ(subject_under_test.size(),
291 subject_under_test.GetMaximumNumberOfTraces());
292 const auto& last_entry =
293 subject_under_test[subject_under_test.GetMaximumNumberOfTraces() - 1];
294 ASSERT_EQ(last_entry.GetOperationType(), OperationType::kAllocation);
295 ASSERT_EQ(last_entry.GetAddress(), (this + idx));
296
297 // Check that the previous first entry (an allocation) is gone. Accessing
298 // the first record now yields what was previously the second record (a free
299 // operation).
300 const auto& first_entry = subject_under_test[0];
301
302 ASSERT_EQ(&old_second_entry, &first_entry);
303 ASSERT_EQ(first_entry.GetOperationType(), OperationType::kFree);
304 ASSERT_EQ(first_entry.GetAddress(), (this + 1));
305 }
306 }
307
TEST_F(AllocationTraceRecorderTest,VerifyWithHooks)308 TEST_F(AllocationTraceRecorderTest, VerifyWithHooks) {
309 auto& dispatcher = base::allocator::dispatcher::Dispatcher::GetInstance();
310 AllocationTraceRecorder& subject_under_test = GetSubjectUnderTest();
311
312 dispatcher.InitializeForTesting(&subject_under_test);
313
314 // Perform an allocation and free.
315 std::make_unique<std::string>(
316 "Just enforce an allocation and free to trigger notification of the "
317 "subject under test. Hopefully this string is long enough to bypass any "
318 "small string optimizations that the STL implementation might use.");
319
320 dispatcher.ResetForTesting();
321
322 // We only test for greater-equal since allocation from other parts of GTest
323 // might have interfered.
324 EXPECT_GE(subject_under_test.size(), 2ul);
325 }
326
327 class OperationRecordTest : public Test {
328 protected:
329 using ReferenceStackTrace = std::vector<const void*>;
330
GetReferenceTrace()331 ReferenceStackTrace GetReferenceTrace() {
332 constexpr size_t max_trace_size = 128;
333 const void* frame_pointers[max_trace_size]{nullptr};
334 const auto num_frames = base::debug::TraceStackFramePointers(
335 &frame_pointers[0], max_trace_size, 0);
336 ReferenceStackTrace trace;
337 std::copy_n(std::begin(frame_pointers), num_frames,
338 std::back_inserter(trace));
339 return trace;
340 }
341
VerifyStackTrace(const ReferenceStackTrace & reference_stack_trace,const base::debug::tracer::StackTraceContainer & stack_trace)342 void VerifyStackTrace(
343 const ReferenceStackTrace& reference_stack_trace,
344 const base::debug::tracer::StackTraceContainer& stack_trace) {
345 // Verify we have at least one entry in the stack.
346 ASSERT_NE(nullptr, stack_trace.at(0));
347 ASSERT_GT(stack_trace.size(), 0ul);
348
349 // Although functions are marked ALWAYS_INLINE, the compiler may choose not
350 // to inline, depending i.e. on the optimization level. Therefore, we search
351 // for the first common frame in both stack-traces. From there on, both must
352 // be equal for the remaining number of frames.
353 auto const it_stack_trace_begin = std::begin(stack_trace);
354 auto const it_stack_trace_end =
355 std::find(it_stack_trace_begin, std::end(stack_trace), nullptr);
356 auto const it_reference_stack_trace_end = std::end(reference_stack_trace);
357
358 auto const it_stack_trace = std::find_first_of(
359 it_stack_trace_begin, it_stack_trace_end,
360 std::begin(reference_stack_trace), it_reference_stack_trace_end);
361
362 ASSERT_NE(it_stack_trace, it_stack_trace_end)
363 << "stack-trace and reference-stack-trace share no common frame!\n"
364 << "stack trace = " << MakeString(stack_trace) << '\n'
365 << "reference stack trace = " << MakeString(reference_stack_trace);
366
367 // Find the common frame in the reference-stack-trace.
368 const auto it_reference_stack_trace =
369 std::find(std::begin(reference_stack_trace),
370 it_reference_stack_trace_end, *it_stack_trace);
371
372 const auto number_of_expected_common_frames = std::min(
373 std::distance(it_stack_trace, it_stack_trace_end),
374 std::distance(it_reference_stack_trace, it_reference_stack_trace_end));
375
376 // Check if we have any difference within the section of frames that we
377 // expect to be equal.
378 const auto mismatch = std::mismatch(
379 it_reference_stack_trace,
380 it_reference_stack_trace + number_of_expected_common_frames,
381 it_stack_trace);
382
383 ASSERT_EQ(mismatch.first,
384 (it_reference_stack_trace + number_of_expected_common_frames))
385 << "found difference in the range of frames expected to be equal!\n"
386 << "position = "
387 << std::distance(it_reference_stack_trace, mismatch.first) << '\n'
388 << "stack trace = "
389 << MakeString(it_stack_trace,
390 it_stack_trace + number_of_expected_common_frames)
391 << '\n'
392 << "reference stack trace = "
393 << MakeString(
394 it_reference_stack_trace,
395 it_reference_stack_trace + number_of_expected_common_frames);
396 }
397 };
398
TEST_F(OperationRecordTest,VerifyConstructor)399 TEST_F(OperationRecordTest, VerifyConstructor) {
400 OperationRecord subject_under_test;
401
402 EXPECT_EQ(subject_under_test.GetOperationType(), OperationType::kNone);
403 EXPECT_EQ(subject_under_test.GetAddress(), nullptr);
404 EXPECT_EQ(subject_under_test.GetSize(), 0ul);
405 EXPECT_FALSE(subject_under_test.IsRecording());
406
407 // The stack trace is not initialized by the constructor. Therefore, we do not
408 // check here.
409 }
410
TEST_F(OperationRecordTest,VerifyRecordAllocation)411 TEST_F(OperationRecordTest, VerifyRecordAllocation) {
412 const ReferenceStackTrace reference_trace = GetReferenceTrace();
413
414 void* const address = this;
415 size_t const size = sizeof(*this);
416
417 OperationRecord subject_under_test;
418
419 ASSERT_TRUE(subject_under_test.InitializeAllocation(address, size));
420
421 EXPECT_EQ(OperationType::kAllocation, subject_under_test.GetOperationType());
422 EXPECT_EQ(address, subject_under_test.GetAddress());
423 EXPECT_EQ(size, subject_under_test.GetSize());
424 EXPECT_FALSE(subject_under_test.IsRecording());
425
426 VerifyStackTrace(reference_trace, subject_under_test.GetStackTrace());
427 }
428
TEST_F(OperationRecordTest,VerifyRecordFree)429 TEST_F(OperationRecordTest, VerifyRecordFree) {
430 const ReferenceStackTrace reference_trace = GetReferenceTrace();
431
432 void* const address = this;
433 size_t const size = 0;
434
435 OperationRecord subject_under_test;
436
437 ASSERT_TRUE(subject_under_test.InitializeFree(address));
438
439 EXPECT_EQ(OperationType::kFree, subject_under_test.GetOperationType());
440 EXPECT_EQ(address, subject_under_test.GetAddress());
441 EXPECT_EQ(size, subject_under_test.GetSize());
442 EXPECT_FALSE(subject_under_test.IsRecording());
443
444 VerifyStackTrace(reference_trace, subject_under_test.GetStackTrace());
445 }
446
447 } // namespace base::debug::tracer
448