1 // Copyright 2023 The Chromium Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "base/debug/allocation_trace.h"
6
7 #include <array>
8 #include <atomic>
9
10 #include "base/check_op.h"
11
12 namespace base::debug::tracer {
13
IsRecording() const14 bool OperationRecord::IsRecording() const {
15 if (is_recording_.test_and_set()) {
16 return true;
17 }
18
19 is_recording_.clear();
20 return false;
21 }
22
GetOperationType() const23 OperationType OperationRecord::GetOperationType() const {
24 return operation_type_;
25 }
26
GetAddress() const27 const void* OperationRecord::GetAddress() const {
28 return address_;
29 }
30
GetSize() const31 size_t OperationRecord::GetSize() const {
32 return size_;
33 }
34
GetStackTrace() const35 const StackTraceContainer& OperationRecord::GetStackTrace() const {
36 return stack_trace_;
37 }
38
39 #if BUILDFLAG(ENABLE_ALLOCATION_TRACE_RECORDER_FULL_REPORTING)
AllocationTraceRecorderStatistics(size_t total_number_of_allocations,size_t total_number_of_collisions)40 AllocationTraceRecorderStatistics::AllocationTraceRecorderStatistics(
41 size_t total_number_of_allocations,
42 size_t total_number_of_collisions)
43 : total_number_of_allocations(total_number_of_allocations),
44 total_number_of_collisions(total_number_of_collisions) {}
45 #else
AllocationTraceRecorderStatistics(size_t total_number_of_allocations)46 AllocationTraceRecorderStatistics::AllocationTraceRecorderStatistics(
47 size_t total_number_of_allocations)
48 : total_number_of_allocations(total_number_of_allocations) {}
49 #endif
50
OnAllocation(const void * allocated_address,size_t allocated_size)51 void AllocationTraceRecorder::OnAllocation(const void* allocated_address,
52 size_t allocated_size) {
53 // Record the allocation into the next available slot, allowing for failure
54 // due to the slot already being in-use by another
55 // OperationRecord::Initialize*() call from another thread.
56 for (auto index = GetNextIndex();
57 !alloc_trace_buffer_[index].InitializeAllocation(allocated_address,
58 allocated_size);
59 index = GetNextIndex()) {
60 #if BUILDFLAG(ENABLE_ALLOCATION_TRACE_RECORDER_FULL_REPORTING)
61 total_number_of_collisions_.fetch_add(1, std::memory_order_relaxed);
62 #endif
63 }
64 }
65
OnFree(const void * freed_address)66 void AllocationTraceRecorder::OnFree(const void* freed_address) {
67 // Record the free into the next available slot, allowing for failure due to
68 // the slot already being in-use by another OperationRecord::Initialize*()
69 // call from another thread.
70 for (auto index = GetNextIndex();
71 !alloc_trace_buffer_[index].InitializeFree(freed_address);
72 index = GetNextIndex()) {
73 #if BUILDFLAG(ENABLE_ALLOCATION_TRACE_RECORDER_FULL_REPORTING)
74 total_number_of_collisions_.fetch_add(1, std::memory_order_relaxed);
75 #endif
76 }
77 }
78
size() const79 size_t AllocationTraceRecorder::size() const {
80 return std::min(kMaximumNumberOfMemoryOperationTraces,
81 total_number_of_records_.load(std::memory_order_relaxed));
82 }
83
operator [](size_t idx) const84 const OperationRecord& AllocationTraceRecorder::operator[](size_t idx) const {
85 DCHECK_LT(idx, size());
86
87 const size_t array_index =
88 size() < GetMaximumNumberOfTraces()
89 ? idx
90 : WrapIdxIfNeeded(
91 total_number_of_records_.load(std::memory_order_relaxed) + idx);
92
93 DCHECK_LT(array_index, alloc_trace_buffer_.size());
94
95 return alloc_trace_buffer_[array_index];
96 }
97
98 AllocationTraceRecorderStatistics
GetRecorderStatistics() const99 AllocationTraceRecorder::GetRecorderStatistics() const {
100 #if BUILDFLAG(ENABLE_ALLOCATION_TRACE_RECORDER_FULL_REPORTING)
101 return {total_number_of_records_.load(std::memory_order_relaxed),
102 total_number_of_collisions_.load(std::memory_order_relaxed)};
103 #else
104 return {total_number_of_records_.load(std::memory_order_relaxed)};
105 #endif
106 }
107
108 } // namespace base::debug::tracer