xref: /aosp_15_r20/external/pigweed/pw_allocator/block_allocator_testing.cc (revision 61c4878ac05f98d0ceed94b57d316916de578985)
1 // Copyright 2024 The Pigweed Authors
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License"); you may not
4 // use this file except in compliance with the License. You may obtain a copy of
5 // the License at
6 //
7 //     https://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
11 // WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
12 // License for the specific language governing permissions and limitations under
13 // the License.
14 
15 #include "pw_allocator/block_allocator_testing.h"
16 
17 #include <cstdint>
18 
19 #include "lib/stdcompat/bit.h"
20 #include "pw_allocator/layout.h"
21 #include "pw_assert/check.h"
22 #include "pw_bytes/alignment.h"
23 #include "pw_status/status.h"
24 
25 namespace pw::allocator::test {
26 
27 // Test fixtures.
28 
SetUp()29 void BlockAllocatorTestBase::SetUp() { ptrs_.fill(nullptr); }
30 
Store(size_t index,void * ptr)31 void BlockAllocatorTestBase::Store(size_t index, void* ptr) {
32   PW_CHECK_UINT_LT(index, kNumPtrs, "index is out of bounds");
33   PW_CHECK(ptr == nullptr || ptrs_[index] == nullptr,
34            "assigning pointer would clobber existing allocation");
35   ptrs_[index] = ptr;
36 }
37 
Fetch(size_t index)38 void* BlockAllocatorTestBase::Fetch(size_t index) {
39   return index < kNumPtrs ? ptrs_[index] : nullptr;
40 }
41 
Swap(size_t i,size_t j)42 void BlockAllocatorTestBase::Swap(size_t i, size_t j) {
43   std::swap(ptrs_[i], ptrs_[j]);
44 }
45 
UseMemory(void * ptr,size_t size)46 void BlockAllocatorTestBase::UseMemory(void* ptr, size_t size) {
47   std::memset(ptr, 0x5a, size);
48 }
49 
50 // Unit tests.
51 
GetCapacity()52 void BlockAllocatorTestBase::GetCapacity() {
53   Allocator& allocator = GetGenericAllocator();
54   StatusWithSize capacity = allocator.GetCapacity();
55   EXPECT_EQ(capacity.status(), OkStatus());
56   EXPECT_EQ(capacity.size(), kCapacity);
57 }
58 
AllocateLarge()59 void BlockAllocatorTestBase::AllocateLarge() {
60   Allocator& allocator = GetGenericAllocator();
61   constexpr Layout layout = Layout::Of<std::byte[kLargeInnerSize]>();
62   Store(0, allocator.Allocate(layout));
63   ASSERT_NE(Fetch(0), nullptr);
64   ByteSpan bytes = GetBytes();
65   EXPECT_GE(Fetch(0), bytes.data());
66   EXPECT_LE(Fetch(0), bytes.data() + bytes.size());
67   UseMemory(Fetch(0), layout.size());
68 }
69 
AllocateSmall()70 void BlockAllocatorTestBase::AllocateSmall() {
71   Allocator& allocator = GetGenericAllocator();
72   constexpr Layout layout = Layout::Of<std::byte[kSmallInnerSize]>();
73   Store(0, allocator.Allocate(layout));
74   ASSERT_NE(Fetch(0), nullptr);
75   ByteSpan bytes = GetBytes();
76   EXPECT_GE(Fetch(0), bytes.data());
77   EXPECT_LE(Fetch(0), bytes.data() + bytes.size());
78   UseMemory(Fetch(0), layout.size());
79 }
80 
AllocateTooLarge()81 void BlockAllocatorTestBase::AllocateTooLarge() {
82   Allocator& allocator = GetGenericAllocator();
83   Store(0, allocator.Allocate(Layout::Of<std::byte[kCapacity * 2]>()));
84   EXPECT_EQ(Fetch(0), nullptr);
85 }
86 
AllocateLargeAlignment()87 void BlockAllocatorTestBase::AllocateLargeAlignment() {
88   Allocator& allocator = GetGenericAllocator();
89 
90   constexpr size_t kAlignment = 64;
91   Store(0, allocator.Allocate(Layout(kLargeInnerSize, kAlignment)));
92   ASSERT_NE(Fetch(0), nullptr);
93   EXPECT_TRUE(IsAlignedAs(Fetch(0), kAlignment));
94   UseMemory(Fetch(0), kLargeInnerSize);
95 
96   Store(1, allocator.Allocate(Layout(kLargeInnerSize, kAlignment)));
97   ASSERT_NE(Fetch(1), nullptr);
98   EXPECT_TRUE(IsAlignedAs(Fetch(1), kAlignment));
99   UseMemory(Fetch(1), kLargeInnerSize);
100 }
101 
AllocateAlignmentFailure()102 void BlockAllocatorTestBase::AllocateAlignmentFailure() {
103   // Allocate a two blocks with an unaligned region between them.
104   constexpr size_t kAlignment = 128;
105   ByteSpan bytes = GetBytes();
106   size_t outer_size =
107       GetAlignedOffsetAfter(bytes.data(), kAlignment, kSmallInnerSize) +
108       kAlignment;
109   Allocator& allocator = GetGenericAllocator({
110       {outer_size, Preallocation::kUsed},
111       {kLargeOuterSize, Preallocation::kFree},
112       {Preallocation::kSizeRemaining, Preallocation::kUsed},
113   });
114 
115   // The allocator should be unable to create an aligned region..
116   Store(1, allocator.Allocate(Layout(kLargeInnerSize, kAlignment)));
117   EXPECT_EQ(Fetch(1), nullptr);
118 }
119 
DeallocateNull()120 void BlockAllocatorTestBase::DeallocateNull() {
121   Allocator& allocator = GetGenericAllocator();
122   allocator.Deallocate(nullptr);
123 }
124 
DeallocateShuffled()125 void BlockAllocatorTestBase::DeallocateShuffled() {
126   Allocator& allocator = GetGenericAllocator();
127   constexpr Layout layout = Layout::Of<std::byte[kSmallInnerSize]>();
128   for (size_t i = 0; i < kNumPtrs; ++i) {
129     Store(i, allocator.Allocate(layout));
130     if (Fetch(i) == nullptr) {
131       break;
132     }
133   }
134 
135   // Mix up the order of allocations.
136   for (size_t i = 0; i < kNumPtrs; ++i) {
137     if (i % 2 == 0 && i + 1 < kNumPtrs) {
138       Swap(i, i + 1);
139     }
140     if (i % 3 == 0 && i + 2 < kNumPtrs) {
141       Swap(i, i + 2);
142     }
143   }
144 
145   // Deallocate everything.
146   for (size_t i = 0; i < kNumPtrs; ++i) {
147     allocator.Deallocate(Fetch(i));
148     Store(i, nullptr);
149   }
150 }
151 
ResizeNull()152 void BlockAllocatorTestBase::ResizeNull() {
153   Allocator& allocator = GetGenericAllocator();
154   size_t new_size = 1;
155   EXPECT_FALSE(allocator.Resize(nullptr, new_size));
156 }
157 
ResizeLargeSame()158 void BlockAllocatorTestBase::ResizeLargeSame() {
159   Allocator& allocator = GetGenericAllocator({
160       {kLargeOuterSize, Preallocation::kUsed},
161       {Preallocation::kSizeRemaining, Preallocation::kUsed},
162   });
163   size_t new_size = kLargeInnerSize;
164   ASSERT_TRUE(allocator.Resize(Fetch(0), new_size));
165   UseMemory(Fetch(0), kLargeInnerSize);
166 }
167 
ResizeLargeSmaller()168 void BlockAllocatorTestBase::ResizeLargeSmaller() {
169   Allocator& allocator = GetGenericAllocator({
170       {kLargeOuterSize, Preallocation::kUsed},
171       {Preallocation::kSizeRemaining, Preallocation::kUsed},
172   });
173   size_t new_size = kSmallInnerSize;
174   ASSERT_TRUE(allocator.Resize(Fetch(0), new_size));
175   UseMemory(Fetch(0), kSmallInnerSize);
176 }
177 
ResizeLargeLarger()178 void BlockAllocatorTestBase::ResizeLargeLarger() {
179   Allocator& allocator = GetGenericAllocator({
180       {kLargeOuterSize, Preallocation::kUsed},
181       {kLargeOuterSize, Preallocation::kFree},
182       {Preallocation::kSizeRemaining, Preallocation::kUsed},
183   });
184   size_t new_size = kLargeInnerSize * 2;
185   ASSERT_TRUE(allocator.Resize(Fetch(0), new_size));
186   UseMemory(Fetch(0), kLargeInnerSize * 2);
187 }
188 
ResizeLargeLargerFailure()189 void BlockAllocatorTestBase::ResizeLargeLargerFailure() {
190   Allocator& allocator = GetGenericAllocator({
191       {kLargeOuterSize, Preallocation::kUsed},
192       {Preallocation::kSizeRemaining, Preallocation::kUsed},
193   });
194   // Memory after ptr is already allocated, so `Resize` should fail.
195   size_t new_size = kLargeInnerSize * 2;
196   EXPECT_FALSE(allocator.Resize(Fetch(0), new_size));
197 }
198 
ResizeSmallSame()199 void BlockAllocatorTestBase::ResizeSmallSame() {
200   Allocator& allocator = GetGenericAllocator({
201       {kSmallOuterSize, Preallocation::kUsed},
202       {Preallocation::kSizeRemaining, Preallocation::kUsed},
203   });
204   size_t new_size = kSmallInnerSize;
205   ASSERT_TRUE(allocator.Resize(Fetch(0), new_size));
206   UseMemory(Fetch(0), kSmallInnerSize);
207 }
208 
ResizeSmallSmaller()209 void BlockAllocatorTestBase::ResizeSmallSmaller() {
210   Allocator& allocator = GetGenericAllocator({
211       {kSmallOuterSize, Preallocation::kUsed},
212       {Preallocation::kSizeRemaining, Preallocation::kUsed},
213   });
214   size_t new_size = kSmallInnerSize / 2;
215   ASSERT_TRUE(allocator.Resize(Fetch(0), new_size));
216   UseMemory(Fetch(0), kSmallInnerSize / 2);
217 }
218 
ResizeSmallLarger()219 void BlockAllocatorTestBase::ResizeSmallLarger() {
220   Allocator& allocator = GetGenericAllocator({
221       {kSmallOuterSize, Preallocation::kUsed},
222       {kSmallOuterSize, Preallocation::kFree},
223       {Preallocation::kSizeRemaining, Preallocation::kUsed},
224   });
225   size_t new_size = kSmallInnerSize * 2;
226   ASSERT_TRUE(allocator.Resize(Fetch(0), new_size));
227   UseMemory(Fetch(0), kSmallInnerSize * 2);
228 }
229 
ResizeSmallLargerFailure()230 void BlockAllocatorTestBase::ResizeSmallLargerFailure() {
231   Allocator& allocator = GetGenericAllocator({
232       {kSmallOuterSize, Preallocation::kUsed},
233       {Preallocation::kSizeRemaining, Preallocation::kUsed},
234   });
235   // Memory after ptr is already allocated, so `Resize` should fail.
236   size_t new_size = kSmallInnerSize * 2 + kDefaultBlockOverhead;
237   EXPECT_FALSE(allocator.Resize(Fetch(0), new_size));
238 }
239 
240 }  // namespace pw::allocator::test
241