1 // Copyright 2023 The Pigweed Authors
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License"); you may not
4 // use this file except in compliance with the License. You may obtain a copy of
5 // the License at
6 //
7 // https://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
11 // WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
12 // License for the specific language governing permissions and limitations under
13 // the License.
14
15 #include "pw_multibuf/chunk.h"
16
17 #include <memory>
18
19 #if __cplusplus >= 202002L
20 #include <ranges>
21 #endif // __cplusplus >= 202002L
22
23 #include "pw_allocator/testing.h"
24 #include "pw_multibuf/header_chunk_region_tracker.h"
25 #include "pw_unit_test/framework.h"
26
27 namespace pw::multibuf {
28 namespace {
29
30 using ::pw::allocator::test::AllocatorForTest;
31
32 /// Returns literal with ``_size`` suffix as a ``size_t``.
33 ///
34 /// This is useful for writing size-related test assertions without
35 /// explicit (verbose) casts.
operator ""_size(unsigned long long n)36 constexpr size_t operator""_size(unsigned long long n) { return n; }
37
38 const size_t kArbitraryAllocatorSize = 1024;
39 const size_t kArbitraryChunkSize = 32;
40
41 #if __cplusplus >= 202002L
42 static_assert(std::ranges::contiguous_range<Chunk>);
43 #endif // __cplusplus >= 202002L
44
TakesSpan(ByteSpan span)45 void TakesSpan([[maybe_unused]] ByteSpan span) {}
46
TEST(Chunk,IsImplicitlyConvertibleToSpan)47 TEST(Chunk, IsImplicitlyConvertibleToSpan) {
48 AllocatorForTest<kArbitraryAllocatorSize> allocator;
49 std::optional<OwnedChunk> chunk =
50 HeaderChunkRegionTracker::AllocateRegionAsChunk(allocator,
51 kArbitraryChunkSize);
52 ASSERT_TRUE(chunk.has_value());
53 // ``Chunk`` should convert to ``ByteSpan``.
54 TakesSpan(**chunk);
55 }
56
TEST(OwnedChunk,ReleaseDestroysChunkRegion)57 TEST(OwnedChunk, ReleaseDestroysChunkRegion) {
58 AllocatorForTest<kArbitraryAllocatorSize> allocator;
59 const auto& metrics = allocator.metrics();
60 auto tracker =
61 HeaderChunkRegionTracker::AllocateRegion(allocator, kArbitraryChunkSize);
62 ASSERT_NE(tracker, nullptr);
63 EXPECT_EQ(metrics.num_allocations.value(), 1_size);
64
65 std::optional<OwnedChunk> chunk_opt = tracker->CreateFirstChunk();
66 ASSERT_TRUE(chunk_opt.has_value());
67 auto& chunk = *chunk_opt;
68 EXPECT_EQ(metrics.num_allocations.value(), 2_size);
69 EXPECT_EQ(chunk.size(), kArbitraryChunkSize);
70
71 chunk.Release();
72 EXPECT_EQ(chunk.size(), 0_size);
73 EXPECT_EQ(metrics.num_deallocations.value(), 2_size);
74 EXPECT_EQ(metrics.allocated_bytes.value(), 0_size);
75 }
76
TEST(OwnedChunk,DestructorDestroysChunkRegion)77 TEST(OwnedChunk, DestructorDestroysChunkRegion) {
78 AllocatorForTest<kArbitraryAllocatorSize> allocator;
79 const auto& metrics = allocator.metrics();
80 auto tracker =
81 HeaderChunkRegionTracker::AllocateRegion(allocator, kArbitraryChunkSize);
82 ASSERT_NE(tracker, nullptr);
83 EXPECT_EQ(metrics.num_allocations.value(), 1_size);
84
85 {
86 std::optional<OwnedChunk> chunk = tracker->CreateFirstChunk();
87 ASSERT_TRUE(chunk.has_value());
88 EXPECT_EQ(metrics.num_allocations.value(), 2_size);
89 EXPECT_EQ(chunk->size(), kArbitraryChunkSize);
90 }
91
92 EXPECT_EQ(metrics.num_deallocations.value(), 2_size);
93 EXPECT_EQ(metrics.allocated_bytes.value(), 0_size);
94 }
95
TEST(Chunk,DiscardPrefixDiscardsPrefixOfSpan)96 TEST(Chunk, DiscardPrefixDiscardsPrefixOfSpan) {
97 AllocatorForTest<kArbitraryAllocatorSize> allocator;
98 std::optional<OwnedChunk> chunk_opt =
99 HeaderChunkRegionTracker::AllocateRegionAsChunk(allocator,
100 kArbitraryChunkSize);
101 ASSERT_TRUE(chunk_opt.has_value());
102 auto& chunk = *chunk_opt;
103 ConstByteSpan old_span = chunk;
104 const size_t kDiscarded = 4;
105 chunk->DiscardPrefix(kDiscarded);
106 EXPECT_EQ(chunk.size(), old_span.size() - kDiscarded);
107 EXPECT_EQ(chunk.data(), old_span.data() + kDiscarded);
108 }
109
TEST(Chunk,TakePrefixTakesPrefixOfSpan)110 TEST(Chunk, TakePrefixTakesPrefixOfSpan) {
111 AllocatorForTest<kArbitraryAllocatorSize> allocator;
112 std::optional<OwnedChunk> chunk_opt =
113 HeaderChunkRegionTracker::AllocateRegionAsChunk(allocator,
114 kArbitraryChunkSize);
115 ASSERT_TRUE(chunk_opt.has_value());
116 auto& chunk = *chunk_opt;
117 ConstByteSpan old_span = chunk;
118 const size_t kTaken = 4;
119 std::optional<OwnedChunk> front_opt = chunk->TakePrefix(kTaken);
120 ASSERT_TRUE(front_opt.has_value());
121 auto& front = *front_opt;
122 EXPECT_EQ(front->size(), kTaken);
123 EXPECT_EQ(front->data(), old_span.data());
124 EXPECT_EQ(chunk.size(), old_span.size() - kTaken);
125 EXPECT_EQ(chunk.data(), old_span.data() + kTaken);
126 }
127
TEST(Chunk,TruncateDiscardsEndOfSpan)128 TEST(Chunk, TruncateDiscardsEndOfSpan) {
129 AllocatorForTest<kArbitraryAllocatorSize> allocator;
130 std::optional<OwnedChunk> chunk_opt =
131 HeaderChunkRegionTracker::AllocateRegionAsChunk(allocator,
132 kArbitraryChunkSize);
133 ASSERT_TRUE(chunk_opt.has_value());
134 auto& chunk = *chunk_opt;
135 ConstByteSpan old_span = chunk;
136 const size_t kShorter = 5;
137 chunk->Truncate(old_span.size() - kShorter);
138 EXPECT_EQ(chunk.size(), old_span.size() - kShorter);
139 EXPECT_EQ(chunk.data(), old_span.data());
140 }
141
TEST(Chunk,TakeSuffixTakesEndOfSpan)142 TEST(Chunk, TakeSuffixTakesEndOfSpan) {
143 AllocatorForTest<kArbitraryAllocatorSize> allocator;
144 std::optional<OwnedChunk> chunk_opt =
145 HeaderChunkRegionTracker::AllocateRegionAsChunk(allocator,
146 kArbitraryChunkSize);
147 ASSERT_TRUE(chunk_opt.has_value());
148 auto& chunk = *chunk_opt;
149 ConstByteSpan old_span = chunk;
150 const size_t kTaken = 5;
151 std::optional<OwnedChunk> tail_opt = chunk->TakeSuffix(kTaken);
152 ASSERT_TRUE(tail_opt.has_value());
153 auto& tail = *tail_opt;
154 EXPECT_EQ(tail.size(), kTaken);
155 EXPECT_EQ(tail.data(), old_span.data() + old_span.size() - kTaken);
156 EXPECT_EQ(chunk.size(), old_span.size() - kTaken);
157 EXPECT_EQ(chunk.data(), old_span.data());
158 }
159
TEST(Chunk,SliceRemovesSidesOfSpan)160 TEST(Chunk, SliceRemovesSidesOfSpan) {
161 AllocatorForTest<kArbitraryAllocatorSize> allocator;
162 std::optional<OwnedChunk> chunk_opt =
163 HeaderChunkRegionTracker::AllocateRegionAsChunk(allocator,
164 kArbitraryChunkSize);
165 ASSERT_TRUE(chunk_opt.has_value());
166 auto& chunk = *chunk_opt;
167 ConstByteSpan old_span = chunk;
168 const size_t kBegin = 4;
169 const size_t kEnd = 9;
170 chunk->Slice(kBegin, kEnd);
171 EXPECT_EQ(chunk.data(), old_span.data() + kBegin);
172 EXPECT_EQ(chunk.size(), kEnd - kBegin);
173 }
174
TEST(Chunk,RegionPersistsUntilAllChunksReleased)175 TEST(Chunk, RegionPersistsUntilAllChunksReleased) {
176 AllocatorForTest<kArbitraryAllocatorSize> allocator;
177 const auto& metrics = allocator.metrics();
178 std::optional<OwnedChunk> chunk_opt =
179 HeaderChunkRegionTracker::AllocateRegionAsChunk(allocator,
180 kArbitraryChunkSize);
181 ASSERT_TRUE(chunk_opt.has_value());
182 auto& chunk = *chunk_opt;
183 // One allocation for the region tracker, one for the chunk.
184 EXPECT_EQ(metrics.num_allocations.value(), 2_size);
185 const size_t kSplitPoint = 13;
186 auto split_opt = chunk->TakePrefix(kSplitPoint);
187 ASSERT_TRUE(split_opt.has_value());
188 auto& split = *split_opt;
189 // One allocation for the region tracker, one for each of two chunks.
190 EXPECT_EQ(metrics.num_allocations.value(), 3_size);
191 chunk.Release();
192 EXPECT_EQ(metrics.num_deallocations.value(), 1_size);
193 split.Release();
194 EXPECT_EQ(metrics.num_deallocations.value(), 3_size);
195 }
196
TEST(Chunk,ClaimPrefixReclaimsDiscardedPrefix)197 TEST(Chunk, ClaimPrefixReclaimsDiscardedPrefix) {
198 AllocatorForTest<kArbitraryAllocatorSize> allocator;
199 std::optional<OwnedChunk> chunk_opt =
200 HeaderChunkRegionTracker::AllocateRegionAsChunk(allocator,
201 kArbitraryChunkSize);
202 ASSERT_TRUE(chunk_opt.has_value());
203 auto& chunk = *chunk_opt;
204 ConstByteSpan old_span = chunk;
205 const size_t kDiscarded = 4;
206 chunk->DiscardPrefix(kDiscarded);
207 EXPECT_TRUE(chunk->ClaimPrefix(kDiscarded));
208 EXPECT_EQ(chunk.size(), old_span.size());
209 EXPECT_EQ(chunk.data(), old_span.data());
210 }
211
TEST(Chunk,ClaimPrefixFailsOnFullRegionChunk)212 TEST(Chunk, ClaimPrefixFailsOnFullRegionChunk) {
213 AllocatorForTest<kArbitraryAllocatorSize> allocator;
214 std::optional<OwnedChunk> chunk_opt =
215 HeaderChunkRegionTracker::AllocateRegionAsChunk(allocator,
216 kArbitraryChunkSize);
217 ASSERT_TRUE(chunk_opt.has_value());
218 auto& chunk = *chunk_opt;
219 EXPECT_FALSE(chunk->ClaimPrefix(1));
220 }
221
TEST(Chunk,ClaimPrefixFailsOnNeighboringChunk)222 TEST(Chunk, ClaimPrefixFailsOnNeighboringChunk) {
223 AllocatorForTest<kArbitraryAllocatorSize> allocator;
224 std::optional<OwnedChunk> chunk_opt =
225 HeaderChunkRegionTracker::AllocateRegionAsChunk(allocator,
226 kArbitraryChunkSize);
227 ASSERT_TRUE(chunk_opt.has_value());
228 auto& chunk = *chunk_opt;
229 const size_t kSplitPoint = 22;
230 auto front = chunk->TakePrefix(kSplitPoint);
231 ASSERT_TRUE(front.has_value());
232 EXPECT_FALSE(chunk->ClaimPrefix(1));
233 }
234
TEST(Chunk,ClaimPrefixFailsAtStartOfRegionEvenAfterReleasingChunkAtEndOfRegion)235 TEST(Chunk,
236 ClaimPrefixFailsAtStartOfRegionEvenAfterReleasingChunkAtEndOfRegion) {
237 AllocatorForTest<kArbitraryAllocatorSize> allocator;
238 std::optional<OwnedChunk> chunk_opt =
239 HeaderChunkRegionTracker::AllocateRegionAsChunk(allocator,
240 kArbitraryChunkSize);
241 ASSERT_TRUE(chunk_opt.has_value());
242 auto& chunk = *chunk_opt;
243 const size_t kTaken = 13;
244 auto split = chunk->TakeSuffix(kTaken);
245 ASSERT_TRUE(split.has_value());
246 split->Release();
247 EXPECT_FALSE(chunk->ClaimPrefix(1));
248 }
249
TEST(Chunk,ClaimPrefixReclaimsPrecedingChunksDiscardedSuffix)250 TEST(Chunk, ClaimPrefixReclaimsPrecedingChunksDiscardedSuffix) {
251 AllocatorForTest<kArbitraryAllocatorSize> allocator;
252 std::optional<OwnedChunk> chunk_opt =
253 HeaderChunkRegionTracker::AllocateRegionAsChunk(allocator,
254 kArbitraryChunkSize);
255 ASSERT_TRUE(chunk_opt.has_value());
256 auto& chunk = *chunk_opt;
257 const size_t kSplitPoint = 13;
258 auto split_opt = chunk->TakePrefix(kSplitPoint);
259 ASSERT_TRUE(split_opt.has_value());
260 auto& split = *split_opt;
261 const size_t kDiscard = 3;
262 split->Truncate(split.size() - kDiscard);
263 EXPECT_TRUE(chunk->ClaimPrefix(kDiscard));
264 EXPECT_FALSE(chunk->ClaimPrefix(1));
265 }
266
TEST(Chunk,ClaimSuffixReclaimsTruncatedEnd)267 TEST(Chunk, ClaimSuffixReclaimsTruncatedEnd) {
268 AllocatorForTest<kArbitraryAllocatorSize> allocator;
269 std::optional<OwnedChunk> chunk_opt =
270 HeaderChunkRegionTracker::AllocateRegionAsChunk(allocator,
271 kArbitraryChunkSize);
272 ASSERT_TRUE(chunk_opt.has_value());
273 auto& chunk = *chunk_opt;
274 ConstByteSpan old_span = *chunk;
275 const size_t kDiscarded = 4;
276 chunk->Truncate(old_span.size() - kDiscarded);
277 EXPECT_TRUE(chunk->ClaimSuffix(kDiscarded));
278 EXPECT_EQ(chunk->size(), old_span.size());
279 EXPECT_EQ(chunk->data(), old_span.data());
280 }
281
TEST(Chunk,ClaimSuffixFailsOnFullRegionChunk)282 TEST(Chunk, ClaimSuffixFailsOnFullRegionChunk) {
283 AllocatorForTest<kArbitraryAllocatorSize> allocator;
284 std::optional<OwnedChunk> chunk_opt =
285 HeaderChunkRegionTracker::AllocateRegionAsChunk(allocator,
286 kArbitraryChunkSize);
287 ASSERT_TRUE(chunk_opt.has_value());
288 auto& chunk = *chunk_opt;
289 EXPECT_FALSE(chunk->ClaimSuffix(1));
290 }
291
TEST(Chunk,ClaimSuffixFailsWithNeighboringChunk)292 TEST(Chunk, ClaimSuffixFailsWithNeighboringChunk) {
293 AllocatorForTest<kArbitraryAllocatorSize> allocator;
294 std::optional<OwnedChunk> chunk_opt =
295 HeaderChunkRegionTracker::AllocateRegionAsChunk(allocator,
296 kArbitraryChunkSize);
297 ASSERT_TRUE(chunk_opt.has_value());
298 auto& chunk = *chunk_opt;
299 const size_t kSplitPoint = 22;
300 auto split_opt = chunk->TakePrefix(kSplitPoint);
301 ASSERT_TRUE(split_opt.has_value());
302 auto& split = *split_opt;
303 EXPECT_FALSE(split->ClaimSuffix(1));
304 }
305
TEST(Chunk,ClaimSuffixFailsAtEndOfRegionEvenAfterReleasingFirstChunkInRegion)306 TEST(Chunk, ClaimSuffixFailsAtEndOfRegionEvenAfterReleasingFirstChunkInRegion) {
307 AllocatorForTest<kArbitraryAllocatorSize> allocator;
308 std::optional<OwnedChunk> chunk_opt =
309 HeaderChunkRegionTracker::AllocateRegionAsChunk(allocator,
310 kArbitraryChunkSize);
311 ASSERT_TRUE(chunk_opt.has_value());
312 auto& chunk = *chunk_opt;
313 const size_t kTaken = 22;
314 auto split_opt = chunk->TakeSuffix(kTaken);
315 ASSERT_TRUE(split_opt.has_value());
316 auto& split = *split_opt;
317 EXPECT_FALSE(split->ClaimSuffix(1));
318 }
319
TEST(Chunk,ClaimSuffixReclaimsFollowingChunksDiscardedPrefix)320 TEST(Chunk, ClaimSuffixReclaimsFollowingChunksDiscardedPrefix) {
321 AllocatorForTest<kArbitraryAllocatorSize> allocator;
322 std::optional<OwnedChunk> chunk_opt =
323 HeaderChunkRegionTracker::AllocateRegionAsChunk(allocator,
324 kArbitraryChunkSize);
325 ASSERT_TRUE(chunk_opt.has_value());
326 auto& chunk = *chunk_opt;
327 const size_t kSplitPoint = 22;
328 auto split_opt = chunk->TakePrefix(kSplitPoint);
329 ASSERT_TRUE(split_opt.has_value());
330 auto& split = *split_opt;
331 const size_t kDiscarded = 3;
332 chunk->DiscardPrefix(kDiscarded);
333 EXPECT_TRUE(split->ClaimSuffix(kDiscarded));
334 EXPECT_FALSE(split->ClaimSuffix(1));
335 }
336
TEST(Chunk,MergeReturnsFalseForChunksFromDifferentRegions)337 TEST(Chunk, MergeReturnsFalseForChunksFromDifferentRegions) {
338 AllocatorForTest<kArbitraryAllocatorSize> allocator;
339 std::optional<OwnedChunk> chunk_1_opt =
340 HeaderChunkRegionTracker::AllocateRegionAsChunk(allocator,
341 kArbitraryChunkSize);
342 ASSERT_TRUE(chunk_1_opt.has_value());
343 OwnedChunk& chunk_1 = *chunk_1_opt;
344 std::optional<OwnedChunk> chunk_2_opt =
345 HeaderChunkRegionTracker::AllocateRegionAsChunk(allocator,
346 kArbitraryChunkSize);
347 ASSERT_TRUE(chunk_2_opt.has_value());
348 OwnedChunk& chunk_2 = *chunk_2_opt;
349 EXPECT_FALSE(chunk_1->CanMerge(*chunk_2));
350 EXPECT_FALSE(chunk_1->Merge(chunk_2));
351 // Ensure that neither chunk was modified
352 EXPECT_EQ(chunk_1.size(), kArbitraryChunkSize);
353 EXPECT_EQ(chunk_2.size(), kArbitraryChunkSize);
354 }
355
TEST(Chunk,MergeReturnsFalseForNonAdjacentChunksFromSameRegion)356 TEST(Chunk, MergeReturnsFalseForNonAdjacentChunksFromSameRegion) {
357 const size_t kTakenFromOne = 8;
358 const size_t kTakenFromTwo = 4;
359
360 AllocatorForTest<kArbitraryAllocatorSize> allocator;
361 std::optional<OwnedChunk> chunk_1_opt =
362 HeaderChunkRegionTracker::AllocateRegionAsChunk(allocator,
363 kArbitraryChunkSize);
364 ASSERT_TRUE(chunk_1_opt.has_value());
365 OwnedChunk& chunk_1 = *chunk_1_opt;
366
367 std::optional<OwnedChunk> chunk_2_opt = chunk_1->TakeSuffix(kTakenFromOne);
368 ASSERT_TRUE(chunk_2_opt.has_value());
369 OwnedChunk& chunk_2 = *chunk_2_opt;
370
371 std::optional<OwnedChunk> chunk_3_opt = chunk_2->TakeSuffix(kTakenFromTwo);
372 ASSERT_TRUE(chunk_3_opt.has_value());
373 OwnedChunk& chunk_3 = *chunk_3_opt;
374
375 EXPECT_FALSE(chunk_1->CanMerge(*chunk_3));
376 EXPECT_FALSE(chunk_1->Merge(chunk_3));
377 EXPECT_EQ(chunk_1.size(), kArbitraryChunkSize - kTakenFromOne);
378 EXPECT_EQ(chunk_2.size(), kTakenFromOne - kTakenFromTwo);
379 EXPECT_EQ(chunk_3.size(), kTakenFromTwo);
380 }
381
TEST(Chunk,MergeJoinsMultipleAdjacentChunksFromSameRegion)382 TEST(Chunk, MergeJoinsMultipleAdjacentChunksFromSameRegion) {
383 const size_t kTakenFromOne = 8;
384 const size_t kTakenFromTwo = 4;
385
386 AllocatorForTest<kArbitraryAllocatorSize> allocator;
387 std::optional<OwnedChunk> chunk_1_opt =
388 HeaderChunkRegionTracker::AllocateRegionAsChunk(allocator,
389 kArbitraryChunkSize);
390 ASSERT_TRUE(chunk_1_opt.has_value());
391 OwnedChunk& chunk_1 = *chunk_1_opt;
392
393 std::optional<OwnedChunk> chunk_2_opt = chunk_1->TakeSuffix(kTakenFromOne);
394 ASSERT_TRUE(chunk_2_opt.has_value());
395 OwnedChunk& chunk_2 = *chunk_2_opt;
396
397 std::optional<OwnedChunk> chunk_3_opt = chunk_2->TakeSuffix(kTakenFromTwo);
398 ASSERT_TRUE(chunk_3_opt.has_value());
399 OwnedChunk& chunk_3 = *chunk_3_opt;
400
401 EXPECT_TRUE(chunk_1->CanMerge(*chunk_2));
402 EXPECT_TRUE(chunk_1->Merge(chunk_2));
403 EXPECT_TRUE(chunk_1->CanMerge(*chunk_3));
404 EXPECT_TRUE(chunk_1->Merge(chunk_3));
405
406 EXPECT_EQ(chunk_1.size(), kArbitraryChunkSize);
407 EXPECT_EQ(chunk_2.size(), 0_size);
408 EXPECT_EQ(chunk_3.size(), 0_size);
409 }
410
TEST(Chunk,MergeJoinsAdjacentChunksFromSameRegion)411 TEST(Chunk, MergeJoinsAdjacentChunksFromSameRegion) {
412 const size_t kTaken = 4;
413
414 AllocatorForTest<kArbitraryAllocatorSize> allocator;
415 std::optional<OwnedChunk> chunk_1_opt =
416 HeaderChunkRegionTracker::AllocateRegionAsChunk(allocator,
417 kArbitraryChunkSize);
418 ASSERT_TRUE(chunk_1_opt.has_value());
419 OwnedChunk& chunk_1 = *chunk_1_opt;
420 std::optional<OwnedChunk> chunk_2_opt = chunk_1->TakeSuffix(kTaken);
421 ASSERT_TRUE(chunk_2_opt.has_value());
422 OwnedChunk& chunk_2 = *chunk_2_opt;
423 EXPECT_EQ(chunk_1.size(), kArbitraryChunkSize - kTaken);
424 EXPECT_EQ(chunk_2.size(), kTaken);
425
426 EXPECT_TRUE(chunk_1->CanMerge(*chunk_2));
427 EXPECT_TRUE(chunk_1->Merge(chunk_2));
428 EXPECT_EQ(chunk_1.size(), kArbitraryChunkSize);
429 EXPECT_EQ(chunk_2.size(), 0_size);
430 }
431
432 } // namespace
433 } // namespace pw::multibuf
434