1 // Copyright 2024 The Pigweed Authors
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License"); you may not
4 // use this file except in compliance with the License. You may obtain a copy of
5 // the License at
6 //
7 // https://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
11 // WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
12 // License for the specific language governing permissions and limitations under
13 // the License.
14 #pragma once
15
16 #include <cstddef>
17 #include <limits>
18
19 #include "lib/stdcompat/bit.h"
20 #include "pw_allocator/block/result.h"
21 #include "pw_allocator/layout.h"
22 #include "pw_assert/assert.h"
23 #include "pw_bytes/alignment.h"
24 #include "pw_bytes/span.h"
25 #include "pw_result/result.h"
26 #include "pw_status/status.h"
27
28 namespace pw::allocator::test {
29
30 /// Utility function that returns the offset from an addres a given number of
31 /// bytes `after` a given `ptr` to the next address that has a given
32 /// `alignment`.
33 ///
34 /// In other words, if offset is `GetAlignedOffsetAfter(ptr, alignment, after)`,
35 /// then `((uintptr_t)ptr + after + offset) % alignment` is 0.
36 ///
37 /// This is useful when dealing with blocks that need their usable space to be
38 /// aligned, e.g.
39 /// GetAlignedOffsetAfter(bytes_.data(), layout.alignment(), kBlockOverhead);
GetAlignedOffsetAfter(const void * ptr,size_t alignment,size_t after)40 inline size_t GetAlignedOffsetAfter(const void* ptr,
41 size_t alignment,
42 size_t after) {
43 auto addr = cpp20::bit_cast<uintptr_t>(ptr) + after;
44 return pw::AlignUp(addr, alignment) - addr;
45 }
46
47 /// Returns the minimum outer size for a block allocated from a layout with the
48 /// given `min_inner_size`.
49 template <typename BlockType>
GetOuterSize(size_t min_inner_size)50 constexpr size_t GetOuterSize(size_t min_inner_size) {
51 return BlockType::kBlockOverhead +
52 pw::AlignUp(min_inner_size, BlockType::kAlignment);
53 }
54
55 /// Represents an initial state for a memory block.
56 ///
57 /// Unit tests can specify an initial block layout by passing a list of these
58 /// structs to `Preallocate`.
59 ///
60 /// The outer size of each block must be more than `kBlockOverhead` for the
61 /// block type in use. The special `kSizeRemaining` may be used for at most
62 /// one block to give it any space not assigned to other blocks.
63 ///
64 /// The state must be either `kFree` or `kUsed`.
65 ///
66 /// Example:
67 /// @code{.cpp}
68 /// // BlockType = UnpoisonedBlock<uint32_t>, so kBlockOverhead == 8.
69 /// BlockType* block1 = Preallocate({
70 /// {32, kUsed},
71 /// {24, kFree},
72 /// {48, kUsed},
73 /// {kSizeRemaining, kFree},
74 /// {64, kUsed},
75 /// });
76 /// @endcode
77 struct Preallocation {
78 /// The outer size of the block to preallocate.
79 size_t outer_size;
80
81 // Index into the `test_fixture` array where the pointer to the block's
82 // space should be cached.
83 enum class State {
84 kUsed,
85 kFree,
86 } state;
87
88 static constexpr State kUsed = State::kUsed;
89 static constexpr State kFree = State::kFree;
90
91 /// Special value indicating the block should comprise of the all remaining
92 /// space not preallocated to any other block. May be used at most once.
93 static constexpr size_t kSizeRemaining = std::numeric_limits<size_t>::max();
94 };
95
96 template <typename BlockType>
Preallocate(ByteSpan bytes,std::initializer_list<Preallocation> preallocs)97 BlockType* Preallocate(ByteSpan bytes,
98 std::initializer_list<Preallocation> preallocs) {
99 // First, look if any blocks use kSizeRemaining, and calculate how large
100 // that will be.
101 auto init_result = BlockType::Init(bytes);
102 PW_ASSERT(init_result.ok());
103 BlockType* block = *init_result;
104 size_t remaining_outer_size = block->OuterSize();
105 for (auto& preallocation : preallocs) {
106 if (preallocation.outer_size != Preallocation::kSizeRemaining) {
107 size_t outer_size =
108 AlignUp(preallocation.outer_size, BlockType::kAlignment);
109 PW_ASSERT(outer_size > BlockType::kBlockOverhead);
110 PW_ASSERT(remaining_outer_size >= outer_size);
111 remaining_outer_size -= outer_size;
112 }
113 }
114
115 // Now, construct objects in place.
116 bool next_is_free = false;
117 BlockType* next = nullptr;
118 for (auto it = std::rbegin(preallocs); it != std::rend(preallocs); ++it) {
119 PW_ASSERT(block != nullptr);
120 const Preallocation& preallocation = *it;
121 size_t outer_size = preallocation.outer_size;
122 if (outer_size == Preallocation::kSizeRemaining) {
123 outer_size = remaining_outer_size;
124 remaining_outer_size = 0;
125 } else {
126 outer_size = AlignUp(preallocation.outer_size, BlockType::kAlignment);
127 }
128 Layout layout(outer_size - BlockType::kBlockOverhead, 1);
129 auto alloc_result = BlockType::AllocLast(std::move(block), layout);
130 PW_ASSERT(alloc_result.ok());
131
132 using Next = internal::GenericBlockResult::Next;
133 PW_ASSERT(alloc_result.next() == Next::kUnchanged);
134
135 block = alloc_result.block();
136
137 if (next_is_free) {
138 BlockType::Free(std::move(next)).IgnoreUnlessStrict();
139 }
140 next_is_free = preallocation.state == Preallocation::kFree;
141 next = block;
142 block = block->Prev();
143 }
144
145 // Handle the edge case of the first block being free.
146 PW_ASSERT(block == nullptr);
147 if (next_is_free) {
148 auto free_result = BlockType::Free(std::move(next));
149 next = free_result.block();
150 }
151 return next;
152 }
153
154 } // namespace pw::allocator::test
155