xref: /aosp_15_r20/external/pigweed/pw_allocator/public/pw_allocator/block_allocator.h (revision 61c4878ac05f98d0ceed94b57d316916de578985)
1 // Copyright 2024 The Pigweed Authors
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License"); you may not
4 // use this file except in compliance with the License. You may obtain a copy of
5 // the License at
6 //
7 //     https://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
11 // WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
12 // License for the specific language governing permissions and limitations under
13 // the License.
14 #pragma once
15 
16 #include <cstddef>
17 
18 #include "pw_allocator/allocator.h"
19 #include "pw_allocator/block/basic.h"
20 #include "pw_allocator/block/iterable.h"
21 #include "pw_allocator/block/poisonable.h"
22 #include "pw_allocator/block/result.h"
23 #include "pw_allocator/block/with_layout.h"
24 #include "pw_allocator/capability.h"
25 #include "pw_allocator/config.h"
26 #include "pw_allocator/fragmentation.h"
27 #include "pw_assert/assert.h"
28 #include "pw_bytes/span.h"
29 #include "pw_result/result.h"
30 #include "pw_status/status.h"
31 
32 namespace pw::allocator {
33 namespace internal {
34 
35 /// Block-independent base class of ``BlockAllocator``.
36 ///
37 /// This class contains static methods which do not depend on the template
38 /// parameters of ``BlockAllocator`` that are used to determine block/ type.
39 /// This allows the methods to be defined in a separate source file and use
40 /// macros that cannot be used in headers, e.g. PW_CHECK.
41 ///
42 /// This class should not be used directly. Instead, use ``BlockAllocator`` or
43 /// one of its specializations.
44 class GenericBlockAllocator : public Allocator {
45  public:
46   // Not copyable or movable.
47   GenericBlockAllocator(const GenericBlockAllocator&) = delete;
48   GenericBlockAllocator& operator=(const GenericBlockAllocator&) = delete;
49   GenericBlockAllocator(GenericBlockAllocator&&) = delete;
50   GenericBlockAllocator& operator=(GenericBlockAllocator&&) = delete;
51 
52  protected:
53   template <typename BlockType>
GetCapabilities()54   static constexpr Capabilities GetCapabilities() {
55     Capabilities common = kImplementsGetUsableLayout |
56                           kImplementsGetAllocatedLayout |
57                           kImplementsGetCapacity | kImplementsRecognizes;
58     if constexpr (has_layout_v<BlockType>) {
59       return common | kImplementsGetRequestedLayout;
60     } else {
61       return common;
62     }
63   }
64 
GenericBlockAllocator(Capabilities capabilities)65   constexpr explicit GenericBlockAllocator(Capabilities capabilities)
66       : Allocator(capabilities) {}
67 
68   /// Crashes with an informational message that a given block is allocated.
69   ///
70   /// This method is meant to be called by ``SplitFreeListAllocator``s
71   /// destructor. There must not be any outstanding allocations from an
72   /// when it is destroyed.
73   static void CrashOnAllocated(void* allocated);
74 
75   /// Crashes with an informational message that a given pointer does not belong
76   /// to this allocator.
77   static void CrashOnInvalidFree(void* freed);
78 
79   /// Crashes with an informational message that a given block was freed twice.
80   static void CrashOnDoubleFree(void* freed);
81 };
82 
83 }  // namespace internal
84 
85 /// A memory allocator that uses a list of blocks.
86 ///
87 /// This class does not implement `ChooseBlock` and cannot be used directly.
88 /// Instead, use one of its specializations.
89 ///
90 /// NOTE!! Do NOT use memory returned from this allocator as the backing for
91 /// another allocator. If this is done, the `Query` method may incorrectly
92 /// think pointers returned by that allocator were created by this one, and
93 /// report that this allocator can de/reallocate them.
94 template <typename BlockType_>
95 class BlockAllocator : public internal::GenericBlockAllocator {
96  private:
97   using Base = internal::GenericBlockAllocator;
98 
99  public:
100   using BlockType = BlockType_;
101   using Range = typename BlockType::Range;
102 
103   static constexpr Capabilities kCapabilities =
104       Base::GetCapabilities<BlockType>();
105   static constexpr size_t kPoisonInterval = PW_ALLOCATOR_BLOCK_POISON_INTERVAL;
106 
~BlockAllocator()107   ~BlockAllocator() override { Reset(); }
108 
109   /// Returns a ``Range`` of blocks tracking the memory of this allocator.
110   Range blocks() const;
111 
112   /// Returns fragmentation information for the block allocator's memory region.
113   Fragmentation MeasureFragmentation() const;
114 
115   /// Sets the memory region to be used by this allocator.
116   ///
117   /// This method will instantiate an initial block using the memory region.
118   ///
119   /// @param[in]  region  Region of memory to use when satisfying allocation
120   ///                     requests. The region MUST be valid as an argument to
121   ///                     `BlockType::Init`.
122   void Init(ByteSpan region);
123 
124   /// Sets the blocks to be used by this allocator.
125   ///
126   /// This method will use the sequence of blocks including and following
127   /// `begin`. These blocks which must be valid.
128   ///
129   /// @param[in]  begin               The first block for this allocator.
130   ///                                 The block must not have a previous block.
Init(BlockType * begin)131   void Init(BlockType* begin) { Init(begin, nullptr); }
132 
133   /// Sets the blocks to be used by this allocator.
134   ///
135   /// This method will use the sequence blocks as-is, which must be valid.
136   ///
137   /// @param[in]  begin   The first block for this allocator.
138   /// @param[in]  end     The last block for this allocator. May be null, in
139   ///                     which the sequence including and following `begin` is
140   ///                     used. If not null, the block must not have a next
141   ///                     block.
142   void Init(BlockType* begin, BlockType* end);
143 
144   /// Resets the allocator to an uninitialized state.
145   ///
146   /// At the time of the call, there MUST NOT be any outstanding allocated
147   /// blocks from this allocator.
148   void Reset();
149 
150  protected:
151   using ReverseRange = typename BlockType::ReverseRange;
152 
BlockAllocator()153   constexpr explicit BlockAllocator() : Base(kCapabilities) {}
154 
155   /// Returns a ``ReverseRange`` of blocks tracking the memory of this
156   /// allocator.
157   ReverseRange rblocks();
158 
159   /// Returns the block associated with a pointer.
160   ///
161   /// If the given pointer is to this allocator's memory region, but not to a
162   /// valid block, the memory is corrupted and this method will crash to assist
163   /// in uncovering the underlying bug.
164   ///
165   /// @param  ptr           Pointer to an allocated block's usable space.
166   ///
167   /// @returns @rst
168   ///
169   /// .. pw-status-codes::
170   ///
171   ///    OK: Result contains a pointer to the block.
172   ///
173   ///    OUT_OF_RANGE: Given pointer is outside the allocator's memory.
174   ///
175   /// @endrst
176   template <typename Ptr>
177   Result<internal::copy_const_ptr_t<Ptr, BlockType*>> FromUsableSpace(
178       Ptr ptr) const;
179 
180  private:
181   using BlockResultPrev = internal::GenericBlockResult::Prev;
182   using BlockResultNext = internal::GenericBlockResult::Next;
183 
184   /// @copydoc Allocator::Allocate
185   void* DoAllocate(Layout layout) override;
186 
187   /// @copydoc Allocator::Deallocate
188   void DoDeallocate(void* ptr) override;
189 
190   /// @copydoc Allocator::Deallocate
DoDeallocate(void * ptr,Layout)191   void DoDeallocate(void* ptr, Layout) override { DoDeallocate(ptr); }
192 
193   /// @copydoc Allocator::Resize
194   bool DoResize(void* ptr, size_t new_size) override;
195 
196   /// @copydoc Allocator::GetAllocated
DoGetAllocated()197   size_t DoGetAllocated() const override { return allocated_; }
198 
199   /// @copydoc Deallocator::GetInfo
200   Result<Layout> DoGetInfo(InfoType info_type, const void* ptr) const override;
201 
202   /// Selects a free block to allocate from.
203   ///
204   /// This method represents the allocator-specific strategy of choosing which
205   /// block should be used to satisfy allocation requests. If the returned
206   /// result indicates success, `block` will be replaced by the chosen block.
207   ///
208   /// @param  block   Used to return the chosen block.
209   /// @param  layout  Same as ``Allocator::Allocate``.
210   virtual BlockResult<BlockType> ChooseBlock(Layout layout) = 0;
211 
212   /// Indicates that a block will no longer be free.
213   ///
214   /// Does nothing by default. Derived class may overload to do additional
215   /// bookkeeeping.
216   ///
217   /// @param  block   The block being freed.
ReserveBlock(BlockType &)218   virtual void ReserveBlock(BlockType&) {}
219 
220   /// Indicates that a block is now free.
221   ///
222   /// Does nothing by default. Derived class may overload to do additional
223   /// bookkeeeping.
224   ///
225   /// @param  block   The block being freed.
RecycleBlock(BlockType &)226   virtual void RecycleBlock(BlockType&) {}
227 
228   /// Returns if the previous block exists and is free.
PrevIsFree(const BlockType * block)229   static bool PrevIsFree(const BlockType* block) {
230     auto* prev = block->Prev();
231     return prev != nullptr && prev->IsFree();
232   }
233 
234   /// Returns if the next block exists and is free.
NextIsFree(const BlockType * block)235   static bool NextIsFree(const BlockType* block) {
236     auto* next = block->Next();
237     return next != nullptr && next->IsFree();
238   }
239 
240   /// Ensures the pointer to the last block is correct after the given block is
241   /// allocated or freed.
242   void UpdateLast(BlockType* block);
243 
244   // Represents the range of blocks for this allocator.
245   size_t capacity_ = 0;
246   size_t allocated_ = 0;
247   BlockType* first_ = nullptr;
248   BlockType* last_ = nullptr;
249   uint16_t unpoisoned_ = 0;
250 };
251 
252 // Template method implementations
253 
254 template <typename BlockType>
blocks()255 typename BlockAllocator<BlockType>::Range BlockAllocator<BlockType>::blocks()
256     const {
257   return Range(first_);
258 }
259 
260 template <typename BlockType>
261 typename BlockAllocator<BlockType>::ReverseRange
rblocks()262 BlockAllocator<BlockType>::rblocks() {
263   if constexpr (is_reverse_iterable_v<BlockType>) {
264     PW_ASSERT(last_ == nullptr || last_->Next() == nullptr);
265     return ReverseRange(last_);
266   }
267 }
268 
269 template <typename BlockType>
Init(ByteSpan region)270 void BlockAllocator<BlockType>::Init(ByteSpan region) {
271   Result<BlockType*> result = BlockType::Init(region);
272   Init(*result, nullptr);
273 }
274 
275 template <typename BlockType>
Init(BlockType * begin,BlockType * end)276 void BlockAllocator<BlockType>::Init(BlockType* begin, BlockType* end) {
277   PW_ASSERT(begin != nullptr);
278   PW_ASSERT(begin->Prev() == nullptr);
279   Reset();
280   if (end == nullptr) {
281     end = begin;
282     for (BlockType* next = end->Next(); next != nullptr; next = end->Next()) {
283       end = next;
284     }
285   } else {
286     PW_ASSERT(begin <= end);
287     PW_ASSERT(end->Next() == nullptr);
288   }
289   first_ = begin;
290   last_ = end;
291 
292   for (auto* block : blocks()) {
293     capacity_ += block->OuterSize();
294     if (block->IsFree()) {
295       RecycleBlock(*block);
296     }
297   }
298 }
299 
300 template <typename BlockType>
Reset()301 void BlockAllocator<BlockType>::Reset() {
302   for (auto* block : blocks()) {
303     if (!block->IsFree()) {
304       CrashOnAllocated(block);
305     }
306     ReserveBlock(*block);
307   }
308   capacity_ = 0;
309   first_ = nullptr;
310   last_ = nullptr;
311   unpoisoned_ = 0;
312 }
313 
314 template <typename BlockType>
DoAllocate(Layout layout)315 void* BlockAllocator<BlockType>::DoAllocate(Layout layout) {
316   if (capacity_ == 0) {
317     // Not initialized.
318     return nullptr;
319   }
320 
321   PW_ASSERT(last_->Next() == nullptr);
322   auto result = ChooseBlock(layout);
323   if (!result.ok()) {
324     // No valid block for request.
325     return nullptr;
326   }
327   BlockType* block = result.block();
328   allocated_ += block->OuterSize();
329   switch (result.prev()) {
330     case BlockResultPrev::kSplitNew:
331       // New free blocks may be created when allocating.
332       RecycleBlock(*(block->Prev()));
333       break;
334     case BlockResultPrev::kResizedLarger:
335       // Extra bytes may be appended to the previous block.
336       allocated_ += result.size();
337       break;
338     case BlockResultPrev::kUnchanged:
339     case BlockResultPrev::kResizedSmaller:
340       break;
341   }
342   if (result.next() == BlockResultNext::kSplitNew) {
343     RecycleBlock(*(block->Next()));
344   }
345 
346   UpdateLast(block);
347   PW_ASSERT(block <= last_);
348 
349   return block->UsableSpace();
350 }
351 
352 template <typename BlockType>
DoDeallocate(void * ptr)353 void BlockAllocator<BlockType>::DoDeallocate(void* ptr) {
354   auto from_usable_space_result = FromUsableSpace(ptr);
355   if (!from_usable_space_result.ok()) {
356     CrashOnInvalidFree(ptr);
357   }
358   BlockType* block = *from_usable_space_result;
359   if (block->IsFree()) {
360     CrashOnDoubleFree(block);
361   }
362 
363   // Neighboring blocks may be merged when freeing.
364   if (auto* prev = block->Prev(); prev != nullptr && prev->IsFree()) {
365     ReserveBlock(*prev);
366   }
367   if (auto* next = block->Next(); next != nullptr && next->IsFree()) {
368     ReserveBlock(*next);
369   }
370 
371   // Free the block and merge it with its neighbors, if possible.
372   allocated_ -= block->OuterSize();
373   auto free_result = BlockType::Free(std::move(block));
374   block = free_result.block();
375   UpdateLast(block);
376 
377   if (free_result.prev() == BlockResultPrev::kResizedSmaller) {
378     // Bytes were reclaimed from the previous block.
379     allocated_ -= free_result.size();
380   }
381 
382   if constexpr (is_poisonable_v<BlockType> && kPoisonInterval != 0) {
383     ++unpoisoned_;
384     if (unpoisoned_ >= kPoisonInterval) {
385       block->Poison();
386       unpoisoned_ = 0;
387     }
388   }
389 
390   RecycleBlock(*block);
391 }
392 
393 template <typename BlockType>
DoResize(void * ptr,size_t new_size)394 bool BlockAllocator<BlockType>::DoResize(void* ptr, size_t new_size) {
395   auto result = FromUsableSpace(ptr);
396   if (!result.ok()) {
397     return false;
398   }
399   BlockType* block = *result;
400 
401   // Neighboring blocks may be merged when resizing.
402   if (auto* next = block->Next(); next != nullptr && next->IsFree()) {
403     ReserveBlock(*next);
404   }
405 
406   size_t old_size = block->OuterSize();
407   if (!block->Resize(new_size).ok()) {
408     return false;
409   }
410   allocated_ -= old_size;
411   allocated_ += block->OuterSize();
412   UpdateLast(block);
413 
414   if (auto* next = block->Next(); next != nullptr && next->IsFree()) {
415     RecycleBlock(*next);
416   }
417 
418   return true;
419 }
420 
421 template <typename BlockType>
DoGetInfo(InfoType info_type,const void * ptr)422 Result<Layout> BlockAllocator<BlockType>::DoGetInfo(InfoType info_type,
423                                                     const void* ptr) const {
424   // Handle types not related to a block first.
425   if (info_type == InfoType::kCapacity) {
426     return Layout(capacity_);
427   }
428   // Get a block from the given pointer.
429   auto result = FromUsableSpace(ptr);
430   if (!result.ok()) {
431     return Status::NotFound();
432   }
433   const BlockType* block = result.value();
434   if (block->IsFree()) {
435     return Status::FailedPrecondition();
436   }
437   if constexpr (kCapabilities.has(kImplementsGetRequestedLayout)) {
438     if (info_type == InfoType::kRequestedLayoutOf) {
439       return block->RequestedLayout();
440     }
441   }
442   switch (info_type) {
443     case InfoType::kUsableLayoutOf:
444       return Layout(block->InnerSize(), BlockType::kAlignment);
445     case InfoType::kAllocatedLayoutOf:
446       return Layout(block->OuterSize(), BlockType::kAlignment);
447     case InfoType::kRecognizes:
448       return Layout();
449     case InfoType::kCapacity:
450     case InfoType::kRequestedLayoutOf:
451     default:
452       return Status::Unimplemented();
453   }
454 }
455 
456 template <typename BlockType>
MeasureFragmentation()457 Fragmentation BlockAllocator<BlockType>::MeasureFragmentation() const {
458   Fragmentation fragmentation;
459   for (auto block : blocks()) {
460     if (block->IsFree()) {
461       fragmentation.AddFragment(block->InnerSize() / BlockType::kAlignment);
462     }
463   }
464   return fragmentation;
465 }
466 
467 template <typename BlockType>
468 template <typename Ptr>
469 Result<internal::copy_const_ptr_t<Ptr, BlockType*>>
FromUsableSpace(Ptr ptr)470 BlockAllocator<BlockType>::FromUsableSpace(Ptr ptr) const {
471   if (ptr < first_->UsableSpace() || last_->UsableSpace() < ptr) {
472     return Status::OutOfRange();
473   }
474   return BlockType::FromUsableSpace(ptr);
475 }
476 
477 template <typename BlockType>
UpdateLast(BlockType * block)478 void BlockAllocator<BlockType>::UpdateLast(BlockType* block) {
479   BlockType* next = block->Next();
480   if (next == nullptr) {
481     last_ = block;
482   } else if (next->Next() == nullptr) {
483     last_ = next;
484   }
485 }
486 
487 }  // namespace pw::allocator
488