1 // 2 // Copyright 2022 The ANGLE Project Authors. All rights reserved. 3 // Use of this source code is governed by a BSD-style license that can be 4 // found in the LICENSE file. 5 // 6 // AllocatorHelperPool: 7 // Manages the pool allocators used in the command buffers. 8 // 9 10 #ifndef LIBANGLE_RENDERER_VULKAN_ALLOCATORHELPERPOOL_H_ 11 #define LIBANGLE_RENDERER_VULKAN_ALLOCATORHELPERPOOL_H_ 12 13 #include "common/PoolAlloc.h" 14 #include "common/vulkan/vk_headers.h" 15 #include "libANGLE/renderer/vulkan/vk_command_buffer_utils.h" 16 #include "libANGLE/renderer/vulkan/vk_wrapper.h" 17 18 namespace rx 19 { 20 namespace vk 21 { 22 namespace priv 23 { 24 class SecondaryCommandBuffer; 25 } // namespace priv 26 27 using DedicatedCommandMemoryAllocator = angle::PoolAllocator; 28 29 // Used in CommandBufferHelperCommon 30 class DedicatedCommandBlockAllocator 31 { 32 public: 33 DedicatedCommandBlockAllocator() = default; 34 void resetAllocator(); hasAllocatorLinks()35 bool hasAllocatorLinks() const { return false; } 36 37 static constexpr size_t kDefaultPoolAllocatorPageSize = 16 * 1024; init()38 void init() 39 { 40 mAllocator.initialize(kDefaultPoolAllocatorPageSize, 1); 41 // Push a scope into the pool allocator so we can easily free and re-init on reset() 42 mAllocator.push(); 43 } 44 45 // Placeholder functions for attaching and detaching the allocator. attachAllocator(DedicatedCommandMemoryAllocator * allocator)46 void attachAllocator(DedicatedCommandMemoryAllocator *allocator) {} detachAllocator(bool isCommandBufferEmpty)47 DedicatedCommandMemoryAllocator *detachAllocator(bool isCommandBufferEmpty) { return nullptr; } 48 getAllocator()49 DedicatedCommandMemoryAllocator *getAllocator() { return &mAllocator; } 50 51 private: 52 // Using a pool allocator per CBH to avoid threading issues that occur w/ shared allocator 53 // between multiple CBHs. 54 DedicatedCommandMemoryAllocator mAllocator; 55 }; 56 57 // Used in SecondaryCommandBuffer 58 class DedicatedCommandBlockPool final 59 { 60 public: DedicatedCommandBlockPool()61 DedicatedCommandBlockPool() 62 : mAllocator(nullptr), 63 mCurrentWritePointer(nullptr), 64 mCurrentBytesRemaining(0), 65 mCommandBuffer(nullptr) 66 {} 67 68 static constexpr size_t kCommandHeaderSize = 4; 69 using CommandHeaderIDType = uint16_t; 70 // Make sure the size of command header ID type is less than total command header size. 71 static_assert(sizeof(CommandHeaderIDType) < kCommandHeaderSize, "Check size of CommandHeader"); 72 // Pool Alloc uses 16kB pages w/ 16byte header = 16368bytes. To minimize waste 73 // using a 16368/12 = 1364. Also better perf than 1024 due to fewer block allocations 74 static constexpr size_t kBlockSize = 1360; 75 // Make sure block size is 8-byte aligned to avoid ASAN errors. 76 static_assert((kBlockSize % 8) == 0, "Check kBlockSize alignment"); 77 setCommandBuffer(priv::SecondaryCommandBuffer * commandBuffer)78 void setCommandBuffer(priv::SecondaryCommandBuffer *commandBuffer) 79 { 80 mCommandBuffer = commandBuffer; 81 } resetCommandBuffer()82 void resetCommandBuffer() { mCommandBuffer = nullptr; } 83 84 void reset(CommandBufferCommandTracker *commandBufferTracker); 85 86 // Initialize the SecondaryCommandBuffer by setting the allocator it will use 87 angle::Result initialize(DedicatedCommandMemoryAllocator *allocator); 88 valid()89 bool valid() const { return mAllocator != nullptr; } 90 bool empty() const; 91 92 void getMemoryUsageStats(size_t *usedMemoryOut, size_t *allocatedMemoryOut) const; 93 onNewVariableSizedCommand(const size_t requiredSize,const size_t allocationSize,uint8_t ** headerOut)94 void onNewVariableSizedCommand(const size_t requiredSize, 95 const size_t allocationSize, 96 uint8_t **headerOut) 97 { 98 if (mCurrentBytesRemaining < requiredSize) 99 { 100 // variable size command can potentially exceed default cmd allocation blockSize 101 if (requiredSize <= kBlockSize) 102 { 103 allocateNewBlock(); 104 } 105 else 106 { 107 // Make sure allocation is 4-byte aligned 108 const size_t alignedSize = roundUpPow2<size_t>(requiredSize, 4); 109 ASSERT((alignedSize % 4) == 0); 110 allocateNewBlock(alignedSize); 111 } 112 } 113 114 *headerOut = updateHeaderAndAllocatorParams(allocationSize); 115 } 116 onNewCommand(const size_t requiredSize,const size_t allocationSize,uint8_t ** headerOut)117 void onNewCommand(const size_t requiredSize, const size_t allocationSize, uint8_t **headerOut) 118 { 119 if (mCurrentBytesRemaining < requiredSize) 120 { 121 ASSERT(requiredSize <= kBlockSize); 122 allocateNewBlock(); 123 } 124 125 *headerOut = updateHeaderAndAllocatorParams(allocationSize); 126 } 127 128 // Placeholder functions terminateLastCommandBlock()129 void terminateLastCommandBlock() {} attachAllocator(vk::DedicatedCommandMemoryAllocator * source)130 void attachAllocator(vk::DedicatedCommandMemoryAllocator *source) {} detachAllocator(vk::DedicatedCommandMemoryAllocator * destination)131 void detachAllocator(vk::DedicatedCommandMemoryAllocator *destination) {} 132 133 private: 134 void allocateNewBlock(size_t blockSize = kBlockSize); 135 updateHeaderAndAllocatorParams(size_t allocationSize)136 uint8_t *updateHeaderAndAllocatorParams(size_t allocationSize) 137 { 138 mCurrentBytesRemaining -= allocationSize; 139 uint8_t *headerPointer = mCurrentWritePointer; 140 mCurrentWritePointer += allocationSize; 141 // Set next cmd header to Invalid (0) so cmd sequence will be terminated 142 reinterpret_cast<CommandHeaderIDType &>(*mCurrentWritePointer) = 0; 143 144 return headerPointer; 145 } 146 147 // Using a pool allocator per CBH to avoid threading issues that occur w/ shared allocator 148 // between multiple CBHs. 149 DedicatedCommandMemoryAllocator *mAllocator; 150 uint8_t *mCurrentWritePointer; 151 size_t mCurrentBytesRemaining; 152 153 // Points to the parent command buffer. 154 priv::SecondaryCommandBuffer *mCommandBuffer; 155 }; 156 157 } // namespace vk 158 } // namespace rx 159 160 #endif // LIBANGLE_RENDERER_VULKAN_ALLOCATORHELPERPOOL_H_ 161