xref: /aosp_15_r20/external/executorch/backends/vulkan/runtime/vk_api/Descriptor.cpp (revision 523fa7a60841cd1ecfb9cc4201f1ca8b03ed023a)
1 /*
2  * Copyright (c) Meta Platforms, Inc. and affiliates.
3  * All rights reserved.
4  *
5  * This source code is licensed under the BSD-style license found in the
6  * LICENSE file in the root directory of this source tree.
7  */
8 
9 #include <executorch/backends/vulkan/runtime/vk_api/Descriptor.h>
10 
11 #include <executorch/backends/vulkan/runtime/utils/VecUtils.h>
12 
13 #include <algorithm>
14 #include <utility>
15 
16 namespace vkcompute {
17 namespace vkapi {
18 
19 //
20 // BufferBindInfo
21 //
22 
BufferBindInfo()23 BufferBindInfo::BufferBindInfo()
24     : handle(VK_NULL_HANDLE), offset(0u), range(0u) {}
25 
BufferBindInfo(const VulkanBuffer & buffer_p)26 BufferBindInfo::BufferBindInfo(const VulkanBuffer& buffer_p)
27     : handle(buffer_p.handle()),
28       offset(buffer_p.mem_offset()),
29       range(buffer_p.mem_range()) {}
30 
31 //
32 // ParamsBindList
33 //
34 
ParamsBindList(std::initializer_list<const BufferBindInfo> init_list)35 ParamsBindList::ParamsBindList(
36     std::initializer_list<const BufferBindInfo> init_list) {
37   bind_infos.resize(init_list.size());
38   std::copy(init_list.begin(), init_list.end(), bind_infos.begin());
39 }
40 
append(const BufferBindInfo & bind_info)41 void ParamsBindList::append(const BufferBindInfo& bind_info) {
42   bind_infos.emplace_back(bind_info);
43 }
44 
append(const ParamsBindList & other)45 void ParamsBindList::append(const ParamsBindList& other) {
46   bind_infos.insert(
47       bind_infos.end(), other.bind_infos.begin(), other.bind_infos.end());
48 }
49 
50 //
51 // DescriptorSet
52 //
53 
DescriptorSet(VkDevice device,VkDescriptorSet handle,ShaderLayout::Signature shader_layout_signature)54 DescriptorSet::DescriptorSet(
55     VkDevice device,
56     VkDescriptorSet handle,
57     ShaderLayout::Signature shader_layout_signature)
58     : device_(device),
59       handle_(handle),
60       shader_layout_signature_(std::move(shader_layout_signature)),
61       bindings_{} {}
62 
DescriptorSet(DescriptorSet && other)63 DescriptorSet::DescriptorSet(DescriptorSet&& other) noexcept
64     : device_(other.device_),
65       handle_(other.handle_),
66       shader_layout_signature_(std::move(other.shader_layout_signature_)),
67       bindings_(std::move(other.bindings_)) {
68   other.handle_ = VK_NULL_HANDLE;
69 }
70 
operator =(DescriptorSet && other)71 DescriptorSet& DescriptorSet::operator=(DescriptorSet&& other) noexcept {
72   device_ = other.device_;
73   handle_ = other.handle_;
74   shader_layout_signature_ = std::move(other.shader_layout_signature_);
75   bindings_ = std::move(other.bindings_);
76 
77   other.handle_ = VK_NULL_HANDLE;
78 
79   return *this;
80 }
81 
bind(const uint32_t idx,const VulkanBuffer & buffer)82 DescriptorSet& DescriptorSet::bind(
83     const uint32_t idx,
84     const VulkanBuffer& buffer) {
85   VK_CHECK_COND(
86       buffer.has_memory(),
87       "Buffer must be bound to memory for it to be usable");
88 
89   DescriptorSet::ResourceBinding binder{};
90   binder.binding_idx = idx; // binding_idx
91   binder.descriptor_type = shader_layout_signature_[idx]; // descriptor_type
92   binder.is_image = false; // is_image
93   binder.resource_info.buffer_info.buffer = buffer.handle(); // buffer
94   binder.resource_info.buffer_info.offset = buffer.mem_offset(); // offset
95   binder.resource_info.buffer_info.range = buffer.mem_range(); // range
96   add_binding(binder);
97 
98   return *this;
99 }
100 
bind(const uint32_t idx,const BufferBindInfo & bind_info)101 DescriptorSet& DescriptorSet::bind(
102     const uint32_t idx,
103     const BufferBindInfo& bind_info) {
104   DescriptorSet::ResourceBinding binder{};
105   binder.binding_idx = idx; // binding_idx
106   binder.descriptor_type = shader_layout_signature_[idx]; // descriptor_type
107   binder.is_image = false; // is_image
108   binder.resource_info.buffer_info.buffer = bind_info.handle; // buffer
109   binder.resource_info.buffer_info.offset = bind_info.offset; // offset
110   binder.resource_info.buffer_info.range = bind_info.range; // range
111   add_binding(binder);
112 
113   return *this;
114 }
115 
bind(const uint32_t idx,const VulkanImage & image)116 DescriptorSet& DescriptorSet::bind(
117     const uint32_t idx,
118     const VulkanImage& image) {
119   // If the image does not have an allocator attached, then it is externally
120   // allocated; assume it is already bound to memory. Otherwise, it must be
121   // bound to a VmaAllocation to be used.
122   VK_CHECK_COND(
123       image.vma_allocator() == VK_NULL_HANDLE || image.has_memory(),
124       "Image must be bound to memory for it to be usable");
125 
126   VkImageLayout binding_layout = image.layout();
127   if (shader_layout_signature_[idx] == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE) {
128     binding_layout = VK_IMAGE_LAYOUT_GENERAL;
129   }
130 
131   DescriptorSet::ResourceBinding binder{};
132   binder.binding_idx = idx; // binding_idx
133   binder.descriptor_type = shader_layout_signature_[idx]; // descriptor_type
134   binder.is_image = true; // is_image
135   binder.resource_info.image_info.sampler = image.sampler(); // buffer
136   binder.resource_info.image_info.imageView = image.image_view(); // imageView
137   binder.resource_info.image_info.imageLayout = binding_layout; // imageLayout
138   add_binding(binder);
139 
140   return *this;
141 }
142 
get_bind_handle() const143 VkDescriptorSet DescriptorSet::get_bind_handle() const {
144   std::vector<VkWriteDescriptorSet> write_descriptor_sets;
145 
146   for (const ResourceBinding& binding : bindings_) {
147     VkWriteDescriptorSet write{
148         VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET, // sType
149         nullptr, // pNext
150         handle_, // dstSet
151         binding.binding_idx, // dstBinding
152         0u, // dstArrayElement
153         1u, // descriptorCount
154         binding.descriptor_type, // descriptorType
155         nullptr, // pImageInfo
156         nullptr, // pBufferInfo
157         nullptr, // pTexelBufferView
158     };
159 
160     if (binding.is_image) {
161       write.pImageInfo = &binding.resource_info.image_info;
162     } else {
163       write.pBufferInfo = &binding.resource_info.buffer_info;
164     }
165 
166     write_descriptor_sets.emplace_back(write);
167   }
168 
169   vkUpdateDescriptorSets(
170       device_,
171       write_descriptor_sets.size(),
172       write_descriptor_sets.data(),
173       0u,
174       nullptr);
175 
176   VkDescriptorSet ret = handle_;
177 
178   return ret;
179 }
180 
add_binding(const ResourceBinding & binding)181 void DescriptorSet::add_binding(const ResourceBinding& binding) {
182   const auto bindings_itr = std::find_if(
183       bindings_.begin(),
184       bindings_.end(),
185       [binding_idx = binding.binding_idx](const ResourceBinding& other) {
186         return other.binding_idx == binding_idx;
187       });
188 
189   if (bindings_.end() == bindings_itr) {
190     bindings_.emplace_back(binding);
191   } else {
192     *bindings_itr = binding;
193   }
194 }
195 
196 //
197 // DescriptorSetPile
198 //
199 
DescriptorSetPile(const uint32_t pile_size,VkDescriptorSetLayout descriptor_set_layout,VkDevice device,VkDescriptorPool descriptor_pool)200 DescriptorSetPile::DescriptorSetPile(
201     const uint32_t pile_size,
202     VkDescriptorSetLayout descriptor_set_layout,
203     VkDevice device,
204     VkDescriptorPool descriptor_pool)
205     : pile_size_{pile_size},
206       set_layout_{descriptor_set_layout},
207       device_{device},
208       pool_{descriptor_pool},
209       descriptors_{},
210       in_use_(0u) {
211   descriptors_.resize(pile_size_);
212   allocate_new_batch();
213 }
214 
get_descriptor_set()215 VkDescriptorSet DescriptorSetPile::get_descriptor_set() {
216   // No-ops if there are descriptor sets available
217   allocate_new_batch();
218 
219   VkDescriptorSet handle = descriptors_[in_use_];
220   descriptors_[in_use_] = VK_NULL_HANDLE;
221 
222   in_use_++;
223   return handle;
224 }
225 
allocate_new_batch()226 void DescriptorSetPile::allocate_new_batch() {
227   // No-ops if there are still descriptor sets available
228   if (in_use_ < descriptors_.size() &&
229       descriptors_[in_use_] != VK_NULL_HANDLE) {
230     return;
231   }
232 
233   std::vector<VkDescriptorSetLayout> layouts(descriptors_.size());
234   fill(layouts.begin(), layouts.end(), set_layout_);
235 
236   const VkDescriptorSetAllocateInfo allocate_info{
237       VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO, // sType
238       nullptr, // pNext
239       pool_, // descriptorPool
240       utils::safe_downcast<uint32_t>(layouts.size()), // descriptorSetCount
241       layouts.data(), // pSetLayouts
242   };
243 
244   VK_CHECK(
245       vkAllocateDescriptorSets(device_, &allocate_info, descriptors_.data()));
246 
247   in_use_ = 0u;
248 }
249 
250 //
251 // DescriptorPool
252 //
253 
DescriptorPool(VkDevice device,const DescriptorPoolConfig & config)254 DescriptorPool::DescriptorPool(
255     VkDevice device,
256     const DescriptorPoolConfig& config)
257     : device_(device),
258       pool_(VK_NULL_HANDLE),
259       config_(config),
260       mutex_{},
261       piles_{} {
262   if (config.descriptor_pool_max_sets > 0) {
263     init(config);
264   }
265 }
266 
~DescriptorPool()267 DescriptorPool::~DescriptorPool() {
268   if (pool_ == VK_NULL_HANDLE) {
269     return;
270   }
271   vkDestroyDescriptorPool(device_, pool_, nullptr);
272 }
273 
init(const DescriptorPoolConfig & config)274 void DescriptorPool::init(const DescriptorPoolConfig& config) {
275   VK_CHECK_COND(
276       pool_ == VK_NULL_HANDLE,
277       "Trying to init a DescriptorPool that has already been created!");
278 
279   config_ = config;
280 
281   std::vector<VkDescriptorPoolSize> type_sizes{
282       {
283           VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
284           config_.descriptor_uniform_buffer_count,
285       },
286       {
287           VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
288           config_.descriptor_storage_buffer_count,
289       },
290       {
291           VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
292           config_.descriptor_combined_sampler_count,
293       },
294       {
295           VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
296           config_.descriptor_storage_buffer_count,
297       },
298   };
299 
300   const VkDescriptorPoolCreateInfo create_info{
301       VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO, // sType
302       nullptr, // pNext
303       0u, // flags
304       config_.descriptor_pool_max_sets, // maxSets
305       static_cast<uint32_t>(type_sizes.size()), // poolSizeCounts
306       type_sizes.data(), // pPoolSizes
307   };
308 
309   VK_CHECK(vkCreateDescriptorPool(device_, &create_info, nullptr, &pool_));
310 }
311 
get_descriptor_set(VkDescriptorSetLayout set_layout,const ShaderLayout::Signature & signature)312 DescriptorSet DescriptorPool::get_descriptor_set(
313     VkDescriptorSetLayout set_layout,
314     const ShaderLayout::Signature& signature) {
315   VK_CHECK_COND(
316       pool_ != VK_NULL_HANDLE, "DescriptorPool has not yet been initialized!");
317 
318   auto it = piles_.find(set_layout);
319   if (piles_.cend() == it) {
320     it = piles_
321              .insert({
322                  set_layout,
323                  DescriptorSetPile(
324                      config_.descriptor_pile_sizes, set_layout, device_, pool_),
325              })
326              .first;
327   }
328 
329   VkDescriptorSet handle = it->second.get_descriptor_set();
330 
331   return DescriptorSet(device_, handle, signature);
332 }
333 
flush()334 void DescriptorPool::flush() {
335   if (pool_ != VK_NULL_HANDLE) {
336     VK_CHECK(vkResetDescriptorPool(device_, pool_, 0u));
337     piles_.clear();
338   }
339 }
340 
341 } // namespace vkapi
342 } // namespace vkcompute
343