1 //
2 // Copyright 2019 The ANGLE Project Authors. All rights reserved.
3 // Use of this source code is governed by a BSD-style license that can be
4 // found in the LICENSE file.
5 //
6 // vk_wrapper:
7 // Wrapper classes around Vulkan objects. In an ideal world we could generate this
8 // from vk.xml. Or reuse the generator in the vkhpp tool. For now this is manually
9 // generated and we must add missing functions and objects as we need them.
10
11 #ifndef LIBANGLE_RENDERER_VULKAN_VK_WRAPPER_H_
12 #define LIBANGLE_RENDERER_VULKAN_VK_WRAPPER_H_
13
14 #include "common/vulkan/vk_headers.h"
15 #include "libANGLE/renderer/renderer_utils.h"
16 #include "libANGLE/renderer/vulkan/vk_mem_alloc_wrapper.h"
17 #include "libANGLE/trace.h"
18
19 namespace rx
20 {
21 enum class DescriptorSetIndex : uint32_t;
22
23 namespace vk
24 {
25 // Helper macros that apply to all the wrapped object types.
26 // Unimplemented handle types:
27 // Instance
28 // PhysicalDevice
29 // Device
30 // Queue
31 // DescriptorSet
32
33 #define ANGLE_HANDLE_TYPES_X(FUNC) \
34 FUNC(Allocation) \
35 FUNC(Allocator) \
36 FUNC(Buffer) \
37 FUNC(BufferBlock) \
38 FUNC(BufferView) \
39 FUNC(CommandPool) \
40 FUNC(DescriptorPool) \
41 FUNC(DescriptorSetLayout) \
42 FUNC(DeviceMemory) \
43 FUNC(Event) \
44 FUNC(Fence) \
45 FUNC(Framebuffer) \
46 FUNC(Image) \
47 FUNC(ImageView) \
48 FUNC(Pipeline) \
49 FUNC(PipelineCache) \
50 FUNC(PipelineLayout) \
51 FUNC(QueryPool) \
52 FUNC(RenderPass) \
53 FUNC(Sampler) \
54 FUNC(SamplerYcbcrConversion) \
55 FUNC(Semaphore) \
56 FUNC(ShaderModule)
57
58 #define ANGLE_COMMA_SEP_FUNC(TYPE) TYPE,
59
60 enum class HandleType
61 {
62 Invalid,
63 CommandBuffer,
64 ANGLE_HANDLE_TYPES_X(ANGLE_COMMA_SEP_FUNC) EnumCount
65 };
66
67 #undef ANGLE_COMMA_SEP_FUNC
68
69 #define ANGLE_PRE_DECLARE_CLASS_FUNC(TYPE) class TYPE;
ANGLE_HANDLE_TYPES_X(ANGLE_PRE_DECLARE_CLASS_FUNC)70 ANGLE_HANDLE_TYPES_X(ANGLE_PRE_DECLARE_CLASS_FUNC)
71 namespace priv
72 {
73 class CommandBuffer;
74 } // namespace priv
75 #undef ANGLE_PRE_DECLARE_CLASS_FUNC
76
77 // Returns the HandleType of a Vk Handle.
78 template <typename T>
79 struct HandleTypeHelper;
80
81 #define ANGLE_HANDLE_TYPE_HELPER_FUNC(TYPE) \
82 template <> \
83 struct HandleTypeHelper<TYPE> \
84 { \
85 constexpr static HandleType kHandleType = HandleType::TYPE; \
86 };
87
88 ANGLE_HANDLE_TYPES_X(ANGLE_HANDLE_TYPE_HELPER_FUNC)
89 template <>
90 struct HandleTypeHelper<priv::CommandBuffer>
91 {
92 constexpr static HandleType kHandleType = HandleType::CommandBuffer;
93 };
94
95 #undef ANGLE_HANDLE_TYPE_HELPER_FUNC
96
97 // Base class for all wrapped vulkan objects. Implements several common helper routines.
98 template <typename DerivedT, typename HandleT>
99 class WrappedObject : angle::NonCopyable
100 {
101 public:
102 HandleT getHandle() const { return mHandle; }
103 void setHandle(HandleT handle) { mHandle = handle; }
104 bool valid() const { return (mHandle != VK_NULL_HANDLE); }
105
106 const HandleT *ptr() const { return &mHandle; }
107
108 HandleT release()
109 {
110 HandleT handle = mHandle;
111 mHandle = VK_NULL_HANDLE;
112 return handle;
113 }
114
115 protected:
116 WrappedObject() : mHandle(VK_NULL_HANDLE) {}
117 ~WrappedObject() { ASSERT(!valid()); }
118
119 WrappedObject(WrappedObject &&other) : mHandle(other.mHandle)
120 {
121 other.mHandle = VK_NULL_HANDLE;
122 }
123
124 // Only works to initialize empty objects, since we don't have the device handle.
125 WrappedObject &operator=(WrappedObject &&other)
126 {
127 ASSERT(!valid());
128 std::swap(mHandle, other.mHandle);
129 return *this;
130 }
131
132 HandleT mHandle;
133 };
134
135 class CommandPool final : public WrappedObject<CommandPool, VkCommandPool>
136 {
137 public:
138 CommandPool() = default;
139
140 void destroy(VkDevice device);
141 VkResult reset(VkDevice device, VkCommandPoolResetFlags flags);
142 void freeCommandBuffers(VkDevice device,
143 uint32_t commandBufferCount,
144 const VkCommandBuffer *commandBuffers);
145
146 VkResult init(VkDevice device, const VkCommandPoolCreateInfo &createInfo);
147 };
148
149 class Pipeline final : public WrappedObject<Pipeline, VkPipeline>
150 {
151 public:
152 Pipeline() = default;
153 void destroy(VkDevice device);
154
155 VkResult initGraphics(VkDevice device,
156 const VkGraphicsPipelineCreateInfo &createInfo,
157 const PipelineCache &pipelineCacheVk);
158 VkResult initCompute(VkDevice device,
159 const VkComputePipelineCreateInfo &createInfo,
160 const PipelineCache &pipelineCacheVk);
161 };
162
163 namespace priv
164 {
165
166 // Helper class that wraps a Vulkan command buffer.
167 class CommandBuffer : public WrappedObject<CommandBuffer, VkCommandBuffer>
168 {
169 public:
170 CommandBuffer() = default;
171
172 VkCommandBuffer releaseHandle();
173
174 // This is used for normal pool allocated command buffers. It reset the handle.
175 void destroy(VkDevice device);
176
177 // This is used in conjunction with VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT.
178 void destroy(VkDevice device, const CommandPool &commandPool);
179
180 VkResult init(VkDevice device, const VkCommandBufferAllocateInfo &createInfo);
181
182 using WrappedObject::operator=;
183
184 static bool SupportsQueries(const VkPhysicalDeviceFeatures &features)
185 {
186 return (features.inheritedQueries == VK_TRUE);
187 }
188
189 // Vulkan command buffers are executed as secondary command buffers within a primary command
190 // buffer.
191 static constexpr bool ExecutesInline() { return false; }
192
193 VkResult begin(const VkCommandBufferBeginInfo &info);
194
195 void beginQuery(const QueryPool &queryPool, uint32_t query, VkQueryControlFlags flags);
196
197 void beginRenderPass(const VkRenderPassBeginInfo &beginInfo, VkSubpassContents subpassContents);
198 void beginRendering(const VkRenderingInfo &beginInfo);
199
200 void bindDescriptorSets(const PipelineLayout &layout,
201 VkPipelineBindPoint pipelineBindPoint,
202 DescriptorSetIndex firstSet,
203 uint32_t descriptorSetCount,
204 const VkDescriptorSet *descriptorSets,
205 uint32_t dynamicOffsetCount,
206 const uint32_t *dynamicOffsets);
207 void bindGraphicsPipeline(const Pipeline &pipeline);
208 void bindComputePipeline(const Pipeline &pipeline);
209 void bindPipeline(VkPipelineBindPoint pipelineBindPoint, const Pipeline &pipeline);
210
211 void bindIndexBuffer(const Buffer &buffer, VkDeviceSize offset, VkIndexType indexType);
212 void bindVertexBuffers(uint32_t firstBinding,
213 uint32_t bindingCount,
214 const VkBuffer *buffers,
215 const VkDeviceSize *offsets);
216 void bindVertexBuffers2(uint32_t firstBinding,
217 uint32_t bindingCount,
218 const VkBuffer *buffers,
219 const VkDeviceSize *offsets,
220 const VkDeviceSize *sizes,
221 const VkDeviceSize *strides);
222
223 void blitImage(const Image &srcImage,
224 VkImageLayout srcImageLayout,
225 const Image &dstImage,
226 VkImageLayout dstImageLayout,
227 uint32_t regionCount,
228 const VkImageBlit *regions,
229 VkFilter filter);
230
231 void clearColorImage(const Image &image,
232 VkImageLayout imageLayout,
233 const VkClearColorValue &color,
234 uint32_t rangeCount,
235 const VkImageSubresourceRange *ranges);
236 void clearDepthStencilImage(const Image &image,
237 VkImageLayout imageLayout,
238 const VkClearDepthStencilValue &depthStencil,
239 uint32_t rangeCount,
240 const VkImageSubresourceRange *ranges);
241
242 void clearAttachments(uint32_t attachmentCount,
243 const VkClearAttachment *attachments,
244 uint32_t rectCount,
245 const VkClearRect *rects);
246
247 void copyBuffer(const Buffer &srcBuffer,
248 const Buffer &destBuffer,
249 uint32_t regionCount,
250 const VkBufferCopy *regions);
251
252 void copyBufferToImage(VkBuffer srcBuffer,
253 const Image &dstImage,
254 VkImageLayout dstImageLayout,
255 uint32_t regionCount,
256 const VkBufferImageCopy *regions);
257 void copyImageToBuffer(const Image &srcImage,
258 VkImageLayout srcImageLayout,
259 VkBuffer dstBuffer,
260 uint32_t regionCount,
261 const VkBufferImageCopy *regions);
262 void copyImage(const Image &srcImage,
263 VkImageLayout srcImageLayout,
264 const Image &dstImage,
265 VkImageLayout dstImageLayout,
266 uint32_t regionCount,
267 const VkImageCopy *regions);
268
269 void dispatch(uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ);
270 void dispatchIndirect(const Buffer &buffer, VkDeviceSize offset);
271
272 void draw(uint32_t vertexCount,
273 uint32_t instanceCount,
274 uint32_t firstVertex,
275 uint32_t firstInstance);
276 void drawIndexed(uint32_t indexCount,
277 uint32_t instanceCount,
278 uint32_t firstIndex,
279 int32_t vertexOffset,
280 uint32_t firstInstance);
281 void drawIndexedIndirect(const Buffer &buffer,
282 VkDeviceSize offset,
283 uint32_t drawCount,
284 uint32_t stride);
285 void drawIndirect(const Buffer &buffer,
286 VkDeviceSize offset,
287 uint32_t drawCount,
288 uint32_t stride);
289
290 VkResult end();
291 void endQuery(const QueryPool &queryPool, uint32_t query);
292 void endRenderPass();
293 void endRendering();
294 void executeCommands(uint32_t commandBufferCount, const CommandBuffer *commandBuffers);
295
296 void getMemoryUsageStats(size_t *usedMemoryOut, size_t *allocatedMemoryOut) const;
297
298 void fillBuffer(const Buffer &dstBuffer,
299 VkDeviceSize dstOffset,
300 VkDeviceSize size,
301 uint32_t data);
302
303 void imageBarrier(VkPipelineStageFlags srcStageMask,
304 VkPipelineStageFlags dstStageMask,
305 const VkImageMemoryBarrier &imageMemoryBarrier);
306
307 void imageBarrier2(const VkImageMemoryBarrier2 &imageMemoryBarrier2);
308
309 void imageWaitEvent(const VkEvent &event,
310 VkPipelineStageFlags srcStageMask,
311 VkPipelineStageFlags dstStageMask,
312 const VkImageMemoryBarrier &imageMemoryBarrier);
313
314 void nextSubpass(VkSubpassContents subpassContents);
315
316 void memoryBarrier(VkPipelineStageFlags srcStageMask,
317 VkPipelineStageFlags dstStageMask,
318 const VkMemoryBarrier &memoryBarrier);
319
320 void memoryBarrier2(const VkMemoryBarrier2 &memoryBarrier2);
321
322 void pipelineBarrier(VkPipelineStageFlags srcStageMask,
323 VkPipelineStageFlags dstStageMask,
324 VkDependencyFlags dependencyFlags,
325 uint32_t memoryBarrierCount,
326 const VkMemoryBarrier *memoryBarriers,
327 uint32_t bufferMemoryBarrierCount,
328 const VkBufferMemoryBarrier *bufferMemoryBarriers,
329 uint32_t imageMemoryBarrierCount,
330 const VkImageMemoryBarrier *imageMemoryBarriers);
331
332 void pipelineBarrier2(VkDependencyFlags dependencyFlags,
333 uint32_t memoryBarrierCount,
334 const VkMemoryBarrier2 *memoryBarriers2,
335 uint32_t bufferMemoryBarrierCount,
336 const VkBufferMemoryBarrier2 *bufferMemoryBarriers2,
337 uint32_t imageMemoryBarrierCount,
338 const VkImageMemoryBarrier2 *imageMemoryBarriers2);
339
340 void pushConstants(const PipelineLayout &layout,
341 VkShaderStageFlags flag,
342 uint32_t offset,
343 uint32_t size,
344 const void *data);
345
346 void setBlendConstants(const float blendConstants[4]);
347 void setCullMode(VkCullModeFlags cullMode);
348 void setDepthBias(float depthBiasConstantFactor,
349 float depthBiasClamp,
350 float depthBiasSlopeFactor);
351 void setDepthBiasEnable(VkBool32 depthBiasEnable);
352 void setDepthCompareOp(VkCompareOp depthCompareOp);
353 void setDepthTestEnable(VkBool32 depthTestEnable);
354 void setDepthWriteEnable(VkBool32 depthWriteEnable);
355 void setEvent(VkEvent event, VkPipelineStageFlags stageMask);
356 void setFragmentShadingRate(const VkExtent2D *fragmentSize,
357 VkFragmentShadingRateCombinerOpKHR ops[2]);
358 void setFrontFace(VkFrontFace frontFace);
359 void setLineWidth(float lineWidth);
360 void setLogicOp(VkLogicOp logicOp);
361 void setPrimitiveRestartEnable(VkBool32 primitiveRestartEnable);
362 void setRasterizerDiscardEnable(VkBool32 rasterizerDiscardEnable);
363 void setRenderingAttachmentLocations(const VkRenderingAttachmentLocationInfoKHR *info);
364 void setRenderingInputAttachmentIndicates(const VkRenderingInputAttachmentIndexInfoKHR *info);
365 void setScissor(uint32_t firstScissor, uint32_t scissorCount, const VkRect2D *scissors);
366 void setStencilCompareMask(uint32_t compareFrontMask, uint32_t compareBackMask);
367 void setStencilOp(VkStencilFaceFlags faceMask,
368 VkStencilOp failOp,
369 VkStencilOp passOp,
370 VkStencilOp depthFailOp,
371 VkCompareOp compareOp);
372 void setStencilReference(uint32_t frontReference, uint32_t backReference);
373 void setStencilTestEnable(VkBool32 stencilTestEnable);
374 void setStencilWriteMask(uint32_t writeFrontMask, uint32_t writeBackMask);
375 void setVertexInput(uint32_t vertexBindingDescriptionCount,
376 const VkVertexInputBindingDescription2EXT *vertexBindingDescriptions,
377 uint32_t vertexAttributeDescriptionCount,
378 const VkVertexInputAttributeDescription2EXT *vertexAttributeDescriptions);
379 void setViewport(uint32_t firstViewport, uint32_t viewportCount, const VkViewport *viewports);
380 VkResult reset();
381 void resetEvent(VkEvent event, VkPipelineStageFlags stageMask);
382 void resetQueryPool(const QueryPool &queryPool, uint32_t firstQuery, uint32_t queryCount);
383 void resolveImage(const Image &srcImage,
384 VkImageLayout srcImageLayout,
385 const Image &dstImage,
386 VkImageLayout dstImageLayout,
387 uint32_t regionCount,
388 const VkImageResolve *regions);
389 void waitEvents(uint32_t eventCount,
390 const VkEvent *events,
391 VkPipelineStageFlags srcStageMask,
392 VkPipelineStageFlags dstStageMask,
393 uint32_t memoryBarrierCount,
394 const VkMemoryBarrier *memoryBarriers,
395 uint32_t bufferMemoryBarrierCount,
396 const VkBufferMemoryBarrier *bufferMemoryBarriers,
397 uint32_t imageMemoryBarrierCount,
398 const VkImageMemoryBarrier *imageMemoryBarriers);
399
400 void writeTimestamp(VkPipelineStageFlagBits pipelineStage,
401 const QueryPool &queryPool,
402 uint32_t query);
403
404 void writeTimestamp2(VkPipelineStageFlagBits2 pipelineStage,
405 const QueryPool &queryPool,
406 uint32_t query);
407
408 // VK_EXT_transform_feedback
409 void beginTransformFeedback(uint32_t firstCounterBuffer,
410 uint32_t counterBufferCount,
411 const VkBuffer *counterBuffers,
412 const VkDeviceSize *counterBufferOffsets);
413 void endTransformFeedback(uint32_t firstCounterBuffer,
414 uint32_t counterBufferCount,
415 const VkBuffer *counterBuffers,
416 const VkDeviceSize *counterBufferOffsets);
417 void bindTransformFeedbackBuffers(uint32_t firstBinding,
418 uint32_t bindingCount,
419 const VkBuffer *buffers,
420 const VkDeviceSize *offsets,
421 const VkDeviceSize *sizes);
422
423 // VK_EXT_debug_utils
424 void beginDebugUtilsLabelEXT(const VkDebugUtilsLabelEXT &labelInfo);
425 void endDebugUtilsLabelEXT();
426 void insertDebugUtilsLabelEXT(const VkDebugUtilsLabelEXT &labelInfo);
427 };
428 } // namespace priv
429
430 using PrimaryCommandBuffer = priv::CommandBuffer;
431
432 class Image final : public WrappedObject<Image, VkImage>
433 {
434 public:
435 Image() = default;
436
437 // Use this method if the lifetime of the image is not controlled by ANGLE. (SwapChain)
438 void setHandle(VkImage handle);
439
440 // Called on shutdown when the helper class *doesn't* own the handle to the image resource.
441 void reset();
442
443 // Called on shutdown when the helper class *does* own the handle to the image resource.
444 void destroy(VkDevice device);
445
446 VkResult init(VkDevice device, const VkImageCreateInfo &createInfo);
447
448 void getMemoryRequirements(VkDevice device, VkMemoryRequirements *requirementsOut) const;
449 VkResult bindMemory(VkDevice device, const DeviceMemory &deviceMemory);
450 VkResult bindMemory2(VkDevice device, const VkBindImageMemoryInfoKHR &bindInfo);
451
452 void getSubresourceLayout(VkDevice device,
453 VkImageAspectFlagBits aspectMask,
454 uint32_t mipLevel,
455 uint32_t arrayLayer,
456 VkSubresourceLayout *outSubresourceLayout) const;
457
458 private:
459 friend class ImageMemorySuballocator;
460 };
461
462 class ImageView final : public WrappedObject<ImageView, VkImageView>
463 {
464 public:
465 ImageView() = default;
466 void destroy(VkDevice device);
467
468 VkResult init(VkDevice device, const VkImageViewCreateInfo &createInfo);
469 };
470
471 class Semaphore final : public WrappedObject<Semaphore, VkSemaphore>
472 {
473 public:
474 Semaphore() = default;
475 void destroy(VkDevice device);
476
477 VkResult init(VkDevice device);
478 VkResult importFd(VkDevice device, const VkImportSemaphoreFdInfoKHR &importFdInfo) const;
479 };
480
481 class Framebuffer final : public WrappedObject<Framebuffer, VkFramebuffer>
482 {
483 public:
484 Framebuffer() = default;
485 void destroy(VkDevice device);
486
487 // Use this method only in necessary cases. (RenderPass)
488 void setHandle(VkFramebuffer handle);
489
490 VkResult init(VkDevice device, const VkFramebufferCreateInfo &createInfo);
491 };
492
493 class DeviceMemory final : public WrappedObject<DeviceMemory, VkDeviceMemory>
494 {
495 public:
496 DeviceMemory() = default;
497 void destroy(VkDevice device);
498
499 VkResult allocate(VkDevice device, const VkMemoryAllocateInfo &allocInfo);
500 VkResult map(VkDevice device,
501 VkDeviceSize offset,
502 VkDeviceSize size,
503 VkMemoryMapFlags flags,
504 uint8_t **mapPointer) const;
505 void unmap(VkDevice device) const;
506 void flush(VkDevice device, VkMappedMemoryRange &memRange);
507 void invalidate(VkDevice device, VkMappedMemoryRange &memRange);
508 };
509
510 class Allocator : public WrappedObject<Allocator, VmaAllocator>
511 {
512 public:
513 Allocator() = default;
514 void destroy();
515
516 VkResult init(VkPhysicalDevice physicalDevice,
517 VkDevice device,
518 VkInstance instance,
519 uint32_t apiVersion,
520 VkDeviceSize preferredLargeHeapBlockSize);
521
522 // Initializes the buffer handle and memory allocation.
523 VkResult createBuffer(const VkBufferCreateInfo &bufferCreateInfo,
524 VkMemoryPropertyFlags requiredFlags,
525 VkMemoryPropertyFlags preferredFlags,
526 bool persistentlyMappedBuffers,
527 uint32_t *memoryTypeIndexOut,
528 Buffer *bufferOut,
529 Allocation *allocationOut) const;
530
531 void getMemoryTypeProperties(uint32_t memoryTypeIndex, VkMemoryPropertyFlags *flagsOut) const;
532 VkResult findMemoryTypeIndexForBufferInfo(const VkBufferCreateInfo &bufferCreateInfo,
533 VkMemoryPropertyFlags requiredFlags,
534 VkMemoryPropertyFlags preferredFlags,
535 bool persistentlyMappedBuffers,
536 uint32_t *memoryTypeIndexOut) const;
537
538 void buildStatsString(char **statsString, VkBool32 detailedMap);
539 void freeStatsString(char *statsString);
540 };
541
542 class Allocation final : public WrappedObject<Allocation, VmaAllocation>
543 {
544 public:
545 Allocation() = default;
546 void destroy(const Allocator &allocator);
547
548 VkResult map(const Allocator &allocator, uint8_t **mapPointer) const;
549 void unmap(const Allocator &allocator) const;
550 void flush(const Allocator &allocator, VkDeviceSize offset, VkDeviceSize size) const;
551 void invalidate(const Allocator &allocator, VkDeviceSize offset, VkDeviceSize size) const;
552
553 private:
554 friend class Allocator;
555 friend class ImageMemorySuballocator;
556 };
557
558 class RenderPass final : public WrappedObject<RenderPass, VkRenderPass>
559 {
560 public:
561 RenderPass() = default;
562 void destroy(VkDevice device);
563
564 VkResult init(VkDevice device, const VkRenderPassCreateInfo &createInfo);
565 VkResult init2(VkDevice device, const VkRenderPassCreateInfo2 &createInfo);
566 };
567
568 enum class StagingUsage
569 {
570 Read,
571 Write,
572 Both,
573 };
574
575 class Buffer final : public WrappedObject<Buffer, VkBuffer>
576 {
577 public:
578 Buffer() = default;
579 void destroy(VkDevice device);
580
581 VkResult init(VkDevice device, const VkBufferCreateInfo &createInfo);
582 VkResult bindMemory(VkDevice device, const DeviceMemory &deviceMemory, VkDeviceSize offset);
583 void getMemoryRequirements(VkDevice device, VkMemoryRequirements *memoryRequirementsOut);
584
585 private:
586 friend class Allocator;
587 };
588
589 class BufferView final : public WrappedObject<BufferView, VkBufferView>
590 {
591 public:
592 BufferView() = default;
593 void destroy(VkDevice device);
594
595 VkResult init(VkDevice device, const VkBufferViewCreateInfo &createInfo);
596 };
597
598 class ShaderModule final : public WrappedObject<ShaderModule, VkShaderModule>
599 {
600 public:
601 ShaderModule() = default;
602 void destroy(VkDevice device);
603
604 VkResult init(VkDevice device, const VkShaderModuleCreateInfo &createInfo);
605 };
606
607 class PipelineLayout final : public WrappedObject<PipelineLayout, VkPipelineLayout>
608 {
609 public:
610 PipelineLayout() = default;
611 void destroy(VkDevice device);
612
613 VkResult init(VkDevice device, const VkPipelineLayoutCreateInfo &createInfo);
614 };
615
616 class PipelineCache final : public WrappedObject<PipelineCache, VkPipelineCache>
617 {
618 public:
619 PipelineCache() = default;
620 void destroy(VkDevice device);
621
622 VkResult init(VkDevice device, const VkPipelineCacheCreateInfo &createInfo);
623 VkResult getCacheData(VkDevice device, size_t *cacheSize, void *cacheData) const;
624 VkResult merge(VkDevice device, uint32_t srcCacheCount, const VkPipelineCache *srcCaches) const;
625 };
626
627 class DescriptorSetLayout final : public WrappedObject<DescriptorSetLayout, VkDescriptorSetLayout>
628 {
629 public:
630 DescriptorSetLayout() = default;
631 void destroy(VkDevice device);
632
633 VkResult init(VkDevice device, const VkDescriptorSetLayoutCreateInfo &createInfo);
634 };
635
636 class DescriptorPool final : public WrappedObject<DescriptorPool, VkDescriptorPool>
637 {
638 public:
639 DescriptorPool() = default;
640 void destroy(VkDevice device);
641
642 VkResult init(VkDevice device, const VkDescriptorPoolCreateInfo &createInfo);
643
644 VkResult allocateDescriptorSets(VkDevice device,
645 const VkDescriptorSetAllocateInfo &allocInfo,
646 VkDescriptorSet *descriptorSetsOut);
647 VkResult freeDescriptorSets(VkDevice device,
648 uint32_t descriptorSetCount,
649 const VkDescriptorSet *descriptorSets);
650 };
651
652 class Sampler final : public WrappedObject<Sampler, VkSampler>
653 {
654 public:
655 Sampler() = default;
656 void destroy(VkDevice device);
657 VkResult init(VkDevice device, const VkSamplerCreateInfo &createInfo);
658 };
659
660 class SamplerYcbcrConversion final
661 : public WrappedObject<SamplerYcbcrConversion, VkSamplerYcbcrConversion>
662 {
663 public:
664 SamplerYcbcrConversion() = default;
665 void destroy(VkDevice device);
666 VkResult init(VkDevice device, const VkSamplerYcbcrConversionCreateInfo &createInfo);
667 };
668
669 class Event final : public WrappedObject<Event, VkEvent>
670 {
671 public:
672 Event() = default;
673 void destroy(VkDevice device);
674 using WrappedObject::operator=;
675
676 VkResult init(VkDevice device, const VkEventCreateInfo &createInfo);
677 VkResult getStatus(VkDevice device) const;
678 VkResult set(VkDevice device) const;
679 VkResult reset(VkDevice device) const;
680 };
681
682 class Fence final : public WrappedObject<Fence, VkFence>
683 {
684 public:
685 Fence() = default;
686 void destroy(VkDevice device);
687 using WrappedObject::operator=;
688
689 VkResult init(VkDevice device, const VkFenceCreateInfo &createInfo);
690 VkResult reset(VkDevice device);
691 VkResult getStatus(VkDevice device) const;
692 VkResult wait(VkDevice device, uint64_t timeout) const;
693 VkResult importFd(VkDevice device, const VkImportFenceFdInfoKHR &importFenceFdInfo) const;
694 VkResult exportFd(VkDevice device, const VkFenceGetFdInfoKHR &fenceGetFdInfo, int *outFd) const;
695 };
696
697 class QueryPool final : public WrappedObject<QueryPool, VkQueryPool>
698 {
699 public:
700 QueryPool() = default;
701 void destroy(VkDevice device);
702
703 VkResult init(VkDevice device, const VkQueryPoolCreateInfo &createInfo);
704 VkResult getResults(VkDevice device,
705 uint32_t firstQuery,
706 uint32_t queryCount,
707 size_t dataSize,
708 void *data,
709 VkDeviceSize stride,
710 VkQueryResultFlags flags) const;
711 };
712
713 // VirtualBlock
714 class VirtualBlock final : public WrappedObject<VirtualBlock, VmaVirtualBlock>
715 {
716 public:
717 VirtualBlock() = default;
718 void destroy(VkDevice device);
719 VkResult init(VkDevice device, vma::VirtualBlockCreateFlags flags, VkDeviceSize size);
720
721 VkResult allocate(VkDeviceSize size,
722 VkDeviceSize alignment,
723 VmaVirtualAllocation *allocationOut,
724 VkDeviceSize *offsetOut);
725 void free(VmaVirtualAllocation allocation, VkDeviceSize offset);
726 void calculateStats(vma::StatInfo *pStatInfo) const;
727 };
728
729 // CommandPool implementation.
730 ANGLE_INLINE void CommandPool::destroy(VkDevice device)
731 {
732 if (valid())
733 {
734 vkDestroyCommandPool(device, mHandle, nullptr);
735 mHandle = VK_NULL_HANDLE;
736 }
737 }
738
739 ANGLE_INLINE VkResult CommandPool::reset(VkDevice device, VkCommandPoolResetFlags flags)
740 {
741 ASSERT(valid());
742 return vkResetCommandPool(device, mHandle, flags);
743 }
744
745 ANGLE_INLINE void CommandPool::freeCommandBuffers(VkDevice device,
746 uint32_t commandBufferCount,
747 const VkCommandBuffer *commandBuffers)
748 {
749 ASSERT(valid());
750 vkFreeCommandBuffers(device, mHandle, commandBufferCount, commandBuffers);
751 }
752
753 ANGLE_INLINE VkResult CommandPool::init(VkDevice device, const VkCommandPoolCreateInfo &createInfo)
754 {
755 ASSERT(!valid());
756 return vkCreateCommandPool(device, &createInfo, nullptr, &mHandle);
757 }
758
759 namespace priv
760 {
761
762 // CommandBuffer implementation.
763 ANGLE_INLINE VkCommandBuffer CommandBuffer::releaseHandle()
764 {
765 VkCommandBuffer handle = mHandle;
766 mHandle = nullptr;
767 return handle;
768 }
769
770 ANGLE_INLINE VkResult CommandBuffer::init(VkDevice device,
771 const VkCommandBufferAllocateInfo &createInfo)
772 {
773 ASSERT(!valid());
774 return vkAllocateCommandBuffers(device, &createInfo, &mHandle);
775 }
776
777 ANGLE_INLINE void CommandBuffer::blitImage(const Image &srcImage,
778 VkImageLayout srcImageLayout,
779 const Image &dstImage,
780 VkImageLayout dstImageLayout,
781 uint32_t regionCount,
782 const VkImageBlit *regions,
783 VkFilter filter)
784 {
785 ASSERT(valid() && srcImage.valid() && dstImage.valid());
786 ASSERT(regionCount == 1);
787 vkCmdBlitImage(mHandle, srcImage.getHandle(), srcImageLayout, dstImage.getHandle(),
788 dstImageLayout, 1, regions, filter);
789 }
790
791 ANGLE_INLINE VkResult CommandBuffer::begin(const VkCommandBufferBeginInfo &info)
792 {
793 ASSERT(valid());
794 return vkBeginCommandBuffer(mHandle, &info);
795 }
796
797 ANGLE_INLINE VkResult CommandBuffer::end()
798 {
799 ASSERT(valid());
800 return vkEndCommandBuffer(mHandle);
801 }
802
803 ANGLE_INLINE VkResult CommandBuffer::reset()
804 {
805 ASSERT(valid());
806 return vkResetCommandBuffer(mHandle, 0);
807 }
808
809 ANGLE_INLINE void CommandBuffer::nextSubpass(VkSubpassContents subpassContents)
810 {
811 ASSERT(valid());
812 vkCmdNextSubpass(mHandle, subpassContents);
813 }
814
815 ANGLE_INLINE void CommandBuffer::memoryBarrier(VkPipelineStageFlags srcStageMask,
816 VkPipelineStageFlags dstStageMask,
817 const VkMemoryBarrier &memoryBarrier)
818 {
819 ASSERT(valid());
820 vkCmdPipelineBarrier(mHandle, srcStageMask, dstStageMask, 0, 1, &memoryBarrier, 0, nullptr, 0,
821 nullptr);
822 }
823
824 ANGLE_INLINE void CommandBuffer::memoryBarrier2(const VkMemoryBarrier2 &memoryBarrier2)
825 {
826 ASSERT(valid());
827 VkDependencyInfo pDependencyInfo = {};
828 pDependencyInfo.sType = VK_STRUCTURE_TYPE_DEPENDENCY_INFO;
829 pDependencyInfo.memoryBarrierCount = 1;
830 pDependencyInfo.pMemoryBarriers = &memoryBarrier2;
831 pDependencyInfo.bufferMemoryBarrierCount = 0;
832 pDependencyInfo.pBufferMemoryBarriers = nullptr;
833 pDependencyInfo.imageMemoryBarrierCount = 0;
834 pDependencyInfo.pImageMemoryBarriers = nullptr;
835 vkCmdPipelineBarrier2KHR(mHandle, &pDependencyInfo);
836 }
837
838 ANGLE_INLINE void CommandBuffer::pipelineBarrier(VkPipelineStageFlags srcStageMask,
839 VkPipelineStageFlags dstStageMask,
840 VkDependencyFlags dependencyFlags,
841 uint32_t memoryBarrierCount,
842 const VkMemoryBarrier *memoryBarriers,
843 uint32_t bufferMemoryBarrierCount,
844 const VkBufferMemoryBarrier *bufferMemoryBarriers,
845 uint32_t imageMemoryBarrierCount,
846 const VkImageMemoryBarrier *imageMemoryBarriers)
847 {
848 ASSERT(valid());
849 vkCmdPipelineBarrier(mHandle, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount,
850 memoryBarriers, bufferMemoryBarrierCount, bufferMemoryBarriers,
851 imageMemoryBarrierCount, imageMemoryBarriers);
852 }
853
854 ANGLE_INLINE void CommandBuffer::pipelineBarrier2(
855 VkDependencyFlags dependencyFlags,
856 uint32_t memoryBarrierCount,
857 const VkMemoryBarrier2 *memoryBarriers2,
858 uint32_t bufferMemoryBarrierCount,
859 const VkBufferMemoryBarrier2 *bufferMemoryBarriers2,
860 uint32_t imageMemoryBarrierCount,
861 const VkImageMemoryBarrier2 *imageMemoryBarriers2)
862 {
863 ASSERT(valid());
864 VkDependencyInfo dependencyInfo = {};
865 dependencyInfo.sType = VK_STRUCTURE_TYPE_DEPENDENCY_INFO;
866 dependencyInfo.pNext = nullptr;
867 dependencyInfo.dependencyFlags = dependencyFlags;
868 dependencyInfo.memoryBarrierCount = memoryBarrierCount;
869 dependencyInfo.pMemoryBarriers = memoryBarriers2;
870 dependencyInfo.bufferMemoryBarrierCount = bufferMemoryBarrierCount;
871 dependencyInfo.pBufferMemoryBarriers = bufferMemoryBarriers2;
872 dependencyInfo.imageMemoryBarrierCount = imageMemoryBarrierCount;
873 dependencyInfo.pImageMemoryBarriers = imageMemoryBarriers2;
874 vkCmdPipelineBarrier2KHR(mHandle, &dependencyInfo);
875 }
876
877 ANGLE_INLINE void CommandBuffer::imageBarrier(VkPipelineStageFlags srcStageMask,
878 VkPipelineStageFlags dstStageMask,
879 const VkImageMemoryBarrier &imageMemoryBarrier)
880 {
881 ASSERT(valid());
882 vkCmdPipelineBarrier(mHandle, srcStageMask, dstStageMask, 0, 0, nullptr, 0, nullptr, 1,
883 &imageMemoryBarrier);
884 }
885
886 ANGLE_INLINE void CommandBuffer::imageBarrier2(const VkImageMemoryBarrier2 &imageMemoryBarrier2)
887 {
888 ASSERT(valid());
889
890 VkDependencyInfo pDependencyInfo = {};
891 pDependencyInfo.sType = VK_STRUCTURE_TYPE_DEPENDENCY_INFO;
892 pDependencyInfo.memoryBarrierCount = 0;
893 pDependencyInfo.pMemoryBarriers = nullptr;
894 pDependencyInfo.bufferMemoryBarrierCount = 0;
895 pDependencyInfo.pBufferMemoryBarriers = nullptr;
896 pDependencyInfo.imageMemoryBarrierCount = 1;
897 pDependencyInfo.pImageMemoryBarriers = &imageMemoryBarrier2;
898 vkCmdPipelineBarrier2KHR(mHandle, &pDependencyInfo);
899 }
900
901 ANGLE_INLINE void CommandBuffer::imageWaitEvent(const VkEvent &event,
902 VkPipelineStageFlags srcStageMask,
903 VkPipelineStageFlags dstStageMask,
904 const VkImageMemoryBarrier &imageMemoryBarrier)
905 {
906 ASSERT(valid());
907 vkCmdWaitEvents(mHandle, 1, &event, srcStageMask, dstStageMask, 0, nullptr, 0, nullptr, 1,
908 &imageMemoryBarrier);
909 }
910
911 ANGLE_INLINE void CommandBuffer::destroy(VkDevice device)
912 {
913 releaseHandle();
914 }
915
916 ANGLE_INLINE void CommandBuffer::destroy(VkDevice device, const vk::CommandPool &commandPool)
917 {
918 if (valid())
919 {
920 ASSERT(commandPool.valid());
921 vkFreeCommandBuffers(device, commandPool.getHandle(), 1, &mHandle);
922 mHandle = VK_NULL_HANDLE;
923 }
924 }
925
926 ANGLE_INLINE void CommandBuffer::copyBuffer(const Buffer &srcBuffer,
927 const Buffer &destBuffer,
928 uint32_t regionCount,
929 const VkBufferCopy *regions)
930 {
931 ASSERT(valid() && srcBuffer.valid() && destBuffer.valid());
932 vkCmdCopyBuffer(mHandle, srcBuffer.getHandle(), destBuffer.getHandle(), regionCount, regions);
933 }
934
935 ANGLE_INLINE void CommandBuffer::copyBufferToImage(VkBuffer srcBuffer,
936 const Image &dstImage,
937 VkImageLayout dstImageLayout,
938 uint32_t regionCount,
939 const VkBufferImageCopy *regions)
940 {
941 ASSERT(valid() && dstImage.valid());
942 ASSERT(srcBuffer != VK_NULL_HANDLE);
943 ASSERT(regionCount == 1);
944 vkCmdCopyBufferToImage(mHandle, srcBuffer, dstImage.getHandle(), dstImageLayout, 1, regions);
945 }
946
947 ANGLE_INLINE void CommandBuffer::copyImageToBuffer(const Image &srcImage,
948 VkImageLayout srcImageLayout,
949 VkBuffer dstBuffer,
950 uint32_t regionCount,
951 const VkBufferImageCopy *regions)
952 {
953 ASSERT(valid() && srcImage.valid());
954 ASSERT(dstBuffer != VK_NULL_HANDLE);
955 ASSERT(regionCount == 1);
956 vkCmdCopyImageToBuffer(mHandle, srcImage.getHandle(), srcImageLayout, dstBuffer, 1, regions);
957 }
958
959 ANGLE_INLINE void CommandBuffer::clearColorImage(const Image &image,
960 VkImageLayout imageLayout,
961 const VkClearColorValue &color,
962 uint32_t rangeCount,
963 const VkImageSubresourceRange *ranges)
964 {
965 ASSERT(valid());
966 ASSERT(rangeCount == 1);
967 vkCmdClearColorImage(mHandle, image.getHandle(), imageLayout, &color, 1, ranges);
968 }
969
970 ANGLE_INLINE void CommandBuffer::clearDepthStencilImage(
971 const Image &image,
972 VkImageLayout imageLayout,
973 const VkClearDepthStencilValue &depthStencil,
974 uint32_t rangeCount,
975 const VkImageSubresourceRange *ranges)
976 {
977 ASSERT(valid());
978 ASSERT(rangeCount == 1);
979 vkCmdClearDepthStencilImage(mHandle, image.getHandle(), imageLayout, &depthStencil, 1, ranges);
980 }
981
982 ANGLE_INLINE void CommandBuffer::clearAttachments(uint32_t attachmentCount,
983 const VkClearAttachment *attachments,
984 uint32_t rectCount,
985 const VkClearRect *rects)
986 {
987 ASSERT(valid());
988 vkCmdClearAttachments(mHandle, attachmentCount, attachments, rectCount, rects);
989 }
990
991 ANGLE_INLINE void CommandBuffer::copyImage(const Image &srcImage,
992 VkImageLayout srcImageLayout,
993 const Image &dstImage,
994 VkImageLayout dstImageLayout,
995 uint32_t regionCount,
996 const VkImageCopy *regions)
997 {
998 ASSERT(valid() && srcImage.valid() && dstImage.valid());
999 ASSERT(regionCount == 1);
1000 vkCmdCopyImage(mHandle, srcImage.getHandle(), srcImageLayout, dstImage.getHandle(),
1001 dstImageLayout, 1, regions);
1002 }
1003
1004 ANGLE_INLINE void CommandBuffer::beginRenderPass(const VkRenderPassBeginInfo &beginInfo,
1005 VkSubpassContents subpassContents)
1006 {
1007 ASSERT(valid());
1008 vkCmdBeginRenderPass(mHandle, &beginInfo, subpassContents);
1009 }
1010
1011 ANGLE_INLINE void CommandBuffer::beginRendering(const VkRenderingInfo &beginInfo)
1012 {
1013 ASSERT(valid());
1014 vkCmdBeginRenderingKHR(mHandle, &beginInfo);
1015 }
1016
1017 ANGLE_INLINE void CommandBuffer::endRenderPass()
1018 {
1019 ASSERT(valid());
1020 vkCmdEndRenderPass(mHandle);
1021 }
1022
1023 ANGLE_INLINE void CommandBuffer::endRendering()
1024 {
1025 ASSERT(valid());
1026 vkCmdEndRenderingKHR(mHandle);
1027 }
1028
1029 ANGLE_INLINE void CommandBuffer::bindIndexBuffer(const Buffer &buffer,
1030 VkDeviceSize offset,
1031 VkIndexType indexType)
1032 {
1033 ASSERT(valid());
1034 vkCmdBindIndexBuffer(mHandle, buffer.getHandle(), offset, indexType);
1035 }
1036
1037 ANGLE_INLINE void CommandBuffer::bindDescriptorSets(const PipelineLayout &layout,
1038 VkPipelineBindPoint pipelineBindPoint,
1039 DescriptorSetIndex firstSet,
1040 uint32_t descriptorSetCount,
1041 const VkDescriptorSet *descriptorSets,
1042 uint32_t dynamicOffsetCount,
1043 const uint32_t *dynamicOffsets)
1044 {
1045 ASSERT(valid() && layout.valid());
1046 vkCmdBindDescriptorSets(this->mHandle, pipelineBindPoint, layout.getHandle(),
1047 ToUnderlying(firstSet), descriptorSetCount, descriptorSets,
1048 dynamicOffsetCount, dynamicOffsets);
1049 }
1050
1051 ANGLE_INLINE void CommandBuffer::executeCommands(uint32_t commandBufferCount,
1052 const CommandBuffer *commandBuffers)
1053 {
1054 ASSERT(valid());
1055 vkCmdExecuteCommands(mHandle, commandBufferCount, commandBuffers[0].ptr());
1056 }
1057
1058 ANGLE_INLINE void CommandBuffer::getMemoryUsageStats(size_t *usedMemoryOut,
1059 size_t *allocatedMemoryOut) const
1060 {
1061 // No data available.
1062 *usedMemoryOut = 0;
1063 *allocatedMemoryOut = 1;
1064 }
1065
1066 ANGLE_INLINE void CommandBuffer::fillBuffer(const Buffer &dstBuffer,
1067 VkDeviceSize dstOffset,
1068 VkDeviceSize size,
1069 uint32_t data)
1070 {
1071 ASSERT(valid());
1072 vkCmdFillBuffer(mHandle, dstBuffer.getHandle(), dstOffset, size, data);
1073 }
1074
1075 ANGLE_INLINE void CommandBuffer::pushConstants(const PipelineLayout &layout,
1076 VkShaderStageFlags flag,
1077 uint32_t offset,
1078 uint32_t size,
1079 const void *data)
1080 {
1081 ASSERT(valid() && layout.valid());
1082 ASSERT(offset == 0);
1083 vkCmdPushConstants(mHandle, layout.getHandle(), flag, 0, size, data);
1084 }
1085
1086 ANGLE_INLINE void CommandBuffer::setBlendConstants(const float blendConstants[4])
1087 {
1088 ASSERT(valid());
1089 vkCmdSetBlendConstants(mHandle, blendConstants);
1090 }
1091
1092 ANGLE_INLINE void CommandBuffer::setCullMode(VkCullModeFlags cullMode)
1093 {
1094 ASSERT(valid());
1095 vkCmdSetCullModeEXT(mHandle, cullMode);
1096 }
1097
1098 ANGLE_INLINE void CommandBuffer::setDepthBias(float depthBiasConstantFactor,
1099 float depthBiasClamp,
1100 float depthBiasSlopeFactor)
1101 {
1102 ASSERT(valid());
1103 vkCmdSetDepthBias(mHandle, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor);
1104 }
1105
1106 ANGLE_INLINE void CommandBuffer::setDepthBiasEnable(VkBool32 depthBiasEnable)
1107 {
1108 ASSERT(valid());
1109 vkCmdSetDepthBiasEnableEXT(mHandle, depthBiasEnable);
1110 }
1111
1112 ANGLE_INLINE void CommandBuffer::setDepthCompareOp(VkCompareOp depthCompareOp)
1113 {
1114 ASSERT(valid());
1115 vkCmdSetDepthCompareOpEXT(mHandle, depthCompareOp);
1116 }
1117
1118 ANGLE_INLINE void CommandBuffer::setDepthTestEnable(VkBool32 depthTestEnable)
1119 {
1120 ASSERT(valid());
1121 vkCmdSetDepthTestEnableEXT(mHandle, depthTestEnable);
1122 }
1123
1124 ANGLE_INLINE void CommandBuffer::setDepthWriteEnable(VkBool32 depthWriteEnable)
1125 {
1126 ASSERT(valid());
1127 vkCmdSetDepthWriteEnableEXT(mHandle, depthWriteEnable);
1128 }
1129
1130 ANGLE_INLINE void CommandBuffer::setEvent(VkEvent event, VkPipelineStageFlags stageMask)
1131 {
1132 ASSERT(valid() && event != VK_NULL_HANDLE);
1133 vkCmdSetEvent(mHandle, event, stageMask);
1134 }
1135
1136 ANGLE_INLINE void CommandBuffer::setFragmentShadingRate(const VkExtent2D *fragmentSize,
1137 VkFragmentShadingRateCombinerOpKHR ops[2])
1138 {
1139 ASSERT(valid() && fragmentSize != nullptr);
1140 vkCmdSetFragmentShadingRateKHR(mHandle, fragmentSize, ops);
1141 }
1142
1143 ANGLE_INLINE void CommandBuffer::setFrontFace(VkFrontFace frontFace)
1144 {
1145 ASSERT(valid());
1146 vkCmdSetFrontFaceEXT(mHandle, frontFace);
1147 }
1148
1149 ANGLE_INLINE void CommandBuffer::setLineWidth(float lineWidth)
1150 {
1151 ASSERT(valid());
1152 vkCmdSetLineWidth(mHandle, lineWidth);
1153 }
1154
1155 ANGLE_INLINE void CommandBuffer::setLogicOp(VkLogicOp logicOp)
1156 {
1157 ASSERT(valid());
1158 vkCmdSetLogicOpEXT(mHandle, logicOp);
1159 }
1160
1161 ANGLE_INLINE void CommandBuffer::setPrimitiveRestartEnable(VkBool32 primitiveRestartEnable)
1162 {
1163 ASSERT(valid());
1164 vkCmdSetPrimitiveRestartEnableEXT(mHandle, primitiveRestartEnable);
1165 }
1166
1167 ANGLE_INLINE void CommandBuffer::setRasterizerDiscardEnable(VkBool32 rasterizerDiscardEnable)
1168 {
1169 ASSERT(valid());
1170 vkCmdSetRasterizerDiscardEnableEXT(mHandle, rasterizerDiscardEnable);
1171 }
1172
1173 ANGLE_INLINE void CommandBuffer::setRenderingAttachmentLocations(
1174 const VkRenderingAttachmentLocationInfoKHR *info)
1175 {
1176 ASSERT(valid());
1177 vkCmdSetRenderingAttachmentLocationsKHR(mHandle, info);
1178 }
1179
1180 ANGLE_INLINE void CommandBuffer::setRenderingInputAttachmentIndicates(
1181 const VkRenderingInputAttachmentIndexInfoKHR *info)
1182 {
1183 ASSERT(valid());
1184 vkCmdSetRenderingInputAttachmentIndicesKHR(mHandle, info);
1185 }
1186
1187 ANGLE_INLINE void CommandBuffer::setScissor(uint32_t firstScissor,
1188 uint32_t scissorCount,
1189 const VkRect2D *scissors)
1190 {
1191 ASSERT(valid() && scissors != nullptr);
1192 vkCmdSetScissor(mHandle, firstScissor, scissorCount, scissors);
1193 }
1194
1195 ANGLE_INLINE void CommandBuffer::setStencilCompareMask(uint32_t compareFrontMask,
1196 uint32_t compareBackMask)
1197 {
1198 ASSERT(valid());
1199 vkCmdSetStencilCompareMask(mHandle, VK_STENCIL_FACE_FRONT_BIT, compareFrontMask);
1200 vkCmdSetStencilCompareMask(mHandle, VK_STENCIL_FACE_BACK_BIT, compareBackMask);
1201 }
1202
1203 ANGLE_INLINE void CommandBuffer::setStencilOp(VkStencilFaceFlags faceMask,
1204 VkStencilOp failOp,
1205 VkStencilOp passOp,
1206 VkStencilOp depthFailOp,
1207 VkCompareOp compareOp)
1208 {
1209 ASSERT(valid());
1210 vkCmdSetStencilOpEXT(mHandle, faceMask, failOp, passOp, depthFailOp, compareOp);
1211 }
1212
1213 ANGLE_INLINE void CommandBuffer::setStencilReference(uint32_t frontReference,
1214 uint32_t backReference)
1215 {
1216 ASSERT(valid());
1217 vkCmdSetStencilReference(mHandle, VK_STENCIL_FACE_FRONT_BIT, frontReference);
1218 vkCmdSetStencilReference(mHandle, VK_STENCIL_FACE_BACK_BIT, backReference);
1219 }
1220
1221 ANGLE_INLINE void CommandBuffer::setStencilTestEnable(VkBool32 stencilTestEnable)
1222 {
1223 ASSERT(valid());
1224 vkCmdSetStencilTestEnableEXT(mHandle, stencilTestEnable);
1225 }
1226
1227 ANGLE_INLINE void CommandBuffer::setStencilWriteMask(uint32_t writeFrontMask,
1228 uint32_t writeBackMask)
1229 {
1230 ASSERT(valid());
1231 vkCmdSetStencilWriteMask(mHandle, VK_STENCIL_FACE_FRONT_BIT, writeFrontMask);
1232 vkCmdSetStencilWriteMask(mHandle, VK_STENCIL_FACE_BACK_BIT, writeBackMask);
1233 }
1234
1235 ANGLE_INLINE void CommandBuffer::setVertexInput(
1236 uint32_t vertexBindingDescriptionCount,
1237 const VkVertexInputBindingDescription2EXT *VertexBindingDescriptions,
1238 uint32_t vertexAttributeDescriptionCount,
1239 const VkVertexInputAttributeDescription2EXT *VertexAttributeDescriptions)
1240 {
1241 ASSERT(valid());
1242 vkCmdSetVertexInputEXT(mHandle, vertexBindingDescriptionCount, VertexBindingDescriptions,
1243 vertexAttributeDescriptionCount, VertexAttributeDescriptions);
1244 }
1245
1246 ANGLE_INLINE void CommandBuffer::setViewport(uint32_t firstViewport,
1247 uint32_t viewportCount,
1248 const VkViewport *viewports)
1249 {
1250 ASSERT(valid() && viewports != nullptr);
1251 vkCmdSetViewport(mHandle, firstViewport, viewportCount, viewports);
1252 }
1253
1254 ANGLE_INLINE void CommandBuffer::resetEvent(VkEvent event, VkPipelineStageFlags stageMask)
1255 {
1256 ASSERT(valid() && event != VK_NULL_HANDLE);
1257 vkCmdResetEvent(mHandle, event, stageMask);
1258 }
1259
1260 ANGLE_INLINE void CommandBuffer::waitEvents(uint32_t eventCount,
1261 const VkEvent *events,
1262 VkPipelineStageFlags srcStageMask,
1263 VkPipelineStageFlags dstStageMask,
1264 uint32_t memoryBarrierCount,
1265 const VkMemoryBarrier *memoryBarriers,
1266 uint32_t bufferMemoryBarrierCount,
1267 const VkBufferMemoryBarrier *bufferMemoryBarriers,
1268 uint32_t imageMemoryBarrierCount,
1269 const VkImageMemoryBarrier *imageMemoryBarriers)
1270 {
1271 ASSERT(valid());
1272 vkCmdWaitEvents(mHandle, eventCount, events, srcStageMask, dstStageMask, memoryBarrierCount,
1273 memoryBarriers, bufferMemoryBarrierCount, bufferMemoryBarriers,
1274 imageMemoryBarrierCount, imageMemoryBarriers);
1275 }
1276
1277 ANGLE_INLINE void CommandBuffer::resetQueryPool(const QueryPool &queryPool,
1278 uint32_t firstQuery,
1279 uint32_t queryCount)
1280 {
1281 ASSERT(valid() && queryPool.valid());
1282 vkCmdResetQueryPool(mHandle, queryPool.getHandle(), firstQuery, queryCount);
1283 }
1284
1285 ANGLE_INLINE void CommandBuffer::resolveImage(const Image &srcImage,
1286 VkImageLayout srcImageLayout,
1287 const Image &dstImage,
1288 VkImageLayout dstImageLayout,
1289 uint32_t regionCount,
1290 const VkImageResolve *regions)
1291 {
1292 ASSERT(valid() && srcImage.valid() && dstImage.valid());
1293 vkCmdResolveImage(mHandle, srcImage.getHandle(), srcImageLayout, dstImage.getHandle(),
1294 dstImageLayout, regionCount, regions);
1295 }
1296
1297 ANGLE_INLINE void CommandBuffer::beginQuery(const QueryPool &queryPool,
1298 uint32_t query,
1299 VkQueryControlFlags flags)
1300 {
1301 ASSERT(valid() && queryPool.valid());
1302 vkCmdBeginQuery(mHandle, queryPool.getHandle(), query, flags);
1303 }
1304
1305 ANGLE_INLINE void CommandBuffer::endQuery(const QueryPool &queryPool, uint32_t query)
1306 {
1307 ASSERT(valid() && queryPool.valid());
1308 vkCmdEndQuery(mHandle, queryPool.getHandle(), query);
1309 }
1310
1311 ANGLE_INLINE void CommandBuffer::writeTimestamp(VkPipelineStageFlagBits pipelineStage,
1312 const QueryPool &queryPool,
1313 uint32_t query)
1314 {
1315 ASSERT(valid());
1316 vkCmdWriteTimestamp(mHandle, pipelineStage, queryPool.getHandle(), query);
1317 }
1318
1319 ANGLE_INLINE void CommandBuffer::writeTimestamp2(VkPipelineStageFlagBits2 pipelineStage,
1320 const QueryPool &queryPool,
1321 uint32_t query)
1322 {
1323 ASSERT(valid());
1324 vkCmdWriteTimestamp2KHR(mHandle, pipelineStage, queryPool.getHandle(), query);
1325 }
1326
1327 ANGLE_INLINE void CommandBuffer::draw(uint32_t vertexCount,
1328 uint32_t instanceCount,
1329 uint32_t firstVertex,
1330 uint32_t firstInstance)
1331 {
1332 ASSERT(valid());
1333 vkCmdDraw(mHandle, vertexCount, instanceCount, firstVertex, firstInstance);
1334 }
1335
1336 ANGLE_INLINE void CommandBuffer::drawIndexed(uint32_t indexCount,
1337 uint32_t instanceCount,
1338 uint32_t firstIndex,
1339 int32_t vertexOffset,
1340 uint32_t firstInstance)
1341 {
1342 ASSERT(valid());
1343 vkCmdDrawIndexed(mHandle, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance);
1344 }
1345
1346 ANGLE_INLINE void CommandBuffer::drawIndexedIndirect(const Buffer &buffer,
1347 VkDeviceSize offset,
1348 uint32_t drawCount,
1349 uint32_t stride)
1350 {
1351 ASSERT(valid());
1352 vkCmdDrawIndexedIndirect(mHandle, buffer.getHandle(), offset, drawCount, stride);
1353 }
1354
1355 ANGLE_INLINE void CommandBuffer::drawIndirect(const Buffer &buffer,
1356 VkDeviceSize offset,
1357 uint32_t drawCount,
1358 uint32_t stride)
1359 {
1360 ASSERT(valid());
1361 vkCmdDrawIndirect(mHandle, buffer.getHandle(), offset, drawCount, stride);
1362 }
1363
1364 ANGLE_INLINE void CommandBuffer::dispatch(uint32_t groupCountX,
1365 uint32_t groupCountY,
1366 uint32_t groupCountZ)
1367 {
1368 ASSERT(valid());
1369 vkCmdDispatch(mHandle, groupCountX, groupCountY, groupCountZ);
1370 }
1371
1372 ANGLE_INLINE void CommandBuffer::dispatchIndirect(const Buffer &buffer, VkDeviceSize offset)
1373 {
1374 ASSERT(valid());
1375 vkCmdDispatchIndirect(mHandle, buffer.getHandle(), offset);
1376 }
1377
1378 ANGLE_INLINE void CommandBuffer::bindPipeline(VkPipelineBindPoint pipelineBindPoint,
1379 const Pipeline &pipeline)
1380 {
1381 ASSERT(valid() && pipeline.valid());
1382 vkCmdBindPipeline(mHandle, pipelineBindPoint, pipeline.getHandle());
1383 }
1384
1385 ANGLE_INLINE void CommandBuffer::bindGraphicsPipeline(const Pipeline &pipeline)
1386 {
1387 ASSERT(valid() && pipeline.valid());
1388 vkCmdBindPipeline(mHandle, VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline.getHandle());
1389 }
1390
1391 ANGLE_INLINE void CommandBuffer::bindComputePipeline(const Pipeline &pipeline)
1392 {
1393 ASSERT(valid() && pipeline.valid());
1394 vkCmdBindPipeline(mHandle, VK_PIPELINE_BIND_POINT_COMPUTE, pipeline.getHandle());
1395 }
1396
1397 ANGLE_INLINE void CommandBuffer::bindVertexBuffers(uint32_t firstBinding,
1398 uint32_t bindingCount,
1399 const VkBuffer *buffers,
1400 const VkDeviceSize *offsets)
1401 {
1402 ASSERT(valid());
1403 vkCmdBindVertexBuffers(mHandle, firstBinding, bindingCount, buffers, offsets);
1404 }
1405
1406 ANGLE_INLINE void CommandBuffer::bindVertexBuffers2(uint32_t firstBinding,
1407 uint32_t bindingCount,
1408 const VkBuffer *buffers,
1409 const VkDeviceSize *offsets,
1410 const VkDeviceSize *sizes,
1411 const VkDeviceSize *strides)
1412 {
1413 ASSERT(valid());
1414 vkCmdBindVertexBuffers2EXT(mHandle, firstBinding, bindingCount, buffers, offsets, sizes,
1415 strides);
1416 }
1417
1418 ANGLE_INLINE void CommandBuffer::beginTransformFeedback(uint32_t firstCounterBuffer,
1419 uint32_t counterBufferCount,
1420 const VkBuffer *counterBuffers,
1421 const VkDeviceSize *counterBufferOffsets)
1422 {
1423 ASSERT(valid());
1424 ASSERT(vkCmdBeginTransformFeedbackEXT);
1425 vkCmdBeginTransformFeedbackEXT(mHandle, firstCounterBuffer, counterBufferCount, counterBuffers,
1426 counterBufferOffsets);
1427 }
1428
1429 ANGLE_INLINE void CommandBuffer::endTransformFeedback(uint32_t firstCounterBuffer,
1430 uint32_t counterBufferCount,
1431 const VkBuffer *counterBuffers,
1432 const VkDeviceSize *counterBufferOffsets)
1433 {
1434 ASSERT(valid());
1435 ASSERT(vkCmdEndTransformFeedbackEXT);
1436 vkCmdEndTransformFeedbackEXT(mHandle, firstCounterBuffer, counterBufferCount, counterBuffers,
1437 counterBufferOffsets);
1438 }
1439
1440 ANGLE_INLINE void CommandBuffer::bindTransformFeedbackBuffers(uint32_t firstBinding,
1441 uint32_t bindingCount,
1442 const VkBuffer *buffers,
1443 const VkDeviceSize *offsets,
1444 const VkDeviceSize *sizes)
1445 {
1446 ASSERT(valid());
1447 ASSERT(vkCmdBindTransformFeedbackBuffersEXT);
1448 vkCmdBindTransformFeedbackBuffersEXT(mHandle, firstBinding, bindingCount, buffers, offsets,
1449 sizes);
1450 }
1451
1452 ANGLE_INLINE void CommandBuffer::beginDebugUtilsLabelEXT(const VkDebugUtilsLabelEXT &labelInfo)
1453 {
1454 ASSERT(valid());
1455 {
1456 #if !defined(ANGLE_SHARED_LIBVULKAN)
1457 // When the vulkan-loader is statically linked, we need to use the extension
1458 // functions defined in ANGLE's rx namespace. When it's dynamically linked
1459 // with volk, this will default to the function definitions with no namespace
1460 using rx::vkCmdBeginDebugUtilsLabelEXT;
1461 #endif // !defined(ANGLE_SHARED_LIBVULKAN)
1462 ASSERT(vkCmdBeginDebugUtilsLabelEXT);
1463 vkCmdBeginDebugUtilsLabelEXT(mHandle, &labelInfo);
1464 }
1465 }
1466
1467 ANGLE_INLINE void CommandBuffer::endDebugUtilsLabelEXT()
1468 {
1469 ASSERT(valid());
1470 ASSERT(vkCmdEndDebugUtilsLabelEXT);
1471 vkCmdEndDebugUtilsLabelEXT(mHandle);
1472 }
1473
1474 ANGLE_INLINE void CommandBuffer::insertDebugUtilsLabelEXT(const VkDebugUtilsLabelEXT &labelInfo)
1475 {
1476 ASSERT(valid());
1477 ASSERT(vkCmdInsertDebugUtilsLabelEXT);
1478 vkCmdInsertDebugUtilsLabelEXT(mHandle, &labelInfo);
1479 }
1480 } // namespace priv
1481
1482 // Image implementation.
1483 ANGLE_INLINE void Image::setHandle(VkImage handle)
1484 {
1485 mHandle = handle;
1486 }
1487
1488 ANGLE_INLINE void Image::reset()
1489 {
1490 mHandle = VK_NULL_HANDLE;
1491 }
1492
1493 ANGLE_INLINE void Image::destroy(VkDevice device)
1494 {
1495 if (valid())
1496 {
1497 vkDestroyImage(device, mHandle, nullptr);
1498 mHandle = VK_NULL_HANDLE;
1499 }
1500 }
1501
1502 ANGLE_INLINE VkResult Image::init(VkDevice device, const VkImageCreateInfo &createInfo)
1503 {
1504 ASSERT(!valid());
1505 return vkCreateImage(device, &createInfo, nullptr, &mHandle);
1506 }
1507
1508 ANGLE_INLINE void Image::getMemoryRequirements(VkDevice device,
1509 VkMemoryRequirements *requirementsOut) const
1510 {
1511 ASSERT(valid());
1512 vkGetImageMemoryRequirements(device, mHandle, requirementsOut);
1513 }
1514
1515 ANGLE_INLINE VkResult Image::bindMemory(VkDevice device, const vk::DeviceMemory &deviceMemory)
1516 {
1517 ASSERT(valid() && deviceMemory.valid());
1518 return vkBindImageMemory(device, mHandle, deviceMemory.getHandle(), 0);
1519 }
1520
1521 ANGLE_INLINE VkResult Image::bindMemory2(VkDevice device, const VkBindImageMemoryInfoKHR &bindInfo)
1522 {
1523 ASSERT(valid());
1524 return vkBindImageMemory2(device, 1, &bindInfo);
1525 }
1526
1527 ANGLE_INLINE void Image::getSubresourceLayout(VkDevice device,
1528 VkImageAspectFlagBits aspectMask,
1529 uint32_t mipLevel,
1530 uint32_t arrayLayer,
1531 VkSubresourceLayout *outSubresourceLayout) const
1532 {
1533 VkImageSubresource subresource = {};
1534 subresource.aspectMask = aspectMask;
1535 subresource.mipLevel = mipLevel;
1536 subresource.arrayLayer = arrayLayer;
1537
1538 vkGetImageSubresourceLayout(device, getHandle(), &subresource, outSubresourceLayout);
1539 }
1540
1541 // ImageView implementation.
1542 ANGLE_INLINE void ImageView::destroy(VkDevice device)
1543 {
1544 if (valid())
1545 {
1546 vkDestroyImageView(device, mHandle, nullptr);
1547 mHandle = VK_NULL_HANDLE;
1548 }
1549 }
1550
1551 ANGLE_INLINE VkResult ImageView::init(VkDevice device, const VkImageViewCreateInfo &createInfo)
1552 {
1553 return vkCreateImageView(device, &createInfo, nullptr, &mHandle);
1554 }
1555
1556 // Semaphore implementation.
1557 ANGLE_INLINE void Semaphore::destroy(VkDevice device)
1558 {
1559 if (valid())
1560 {
1561 vkDestroySemaphore(device, mHandle, nullptr);
1562 mHandle = VK_NULL_HANDLE;
1563 }
1564 }
1565
1566 ANGLE_INLINE VkResult Semaphore::init(VkDevice device)
1567 {
1568 ASSERT(!valid());
1569
1570 VkSemaphoreCreateInfo semaphoreInfo = {};
1571 semaphoreInfo.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
1572 semaphoreInfo.flags = 0;
1573
1574 return vkCreateSemaphore(device, &semaphoreInfo, nullptr, &mHandle);
1575 }
1576
1577 ANGLE_INLINE VkResult Semaphore::importFd(VkDevice device,
1578 const VkImportSemaphoreFdInfoKHR &importFdInfo) const
1579 {
1580 ASSERT(valid());
1581 return vkImportSemaphoreFdKHR(device, &importFdInfo);
1582 }
1583
1584 // Framebuffer implementation.
1585 ANGLE_INLINE void Framebuffer::destroy(VkDevice device)
1586 {
1587 if (valid())
1588 {
1589 vkDestroyFramebuffer(device, mHandle, nullptr);
1590 mHandle = VK_NULL_HANDLE;
1591 }
1592 }
1593
1594 ANGLE_INLINE VkResult Framebuffer::init(VkDevice device, const VkFramebufferCreateInfo &createInfo)
1595 {
1596 ASSERT(!valid());
1597 return vkCreateFramebuffer(device, &createInfo, nullptr, &mHandle);
1598 }
1599
1600 ANGLE_INLINE void Framebuffer::setHandle(VkFramebuffer handle)
1601 {
1602 mHandle = handle;
1603 }
1604
1605 // DeviceMemory implementation.
1606 ANGLE_INLINE void DeviceMemory::destroy(VkDevice device)
1607 {
1608 if (valid())
1609 {
1610 vkFreeMemory(device, mHandle, nullptr);
1611 mHandle = VK_NULL_HANDLE;
1612 }
1613 }
1614
1615 ANGLE_INLINE VkResult DeviceMemory::allocate(VkDevice device, const VkMemoryAllocateInfo &allocInfo)
1616 {
1617 ASSERT(!valid());
1618 return vkAllocateMemory(device, &allocInfo, nullptr, &mHandle);
1619 }
1620
1621 ANGLE_INLINE VkResult DeviceMemory::map(VkDevice device,
1622 VkDeviceSize offset,
1623 VkDeviceSize size,
1624 VkMemoryMapFlags flags,
1625 uint8_t **mapPointer) const
1626 {
1627 ASSERT(valid());
1628 return vkMapMemory(device, mHandle, offset, size, flags, reinterpret_cast<void **>(mapPointer));
1629 }
1630
1631 ANGLE_INLINE void DeviceMemory::unmap(VkDevice device) const
1632 {
1633 ASSERT(valid());
1634 vkUnmapMemory(device, mHandle);
1635 }
1636
1637 ANGLE_INLINE void DeviceMemory::flush(VkDevice device, VkMappedMemoryRange &memRange)
1638 {
1639 vkFlushMappedMemoryRanges(device, 1, &memRange);
1640 }
1641
1642 ANGLE_INLINE void DeviceMemory::invalidate(VkDevice device, VkMappedMemoryRange &memRange)
1643 {
1644 vkInvalidateMappedMemoryRanges(device, 1, &memRange);
1645 }
1646
1647 // Allocator implementation.
1648 ANGLE_INLINE void Allocator::destroy()
1649 {
1650 if (valid())
1651 {
1652 vma::DestroyAllocator(mHandle);
1653 mHandle = VK_NULL_HANDLE;
1654 }
1655 }
1656
1657 ANGLE_INLINE VkResult Allocator::init(VkPhysicalDevice physicalDevice,
1658 VkDevice device,
1659 VkInstance instance,
1660 uint32_t apiVersion,
1661 VkDeviceSize preferredLargeHeapBlockSize)
1662 {
1663 ASSERT(!valid());
1664 return vma::InitAllocator(physicalDevice, device, instance, apiVersion,
1665 preferredLargeHeapBlockSize, &mHandle);
1666 }
1667
1668 ANGLE_INLINE VkResult Allocator::createBuffer(const VkBufferCreateInfo &bufferCreateInfo,
1669 VkMemoryPropertyFlags requiredFlags,
1670 VkMemoryPropertyFlags preferredFlags,
1671 bool persistentlyMappedBuffers,
1672 uint32_t *memoryTypeIndexOut,
1673 Buffer *bufferOut,
1674 Allocation *allocationOut) const
1675 {
1676 ASSERT(valid());
1677 ASSERT(bufferOut && !bufferOut->valid());
1678 ASSERT(allocationOut && !allocationOut->valid());
1679 return vma::CreateBuffer(mHandle, &bufferCreateInfo, requiredFlags, preferredFlags,
1680 persistentlyMappedBuffers, memoryTypeIndexOut, &bufferOut->mHandle,
1681 &allocationOut->mHandle);
1682 }
1683
1684 ANGLE_INLINE void Allocator::getMemoryTypeProperties(uint32_t memoryTypeIndex,
1685 VkMemoryPropertyFlags *flagsOut) const
1686 {
1687 ASSERT(valid());
1688 vma::GetMemoryTypeProperties(mHandle, memoryTypeIndex, flagsOut);
1689 }
1690
1691 ANGLE_INLINE VkResult
1692 Allocator::findMemoryTypeIndexForBufferInfo(const VkBufferCreateInfo &bufferCreateInfo,
1693 VkMemoryPropertyFlags requiredFlags,
1694 VkMemoryPropertyFlags preferredFlags,
1695 bool persistentlyMappedBuffers,
1696 uint32_t *memoryTypeIndexOut) const
1697 {
1698 ASSERT(valid());
1699 return vma::FindMemoryTypeIndexForBufferInfo(mHandle, &bufferCreateInfo, requiredFlags,
1700 preferredFlags, persistentlyMappedBuffers,
1701 memoryTypeIndexOut);
1702 }
1703
1704 ANGLE_INLINE void Allocator::buildStatsString(char **statsString, VkBool32 detailedMap)
1705 {
1706 ASSERT(valid());
1707 vma::BuildStatsString(mHandle, statsString, detailedMap);
1708 }
1709
1710 ANGLE_INLINE void Allocator::freeStatsString(char *statsString)
1711 {
1712 ASSERT(valid());
1713 vma::FreeStatsString(mHandle, statsString);
1714 }
1715
1716 // Allocation implementation.
1717 ANGLE_INLINE void Allocation::destroy(const Allocator &allocator)
1718 {
1719 if (valid())
1720 {
1721 vma::FreeMemory(allocator.getHandle(), mHandle);
1722 mHandle = VK_NULL_HANDLE;
1723 }
1724 }
1725
1726 ANGLE_INLINE VkResult Allocation::map(const Allocator &allocator, uint8_t **mapPointer) const
1727 {
1728 ASSERT(valid());
1729 return vma::MapMemory(allocator.getHandle(), mHandle, (void **)mapPointer);
1730 }
1731
1732 ANGLE_INLINE void Allocation::unmap(const Allocator &allocator) const
1733 {
1734 ASSERT(valid());
1735 vma::UnmapMemory(allocator.getHandle(), mHandle);
1736 }
1737
1738 ANGLE_INLINE void Allocation::flush(const Allocator &allocator,
1739 VkDeviceSize offset,
1740 VkDeviceSize size) const
1741 {
1742 ASSERT(valid());
1743 vma::FlushAllocation(allocator.getHandle(), mHandle, offset, size);
1744 }
1745
1746 ANGLE_INLINE void Allocation::invalidate(const Allocator &allocator,
1747 VkDeviceSize offset,
1748 VkDeviceSize size) const
1749 {
1750 ASSERT(valid());
1751 vma::InvalidateAllocation(allocator.getHandle(), mHandle, offset, size);
1752 }
1753
1754 // RenderPass implementation.
1755 ANGLE_INLINE void RenderPass::destroy(VkDevice device)
1756 {
1757 if (valid())
1758 {
1759 vkDestroyRenderPass(device, mHandle, nullptr);
1760 mHandle = VK_NULL_HANDLE;
1761 }
1762 }
1763
1764 ANGLE_INLINE VkResult RenderPass::init(VkDevice device, const VkRenderPassCreateInfo &createInfo)
1765 {
1766 ASSERT(!valid());
1767 return vkCreateRenderPass(device, &createInfo, nullptr, &mHandle);
1768 }
1769
1770 ANGLE_INLINE VkResult RenderPass::init2(VkDevice device, const VkRenderPassCreateInfo2 &createInfo)
1771 {
1772 ASSERT(!valid());
1773 return vkCreateRenderPass2KHR(device, &createInfo, nullptr, &mHandle);
1774 }
1775
1776 // Buffer implementation.
1777 ANGLE_INLINE void Buffer::destroy(VkDevice device)
1778 {
1779 if (valid())
1780 {
1781 vkDestroyBuffer(device, mHandle, nullptr);
1782 mHandle = VK_NULL_HANDLE;
1783 }
1784 }
1785
1786 ANGLE_INLINE VkResult Buffer::init(VkDevice device, const VkBufferCreateInfo &createInfo)
1787 {
1788 ASSERT(!valid());
1789 return vkCreateBuffer(device, &createInfo, nullptr, &mHandle);
1790 }
1791
1792 ANGLE_INLINE VkResult Buffer::bindMemory(VkDevice device,
1793 const DeviceMemory &deviceMemory,
1794 VkDeviceSize offset)
1795 {
1796 ASSERT(valid() && deviceMemory.valid());
1797 return vkBindBufferMemory(device, mHandle, deviceMemory.getHandle(), offset);
1798 }
1799
1800 ANGLE_INLINE void Buffer::getMemoryRequirements(VkDevice device,
1801 VkMemoryRequirements *memoryRequirementsOut)
1802 {
1803 ASSERT(valid());
1804 vkGetBufferMemoryRequirements(device, mHandle, memoryRequirementsOut);
1805 }
1806
1807 // BufferView implementation.
1808 ANGLE_INLINE void BufferView::destroy(VkDevice device)
1809 {
1810 if (valid())
1811 {
1812 vkDestroyBufferView(device, mHandle, nullptr);
1813 mHandle = VK_NULL_HANDLE;
1814 }
1815 }
1816
1817 ANGLE_INLINE VkResult BufferView::init(VkDevice device, const VkBufferViewCreateInfo &createInfo)
1818 {
1819 ASSERT(!valid());
1820 return vkCreateBufferView(device, &createInfo, nullptr, &mHandle);
1821 }
1822
1823 // ShaderModule implementation.
1824 ANGLE_INLINE void ShaderModule::destroy(VkDevice device)
1825 {
1826 if (mHandle != VK_NULL_HANDLE)
1827 {
1828 vkDestroyShaderModule(device, mHandle, nullptr);
1829 mHandle = VK_NULL_HANDLE;
1830 }
1831 }
1832
1833 ANGLE_INLINE VkResult ShaderModule::init(VkDevice device,
1834 const VkShaderModuleCreateInfo &createInfo)
1835 {
1836 ASSERT(!valid());
1837 return vkCreateShaderModule(device, &createInfo, nullptr, &mHandle);
1838 }
1839
1840 // PipelineLayout implementation.
1841 ANGLE_INLINE void PipelineLayout::destroy(VkDevice device)
1842 {
1843 if (valid())
1844 {
1845 vkDestroyPipelineLayout(device, mHandle, nullptr);
1846 mHandle = VK_NULL_HANDLE;
1847 }
1848 }
1849
1850 ANGLE_INLINE VkResult PipelineLayout::init(VkDevice device,
1851 const VkPipelineLayoutCreateInfo &createInfo)
1852 {
1853 ASSERT(!valid());
1854 return vkCreatePipelineLayout(device, &createInfo, nullptr, &mHandle);
1855 }
1856
1857 // PipelineCache implementation.
1858 ANGLE_INLINE void PipelineCache::destroy(VkDevice device)
1859 {
1860 if (valid())
1861 {
1862 vkDestroyPipelineCache(device, mHandle, nullptr);
1863 mHandle = VK_NULL_HANDLE;
1864 }
1865 }
1866
1867 ANGLE_INLINE VkResult PipelineCache::init(VkDevice device,
1868 const VkPipelineCacheCreateInfo &createInfo)
1869 {
1870 ASSERT(!valid());
1871 // Note: if we are concerned with memory usage of this cache, we should give it custom
1872 // allocators. Also, failure of this function is of little importance.
1873 return vkCreatePipelineCache(device, &createInfo, nullptr, &mHandle);
1874 }
1875
1876 ANGLE_INLINE VkResult PipelineCache::merge(VkDevice device,
1877 uint32_t srcCacheCount,
1878 const VkPipelineCache *srcCaches) const
1879 {
1880 ASSERT(valid());
1881 return vkMergePipelineCaches(device, mHandle, srcCacheCount, srcCaches);
1882 }
1883
1884 ANGLE_INLINE VkResult PipelineCache::getCacheData(VkDevice device,
1885 size_t *cacheSize,
1886 void *cacheData) const
1887 {
1888 ASSERT(valid());
1889
1890 // Note: vkGetPipelineCacheData can return VK_INCOMPLETE if cacheSize is smaller than actual
1891 // size. There are two usages of this function. One is with *cacheSize == 0 to query the size
1892 // of the cache, and one is with an appropriate buffer to retrieve the cache contents.
1893 // VK_INCOMPLETE in the first case is an expected output. In the second case, VK_INCOMPLETE is
1894 // also acceptable and the resulting buffer will contain valid value by spec. Angle currently
1895 // ensures *cacheSize to be either 0 or of enough size, therefore VK_INCOMPLETE is not expected.
1896 return vkGetPipelineCacheData(device, mHandle, cacheSize, cacheData);
1897 }
1898
1899 // Pipeline implementation.
1900 ANGLE_INLINE void Pipeline::destroy(VkDevice device)
1901 {
1902 if (valid())
1903 {
1904 vkDestroyPipeline(device, mHandle, nullptr);
1905 mHandle = VK_NULL_HANDLE;
1906 }
1907 }
1908
1909 ANGLE_INLINE VkResult Pipeline::initGraphics(VkDevice device,
1910 const VkGraphicsPipelineCreateInfo &createInfo,
1911 const PipelineCache &pipelineCacheVk)
1912 {
1913 ASSERT(!valid());
1914 return vkCreateGraphicsPipelines(device, pipelineCacheVk.getHandle(), 1, &createInfo, nullptr,
1915 &mHandle);
1916 }
1917
1918 ANGLE_INLINE VkResult Pipeline::initCompute(VkDevice device,
1919 const VkComputePipelineCreateInfo &createInfo,
1920 const PipelineCache &pipelineCacheVk)
1921 {
1922 ASSERT(!valid());
1923 return vkCreateComputePipelines(device, pipelineCacheVk.getHandle(), 1, &createInfo, nullptr,
1924 &mHandle);
1925 }
1926
1927 // DescriptorSetLayout implementation.
1928 ANGLE_INLINE void DescriptorSetLayout::destroy(VkDevice device)
1929 {
1930 if (valid())
1931 {
1932 vkDestroyDescriptorSetLayout(device, mHandle, nullptr);
1933 mHandle = VK_NULL_HANDLE;
1934 }
1935 }
1936
1937 ANGLE_INLINE VkResult DescriptorSetLayout::init(VkDevice device,
1938 const VkDescriptorSetLayoutCreateInfo &createInfo)
1939 {
1940 ASSERT(!valid());
1941 return vkCreateDescriptorSetLayout(device, &createInfo, nullptr, &mHandle);
1942 }
1943
1944 // DescriptorPool implementation.
1945 ANGLE_INLINE void DescriptorPool::destroy(VkDevice device)
1946 {
1947 if (valid())
1948 {
1949 vkDestroyDescriptorPool(device, mHandle, nullptr);
1950 mHandle = VK_NULL_HANDLE;
1951 }
1952 }
1953
1954 ANGLE_INLINE VkResult DescriptorPool::init(VkDevice device,
1955 const VkDescriptorPoolCreateInfo &createInfo)
1956 {
1957 ASSERT(!valid());
1958 return vkCreateDescriptorPool(device, &createInfo, nullptr, &mHandle);
1959 }
1960
1961 ANGLE_INLINE VkResult
1962 DescriptorPool::allocateDescriptorSets(VkDevice device,
1963 const VkDescriptorSetAllocateInfo &allocInfo,
1964 VkDescriptorSet *descriptorSetsOut)
1965 {
1966 ASSERT(valid());
1967 return vkAllocateDescriptorSets(device, &allocInfo, descriptorSetsOut);
1968 }
1969
1970 ANGLE_INLINE VkResult DescriptorPool::freeDescriptorSets(VkDevice device,
1971 uint32_t descriptorSetCount,
1972 const VkDescriptorSet *descriptorSets)
1973 {
1974 ASSERT(valid());
1975 ASSERT(descriptorSetCount > 0);
1976 return vkFreeDescriptorSets(device, mHandle, descriptorSetCount, descriptorSets);
1977 }
1978
1979 // Sampler implementation.
1980 ANGLE_INLINE void Sampler::destroy(VkDevice device)
1981 {
1982 if (valid())
1983 {
1984 vkDestroySampler(device, mHandle, nullptr);
1985 mHandle = VK_NULL_HANDLE;
1986 }
1987 }
1988
1989 ANGLE_INLINE VkResult Sampler::init(VkDevice device, const VkSamplerCreateInfo &createInfo)
1990 {
1991 ASSERT(!valid());
1992 return vkCreateSampler(device, &createInfo, nullptr, &mHandle);
1993 }
1994
1995 // SamplerYuvConversion implementation.
1996 ANGLE_INLINE void SamplerYcbcrConversion::destroy(VkDevice device)
1997 {
1998 if (valid())
1999 {
2000 vkDestroySamplerYcbcrConversion(device, mHandle, nullptr);
2001 mHandle = VK_NULL_HANDLE;
2002 }
2003 }
2004
2005 ANGLE_INLINE VkResult
2006 SamplerYcbcrConversion::init(VkDevice device, const VkSamplerYcbcrConversionCreateInfo &createInfo)
2007 {
2008 ASSERT(!valid());
2009 return vkCreateSamplerYcbcrConversion(device, &createInfo, nullptr, &mHandle);
2010 }
2011
2012 // Event implementation.
2013 ANGLE_INLINE void Event::destroy(VkDevice device)
2014 {
2015 if (valid())
2016 {
2017 vkDestroyEvent(device, mHandle, nullptr);
2018 mHandle = VK_NULL_HANDLE;
2019 }
2020 }
2021
2022 ANGLE_INLINE VkResult Event::init(VkDevice device, const VkEventCreateInfo &createInfo)
2023 {
2024 ASSERT(!valid());
2025 return vkCreateEvent(device, &createInfo, nullptr, &mHandle);
2026 }
2027
2028 ANGLE_INLINE VkResult Event::getStatus(VkDevice device) const
2029 {
2030 ASSERT(valid());
2031 return vkGetEventStatus(device, mHandle);
2032 }
2033
2034 ANGLE_INLINE VkResult Event::set(VkDevice device) const
2035 {
2036 ASSERT(valid());
2037 return vkSetEvent(device, mHandle);
2038 }
2039
2040 ANGLE_INLINE VkResult Event::reset(VkDevice device) const
2041 {
2042 ASSERT(valid());
2043 return vkResetEvent(device, mHandle);
2044 }
2045
2046 // Fence implementation.
2047 ANGLE_INLINE void Fence::destroy(VkDevice device)
2048 {
2049 if (valid())
2050 {
2051 vkDestroyFence(device, mHandle, nullptr);
2052 mHandle = VK_NULL_HANDLE;
2053 }
2054 }
2055
2056 ANGLE_INLINE VkResult Fence::init(VkDevice device, const VkFenceCreateInfo &createInfo)
2057 {
2058 ASSERT(!valid());
2059 return vkCreateFence(device, &createInfo, nullptr, &mHandle);
2060 }
2061
2062 ANGLE_INLINE VkResult Fence::reset(VkDevice device)
2063 {
2064 ASSERT(valid());
2065 return vkResetFences(device, 1, &mHandle);
2066 }
2067
2068 ANGLE_INLINE VkResult Fence::getStatus(VkDevice device) const
2069 {
2070 ASSERT(valid());
2071 return vkGetFenceStatus(device, mHandle);
2072 }
2073
2074 ANGLE_INLINE VkResult Fence::wait(VkDevice device, uint64_t timeout) const
2075 {
2076 ASSERT(valid());
2077 return vkWaitForFences(device, 1, &mHandle, true, timeout);
2078 }
2079
2080 ANGLE_INLINE VkResult Fence::importFd(VkDevice device,
2081 const VkImportFenceFdInfoKHR &importFenceFdInfo) const
2082 {
2083 ASSERT(valid());
2084 return vkImportFenceFdKHR(device, &importFenceFdInfo);
2085 }
2086
2087 ANGLE_INLINE VkResult Fence::exportFd(VkDevice device,
2088 const VkFenceGetFdInfoKHR &fenceGetFdInfo,
2089 int *fdOut) const
2090 {
2091 ASSERT(valid());
2092 return vkGetFenceFdKHR(device, &fenceGetFdInfo, fdOut);
2093 }
2094
2095 // QueryPool implementation.
2096 ANGLE_INLINE void QueryPool::destroy(VkDevice device)
2097 {
2098 if (valid())
2099 {
2100 vkDestroyQueryPool(device, mHandle, nullptr);
2101 mHandle = VK_NULL_HANDLE;
2102 }
2103 }
2104
2105 ANGLE_INLINE VkResult QueryPool::init(VkDevice device, const VkQueryPoolCreateInfo &createInfo)
2106 {
2107 ASSERT(!valid());
2108 return vkCreateQueryPool(device, &createInfo, nullptr, &mHandle);
2109 }
2110
2111 ANGLE_INLINE VkResult QueryPool::getResults(VkDevice device,
2112 uint32_t firstQuery,
2113 uint32_t queryCount,
2114 size_t dataSize,
2115 void *data,
2116 VkDeviceSize stride,
2117 VkQueryResultFlags flags) const
2118 {
2119 ASSERT(valid());
2120 return vkGetQueryPoolResults(device, mHandle, firstQuery, queryCount, dataSize, data, stride,
2121 flags);
2122 }
2123
2124 // VirtualBlock implementation.
2125 ANGLE_INLINE void VirtualBlock::destroy(VkDevice device)
2126 {
2127 if (valid())
2128 {
2129 vma::DestroyVirtualBlock(mHandle);
2130 mHandle = VK_NULL_HANDLE;
2131 }
2132 }
2133
2134 ANGLE_INLINE VkResult VirtualBlock::init(VkDevice device,
2135 vma::VirtualBlockCreateFlags flags,
2136 VkDeviceSize size)
2137 {
2138 return vma::CreateVirtualBlock(size, flags, &mHandle);
2139 }
2140
2141 ANGLE_INLINE VkResult VirtualBlock::allocate(VkDeviceSize size,
2142 VkDeviceSize alignment,
2143 VmaVirtualAllocation *allocationOut,
2144 VkDeviceSize *offsetOut)
2145 {
2146 return vma::VirtualAllocate(mHandle, size, alignment, allocationOut, offsetOut);
2147 }
2148
2149 ANGLE_INLINE void VirtualBlock::free(VmaVirtualAllocation allocation, VkDeviceSize offset)
2150 {
2151 vma::VirtualFree(mHandle, allocation, offset);
2152 }
2153
2154 ANGLE_INLINE void VirtualBlock::calculateStats(vma::StatInfo *pStatInfo) const
2155 {
2156 vma::CalculateVirtualBlockStats(mHandle, pStatInfo);
2157 }
2158 } // namespace vk
2159 } // namespace rx
2160
2161 #endif // LIBANGLE_RENDERER_VULKAN_VK_WRAPPER_H_
2162