1 /* 2 * Copyright 2022 Google LLC 3 * 4 * Use of this source code is governed by a BSD-style license that can be 5 * found in the LICENSE file. 6 */ 7 8 #ifndef skgpu_graphite_VulkanCommandBuffer_DEFINED 9 #define skgpu_graphite_VulkanCommandBuffer_DEFINED 10 11 #include "src/gpu/graphite/CommandBuffer.h" 12 13 #include "include/gpu/vk/VulkanTypes.h" 14 #include "src/gpu/graphite/DrawPass.h" 15 #include "src/gpu/graphite/vk/VulkanGraphicsPipeline.h" 16 #include "src/gpu/graphite/vk/VulkanResourceProvider.h" 17 18 namespace skgpu::graphite { 19 20 class VulkanBuffer; 21 class VulkanDescriptorSet; 22 class VulkanSharedContext; 23 class VulkanTexture; 24 class Buffer; 25 26 class VulkanCommandBuffer final : public CommandBuffer { 27 public: 28 static std::unique_ptr<VulkanCommandBuffer> Make(const VulkanSharedContext*, 29 VulkanResourceProvider*, 30 Protected); 31 ~VulkanCommandBuffer() override; 32 33 bool setNewCommandBufferResources() override; 34 35 bool submit(VkQueue); 36 37 bool isFinished(); 38 39 void waitUntilFinished(); 40 41 void addBufferMemoryBarrier(const Resource* resource, 42 VkPipelineStageFlags srcStageMask, 43 VkPipelineStageFlags dstStageMask, 44 VkBufferMemoryBarrier* barrier); 45 void addBufferMemoryBarrier(VkPipelineStageFlags srcStageMask, 46 VkPipelineStageFlags dstStageMask, 47 VkBufferMemoryBarrier* barrier); 48 void addImageMemoryBarrier(const Resource*, 49 VkPipelineStageFlags srcStageMask, 50 VkPipelineStageFlags dstStageMask, 51 bool byRegion, 52 VkImageMemoryBarrier* barrier); 53 54 private: 55 VulkanCommandBuffer(VkCommandPool pool, 56 VkCommandBuffer primaryCommandBuffer, 57 const VulkanSharedContext* sharedContext, 58 VulkanResourceProvider* resourceProvider, 59 Protected); 60 resourceProvider()61 ResourceProvider* resourceProvider() const override { return fResourceProvider; } 62 63 void onResetCommandBuffer() override; 64 65 void begin(); 66 void end(); 67 68 void addWaitSemaphores(size_t numWaitSemaphores, 69 const BackendSemaphore* waitSemaphores) override; 70 void addSignalSemaphores(size_t numWaitSemaphores, 71 const BackendSemaphore* signalSemaphores) override; 72 void prepareSurfaceForStateUpdate(SkSurface* targetSurface, 73 const MutableTextureState* newState) override; 74 75 bool onAddRenderPass(const RenderPassDesc&, 76 SkIRect renderPassBounds, 77 const Texture* colorTexture, 78 const Texture* resolveTexture, 79 const Texture* depthStencilTexture, 80 SkIRect viewport, 81 const DrawPassList&) override; 82 83 bool beginRenderPass(const RenderPassDesc&, 84 SkIRect renderPassBounds, 85 const Texture* colorTexture, 86 const Texture* resolveTexture, 87 const Texture* depthStencilTexture); 88 void endRenderPass(); 89 90 void addDrawPass(const DrawPass*); 91 92 // Track descriptor changes for binding prior to draw calls 93 void recordBufferBindingInfo(const BindBufferInfo& info, UniformSlot); 94 // Either both arguments are non-null, or both must be null (to reset or handle just the 95 // dstCopy intrinsic w/o requiring a DrawPass command). 96 void recordTextureAndSamplerDescSet( 97 const DrawPass*, const DrawPassCommands::BindTexturesAndSamplers*); 98 99 void bindTextureSamplers(); 100 void bindUniformBuffers(); 101 void syncDescriptorSets(); 102 103 void bindGraphicsPipeline(const GraphicsPipeline*); 104 void setBlendConstants(float* blendConstants); 105 void bindDrawBuffers(const BindBufferInfo& vertices, 106 const BindBufferInfo& instances, 107 const BindBufferInfo& indices, 108 const BindBufferInfo& indirect); 109 void bindVertexBuffers(const Buffer* vertexBuffer, size_t vertexOffset, 110 const Buffer* instanceBuffer, size_t instanceOffset); 111 void bindInputBuffer(const Buffer* buffer, VkDeviceSize offset, uint32_t binding); 112 void bindIndexBuffer(const Buffer* indexBuffer, size_t offset); 113 void bindIndirectBuffer(const Buffer* indirectBuffer, size_t offset); 114 void setScissor(const Scissor&); 115 void setScissor(const SkIRect&); 116 117 void draw(PrimitiveType type, unsigned int baseVertex, unsigned int vertexCount); 118 void drawIndexed(PrimitiveType type, unsigned int baseIndex, unsigned int indexCount, 119 unsigned int baseVertex); 120 void drawInstanced(PrimitiveType type, 121 unsigned int baseVertex, unsigned int vertexCount, 122 unsigned int baseInstance, unsigned int instanceCount); 123 void drawIndexedInstanced(PrimitiveType type, unsigned int baseIndex, 124 unsigned int indexCount, unsigned int baseVertex, 125 unsigned int baseInstance, unsigned int instanceCount); 126 void drawIndirect(PrimitiveType type); 127 void drawIndexedIndirect(PrimitiveType type); 128 129 // TODO: The virtuals in this class have not yet been implemented as we still haven't 130 // implemented the objects they use. 131 bool onAddComputePass(DispatchGroupSpan) override; 132 133 bool onCopyBufferToBuffer(const Buffer* srcBuffer, 134 size_t srcOffset, 135 const Buffer* dstBuffer, 136 size_t dstOffset, 137 size_t size) override; 138 bool onCopyTextureToBuffer(const Texture*, 139 SkIRect srcRect, 140 const Buffer*, 141 size_t bufferOffset, 142 size_t bufferRowBytes) override; 143 bool onCopyBufferToTexture(const Buffer*, 144 const Texture*, 145 const BufferTextureCopyData* copyData, 146 int count) override; 147 bool onCopyTextureToTexture(const Texture* src, 148 SkIRect srcRect, 149 const Texture* dst, 150 SkIPoint dstPoint, 151 int mipLevel) override; 152 153 bool pushConstants(VkShaderStageFlags stageFlags, 154 uint32_t offset, 155 uint32_t size, 156 const void* values); 157 158 bool onSynchronizeBufferToCpu(const Buffer*, bool* outDidResultInWork) override; 159 bool onClearBuffer(const Buffer*, size_t offset, size_t size) override; 160 161 enum BarrierType { 162 kBufferMemory_BarrierType, 163 kImageMemory_BarrierType 164 }; 165 void pipelineBarrier(const Resource* resource, 166 VkPipelineStageFlags srcStageMask, 167 VkPipelineStageFlags dstStageMask, 168 bool byRegion, 169 BarrierType barrierType, 170 void* barrier); 171 void submitPipelineBarriers(bool forSelfDependency = false); 172 173 // Update the intrinsic constant uniform buffer and binding to reflect the updated viewport. 174 // The resource provider is responsible for finding a suitable buffer and managing its lifetime. 175 bool updateIntrinsicUniforms(SkIRect viewport); 176 177 bool loadMSAAFromResolve(const RenderPassDesc&, 178 VulkanTexture& resolveTexture, 179 SkISize dstDimensions, 180 SkIRect nativeBounds); 181 bool updateAndBindLoadMSAAInputAttachment(const VulkanTexture& resolveTexture); 182 void updateBuffer(const VulkanBuffer* buffer, 183 const void* data, 184 size_t dataSize, 185 size_t dstOffset = 0); 186 void nextSubpass(); 187 void setViewport(SkIRect viewport); 188 189 VkCommandPool fPool; 190 VkCommandBuffer fPrimaryCommandBuffer; 191 const VulkanSharedContext* fSharedContext; 192 VulkanResourceProvider* fResourceProvider; 193 194 // begin() has been called, but not end() 195 bool fActive = false; 196 // Track whether there is currently an active render pass (beginRenderPass has been called, but 197 // not endRenderPass) 198 bool fActiveRenderPass = false; 199 200 const VulkanGraphicsPipeline* fActiveGraphicsPipeline = nullptr; 201 202 VkFence fSubmitFence = VK_NULL_HANDLE; 203 204 // Current semaphores 205 skia_private::STArray<1, VkSemaphore> fWaitSemaphores; 206 skia_private::STArray<1, VkSemaphore> fSignalSemaphores; 207 208 // Tracking of memory barriers so that we can submit them all in a batch together. 209 skia_private::STArray<1, VkBufferMemoryBarrier> fBufferBarriers; 210 skia_private::STArray<2, VkImageMemoryBarrier> fImageBarriers; 211 bool fBarriersByRegion = false; 212 VkPipelineStageFlags fSrcStageMask = 0; 213 VkPipelineStageFlags fDstStageMask = 0; 214 215 // Track whether certain descriptor sets need to be bound 216 bool fBindUniformBuffers = false; 217 bool fBindTextureSamplers = false; 218 219 std::array<BindBufferInfo, VulkanGraphicsPipeline::kNumUniformBuffers> fUniformBuffersToBind; 220 VkDescriptorSet fTextureSamplerDescSetToBind = VK_NULL_HANDLE; 221 222 int fNumTextureSamplers = 0; 223 224 VkBuffer fBoundInputBuffers[VulkanGraphicsPipeline::kNumInputBuffers]; 225 size_t fBoundInputBufferOffsets[VulkanGraphicsPipeline::kNumInputBuffers]; 226 227 VkBuffer fBoundIndexBuffer = VK_NULL_HANDLE; 228 VkBuffer fBoundIndirectBuffer = VK_NULL_HANDLE; 229 size_t fBoundIndexBufferOffset = 0; 230 size_t fBoundIndirectBufferOffset = 0; 231 232 float fCachedBlendConstant[4]; 233 234 class IntrinsicConstantsManager; 235 std::unique_ptr<IntrinsicConstantsManager> fIntrinsicConstants; 236 }; 237 238 } // namespace skgpu::graphite 239 240 #endif // skgpu_graphite_VulkanCommandBuffer_DEFINED 241