1 // Copyright 2018 The Amber Authors.
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 // http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14
15 #include "src/vulkan/pipeline.h"
16
17 #include <algorithm>
18 #include <limits>
19 #include <utility>
20
21 #include "src/command.h"
22 #include "src/engine.h"
23 #include "src/make_unique.h"
24 #include "src/vulkan/buffer_descriptor.h"
25 #include "src/vulkan/compute_pipeline.h"
26 #include "src/vulkan/device.h"
27 #include "src/vulkan/graphics_pipeline.h"
28 #include "src/vulkan/image_descriptor.h"
29 #include "src/vulkan/sampler_descriptor.h"
30
31 namespace amber {
32 namespace vulkan {
33 namespace {
34
35 const char* kDefaultEntryPointName = "main";
36
37 } // namespace
38
Pipeline(PipelineType type,Device * device,uint32_t fence_timeout_ms,bool pipeline_runtime_layer_enabled,const std::vector<VkPipelineShaderStageCreateInfo> & shader_stage_info)39 Pipeline::Pipeline(
40 PipelineType type,
41 Device* device,
42 uint32_t fence_timeout_ms,
43 bool pipeline_runtime_layer_enabled,
44 const std::vector<VkPipelineShaderStageCreateInfo>& shader_stage_info)
45 : device_(device),
46 pipeline_type_(type),
47 shader_stage_info_(shader_stage_info),
48 fence_timeout_ms_(fence_timeout_ms),
49 pipeline_runtime_layer_enabled_(pipeline_runtime_layer_enabled) {}
50
~Pipeline()51 Pipeline::~Pipeline() {
52 // Command must be reset before we destroy descriptors or we get a validation
53 // error.
54 command_ = nullptr;
55
56 for (auto& info : descriptor_set_info_) {
57 if (info.layout != VK_NULL_HANDLE) {
58 device_->GetPtrs()->vkDestroyDescriptorSetLayout(device_->GetVkDevice(),
59 info.layout, nullptr);
60 }
61
62 if (info.empty)
63 continue;
64
65 if (info.pool != VK_NULL_HANDLE) {
66 device_->GetPtrs()->vkDestroyDescriptorPool(device_->GetVkDevice(),
67 info.pool, nullptr);
68 }
69 }
70 }
71
AsGraphics()72 GraphicsPipeline* Pipeline::AsGraphics() {
73 return static_cast<GraphicsPipeline*>(this);
74 }
75
AsCompute()76 ComputePipeline* Pipeline::AsCompute() {
77 return static_cast<ComputePipeline*>(this);
78 }
79
Initialize(CommandPool * pool)80 Result Pipeline::Initialize(CommandPool* pool) {
81 push_constant_ = MakeUnique<PushConstant>(device_);
82
83 command_ = MakeUnique<CommandBuffer>(device_, pool);
84 return command_->Initialize();
85 }
86
CreateDescriptorSetLayouts()87 Result Pipeline::CreateDescriptorSetLayouts() {
88 for (auto& info : descriptor_set_info_) {
89 VkDescriptorSetLayoutCreateInfo desc_info =
90 VkDescriptorSetLayoutCreateInfo();
91 desc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
92
93 // If there are no descriptors for this descriptor set we only
94 // need to create its layout and there will be no bindings.
95 std::vector<VkDescriptorSetLayoutBinding> bindings;
96 for (auto& desc : info.descriptors) {
97 bindings.emplace_back();
98 bindings.back().binding = desc->GetBinding();
99 bindings.back().descriptorType = desc->GetVkDescriptorType();
100 bindings.back().descriptorCount = desc->GetDescriptorCount();
101 bindings.back().stageFlags = VK_SHADER_STAGE_ALL;
102 }
103 desc_info.bindingCount = static_cast<uint32_t>(bindings.size());
104 desc_info.pBindings = bindings.data();
105
106 if (device_->GetPtrs()->vkCreateDescriptorSetLayout(
107 device_->GetVkDevice(), &desc_info, nullptr, &info.layout) !=
108 VK_SUCCESS) {
109 return Result("Vulkan::Calling vkCreateDescriptorSetLayout Fail");
110 }
111 }
112
113 return {};
114 }
115
CreateDescriptorPools()116 Result Pipeline::CreateDescriptorPools() {
117 for (auto& info : descriptor_set_info_) {
118 if (info.empty)
119 continue;
120
121 std::vector<VkDescriptorPoolSize> pool_sizes;
122 for (auto& desc : info.descriptors) {
123 VkDescriptorType type = desc->GetVkDescriptorType();
124 auto it = find_if(pool_sizes.begin(), pool_sizes.end(),
125 [&type](const VkDescriptorPoolSize& size) {
126 return size.type == type;
127 });
128 if (it != pool_sizes.end()) {
129 it->descriptorCount += desc->GetDescriptorCount();
130 continue;
131 }
132
133 pool_sizes.emplace_back();
134 pool_sizes.back().type = type;
135 pool_sizes.back().descriptorCount = desc->GetDescriptorCount();
136 }
137
138 VkDescriptorPoolCreateInfo pool_info = VkDescriptorPoolCreateInfo();
139 pool_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
140 pool_info.maxSets = 1;
141 pool_info.poolSizeCount = static_cast<uint32_t>(pool_sizes.size());
142 pool_info.pPoolSizes = pool_sizes.data();
143
144 if (device_->GetPtrs()->vkCreateDescriptorPool(device_->GetVkDevice(),
145 &pool_info, nullptr,
146 &info.pool) != VK_SUCCESS) {
147 return Result("Vulkan::Calling vkCreateDescriptorPool Fail");
148 }
149 }
150
151 return {};
152 }
153
CreateDescriptorSets()154 Result Pipeline::CreateDescriptorSets() {
155 for (size_t i = 0; i < descriptor_set_info_.size(); ++i) {
156 if (descriptor_set_info_[i].empty)
157 continue;
158
159 VkDescriptorSetAllocateInfo desc_set_info = VkDescriptorSetAllocateInfo();
160 desc_set_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
161 desc_set_info.descriptorPool = descriptor_set_info_[i].pool;
162 desc_set_info.descriptorSetCount = 1;
163 desc_set_info.pSetLayouts = &descriptor_set_info_[i].layout;
164
165 VkDescriptorSet desc_set = VK_NULL_HANDLE;
166 if (device_->GetPtrs()->vkAllocateDescriptorSets(
167 device_->GetVkDevice(), &desc_set_info, &desc_set) != VK_SUCCESS) {
168 return Result("Vulkan::Calling vkAllocateDescriptorSets Fail");
169 }
170 descriptor_set_info_[i].vk_desc_set = desc_set;
171 }
172
173 return {};
174 }
175
CreateVkPipelineLayout(VkPipelineLayout * pipeline_layout)176 Result Pipeline::CreateVkPipelineLayout(VkPipelineLayout* pipeline_layout) {
177 Result r = CreateVkDescriptorRelatedObjectsIfNeeded();
178 if (!r.IsSuccess())
179 return r;
180
181 std::vector<VkDescriptorSetLayout> descriptor_set_layouts;
182 for (const auto& desc_set : descriptor_set_info_)
183 descriptor_set_layouts.push_back(desc_set.layout);
184
185 VkPipelineLayoutCreateInfo pipeline_layout_info =
186 VkPipelineLayoutCreateInfo();
187 pipeline_layout_info.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
188 pipeline_layout_info.setLayoutCount =
189 static_cast<uint32_t>(descriptor_set_layouts.size());
190 pipeline_layout_info.pSetLayouts = descriptor_set_layouts.data();
191
192 VkPushConstantRange push_const_range =
193 push_constant_->GetVkPushConstantRange();
194 if (push_const_range.size > 0) {
195 pipeline_layout_info.pushConstantRangeCount = 1U;
196 pipeline_layout_info.pPushConstantRanges = &push_const_range;
197 }
198
199 if (device_->GetPtrs()->vkCreatePipelineLayout(
200 device_->GetVkDevice(), &pipeline_layout_info, nullptr,
201 pipeline_layout) != VK_SUCCESS) {
202 return Result("Vulkan::Calling vkCreatePipelineLayout Fail");
203 }
204
205 return {};
206 }
207
CreateVkDescriptorRelatedObjectsIfNeeded()208 Result Pipeline::CreateVkDescriptorRelatedObjectsIfNeeded() {
209 if (descriptor_related_objects_already_created_)
210 return {};
211
212 Result r = CreateDescriptorSetLayouts();
213 if (!r.IsSuccess())
214 return r;
215
216 r = CreateDescriptorPools();
217 if (!r.IsSuccess())
218 return r;
219
220 r = CreateDescriptorSets();
221 if (!r.IsSuccess())
222 return r;
223
224 descriptor_related_objects_already_created_ = true;
225 return {};
226 }
227
UpdateDescriptorSetsIfNeeded()228 void Pipeline::UpdateDescriptorSetsIfNeeded() {
229 for (auto& info : descriptor_set_info_) {
230 for (auto& desc : info.descriptors)
231 desc->UpdateDescriptorSetIfNeeded(info.vk_desc_set);
232 }
233 }
234
RecordPushConstant(const VkPipelineLayout & pipeline_layout)235 Result Pipeline::RecordPushConstant(const VkPipelineLayout& pipeline_layout) {
236 return push_constant_->RecordPushConstantVkCommand(command_.get(),
237 pipeline_layout);
238 }
239
AddPushConstantBuffer(const Buffer * buf,uint32_t offset)240 Result Pipeline::AddPushConstantBuffer(const Buffer* buf, uint32_t offset) {
241 if (!buf)
242 return Result("Missing push constant buffer data");
243 return push_constant_->AddBuffer(buf, offset);
244 }
245
GetDescriptorSlot(uint32_t desc_set,uint32_t binding,Descriptor ** desc)246 Result Pipeline::GetDescriptorSlot(uint32_t desc_set,
247 uint32_t binding,
248 Descriptor** desc) {
249 *desc = nullptr;
250
251 if (desc_set >= descriptor_set_info_.size()) {
252 for (size_t i = descriptor_set_info_.size();
253 i <= static_cast<size_t>(desc_set); ++i) {
254 descriptor_set_info_.emplace_back();
255 }
256 }
257
258 if (descriptor_set_info_[desc_set].empty &&
259 descriptor_related_objects_already_created_) {
260 return Result(
261 "Vulkan: Pipeline descriptor related objects were already created but "
262 "try to put data on empty descriptor set '" +
263 std::to_string(desc_set) +
264 "'. Note that all used descriptor sets must be allocated before the "
265 "first compute or draw.");
266 }
267 descriptor_set_info_[desc_set].empty = false;
268
269 auto& descriptors = descriptor_set_info_[desc_set].descriptors;
270 for (auto& descriptor : descriptors) {
271 if (descriptor->GetBinding() == binding)
272 *desc = descriptor.get();
273 }
274
275 return {};
276 }
277
AddDescriptorBuffer(Buffer * amber_buffer)278 Result Pipeline::AddDescriptorBuffer(Buffer* amber_buffer) {
279 // Don't add the buffer if it's already added.
280 const auto& buffer = std::find_if(
281 descriptor_buffers_.begin(), descriptor_buffers_.end(),
282 [&](const Buffer* buf) { return buf == amber_buffer; });
283 if (buffer != descriptor_buffers_.end()) {
284 return {};
285 }
286 descriptor_buffers_.push_back(amber_buffer);
287 return {};
288 }
289
AddBufferDescriptor(const BufferCommand * cmd)290 Result Pipeline::AddBufferDescriptor(const BufferCommand* cmd) {
291 if (cmd == nullptr)
292 return Result("Pipeline::AddBufferDescriptor BufferCommand is nullptr");
293 if (!cmd->IsSSBO() && !cmd->IsUniform() && !cmd->IsStorageImage() &&
294 !cmd->IsSampledImage() && !cmd->IsCombinedImageSampler() &&
295 !cmd->IsUniformTexelBuffer() && !cmd->IsStorageTexelBuffer() &&
296 !cmd->IsUniformDynamic() && !cmd->IsSSBODynamic()) {
297 return Result("Pipeline::AddBufferDescriptor not supported buffer type");
298 }
299
300 Descriptor* desc;
301 Result r =
302 GetDescriptorSlot(cmd->GetDescriptorSet(), cmd->GetBinding(), &desc);
303 if (!r.IsSuccess())
304 return r;
305
306 auto& descriptors = descriptor_set_info_[cmd->GetDescriptorSet()].descriptors;
307
308 bool is_image = false;
309 DescriptorType desc_type = DescriptorType::kUniformBuffer;
310
311 if (cmd->IsStorageImage()) {
312 desc_type = DescriptorType::kStorageImage;
313 is_image = true;
314 } else if (cmd->IsSampledImage()) {
315 desc_type = DescriptorType::kSampledImage;
316 is_image = true;
317 } else if (cmd->IsCombinedImageSampler()) {
318 desc_type = DescriptorType::kCombinedImageSampler;
319 is_image = true;
320 } else if (cmd->IsUniformTexelBuffer()) {
321 desc_type = DescriptorType::kUniformTexelBuffer;
322 } else if (cmd->IsStorageTexelBuffer()) {
323 desc_type = DescriptorType::kStorageTexelBuffer;
324 } else if (cmd->IsSSBO()) {
325 desc_type = DescriptorType::kStorageBuffer;
326 } else if (cmd->IsUniformDynamic()) {
327 desc_type = DescriptorType::kUniformBufferDynamic;
328 } else if (cmd->IsSSBODynamic()) {
329 desc_type = DescriptorType::kStorageBufferDynamic;
330 }
331
332 if (desc == nullptr) {
333 if (is_image) {
334 auto image_desc = MakeUnique<ImageDescriptor>(
335 cmd->GetBuffer(), desc_type, device_, cmd->GetBaseMipLevel(),
336 cmd->GetDescriptorSet(), cmd->GetBinding(), this);
337 if (cmd->IsCombinedImageSampler())
338 image_desc->SetAmberSampler(cmd->GetSampler());
339
340 descriptors.push_back(std::move(image_desc));
341 } else {
342 auto buffer_desc = MakeUnique<BufferDescriptor>(
343 cmd->GetBuffer(), desc_type, device_, cmd->GetDescriptorSet(),
344 cmd->GetBinding(), this);
345 descriptors.push_back(std::move(buffer_desc));
346 }
347 AddDescriptorBuffer(cmd->GetBuffer());
348 desc = descriptors.back().get();
349 } else {
350 if (desc->GetDescriptorType() != desc_type) {
351 return Result(
352 "Descriptors bound to the same binding needs to have matching "
353 "descriptor types");
354 }
355 desc->AsBufferBackedDescriptor()->AddAmberBuffer(cmd->GetBuffer());
356 AddDescriptorBuffer(cmd->GetBuffer());
357 }
358
359 if (cmd->IsUniformDynamic() || cmd->IsSSBODynamic())
360 desc->AsBufferDescriptor()->AddDynamicOffset(cmd->GetDynamicOffset());
361
362 if (cmd->IsUniform() || cmd->IsUniformDynamic() || cmd->IsSSBO() ||
363 cmd->IsSSBODynamic()) {
364 desc->AsBufferDescriptor()->AddDescriptorOffset(cmd->GetDescriptorOffset());
365 desc->AsBufferDescriptor()->AddDescriptorRange(cmd->GetDescriptorRange());
366 }
367
368 if (cmd->IsSSBO() && !desc->IsStorageBuffer()) {
369 return Result(
370 "Vulkan::AddBufferDescriptor BufferCommand for SSBO uses wrong "
371 "descriptor "
372 "set and binding");
373 }
374
375 if (cmd->IsUniform() && !desc->IsUniformBuffer()) {
376 return Result(
377 "Vulkan::AddBufferDescriptor BufferCommand for UBO uses wrong "
378 "descriptor set "
379 "and binding");
380 }
381
382 return {};
383 }
384
AddSamplerDescriptor(const SamplerCommand * cmd)385 Result Pipeline::AddSamplerDescriptor(const SamplerCommand* cmd) {
386 if (cmd == nullptr)
387 return Result("Pipeline::AddSamplerDescriptor SamplerCommand is nullptr");
388
389 Descriptor* desc;
390 Result r =
391 GetDescriptorSlot(cmd->GetDescriptorSet(), cmd->GetBinding(), &desc);
392 if (!r.IsSuccess())
393 return r;
394
395 auto& descriptors = descriptor_set_info_[cmd->GetDescriptorSet()].descriptors;
396
397 if (desc == nullptr) {
398 auto sampler_desc = MakeUnique<SamplerDescriptor>(
399 cmd->GetSampler(), DescriptorType::kSampler, device_,
400 cmd->GetDescriptorSet(), cmd->GetBinding());
401 descriptors.push_back(std::move(sampler_desc));
402 } else {
403 if (desc->GetDescriptorType() != DescriptorType::kSampler) {
404 return Result(
405 "Descriptors bound to the same binding needs to have matching "
406 "descriptor types");
407 }
408 desc->AsSamplerDescriptor()->AddAmberSampler(cmd->GetSampler());
409 }
410
411 return {};
412 }
413
SendDescriptorDataToDeviceIfNeeded()414 Result Pipeline::SendDescriptorDataToDeviceIfNeeded() {
415 {
416 CommandBufferGuard guard(GetCommandBuffer());
417 if (!guard.IsRecording())
418 return guard.GetResult();
419
420 for (auto& info : descriptor_set_info_) {
421 for (auto& desc : info.descriptors) {
422 Result r = desc->CreateResourceIfNeeded();
423 if (!r.IsSuccess())
424 return r;
425 }
426 }
427
428 // Initialize transfer buffers / images.
429 for (auto buffer : descriptor_buffers_) {
430 if (descriptor_transfer_resources_.count(buffer) == 0) {
431 return Result(
432 "Vulkan: Pipeline::SendDescriptorDataToDeviceIfNeeded() "
433 "descriptor's transfer resource is not found");
434 }
435 Result r = descriptor_transfer_resources_[buffer]->Initialize();
436 if (!r.IsSuccess())
437 return r;
438 }
439
440 // Note that if a buffer for a descriptor is host accessible and
441 // does not need to record a command to copy data to device, it
442 // directly writes data to the buffer. The direct write must be
443 // done after resizing backed buffer i.e., copying data to the new
444 // buffer from the old one. Thus, we must submit commands here to
445 // guarantee this.
446 Result r = guard.Submit(GetFenceTimeout(),
447 GetPipelineRuntimeLayerEnabled());
448 if (!r.IsSuccess())
449 return r;
450 }
451
452 CommandBufferGuard guard(GetCommandBuffer());
453 if (!guard.IsRecording())
454 return guard.GetResult();
455
456 // Copy descriptor data to transfer resources.
457 for (auto& buffer : descriptor_buffers_) {
458 if (auto transfer_buffer =
459 descriptor_transfer_resources_[buffer]->AsTransferBuffer()) {
460 BufferBackedDescriptor::RecordCopyBufferDataToTransferResourceIfNeeded(
461 GetCommandBuffer(), buffer, transfer_buffer);
462 } else if (auto transfer_image =
463 descriptor_transfer_resources_[buffer]->AsTransferImage()) {
464 transfer_image->ImageBarrier(GetCommandBuffer(),
465 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
466 VK_PIPELINE_STAGE_TRANSFER_BIT);
467
468 BufferBackedDescriptor::RecordCopyBufferDataToTransferResourceIfNeeded(
469 GetCommandBuffer(), buffer, transfer_image);
470
471 transfer_image->ImageBarrier(GetCommandBuffer(), VK_IMAGE_LAYOUT_GENERAL,
472 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT);
473 } else {
474 return Result(
475 "Vulkan: Pipeline::SendDescriptorDataToDeviceIfNeeded() "
476 "this should be unreachable");
477 }
478 }
479 return guard.Submit(GetFenceTimeout(), GetPipelineRuntimeLayerEnabled());
480 }
481
BindVkDescriptorSets(const VkPipelineLayout & pipeline_layout)482 void Pipeline::BindVkDescriptorSets(const VkPipelineLayout& pipeline_layout) {
483 for (size_t i = 0; i < descriptor_set_info_.size(); ++i) {
484 if (descriptor_set_info_[i].empty)
485 continue;
486
487 // Sort descriptors by binding number to get correct order of dynamic
488 // offsets.
489 typedef std::pair<uint32_t, std::vector<uint32_t>> binding_offsets_pair;
490 std::vector<binding_offsets_pair> binding_offsets;
491 for (const auto& desc : descriptor_set_info_[i].descriptors) {
492 binding_offsets.push_back(
493 {desc->GetBinding(), desc->GetDynamicOffsets()});
494 }
495
496 std::sort(std::begin(binding_offsets), std::end(binding_offsets),
497 [](const binding_offsets_pair& a, const binding_offsets_pair& b) {
498 return a.first < b.first;
499 });
500
501 // Add the sorted dynamic offsets.
502 std::vector<uint32_t> dynamic_offsets;
503 for (const auto& binding_offset : binding_offsets) {
504 for (auto offset : binding_offset.second) {
505 dynamic_offsets.push_back(offset);
506 }
507 }
508
509 device_->GetPtrs()->vkCmdBindDescriptorSets(
510 command_->GetVkCommandBuffer(),
511 IsGraphics() ? VK_PIPELINE_BIND_POINT_GRAPHICS
512 : VK_PIPELINE_BIND_POINT_COMPUTE,
513 pipeline_layout, static_cast<uint32_t>(i), 1,
514 &descriptor_set_info_[i].vk_desc_set,
515 static_cast<uint32_t>(dynamic_offsets.size()), dynamic_offsets.data());
516 }
517 }
518
ReadbackDescriptorsToHostDataQueue()519 Result Pipeline::ReadbackDescriptorsToHostDataQueue() {
520 // Record required commands to copy the data to a host visible buffer.
521 {
522 CommandBufferGuard guard(GetCommandBuffer());
523 if (!guard.IsRecording())
524 return guard.GetResult();
525
526 for (auto& buffer : descriptor_buffers_) {
527 if (descriptor_transfer_resources_.count(buffer) == 0) {
528 return Result(
529 "Vulkan: Pipeline::ReadbackDescriptorsToHostDataQueue() "
530 "descriptor's transfer resource is not found");
531 }
532 if (auto transfer_buffer =
533 descriptor_transfer_resources_[buffer]->AsTransferBuffer()) {
534 Result r = BufferBackedDescriptor::RecordCopyTransferResourceToHost(
535 GetCommandBuffer(), transfer_buffer);
536 if (!r.IsSuccess())
537 return r;
538 } else if (auto transfer_image = descriptor_transfer_resources_[buffer]
539 ->AsTransferImage()) {
540 transfer_image->ImageBarrier(GetCommandBuffer(),
541 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
542 VK_PIPELINE_STAGE_TRANSFER_BIT);
543 Result r = BufferBackedDescriptor::RecordCopyTransferResourceToHost(
544 GetCommandBuffer(), transfer_image);
545 if (!r.IsSuccess())
546 return r;
547 } else {
548 return Result(
549 "Vulkan: Pipeline::ReadbackDescriptorsToHostDataQueue() "
550 "this should be unreachable");
551 }
552 }
553
554 Result r = guard.Submit(GetFenceTimeout(),
555 GetPipelineRuntimeLayerEnabled());
556 if (!r.IsSuccess())
557 return r;
558 }
559
560 // Move data from transfer buffers to output buffers.
561 for (auto& buffer : descriptor_buffers_) {
562 auto& transfer_resource = descriptor_transfer_resources_[buffer];
563 Result r = BufferBackedDescriptor::MoveTransferResourceToBufferOutput(
564 transfer_resource.get(), buffer);
565 if (!r.IsSuccess())
566 return r;
567 }
568 descriptor_transfer_resources_.clear();
569 return {};
570 }
571
GetEntryPointName(VkShaderStageFlagBits stage) const572 const char* Pipeline::GetEntryPointName(VkShaderStageFlagBits stage) const {
573 auto it = entry_points_.find(stage);
574 if (it != entry_points_.end())
575 return it->second.c_str();
576
577 return kDefaultEntryPointName;
578 }
579
580 } // namespace vulkan
581 } // namespace amber
582