1 // Copyright 2018 The Amber Authors.
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 // http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14
15 #include "src/vulkan/engine_vulkan.h"
16
17 #include <algorithm>
18 #include <set>
19 #include <utility>
20
21 #include "amber/amber_vulkan.h"
22 #include "src/make_unique.h"
23 #include "src/type_parser.h"
24 #include "src/vulkan/compute_pipeline.h"
25 #include "src/vulkan/graphics_pipeline.h"
26
27 namespace amber {
28 namespace vulkan {
29 namespace {
30
31 const uint32_t kTrianglesPerCell = 2;
32 const uint32_t kVerticesPerTriangle = 3;
33
ToVkShaderStage(ShaderType type,VkShaderStageFlagBits * ret)34 Result ToVkShaderStage(ShaderType type, VkShaderStageFlagBits* ret) {
35 switch (type) {
36 case kShaderTypeGeometry:
37 *ret = VK_SHADER_STAGE_GEOMETRY_BIT;
38 break;
39 case kShaderTypeFragment:
40 *ret = VK_SHADER_STAGE_FRAGMENT_BIT;
41 break;
42 case kShaderTypeVertex:
43 *ret = VK_SHADER_STAGE_VERTEX_BIT;
44 break;
45 case kShaderTypeTessellationControl:
46 *ret = VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT;
47 break;
48 case kShaderTypeTessellationEvaluation:
49 *ret = VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT;
50 break;
51 case kShaderTypeCompute:
52 *ret = VK_SHADER_STAGE_COMPUTE_BIT;
53 break;
54 case kShaderTypeMulti:
55 *ret = VK_SHADER_STAGE_FRAGMENT_BIT;
56 return Result("Vulkan::Unknown shader stage");
57 }
58
59 return {};
60 }
61
AreAllExtensionsSupported(const std::vector<std::string> & available_extensions,const std::vector<std::string> & required_extensions)62 bool AreAllExtensionsSupported(
63 const std::vector<std::string>& available_extensions,
64 const std::vector<std::string>& required_extensions) {
65 if (required_extensions.empty())
66 return true;
67
68 std::set<std::string> required_extension_set(required_extensions.begin(),
69 required_extensions.end());
70 for (const auto& extension : available_extensions) {
71 required_extension_set.erase(extension);
72 }
73
74 return required_extension_set.empty();
75 }
76
77 } // namespace
78
EngineVulkan()79 EngineVulkan::EngineVulkan() : Engine() {}
80
~EngineVulkan()81 EngineVulkan::~EngineVulkan() {
82 auto vk_device = device_->GetVkDevice();
83 if (vk_device != VK_NULL_HANDLE) {
84 for (auto shader : shaders_) {
85 device_->GetPtrs()->vkDestroyShaderModule(vk_device, shader.second,
86 nullptr);
87 }
88 }
89 }
90
Initialize(EngineConfig * config,Delegate * delegate,const std::vector<std::string> & features,const std::vector<std::string> & instance_extensions,const std::vector<std::string> & device_extensions)91 Result EngineVulkan::Initialize(
92 EngineConfig* config,
93 Delegate* delegate,
94 const std::vector<std::string>& features,
95 const std::vector<std::string>& instance_extensions,
96 const std::vector<std::string>& device_extensions) {
97 if (device_)
98 return Result("Vulkan::Initialize device_ already exists");
99
100 VulkanEngineConfig* vk_config = static_cast<VulkanEngineConfig*>(config);
101 if (!vk_config || vk_config->vkGetInstanceProcAddr == VK_NULL_HANDLE)
102 return Result("Vulkan::Initialize vkGetInstanceProcAddr must be provided.");
103 if (vk_config->device == VK_NULL_HANDLE)
104 return Result("Vulkan::Initialize device must be provided");
105 if (vk_config->physical_device == VK_NULL_HANDLE)
106 return Result("Vulkan::Initialize physical device handle is null.");
107 if (vk_config->queue == VK_NULL_HANDLE)
108 return Result("Vulkan::Initialize queue handle is null.");
109
110 // Validate instance extensions
111 if (!AreAllExtensionsSupported(vk_config->available_instance_extensions,
112 instance_extensions)) {
113 return Result("Vulkan::Initialize not all instance extensions supported");
114 }
115
116 device_ = MakeUnique<Device>(vk_config->instance, vk_config->physical_device,
117 vk_config->queue_family_index, vk_config->device,
118 vk_config->queue);
119
120 Result r = device_->Initialize(
121 vk_config->vkGetInstanceProcAddr, delegate, features, device_extensions,
122 vk_config->available_features, vk_config->available_features2,
123 vk_config->available_device_extensions);
124 if (!r.IsSuccess())
125 return r;
126
127 if (!pool_) {
128 pool_ = MakeUnique<CommandPool>(device_.get());
129 r = pool_->Initialize();
130 if (!r.IsSuccess())
131 return r;
132 }
133
134 return {};
135 }
136
CreatePipeline(amber::Pipeline * pipeline)137 Result EngineVulkan::CreatePipeline(amber::Pipeline* pipeline) {
138 // Create the pipeline data early so we can access them as needed.
139 pipeline_map_[pipeline] = PipelineInfo();
140 auto& info = pipeline_map_[pipeline];
141
142 for (const auto& shader_info : pipeline->GetShaders()) {
143 Result r = SetShader(pipeline, shader_info);
144 if (!r.IsSuccess())
145 return r;
146 }
147
148 for (const auto& colour_info : pipeline->GetColorAttachments()) {
149 auto fmt = colour_info.buffer->GetFormat();
150 if (!device_->IsFormatSupportedByPhysicalDevice(*fmt, colour_info.type))
151 return Result("Vulkan color attachment format is not supported");
152 }
153
154 if (pipeline->GetDepthStencilBuffer().buffer) {
155 const auto& depth_stencil_info = pipeline->GetDepthStencilBuffer();
156
157 auto fmt = depth_stencil_info.buffer->GetFormat();
158 if (!device_->IsFormatSupportedByPhysicalDevice(*fmt,
159 depth_stencil_info.type)) {
160 return Result("Vulkan depth attachment format is not supported");
161 }
162 }
163
164 std::vector<VkPipelineShaderStageCreateInfo> stage_create_info;
165 Result r = GetVkShaderStageInfo(pipeline, &stage_create_info);
166 if (!r.IsSuccess())
167 return r;
168
169 const auto& engine_data = GetEngineData();
170 std::unique_ptr<Pipeline> vk_pipeline;
171 if (pipeline->GetType() == PipelineType::kCompute) {
172 vk_pipeline = MakeUnique<ComputePipeline>(
173 device_.get(), engine_data.fence_timeout_ms,
174 engine_data.pipeline_runtime_layer_enabled, stage_create_info);
175 r = vk_pipeline->AsCompute()->Initialize(pool_.get());
176 if (!r.IsSuccess())
177 return r;
178 } else {
179 vk_pipeline = MakeUnique<GraphicsPipeline>(
180 device_.get(), pipeline->GetColorAttachments(),
181 pipeline->GetDepthStencilBuffer(), pipeline->GetResolveTargets(),
182 engine_data.fence_timeout_ms,
183 engine_data.pipeline_runtime_layer_enabled, stage_create_info);
184
185 vk_pipeline->AsGraphics()->SetPatchControlPoints(
186 pipeline->GetPipelineData()->GetPatchControlPoints());
187
188 r = vk_pipeline->AsGraphics()->Initialize(pipeline->GetFramebufferWidth(),
189 pipeline->GetFramebufferHeight(),
190 pool_.get());
191 if (!r.IsSuccess())
192 return r;
193 }
194
195 info.vk_pipeline = std::move(vk_pipeline);
196
197 // Set the entry point names for the pipeline.
198 for (const auto& shader_info : pipeline->GetShaders()) {
199 VkShaderStageFlagBits stage = VK_SHADER_STAGE_FLAG_BITS_MAX_ENUM;
200 r = ToVkShaderStage(shader_info.GetShaderType(), &stage);
201 if (!r.IsSuccess())
202 return r;
203 const auto& name = shader_info.GetEntryPoint();
204 if (!name.empty()) {
205 info.vk_pipeline->SetEntryPointName(stage, name);
206 }
207 }
208
209 for (const auto& vtex_info : pipeline->GetVertexBuffers()) {
210 auto fmt = vtex_info.buffer->GetFormat();
211 if (!device_->IsFormatSupportedByPhysicalDevice(*fmt, vtex_info.type))
212 return Result("Vulkan vertex buffer format is not supported");
213 if (!info.vertex_buffer)
214 info.vertex_buffer = MakeUnique<VertexBuffer>(device_.get());
215
216 info.vertex_buffer->SetData(static_cast<uint8_t>(vtex_info.location),
217 vtex_info.buffer, vtex_info.input_rate,
218 vtex_info.format, vtex_info.offset,
219 vtex_info.stride);
220 }
221
222 if (pipeline->GetIndexBuffer()) {
223 auto* buf = pipeline->GetIndexBuffer();
224 info.vk_pipeline->AsGraphics()->SetIndexBuffer(buf);
225 }
226
227 if (pipeline->GetPushConstantBuffer().buffer != nullptr) {
228 r = info.vk_pipeline->AddPushConstantBuffer(
229 pipeline->GetPushConstantBuffer().buffer, 0);
230 if (!r.IsSuccess())
231 return r;
232 }
233
234 for (const auto& buf_info : pipeline->GetBuffers()) {
235 auto type = BufferCommand::BufferType::kSSBO;
236 if (buf_info.type == BufferType::kStorageImage) {
237 type = BufferCommand::BufferType::kStorageImage;
238 } else if (buf_info.type == BufferType::kSampledImage) {
239 type = BufferCommand::BufferType::kSampledImage;
240 } else if (buf_info.type == BufferType::kCombinedImageSampler) {
241 type = BufferCommand::BufferType::kCombinedImageSampler;
242 } else if (buf_info.type == BufferType::kUniformTexelBuffer) {
243 type = BufferCommand::BufferType::kUniformTexelBuffer;
244 } else if (buf_info.type == BufferType::kStorageTexelBuffer) {
245 type = BufferCommand::BufferType::kStorageTexelBuffer;
246 } else if (buf_info.type == BufferType::kUniform) {
247 type = BufferCommand::BufferType::kUniform;
248 } else if (buf_info.type == BufferType::kUniformDynamic) {
249 type = BufferCommand::BufferType::kUniformDynamic;
250 } else if (buf_info.type == BufferType::kStorageDynamic) {
251 type = BufferCommand::BufferType::kSSBODynamic;
252 } else if (buf_info.type != BufferType::kStorage) {
253 return Result("Vulkan: CreatePipeline - unknown buffer type: " +
254 std::to_string(static_cast<uint32_t>(buf_info.type)));
255 }
256
257 auto cmd = MakeUnique<BufferCommand>(type, pipeline);
258 cmd->SetDescriptorSet(buf_info.descriptor_set);
259 cmd->SetBinding(buf_info.binding);
260 cmd->SetBaseMipLevel(buf_info.base_mip_level);
261 cmd->SetDynamicOffset(buf_info.dynamic_offset);
262 cmd->SetDescriptorOffset(buf_info.descriptor_offset);
263 cmd->SetDescriptorRange(buf_info.descriptor_range);
264 cmd->SetBuffer(buf_info.buffer);
265 cmd->SetSampler(buf_info.sampler);
266
267 if (cmd->GetValues().empty()) {
268 cmd->GetBuffer()->SetSizeInElements(cmd->GetBuffer()->ElementCount());
269 } else {
270 cmd->GetBuffer()->SetDataWithOffset(cmd->GetValues(), cmd->GetOffset());
271 }
272
273 r = info.vk_pipeline->AddBufferDescriptor(cmd.get());
274 if (!r.IsSuccess())
275 return r;
276 }
277
278 for (const auto& sampler_info : pipeline->GetSamplers()) {
279 auto cmd = MakeUnique<SamplerCommand>(pipeline);
280 cmd->SetDescriptorSet(sampler_info.descriptor_set);
281 cmd->SetBinding(sampler_info.binding);
282 cmd->SetSampler(sampler_info.sampler);
283
284 r = info.vk_pipeline->AddSamplerDescriptor(cmd.get());
285 if (!r.IsSuccess())
286 return r;
287 }
288
289 return {};
290 }
291
SetShader(amber::Pipeline * pipeline,const amber::Pipeline::ShaderInfo & shader)292 Result EngineVulkan::SetShader(amber::Pipeline* pipeline,
293 const amber::Pipeline::ShaderInfo& shader) {
294 const auto type = shader.GetShaderType();
295 const auto& data = shader.GetData();
296 const auto shader_name = shader.GetShader()->GetName();
297 auto& info = pipeline_map_[pipeline];
298
299 auto it = info.shader_info.find(type);
300 if (it != info.shader_info.end())
301 return Result("Vulkan::Setting Duplicated Shader Types Fail");
302
303 VkShaderModule shader_module;
304 if (shaders_.find(shader_name) != shaders_.end()) {
305 shader_module = shaders_[shader_name];
306 } else {
307 VkShaderModuleCreateInfo create_info = VkShaderModuleCreateInfo();
308 create_info.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
309 create_info.codeSize = data.size() * sizeof(uint32_t);
310 create_info.pCode = data.data();
311
312 if (device_->GetPtrs()->vkCreateShaderModule(
313 device_->GetVkDevice(), &create_info, nullptr, &shader_module) !=
314 VK_SUCCESS) {
315 return Result("Vulkan::Calling vkCreateShaderModule Fail");
316 }
317
318 shaders_[shader_name] = shader_module;
319 }
320
321 info.shader_info[type].shader = shader_module;
322
323 for (auto& shader_info : pipeline->GetShaders()) {
324 if (shader_info.GetShaderType() != type)
325 continue;
326
327 const auto required_subgroup_size_setting =
328 shader_info.GetRequiredSubgroupSizeSetting();
329 uint32_t required_subgroup_size_uint = 0;
330 switch (required_subgroup_size_setting) {
331 case amber::Pipeline::ShaderInfo::RequiredSubgroupSizeSetting::
332 kSetToMinimumSize:
333 required_subgroup_size_uint = device_->GetMinSubgroupSize();
334 break;
335 case amber::Pipeline::ShaderInfo::RequiredSubgroupSizeSetting::
336 kSetToMaximumSize:
337 required_subgroup_size_uint = device_->GetMaxSubgroupSize();
338 break;
339 default:
340 required_subgroup_size_uint = shader_info.GetRequiredSubgroupSize();
341 break;
342 }
343 if (required_subgroup_size_uint > 0) {
344 if (!device_->IsRequiredSubgroupSizeSupported(
345 type, required_subgroup_size_uint)) {
346 return Result(
347 "Vulkan::Setting Required subgroup size is not supported by the "
348 "device.");
349 }
350 }
351 info.shader_info[type].required_subgroup_size = required_subgroup_size_uint;
352
353 info.shader_info[type].create_flags = 0;
354 if (shader_info.GetVaryingSubgroupSize()) {
355 info.shader_info[type].create_flags |=
356 VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT;
357 }
358 if (shader_info.GetRequireFullSubgroups()) {
359 info.shader_info[type].create_flags |=
360 VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT;
361 }
362
363 const auto& shader_spec_info = shader_info.GetSpecialization();
364 if (shader_spec_info.empty())
365 continue;
366
367 auto& entries = info.shader_info[type].specialization_entries;
368 entries.reset(new std::vector<VkSpecializationMapEntry>());
369 auto& entry_data = info.shader_info[type].specialization_data;
370 entry_data.reset(new std::vector<uint32_t>());
371 uint32_t i = 0;
372 for (auto pair : shader_spec_info) {
373 entries->push_back({pair.first,
374 static_cast<uint32_t>(i * sizeof(uint32_t)),
375 static_cast<uint32_t>(sizeof(uint32_t))});
376 entry_data->push_back(pair.second);
377 ++i;
378 }
379 auto& spec_info = info.shader_info[type].specialization_info;
380 spec_info.reset(new VkSpecializationInfo());
381 spec_info->mapEntryCount = static_cast<uint32_t>(shader_spec_info.size());
382 spec_info->pMapEntries = entries->data();
383 spec_info->dataSize = sizeof(uint32_t) * shader_spec_info.size();
384 spec_info->pData = entry_data->data();
385 }
386
387 return {};
388 }
389
GetVkShaderStageInfo(amber::Pipeline * pipeline,std::vector<VkPipelineShaderStageCreateInfo> * out)390 Result EngineVulkan::GetVkShaderStageInfo(
391 amber::Pipeline* pipeline,
392 std::vector<VkPipelineShaderStageCreateInfo>* out) {
393 auto& info = pipeline_map_[pipeline];
394
395 std::vector<VkPipelineShaderStageCreateInfo> stage_info(
396 info.shader_info.size());
397 uint32_t stage_count = 0;
398 for (auto& it : info.shader_info) {
399 VkShaderStageFlagBits stage = VK_SHADER_STAGE_FLAG_BITS_MAX_ENUM;
400 Result r = ToVkShaderStage(it.first, &stage);
401 if (!r.IsSuccess())
402 return r;
403
404 stage_info[stage_count] = VkPipelineShaderStageCreateInfo();
405 stage_info[stage_count].sType =
406 VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
407 stage_info[stage_count].flags = it.second.create_flags;
408 stage_info[stage_count].stage = stage;
409 stage_info[stage_count].module = it.second.shader;
410 stage_info[stage_count].pName = nullptr;
411 if (it.second.specialization_entries &&
412 !it.second.specialization_entries->empty()) {
413 stage_info[stage_count].pSpecializationInfo =
414 it.second.specialization_info.get();
415 }
416
417 if (stage == VK_SHADER_STAGE_COMPUTE_BIT &&
418 it.second.required_subgroup_size > 0) {
419 VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT* pSubgroupSize =
420 new VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT();
421 pSubgroupSize->sType =
422 VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT; // NOLINT(whitespace/line_length)
423 pSubgroupSize->pNext = nullptr;
424 pSubgroupSize->requiredSubgroupSize = it.second.required_subgroup_size;
425 stage_info[stage_count].pNext = pSubgroupSize;
426 }
427 ++stage_count;
428 }
429 *out = stage_info;
430 return {};
431 }
432
DoClearColor(const ClearColorCommand * command)433 Result EngineVulkan::DoClearColor(const ClearColorCommand* command) {
434 auto& info = pipeline_map_[command->GetPipeline()];
435 if (!info.vk_pipeline->IsGraphics())
436 return Result("Vulkan::Clear Color Command for Non-Graphics Pipeline");
437
438 return info.vk_pipeline->AsGraphics()->SetClearColor(
439 command->GetR(), command->GetG(), command->GetB(), command->GetA());
440 }
441
DoClearStencil(const ClearStencilCommand * command)442 Result EngineVulkan::DoClearStencil(const ClearStencilCommand* command) {
443 auto& info = pipeline_map_[command->GetPipeline()];
444 if (!info.vk_pipeline->IsGraphics())
445 return Result("Vulkan::Clear Stencil Command for Non-Graphics Pipeline");
446
447 return info.vk_pipeline->AsGraphics()->SetClearStencil(command->GetValue());
448 }
449
DoClearDepth(const ClearDepthCommand * command)450 Result EngineVulkan::DoClearDepth(const ClearDepthCommand* command) {
451 auto& info = pipeline_map_[command->GetPipeline()];
452 if (!info.vk_pipeline->IsGraphics())
453 return Result("Vulkan::Clear Depth Command for Non-Graphics Pipeline");
454
455 return info.vk_pipeline->AsGraphics()->SetClearDepth(command->GetValue());
456 }
457
DoClear(const ClearCommand * command)458 Result EngineVulkan::DoClear(const ClearCommand* command) {
459 auto& info = pipeline_map_[command->GetPipeline()];
460 if (!info.vk_pipeline->IsGraphics())
461 return Result("Vulkan::Clear Command for Non-Graphics Pipeline");
462
463 return info.vk_pipeline->AsGraphics()->Clear();
464 }
465
DoDrawRect(const DrawRectCommand * command)466 Result EngineVulkan::DoDrawRect(const DrawRectCommand* command) {
467 auto& info = pipeline_map_[command->GetPipeline()];
468 if (!info.vk_pipeline->IsGraphics())
469 return Result("Vulkan::DrawRect for Non-Graphics Pipeline");
470
471 auto* graphics = info.vk_pipeline->AsGraphics();
472
473 float x = command->GetX();
474 float y = command->GetY();
475 float width = command->GetWidth();
476 float height = command->GetHeight();
477
478 if (command->IsOrtho()) {
479 const float frame_width = static_cast<float>(graphics->GetWidth());
480 const float frame_height = static_cast<float>(graphics->GetHeight());
481 x = ((x / frame_width) * 2.0f) - 1.0f;
482 y = ((y / frame_height) * 2.0f) - 1.0f;
483 width = (width / frame_width) * 2.0f;
484 height = (height / frame_height) * 2.0f;
485 }
486
487 std::vector<Value> values(8);
488 // Bottom left
489 values[0].SetDoubleValue(static_cast<double>(x));
490 values[1].SetDoubleValue(static_cast<double>(y + height));
491 // Top left
492 values[2].SetDoubleValue(static_cast<double>(x));
493 values[3].SetDoubleValue(static_cast<double>(y));
494 // Bottom right
495 values[4].SetDoubleValue(static_cast<double>(x + width));
496 values[5].SetDoubleValue(static_cast<double>(y + height));
497 // Top right
498 values[6].SetDoubleValue(static_cast<double>(x + width));
499 values[7].SetDoubleValue(static_cast<double>(y));
500
501 // |format| is not Format for frame buffer but for vertex buffer.
502 // Since draw rect command contains its vertex information and it
503 // does not include a format of vertex buffer, we can choose any
504 // one that is suitable. We use VK_FORMAT_R32G32_SFLOAT for it.
505 TypeParser parser;
506 auto type = parser.Parse("R32G32_SFLOAT");
507 Format fmt(type.get());
508
509 auto buf = MakeUnique<Buffer>();
510 buf->SetFormat(&fmt);
511 buf->SetData(std::move(values));
512
513 auto vertex_buffer = MakeUnique<VertexBuffer>(device_.get());
514 vertex_buffer->SetData(0, buf.get(), InputRate::kVertex, buf->GetFormat(), 0,
515 buf->GetFormat()->SizeInBytes());
516
517 DrawArraysCommand draw(command->GetPipeline(), *command->GetPipelineData());
518 draw.SetTopology(command->IsPatch() ? Topology::kPatchList
519 : Topology::kTriangleStrip);
520 draw.SetFirstVertexIndex(0);
521 draw.SetVertexCount(4);
522 draw.SetInstanceCount(1);
523
524 Result r = graphics->Draw(&draw, vertex_buffer.get());
525 if (!r.IsSuccess())
526 return r;
527
528 return {};
529 }
530
DoDrawGrid(const DrawGridCommand * command)531 Result EngineVulkan::DoDrawGrid(const DrawGridCommand* command) {
532 auto& info = pipeline_map_[command->GetPipeline()];
533 if (!info.vk_pipeline->IsGraphics())
534 return Result("Vulkan::DrawGrid for Non-Graphics Pipeline");
535
536 auto* graphics = info.vk_pipeline->AsGraphics();
537
538 float x = command->GetX();
539 float y = command->GetY();
540 float width = command->GetWidth();
541 float height = command->GetHeight();
542 const uint32_t columns = command->GetColumns();
543 const uint32_t rows = command->GetRows();
544 const uint32_t vertices =
545 columns * rows * kVerticesPerTriangle * kTrianglesPerCell;
546
547 // Ortho calculation
548 const float frame_width = static_cast<float>(graphics->GetWidth());
549 const float frame_height = static_cast<float>(graphics->GetHeight());
550 x = ((x / frame_width) * 2.0f) - 1.0f;
551 y = ((y / frame_height) * 2.0f) - 1.0f;
552 width = (width / frame_width) * 2.0f;
553 height = (height / frame_height) * 2.0f;
554
555 std::vector<Value> values(vertices * 2);
556
557 const float cell_width = width / static_cast<float>(columns);
558 const float cell_height = height / static_cast<float>(rows);
559
560 for (uint32_t i = 0, c = 0; i < rows; i++) {
561 for (uint32_t j = 0; j < columns; j++, c += 12) {
562 // Calculate corners
563 float x0 = x + cell_width * static_cast<float>(j);
564 float y0 = y + cell_height * static_cast<float>(i);
565 float x1 = x + cell_width * static_cast<float>(j + 1);
566 float y1 = y + cell_height * static_cast<float>(i + 1);
567
568 // Bottom right
569 values[c + 0].SetDoubleValue(static_cast<double>(x1));
570 values[c + 1].SetDoubleValue(static_cast<double>(y1));
571 // Bottom left
572 values[c + 2].SetDoubleValue(static_cast<double>(x0));
573 values[c + 3].SetDoubleValue(static_cast<double>(y1));
574 // Top left
575 values[c + 4].SetDoubleValue(static_cast<double>(x0));
576 values[c + 5].SetDoubleValue(static_cast<double>(y0));
577 // Bottom right
578 values[c + 6].SetDoubleValue(static_cast<double>(x1));
579 values[c + 7].SetDoubleValue(static_cast<double>(y1));
580 // Top left
581 values[c + 8].SetDoubleValue(static_cast<double>(x0));
582 values[c + 9].SetDoubleValue(static_cast<double>(y0));
583 // Top right
584 values[c + 10].SetDoubleValue(static_cast<double>(x1));
585 values[c + 11].SetDoubleValue(static_cast<double>(y0));
586 }
587 }
588
589 // |format| is not Format for frame buffer but for vertex buffer.
590 // Since draw rect command contains its vertex information and it
591 // does not include a format of vertex buffer, we can choose any
592 // one that is suitable. We use VK_FORMAT_R32G32_SFLOAT for it.
593 TypeParser parser;
594 auto type = parser.Parse("R32G32_SFLOAT");
595 Format fmt(type.get());
596
597 auto buf = MakeUnique<Buffer>();
598 buf->SetFormat(&fmt);
599 buf->SetData(std::move(values));
600
601 auto vertex_buffer = MakeUnique<VertexBuffer>(device_.get());
602 vertex_buffer->SetData(0, buf.get(), InputRate::kVertex, buf->GetFormat(), 0,
603 buf->GetFormat()->SizeInBytes());
604
605 DrawArraysCommand draw(command->GetPipeline(), *command->GetPipelineData());
606 draw.SetTopology(Topology::kTriangleList);
607 draw.SetFirstVertexIndex(0);
608 draw.SetVertexCount(vertices);
609 draw.SetInstanceCount(1);
610
611 Result r = graphics->Draw(&draw, vertex_buffer.get());
612 if (!r.IsSuccess())
613 return r;
614
615 return {};
616 }
617
DoDrawArrays(const DrawArraysCommand * command)618 Result EngineVulkan::DoDrawArrays(const DrawArraysCommand* command) {
619 auto& info = pipeline_map_[command->GetPipeline()];
620 if (!info.vk_pipeline)
621 return Result("Vulkan::DrawArrays for Non-Graphics Pipeline");
622
623 return info.vk_pipeline->AsGraphics()->Draw(command,
624 info.vertex_buffer.get());
625 }
626
DoCompute(const ComputeCommand * command)627 Result EngineVulkan::DoCompute(const ComputeCommand* command) {
628 auto& info = pipeline_map_[command->GetPipeline()];
629 if (info.vk_pipeline->IsGraphics())
630 return Result("Vulkan: Compute called for graphics pipeline.");
631
632 return info.vk_pipeline->AsCompute()->Compute(
633 command->GetX(), command->GetY(), command->GetZ());
634 }
635
DoEntryPoint(const EntryPointCommand * command)636 Result EngineVulkan::DoEntryPoint(const EntryPointCommand* command) {
637 auto& info = pipeline_map_[command->GetPipeline()];
638 if (!info.vk_pipeline)
639 return Result("Vulkan::DoEntryPoint no Pipeline exists");
640
641 VkShaderStageFlagBits stage = VK_SHADER_STAGE_FLAG_BITS_MAX_ENUM;
642 Result r = ToVkShaderStage(command->GetShaderType(), &stage);
643 if (!r.IsSuccess())
644 return r;
645
646 info.vk_pipeline->SetEntryPointName(stage, command->GetEntryPointName());
647 return {};
648 }
649
DoPatchParameterVertices(const PatchParameterVerticesCommand * command)650 Result EngineVulkan::DoPatchParameterVertices(
651 const PatchParameterVerticesCommand* command) {
652 auto& info = pipeline_map_[command->GetPipeline()];
653 if (!info.vk_pipeline->IsGraphics())
654 return Result("Vulkan::DoPatchParameterVertices for Non-Graphics Pipeline");
655
656 info.vk_pipeline->AsGraphics()->SetPatchControlPoints(
657 command->GetControlPointCount());
658 return {};
659 }
660
DoBuffer(const BufferCommand * cmd)661 Result EngineVulkan::DoBuffer(const BufferCommand* cmd) {
662 if (!device_->IsDescriptorSetInBounds(cmd->GetDescriptorSet())) {
663 return Result(
664 "Vulkan::DoBuffer exceed maxBoundDescriptorSets limit of physical "
665 "device");
666 }
667 if (cmd->GetValues().empty()) {
668 cmd->GetBuffer()->SetSizeInElements(cmd->GetBuffer()->ElementCount());
669 } else {
670 cmd->GetBuffer()->SetDataWithOffset(cmd->GetValues(), cmd->GetOffset());
671 }
672 if (cmd->IsPushConstant()) {
673 auto& info = pipeline_map_[cmd->GetPipeline()];
674 return info.vk_pipeline->AddPushConstantBuffer(cmd->GetBuffer(),
675 cmd->GetOffset());
676 }
677 return {};
678 }
679
680 } // namespace vulkan
681 } // namespace amber
682