1 /*
2  * Copyright © 2017 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21  * IN THE SOFTWARE.
22  */
23 #ifndef VK_UTIL_H
24 #define VK_UTIL_H
25 
26 /* common inlines and macros for vulkan drivers */
27 
28 #include <inttypes.h>
29 #include <stdio.h>
30 #include <stdlib.h>
31 #include <vulkan/vulkan.h>
32 
33 #include <chrono>
34 #include <functional>
35 #include <memory>
36 #include <optional>
37 #include <string>
38 #include <thread>
39 #include <tuple>
40 #include <type_traits>
41 #include <vector>
42 
43 #include "VkDecoderContext.h"
44 #include "VulkanDispatch.h"
45 #include "aemu/base/synchronization/Lock.h"
46 #include "host-common/GfxstreamFatalError.h"
47 #include "host-common/logging.h"
48 #include "vk_fn_info.h"
49 #include "vk_struct_id.h"
50 #include "vulkan/vk_enum_string_helper.h"
51 
52 namespace gfxstream {
53 namespace vk {
54 
55 struct vk_struct_common {
56     VkStructureType sType;
57     struct vk_struct_common* pNext;
58 };
59 
60 struct vk_struct_chain_iterator {
61     vk_struct_common* value;
62 };
63 
64 #define vk_foreach_struct(__iter, __start)                                              \
65     for (struct vk_struct_common* __iter = (struct vk_struct_common*)(__start); __iter; \
66          __iter = __iter->pNext)
67 
68 #define vk_foreach_struct_const(__iter, __start)                                            \
69     for (const struct vk_struct_common* __iter = (const struct vk_struct_common*)(__start); \
70          __iter; __iter = __iter->pNext)
71 
72 /**
73  * A wrapper for a Vulkan output array. A Vulkan output array is one that
74  * follows the convention of the parameters to
75  * vkGetPhysicalDeviceQueueFamilyProperties().
76  *
77  * Example Usage:
78  *
79  *    VkResult
80  *    vkGetPhysicalDeviceQueueFamilyProperties(
81  *       VkPhysicalDevice           physicalDevice,
82  *       uint32_t*                  pQueueFamilyPropertyCount,
83  *       VkQueueFamilyProperties*   pQueueFamilyProperties)
84  *    {
85  *       VK_OUTARRAY_MAKE(props, pQueueFamilyProperties,
86  *                         pQueueFamilyPropertyCount);
87  *
88  *       vk_outarray_append(&props, p) {
89  *          p->queueFlags = ...;
90  *          p->queueCount = ...;
91  *       }
92  *
93  *       vk_outarray_append(&props, p) {
94  *          p->queueFlags = ...;
95  *          p->queueCount = ...;
96  *       }
97  *
98  *       return vk_outarray_status(&props);
99  *    }
100  */
101 struct __vk_outarray {
102     /** May be null. */
103     void* data;
104 
105     /**
106      * Capacity, in number of elements. Capacity is unlimited (UINT32_MAX) if
107      * data is null.
108      */
109     uint32_t cap;
110 
111     /**
112      * Count of elements successfully written to the array. Every write is
113      * considered successful if data is null.
114      */
115     uint32_t* filled_len;
116 
117     /**
118      * Count of elements that would have been written to the array if its
119      * capacity were sufficient. Vulkan functions often return VK_INCOMPLETE
120      * when `*filled_len < wanted_len`.
121      */
122     uint32_t wanted_len;
123 };
124 
__vk_outarray_init(struct __vk_outarray * a,void * data,uint32_t * len)125 static inline void __vk_outarray_init(struct __vk_outarray* a, void* data, uint32_t* len) {
126     a->data = data;
127     a->cap = *len;
128     a->filled_len = len;
129     *a->filled_len = 0;
130     a->wanted_len = 0;
131 
132     if (a->data == NULL) a->cap = UINT32_MAX;
133 }
134 
__vk_outarray_status(const struct __vk_outarray * a)135 static inline VkResult __vk_outarray_status(const struct __vk_outarray* a) {
136     if (*a->filled_len < a->wanted_len)
137         return VK_INCOMPLETE;
138     else
139         return VK_SUCCESS;
140 }
141 
__vk_outarray_next(struct __vk_outarray * a,size_t elem_size)142 static inline void* __vk_outarray_next(struct __vk_outarray* a, size_t elem_size) {
143     void* p = NULL;
144 
145     a->wanted_len += 1;
146 
147     if (*a->filled_len >= a->cap) return NULL;
148 
149     if (a->data != NULL) p = ((uint8_t*)a->data) + (*a->filled_len) * elem_size;
150 
151     *a->filled_len += 1;
152 
153     return p;
154 }
155 
156 #define vk_outarray(elem_t)        \
157     struct {                       \
158         struct __vk_outarray base; \
159         elem_t meta[];             \
160     }
161 
162 #define vk_outarray_typeof_elem(a) __typeof__((a)->meta[0])
163 #define vk_outarray_sizeof_elem(a) sizeof((a)->meta[0])
164 
165 #define vk_outarray_init(a, data, len) __vk_outarray_init(&(a)->base, (data), (len))
166 
167 #define VK_OUTARRAY_MAKE(name, data, len)    \
168     vk_outarray(__typeof__((data)[0])) name; \
169     vk_outarray_init(&name, (data), (len))
170 
171 #define vk_outarray_status(a) __vk_outarray_status(&(a)->base)
172 
173 #define vk_outarray_next(a) \
174     ((vk_outarray_typeof_elem(a)*)__vk_outarray_next(&(a)->base, vk_outarray_sizeof_elem(a)))
175 
176 /**
177  * Append to a Vulkan output array.
178  *
179  * This is a block-based macro. For example:
180  *
181  *    vk_outarray_append(&a, elem) {
182  *       elem->foo = ...;
183  *       elem->bar = ...;
184  *    }
185  *
186  * The array `a` has type `vk_outarray(elem_t) *`. It is usually declared with
187  * VK_OUTARRAY_MAKE(). The variable `elem` is block-scoped and has type
188  * `elem_t *`.
189  *
190  * The macro unconditionally increments the array's `wanted_len`. If the array
191  * is not full, then the macro also increment its `filled_len` and then
192  * executes the block. When the block is executed, `elem` is non-null and
193  * points to the newly appended element.
194  */
195 #define vk_outarray_append(a, elem) \
196     for (vk_outarray_typeof_elem(a)* elem = vk_outarray_next(a); elem != NULL; elem = NULL)
197 
__vk_find_struct(void * start,VkStructureType sType)198 static inline void* __vk_find_struct(void* start, VkStructureType sType) {
199     vk_foreach_struct(s, start) {
200         if (s->sType == sType) return s;
201     }
202 
203     return NULL;
204 }
205 
206 template <class T, class H>
vk_find_struct(H * head)207 T* vk_find_struct(H* head) {
208     (void)vk_get_vk_struct_id<H>::id;
209     return static_cast<T*>(__vk_find_struct(static_cast<void*>(head), vk_get_vk_struct_id<T>::id));
210 }
211 
212 template <class T, class H>
vk_find_struct(const H * head)213 const T* vk_find_struct(const H* head) {
214     (void)vk_get_vk_struct_id<H>::id;
215     return static_cast<const T*>(__vk_find_struct(const_cast<void*>(static_cast<const void*>(head)),
216                                                   vk_get_vk_struct_id<T>::id));
217 }
218 
219 uint32_t vk_get_driver_version(void);
220 
221 uint32_t vk_get_version_override(void);
222 
223 #define VK_EXT_OFFSET (1000000000UL)
224 #define VK_ENUM_EXTENSION(__enum) \
225     ((__enum) >= VK_EXT_OFFSET ? ((((__enum)-VK_EXT_OFFSET) / 1000UL) + 1) : 0)
226 #define VK_ENUM_OFFSET(__enum) ((__enum) >= VK_EXT_OFFSET ? ((__enum) % 1000) : (__enum))
227 
228 template <class T>
vk_make_orphan_copy(const T & vk_struct)229 T vk_make_orphan_copy(const T& vk_struct) {
230     T copy = vk_struct;
231     copy.pNext = NULL;
232     return copy;
233 }
234 
235 template <class T>
vk_make_chain_iterator(T * vk_struct)236 vk_struct_chain_iterator vk_make_chain_iterator(T* vk_struct) {
237     (void)vk_get_vk_struct_id<T>::id;
238     vk_struct_chain_iterator result = {reinterpret_cast<vk_struct_common*>(vk_struct)};
239     return result;
240 }
241 
242 template <class T>
vk_append_struct(vk_struct_chain_iterator * i,T * vk_struct)243 void vk_append_struct(vk_struct_chain_iterator* i, T* vk_struct) {
244     (void)vk_get_vk_struct_id<T>::id;
245 
246     vk_struct_common* p = i->value;
247     if (p->pNext) {
248         ::abort();
249     }
250 
251     p->pNext = reinterpret_cast<vk_struct_common*>(vk_struct);
252     vk_struct->pNext = NULL;
253 
254     *i = vk_make_chain_iterator(vk_struct);
255 }
256 
257 // The caller should guarantee that all the pNext structs in the chain starting at nextChain is not
258 // a const object to avoid unexpected undefined behavior.
259 template <class T, class U, typename = std::enable_if_t<!std::is_const_v<T> && !std::is_const_v<U>>>
vk_insert_struct(T & pos,U & nextChain)260 void vk_insert_struct(T& pos, U& nextChain) {
261     vk_struct_common* nextChainTail = reinterpret_cast<vk_struct_common*>(&nextChain);
262     for (; nextChainTail->pNext; nextChainTail = nextChainTail->pNext) {}
263 
264     nextChainTail->pNext = reinterpret_cast<vk_struct_common*>(const_cast<void*>(pos.pNext));
265     pos.pNext = &nextChain;
266 }
267 
268 template <class S, class T>
vk_struct_chain_remove(S * unwanted,T * vk_struct)269 void vk_struct_chain_remove(S* unwanted, T* vk_struct) {
270     if (!unwanted) return;
271 
272     vk_foreach_struct(current, vk_struct) {
273         if ((void*)unwanted == current->pNext) {
274             const vk_struct_common* unwanted_as_common =
275                 reinterpret_cast<const vk_struct_common*>(unwanted);
276             current->pNext = unwanted_as_common->pNext;
277         }
278     }
279 }
280 
281 template <class TypeToFilter, class H>
vk_struct_chain_filter(H * head)282 void vk_struct_chain_filter(H* head) {
283     (void)vk_get_vk_struct_id<H>::id;
284 
285     auto* curr = reinterpret_cast<vk_struct_common*>(head);
286     while (curr != nullptr) {
287         if (curr->pNext != nullptr && curr->pNext->sType == vk_get_vk_struct_id<TypeToFilter>::id) {
288             curr->pNext = curr->pNext->pNext;
289         }
290         curr = curr->pNext;
291     }
292 }
293 
294 #define VK_CHECK(x)                                                                             \
295     do {                                                                                        \
296         VkResult err = x;                                                                       \
297         if (err != VK_SUCCESS) {                                                                \
298             if (err == VK_ERROR_DEVICE_LOST) {                                                  \
299                 vk_util::getVkCheckCallbacks().callIfExists(                                    \
300                     &vk_util::VkCheckCallbacks::onVkErrorDeviceLost);                           \
301             }                                                                                   \
302             if (err == VK_ERROR_OUT_OF_HOST_MEMORY || err == VK_ERROR_OUT_OF_DEVICE_MEMORY ||   \
303                 err == VK_ERROR_OUT_OF_POOL_MEMORY) {                                           \
304                 vk_util::getVkCheckCallbacks().callIfExists(                                    \
305                     &vk_util::VkCheckCallbacks::onVkErrorOutOfMemory, err, __func__, __LINE__); \
306             }                                                                                   \
307             GFXSTREAM_ABORT(::emugl::FatalError(err))                                           \
308                 << " VK_CHECK(" << #x << ") failed with " << string_VkResult(err) << " at "     \
309                 << __FILE__ << ":" << __LINE__;                                                 \
310         }                                                                                       \
311     } while (0)
312 
313 #define VK_CHECK_MEMALLOC(x, allocateInfo)                                                       \
314     do {                                                                                         \
315         VkResult err = x;                                                                        \
316         if (err != VK_SUCCESS) {                                                                 \
317             if (err == VK_ERROR_OUT_OF_HOST_MEMORY || err == VK_ERROR_OUT_OF_DEVICE_MEMORY) {    \
318                 vk_util::getVkCheckCallbacks().callIfExists(                                     \
319                     &vk_util::VkCheckCallbacks::onVkErrorOutOfMemoryOnAllocation, err, __func__, \
320                     __LINE__, allocateInfo.allocationSize);                                      \
321             }                                                                                    \
322             GFXSTREAM_ABORT(::emugl::FatalError(err));                                           \
323         }                                                                                        \
324     } while (0)
325 
326 typedef void* MTLTextureRef;
327 typedef void* MTLBufferRef;
328 
329 namespace vk_util {
330 
waitForVkQueueIdleWithRetry(const VulkanDispatch & vk,VkQueue queue)331 inline VkResult waitForVkQueueIdleWithRetry(const VulkanDispatch& vk, VkQueue queue) {
332     using namespace std::chrono_literals;
333     constexpr uint32_t retryLimit = 5;
334     constexpr std::chrono::duration waitInterval = 4ms;
335     VkResult res = vk.vkQueueWaitIdle(queue);
336     for (uint32_t retryTimes = 1; retryTimes < retryLimit && res == VK_TIMEOUT; retryTimes++) {
337         INFO("VK_TIMEOUT returned from vkQueueWaitIdle with %" PRIu32 " attempt. Wait for %" PRIu32
338              "ms before another attempt.",
339              retryTimes,
340              static_cast<uint32_t>(
341                  std::chrono::duration_cast<std::chrono::milliseconds>(waitInterval).count()));
342         std::this_thread::sleep_for(waitInterval);
343         res = vk.vkQueueWaitIdle(queue);
344     }
345     return res;
346 }
347 
348 typedef struct {
349     std::function<void()> onVkErrorDeviceLost;
350     std::function<void(VkResult, const char*, int)> onVkErrorOutOfMemory;
351     std::function<void(VkResult, const char*, int, uint64_t)> onVkErrorOutOfMemoryOnAllocation;
352 } VkCheckCallbacks;
353 
354 template <class T>
355 class CallbacksWrapper {
356    public:
CallbacksWrapper(std::unique_ptr<T> callbacks)357     CallbacksWrapper(std::unique_ptr<T> callbacks) : mCallbacks(std::move(callbacks)) {}
358     // function should be a member function pointer to T.
359     template <class U, class... Args>
callIfExists(U function,Args &&...args)360     void callIfExists(U function, Args&&... args) const {
361         if (mCallbacks && (*mCallbacks.*function)) {
362             (*mCallbacks.*function)(std::forward<Args>(args)...);
363         }
364     }
365 
get()366     T* get() const { return mCallbacks.get(); }
367 
368    private:
369     std::unique_ptr<T> mCallbacks;
370 };
371 
372 std::optional<uint32_t> findMemoryType(const VulkanDispatch* ivk, VkPhysicalDevice physicalDevice,
373                                        uint32_t typeFilter, VkMemoryPropertyFlags properties);
374 
375 void setVkCheckCallbacks(std::unique_ptr<VkCheckCallbacks>);
376 const CallbacksWrapper<VkCheckCallbacks>& getVkCheckCallbacks();
377 
378 class CrtpBase {};
379 
380 // Utility class to make chaining inheritance of multiple CRTP classes more
381 // readable by allowing one to replace
382 //
383 //    class MyClass
384 //        : public vk_util::Crtp1<MyClass,
385 //                                vk_util::Crtp2<MyClass,
386 //                                               vk_util::Crtp3<MyClass>>> {};
387 //
388 // with
389 //
390 //    class MyClass :
391 //        : public vk_util::MultiCrtp<MyClass,
392 //                                    vk_util::Crtp1,
393 //                                    vk_util::Crtp2,
394 //                                    vk_util::Ctrp3> {};
395 namespace vk_util_internal {
396 
397 // For the template "recursion", this is the base case where the list is empty
398 // and which just inherits from the last type.
399 template <typename T,  //
400           typename U,  //
401           template <typename, typename> class... CrtpClasses>
402 class MultiCrtpChainHelper : public U {};
403 
404 // For the template "recursion", this is the case where the list is not empty
405 // and which uses the "current" CRTP class as the "U" type and passes the
406 // resulting type to the next step in the template "recursion".
407 template <typename T,                                //
408           typename U,                                //
409           template <typename, typename> class Crtp,  //
410           template <typename, typename> class... Crtps>
411 class MultiCrtpChainHelper<T, U, Crtp, Crtps...>
412     : public MultiCrtpChainHelper<T, Crtp<T, U>, Crtps...> {};
413 
414 }  // namespace vk_util_internal
415 
416 template <typename T,  //
417           template <typename, typename> class... CrtpClasses>
418 class MultiCrtp : public vk_util_internal::MultiCrtpChainHelper<T, CrtpBase, CrtpClasses...> {};
419 
420 template <class T, class U = CrtpBase>
421 class FindMemoryType : public U {
422    protected:
findMemoryType(uint32_t typeFilter,VkMemoryPropertyFlags properties)423     std::optional<uint32_t> findMemoryType(uint32_t typeFilter,
424                                            VkMemoryPropertyFlags properties) const {
425         const T& self = static_cast<const T&>(*this);
426         return vk_util::findMemoryType(&self.m_vk, self.m_vkPhysicalDevice, typeFilter, properties);
427     }
428 };
429 
430 template <class T, class U = CrtpBase>
431 class RunSingleTimeCommand : public U {
432    protected:
runSingleTimeCommands(VkQueue queue,std::shared_ptr<android::base::Lock> queueLock,std::function<void (const VkCommandBuffer & commandBuffer)> f)433     void runSingleTimeCommands(VkQueue queue, std::shared_ptr<android::base::Lock> queueLock,
434                                std::function<void(const VkCommandBuffer& commandBuffer)> f) const {
435         const T& self = static_cast<const T&>(*this);
436         VkCommandBuffer cmdBuff;
437         VkCommandBufferAllocateInfo cmdBuffAllocInfo = {
438             .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
439             .commandPool = self.m_vkCommandPool,
440             .level = VK_COMMAND_BUFFER_LEVEL_PRIMARY,
441             .commandBufferCount = 1};
442         VK_CHECK(self.m_vk.vkAllocateCommandBuffers(self.m_vkDevice, &cmdBuffAllocInfo, &cmdBuff));
443         VkCommandBufferBeginInfo beginInfo = {.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
444                                               .flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT};
445         VK_CHECK(self.m_vk.vkBeginCommandBuffer(cmdBuff, &beginInfo));
446         f(cmdBuff);
447         VK_CHECK(self.m_vk.vkEndCommandBuffer(cmdBuff));
448         VkSubmitInfo submitInfo = {.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
449                                    .commandBufferCount = 1,
450                                    .pCommandBuffers = &cmdBuff};
451         {
452             std::unique_ptr<android::base::AutoLock> lock = nullptr;
453             if (queueLock) {
454                 lock = std::make_unique<android::base::AutoLock>(*queueLock);
455             }
456             VK_CHECK(self.m_vk.vkQueueSubmit(queue, 1, &submitInfo, VK_NULL_HANDLE));
457             VK_CHECK(self.m_vk.vkQueueWaitIdle(queue));
458         }
459         self.m_vk.vkFreeCommandBuffers(self.m_vkDevice, self.m_vkCommandPool, 1, &cmdBuff);
460     }
461 };
462 template <class T, class U = CrtpBase>
463 class RecordImageLayoutTransformCommands : public U {
464    protected:
recordImageLayoutTransformCommands(VkCommandBuffer cmdBuff,VkImage image,VkImageLayout oldLayout,VkImageLayout newLayout)465     void recordImageLayoutTransformCommands(VkCommandBuffer cmdBuff, VkImage image,
466                                             VkImageLayout oldLayout,
467                                             VkImageLayout newLayout) const {
468         const T& self = static_cast<const T&>(*this);
469         VkImageMemoryBarrier imageBarrier = {
470             .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
471             .srcAccessMask = VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT,
472             .dstAccessMask = VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT,
473             .oldLayout = oldLayout,
474             .newLayout = newLayout,
475             .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
476             .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
477             .image = image,
478             .subresourceRange = {.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
479                                  .baseMipLevel = 0,
480                                  .levelCount = 1,
481                                  .baseArrayLayer = 0,
482                                  .layerCount = 1}};
483         self.m_vk.vkCmdPipelineBarrier(cmdBuff, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
484                                        VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0, nullptr, 0,
485                                        nullptr, 1, &imageBarrier);
486     }
487 };
488 
489 template <class T>
getVkInstanceProcAddrWithFallback(const std::vector<std::function<std::remove_pointer_t<PFN_vkGetInstanceProcAddr>>> & vkGetInstanceProcAddrs,VkInstance instance)490 typename vk_fn_info::GetVkFnInfo<T>::type getVkInstanceProcAddrWithFallback(
491     const std::vector<std::function<std::remove_pointer_t<PFN_vkGetInstanceProcAddr>>>&
492         vkGetInstanceProcAddrs,
493     VkInstance instance) {
494     for (const auto& vkGetInstanceProcAddr : vkGetInstanceProcAddrs) {
495         if (!vkGetInstanceProcAddr) {
496             continue;
497         }
498         PFN_vkVoidFunction resWithCurrentVkGetInstanceProcAddr = std::apply(
499             [&vkGetInstanceProcAddr, instance](auto&&... names) -> PFN_vkVoidFunction {
500                 for (const char* name : {names...}) {
501                     if (PFN_vkVoidFunction resWithCurrentName =
502                             vkGetInstanceProcAddr(instance, name)) {
503                         return resWithCurrentName;
504                     }
505                 }
506                 return nullptr;
507             },
508             vk_fn_info::GetVkFnInfo<T>::names);
509         if (resWithCurrentVkGetInstanceProcAddr) {
510             return reinterpret_cast<typename vk_fn_info::GetVkFnInfo<T>::type>(
511                 resWithCurrentVkGetInstanceProcAddr);
512         }
513     }
514     return nullptr;
515 }
516 
vk_descriptor_type_has_image_view(VkDescriptorType type)517 static inline bool vk_descriptor_type_has_image_view(VkDescriptorType type) {
518     switch (type) {
519         case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
520         case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
521         case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
522         case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
523             return true;
524         default:
525             return false;
526     }
527 }
528 
529 }  // namespace vk_util
530 }  // namespace vk
531 }  // namespace gfxstream
532 
533 #endif /* VK_UTIL_H */
534