1 // Copyright 2018 The Android Open Source Project
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 // http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either expresso or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14 #include "VkDecoderGlobalState.h"
15 
16 #include <algorithm>
17 #include <functional>
18 #include <list>
19 #include <memory>
20 #include <mutex>
21 #include <unordered_map>
22 #include <vector>
23 
24 #include "ExternalObjectManager.h"
25 #include "RenderThreadInfoVk.h"
26 #include "VkAndroidNativeBuffer.h"
27 #include "VkCommonOperations.h"
28 #include "VkDecoderContext.h"
29 #include "VkDecoderInternalStructs.h"
30 #include "VkDecoderSnapshot.h"
31 #include "VkDecoderSnapshotUtils.h"
32 #include "VkEmulatedPhysicalDeviceMemory.h"
33 #include "VulkanDispatch.h"
34 #include "VulkanStream.h"
35 #include "aemu/base/ManagedDescriptor.hpp"
36 #include "aemu/base/Optional.h"
37 #include "aemu/base/containers/EntityManager.h"
38 #include "aemu/base/containers/HybridEntityManager.h"
39 #include "aemu/base/containers/Lookup.h"
40 #include "aemu/base/files/Stream.h"
41 #include "aemu/base/memory/SharedMemory.h"
42 #include "aemu/base/synchronization/ConditionVariable.h"
43 #include "aemu/base/synchronization/Lock.h"
44 #include "aemu/base/system/System.h"
45 #include "common/goldfish_vk_deepcopy.h"
46 #include "common/goldfish_vk_dispatch.h"
47 #include "common/goldfish_vk_marshaling.h"
48 #include "common/goldfish_vk_reserved_marshaling.h"
49 #include "compressedTextureFormats/AstcCpuDecompressor.h"
50 #include "gfxstream/host/Tracing.h"
51 #include "host-common/GfxstreamFatalError.h"
52 #include "host-common/HostmemIdMapping.h"
53 #include "host-common/address_space_device_control_ops.h"
54 #include "host-common/emugl_vm_operations.h"
55 #include "host-common/vm_operations.h"
56 #include "utils/RenderDoc.h"
57 #include "vk_util.h"
58 #include "vulkan/VkFormatUtils.h"
59 #include "vulkan/emulated_textures/AstcTexture.h"
60 #include "vulkan/emulated_textures/CompressedImageInfo.h"
61 #include "vulkan/emulated_textures/GpuDecompressionPipeline.h"
62 #include "vulkan/vk_enum_string_helper.h"
63 
64 #ifndef _WIN32
65 #include <unistd.h>
66 #endif
67 
68 #ifdef __APPLE__
69 #include <CoreFoundation/CoreFoundation.h>
70 #include <vulkan/vulkan_beta.h> // for MoltenVK portability extensions
71 #endif
72 
73 #ifndef VERBOSE
74 #define VERBOSE(fmt, ...)                    \
75     if (android::base::isVerboseLogging()) { \
76         INFO(fmt, ##__VA_ARGS__);            \
77     }
78 #endif
79 
80 #include <climits>
81 
82 namespace gfxstream {
83 namespace vk {
84 
85 using android::base::AutoLock;
86 using android::base::ConditionVariable;
87 using android::base::DescriptorType;
88 using android::base::Lock;
89 using android::base::ManagedDescriptor;
90 using android::base::MetricEventBadPacketLength;
91 using android::base::MetricEventDuplicateSequenceNum;
92 using android::base::MetricEventVulkanOutOfMemory;
93 using android::base::Optional;
94 using android::base::SharedMemory;
95 using android::base::StaticLock;
96 using emugl::ABORT_REASON_OTHER;
97 using emugl::FatalError;
98 using emugl::GfxApiLogger;
99 using gfxstream::ExternalObjectManager;
100 using gfxstream::VulkanInfo;
101 
102 // TODO(b/261477138): Move to a shared aemu definition
103 #define __ALIGN_MASK(x, mask) (((x) + (mask)) & ~(mask))
104 #define __ALIGN(x, a) __ALIGN_MASK(x, (__typeof__(x))(a)-1)
105 
106 #define VKDGS_DEBUG 0
107 
108 #if VKDGS_DEBUG
109 #define VKDGS_LOG(fmt, ...) INFO
110 #else
111 #define VKDGS_LOG(fmt, ...)
112 #endif
113 
114 // Blob mem
115 #define STREAM_BLOB_MEM_GUEST 1
116 #define STREAM_BLOB_MEM_HOST3D 2
117 #define STREAM_BLOB_MEM_HOST3D_GUEST 3
118 
119 // Blob flags
120 #define STREAM_BLOB_FLAG_USE_MAPPABLE 1
121 #define STREAM_BLOB_FLAG_USE_SHAREABLE 2
122 #define STREAM_BLOB_FLAG_USE_CROSS_DEVICE 4
123 #define STREAM_BLOB_FLAG_CREATE_GUEST_HANDLE 8
124 
125 #define VALIDATE_REQUIRED_HANDLE(parameter) \
126     validateRequiredHandle(__FUNCTION__, #parameter, parameter)
127 
128 template <typename T>
validateRequiredHandle(const char * api_name,const char * parameter_name,T value)129 void validateRequiredHandle(const char* api_name, const char* parameter_name, T value) {
130     if (value == VK_NULL_HANDLE) {
131         GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER)) << api_name << ":" << parameter_name;
132     }
133 }
134 
135 #define VALIDATE_NEW_HANDLE_INFO_ENTRY(objectMap, newEntry) \
136     validateNewHandleInfoEntry(objectMap, newEntry, #objectMap)
137 
138 template <typename T, typename K>
validateNewHandleInfoEntry(const std::unordered_map<T,K> & vkObjectMap,const T & newEntry,const char * typeName)139 void validateNewHandleInfoEntry(const std::unordered_map<T, K>& vkObjectMap, const T& newEntry,
140                                 const char* typeName) {
141     if (vkObjectMap.find(newEntry) != vkObjectMap.end()) {
142         ERR("Found duplicate in %s (%p)!", typeName, newEntry);
143     }
144 }
145 
dupExternalSync(VK_EXT_SYNC_HANDLE h)146 VK_EXT_SYNC_HANDLE dupExternalSync(VK_EXT_SYNC_HANDLE h) {
147 #ifdef _WIN32
148     auto myProcessHandle = GetCurrentProcess();
149     VK_EXT_SYNC_HANDLE res;
150     DuplicateHandle(myProcessHandle, h,     // source process and handle
151                     myProcessHandle, &res,  // target process and pointer to handle
152                     0 /* desired access (ignored) */, true /* inherit */,
153                     DUPLICATE_SAME_ACCESS /* same access option */);
154     return res;
155 #else
156     return dup(h);
157 #endif
158 }
159 
160 // A list of device extensions that should not be passed to the host driver.
161 // These will mainly include Vulkan features that we emulate ourselves.
162 static constexpr const char* const kEmulatedDeviceExtensions[] = {
163     "VK_ANDROID_external_memory_android_hardware_buffer",
164     "VK_ANDROID_native_buffer",
165     "VK_FUCHSIA_buffer_collection",
166     "VK_FUCHSIA_external_memory",
167     "VK_FUCHSIA_external_semaphore",
168     VK_EXT_DEVICE_MEMORY_REPORT_EXTENSION_NAME,
169     VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME,
170     VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME,
171     VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME,
172     VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME,
173     VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME,
174     VK_KHR_EXTERNAL_FENCE_FD_EXTENSION_NAME,
175     VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME,
176 #if defined(__QNX__)
177     VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME,
178     VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME,
179 #endif
180 };
181 
182 // A list of instance extensions that should not be passed to the host driver.
183 // On older pre-1.1 Vulkan platforms, gfxstream emulates these features.
184 static constexpr const char* const kEmulatedInstanceExtensions[] = {
185     VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME,
186     VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME,
187     VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME,
188 };
189 
190 static constexpr uint32_t kMaxSafeVersion = VK_MAKE_VERSION(1, 3, 0);
191 static constexpr uint32_t kMinVersion = VK_MAKE_VERSION(1, 0, 0);
192 
193 static constexpr uint64_t kPageSizeforBlob = 4096;
194 static constexpr uint64_t kPageMaskForBlob = ~(0xfff);
195 
196 static uint64_t hostBlobId = 0;
197 
198 // b/319729462
199 // On snapshot load, thread local data is not available, thus we use a
200 // fake context ID. We will eventually need to fix it once we start using
201 // snapshot with virtio.
202 static uint32_t kTemporaryContextIdForSnapshotLoading = 1;
203 
204 static std::unordered_set<std::string> kSnapshotAppAllowList = {"Chromium"};
205 static std::unordered_set<std::string> kSnapshotEngineAllowList = {"ANGLE", "ace"};
206 
207 #define DEFINE_BOXED_HANDLE_TYPE_TAG(type) Tag_##type,
208 
209 enum BoxedHandleTypeTag {
210     Tag_Invalid = 0,
211     GOLDFISH_VK_LIST_HANDLE_TYPES_BY_STAGE(DEFINE_BOXED_HANDLE_TYPE_TAG)
212 
213     // additional generic tag
214     Tag_VkGeneric = 1001,
215 };
216 
217 template <class T>
218 class BoxedHandleManager {
219    public:
220     // The hybrid entity manager uses a sequence lock to protect access to
221     // a working set of 16000 handles, allowing us to avoid using a regular
222     // lock for those. Performance is degraded when going over this number,
223     // as it will then fall back to a std::map.
224     //
225     // We use 16000 as the max number of live handles to track; we don't
226     // expect the system to go over 16000 total live handles, outside some
227     // dEQP object management tests.
228     using Store = android::base::HybridEntityManager<16000, uint64_t, T>;
229 
230     Lock lock;
231     mutable Store store;
232     std::unordered_map<uint64_t, uint64_t> reverseMap;
233     struct DelayedRemove {
234         uint64_t handle;
235         std::function<void()> callback;
236     };
237     std::unordered_map<VkDevice, std::vector<DelayedRemove>> delayedRemoves;
238 
clear()239     void clear() {
240         reverseMap.clear();
241         store.clear();
242     }
243 
add(const T & item,BoxedHandleTypeTag tag)244     uint64_t add(const T& item, BoxedHandleTypeTag tag) {
245         auto res = (uint64_t)store.add(item, (size_t)tag);
246         AutoLock l(lock);
247         reverseMap[(uint64_t)(item.underlying)] = res;
248         return res;
249     }
250 
addFixed(uint64_t handle,const T & item,BoxedHandleTypeTag tag)251     uint64_t addFixed(uint64_t handle, const T& item, BoxedHandleTypeTag tag) {
252         auto res = (uint64_t)store.addFixed(handle, item, (size_t)tag);
253         AutoLock l(lock);
254         reverseMap[(uint64_t)(item.underlying)] = res;
255         return res;
256     }
257 
update(uint64_t handle,const T & item,BoxedHandleTypeTag tag)258     void update(uint64_t handle, const T& item, BoxedHandleTypeTag tag) {
259         auto storedItem = store.get(handle);
260         uint64_t oldHandle = (uint64_t)storedItem->underlying;
261         *storedItem = item;
262         AutoLock l(lock);
263         if (oldHandle) {
264             reverseMap.erase(oldHandle);
265         }
266         reverseMap[(uint64_t)(item.underlying)] = handle;
267     }
268 
remove(uint64_t h)269     void remove(uint64_t h) {
270         auto item = get(h);
271         if (item) {
272             AutoLock l(lock);
273             reverseMap.erase((uint64_t)(item->underlying));
274         }
275         store.remove(h);
276     }
277 
removeDelayed(uint64_t h,VkDevice device,std::function<void ()> callback)278     void removeDelayed(uint64_t h, VkDevice device, std::function<void()> callback) {
279         AutoLock l(lock);
280         delayedRemoves[device].push_back({h, callback});
281     }
282 
processDelayedRemovesGlobalStateLocked(VkDevice device)283     void processDelayedRemovesGlobalStateLocked(VkDevice device) {
284         AutoLock l(lock);
285         auto it = delayedRemoves.find(device);
286         if (it == delayedRemoves.end()) return;
287         auto& delayedRemovesList = it->second;
288         for (const auto& r : delayedRemovesList) {
289             auto h = r.handle;
290             // VkDecoderGlobalState is already locked when callback is called.
291             if (r.callback) {
292                 r.callback();
293             }
294             store.remove(h);
295         }
296         delayedRemovesList.clear();
297         delayedRemoves.erase(it);
298     }
299 
get(uint64_t h)300     T* get(uint64_t h) { return (T*)store.get_const(h); }
301 
getBoxedFromUnboxedLocked(uint64_t unboxed)302     uint64_t getBoxedFromUnboxedLocked(uint64_t unboxed) {
303         auto* res = android::base::find(reverseMap, unboxed);
304         if (!res) return 0;
305         return *res;
306     }
307 };
308 
309 struct OrderMaintenanceInfo {
310     uint32_t sequenceNumber = 0;
311     Lock lock;
312     ConditionVariable cv;
313 
314     uint32_t refcount = 1;
315 
incRefgfxstream::vk::OrderMaintenanceInfo316     void incRef() { __atomic_add_fetch(&refcount, 1, __ATOMIC_SEQ_CST); }
317 
decRefgfxstream::vk::OrderMaintenanceInfo318     bool decRef() { return 0 == __atomic_sub_fetch(&refcount, 1, __ATOMIC_SEQ_CST); }
319 };
320 
acquireOrderMaintInfo(OrderMaintenanceInfo * ord)321 static void acquireOrderMaintInfo(OrderMaintenanceInfo* ord) {
322     if (!ord) return;
323     ord->incRef();
324 }
325 
releaseOrderMaintInfo(OrderMaintenanceInfo * ord)326 static void releaseOrderMaintInfo(OrderMaintenanceInfo* ord) {
327     if (!ord) return;
328     if (ord->decRef()) delete ord;
329 }
330 
331 template <class T>
332 class DispatchableHandleInfo {
333    public:
334     T underlying;
335     VulkanDispatch* dispatch = nullptr;
336     bool ownDispatch = false;
337     OrderMaintenanceInfo* ordMaintInfo = nullptr;
338     VulkanMemReadingStream* readStream = nullptr;
339 };
340 
341 static BoxedHandleManager<DispatchableHandleInfo<uint64_t>> sBoxedHandleManager;
342 
343 struct ReadStreamRegistry {
344     Lock mLock;
345 
346     std::vector<VulkanMemReadingStream*> freeStreams;
347 
ReadStreamRegistrygfxstream::vk::ReadStreamRegistry348     ReadStreamRegistry() { freeStreams.reserve(100); };
349 
popgfxstream::vk::ReadStreamRegistry350     VulkanMemReadingStream* pop(const gfxstream::host::FeatureSet& features) {
351         AutoLock lock(mLock);
352         if (freeStreams.empty()) {
353             return new VulkanMemReadingStream(nullptr, features);
354         } else {
355             VulkanMemReadingStream* res = freeStreams.back();
356             freeStreams.pop_back();
357             return res;
358         }
359     }
360 
pushgfxstream::vk::ReadStreamRegistry361     void push(VulkanMemReadingStream* stream) {
362         AutoLock lock(mLock);
363         freeStreams.push_back(stream);
364     }
365 };
366 
367 static ReadStreamRegistry sReadStreamRegistry;
368 
369 class VkDecoderGlobalState::Impl {
370    public:
Impl()371     Impl()
372         : m_vk(vkDispatch()),
373           m_emu(getGlobalVkEmulation()),
374           mRenderDocWithMultipleVkInstances(m_emu->guestRenderDoc.get()) {
375         mSnapshotsEnabled = m_emu->features.VulkanSnapshots.enabled;
376         mBatchedDescriptorSetUpdateEnabled =
377             m_emu->features.VulkanBatchedDescriptorSetUpdate.enabled;
378         mVkCleanupEnabled =
379             android::base::getEnvironmentVariable("ANDROID_EMU_VK_NO_CLEANUP") != "1";
380         mLogging = android::base::getEnvironmentVariable("ANDROID_EMU_VK_LOG_CALLS") == "1";
381         mVerbosePrints = android::base::getEnvironmentVariable("ANDROID_EMUGL_VERBOSE") == "1";
382         mEnableVirtualVkQueue = m_emu->features.VulkanVirtualQueue.enabled;
383 
384         if (get_emugl_address_space_device_control_ops().control_get_hw_funcs &&
385             get_emugl_address_space_device_control_ops().control_get_hw_funcs()) {
386             mUseOldMemoryCleanupPath = 0 == get_emugl_address_space_device_control_ops()
387                                                 .control_get_hw_funcs()
388                                                 ->getPhysAddrStartLocked();
389         }
390     }
391 
392     ~Impl() = default;
393 
394     // Resets all internal tracking info.
395     // Assumes that the heavyweight cleanup operations
396     // have already happened.
clear()397     void clear() {
398         mInstanceInfo.clear();
399         mPhysdevInfo.clear();
400         mDeviceInfo.clear();
401         mImageInfo.clear();
402         mImageViewInfo.clear();
403         mSamplerInfo.clear();
404         mCommandBufferInfo.clear();
405         mCommandPoolInfo.clear();
406         mDeviceToPhysicalDevice.clear();
407         mPhysicalDeviceToInstance.clear();
408         mQueueInfo.clear();
409         mBufferInfo.clear();
410         mMemoryInfo.clear();
411         mShaderModuleInfo.clear();
412         mPipelineCacheInfo.clear();
413         mPipelineInfo.clear();
414         mRenderPassInfo.clear();
415         mFramebufferInfo.clear();
416         mSemaphoreInfo.clear();
417         mFenceInfo.clear();
418 #ifdef _WIN32
419         mSemaphoreId = 1;
420         mExternalSemaphoresById.clear();
421 #endif
422         mDescriptorUpdateTemplateInfo.clear();
423 
424         mCreatedHandlesForSnapshotLoad.clear();
425         mCreatedHandlesForSnapshotLoadIndex = 0;
426 
427         sBoxedHandleManager.clear();
428     }
429 
snapshotsEnabled() const430     bool snapshotsEnabled() const { return mSnapshotsEnabled; }
431 
batchedDescriptorSetUpdateEnabled() const432     bool batchedDescriptorSetUpdateEnabled() const { return mBatchedDescriptorSetUpdateEnabled; }
433 
vkCleanupEnabled() const434     bool vkCleanupEnabled() const { return mVkCleanupEnabled; }
435 
getFeatures() const436     const gfxstream::host::FeatureSet& getFeatures() const { return m_emu->features; }
437 
createSnapshotStateBlock(VkDevice unboxed_device)438     StateBlock createSnapshotStateBlock(VkDevice unboxed_device) {
439             const auto& device = unboxed_device;
440             const auto& deviceInfo = android::base::find(mDeviceInfo, device);
441             const auto physicalDevice = deviceInfo->physicalDevice;
442             const auto& physicalDeviceInfo = android::base::find(mPhysdevInfo, physicalDevice);
443             const auto& instanceInfo = android::base::find(mInstanceInfo, physicalDeviceInfo->instance);
444 
445             VulkanDispatch* ivk = dispatch_VkInstance(instanceInfo->boxed);
446             VulkanDispatch* dvk = dispatch_VkDevice(deviceInfo->boxed);
447 
448             StateBlock stateBlock{
449                 .physicalDevice = physicalDevice,
450                 .physicalDeviceInfo = physicalDeviceInfo,
451                 .device = device,
452                 .deviceDispatch = dvk,
453                 .queue = VK_NULL_HANDLE,
454                 .commandPool = VK_NULL_HANDLE,
455             };
456 
457             uint32_t queueFamilyCount = 0;
458             ivk->vkGetPhysicalDeviceQueueFamilyProperties(physicalDevice, &queueFamilyCount,
459                                                           nullptr);
460             std::vector<VkQueueFamilyProperties> queueFamilyProps(queueFamilyCount);
461             ivk->vkGetPhysicalDeviceQueueFamilyProperties(physicalDevice, &queueFamilyCount,
462                                                           queueFamilyProps.data());
463             uint32_t queueFamilyIndex = 0;
464             for (auto queue : deviceInfo->queues) {
465                 int idx = queue.first;
466                 if ((queueFamilyProps[idx].queueFlags & VK_QUEUE_GRAPHICS_BIT) == 0) {
467                     continue;
468                 }
469                 stateBlock.queue = queue.second[0];
470                 queueFamilyIndex = idx;
471                 break;
472             }
473 
474             VkCommandPoolCreateInfo commandPoolCi = {
475                 VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
476                 0,
477                 VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,
478                 queueFamilyIndex,
479             };
480             dvk->vkCreateCommandPool(device, &commandPoolCi, nullptr, &stateBlock.commandPool);
481             return stateBlock;
482     }
483 
releaseSnapshotStateBlock(const StateBlock * stateBlock)484     void releaseSnapshotStateBlock(const StateBlock* stateBlock) {
485         stateBlock->deviceDispatch->vkDestroyCommandPool(stateBlock->device, stateBlock->commandPool, nullptr);
486     }
487 
save(android::base::Stream * stream)488     void save(android::base::Stream* stream) {
489         mSnapshotState = SnapshotState::Saving;
490 
491 #ifdef GFXSTREAM_BUILD_WITH_SNAPSHOT_SUPPORT
492         if (!mInstanceInfo.empty()) {
493             get_emugl_vm_operations().setStatSnapshotUseVulkan();
494         }
495 #endif
496 
497         {
498             std::unordered_map<VkDevice, uint32_t> deviceToContextId;
499             for (const auto& [device, deviceInfo] : mDeviceInfo) {
500                 if (!deviceInfo.virtioGpuContextId) {
501                     GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
502                         << "VkDevice" << device << " missing context id.";
503                 }
504                 deviceToContextId[deviceInfo.boxed] = *deviceInfo.virtioGpuContextId;
505             }
506             stream->putBe64(static_cast<uint64_t>(deviceToContextId.size()));
507             for (const auto [device, contextId] : deviceToContextId) {
508                 stream->putBe64(reinterpret_cast<uint64_t>(device));
509                 stream->putBe32(contextId);
510             }
511         }
512 
513         snapshot()->save(stream);
514 
515         // Save mapped memory
516         uint32_t memoryCount = 0;
517         for (const auto& it : mMemoryInfo) {
518             if (it.second.ptr) {
519                 memoryCount++;
520             }
521         }
522         stream->putBe32(memoryCount);
523         for (const auto& it : mMemoryInfo) {
524             if (!it.second.ptr) {
525                 continue;
526             }
527             stream->putBe64(reinterpret_cast<uint64_t>(
528                 unboxed_to_boxed_non_dispatchable_VkDeviceMemory(it.first)));
529             stream->putBe64(it.second.size);
530             stream->write(it.second.ptr, it.second.size);
531         }
532 
533         // Set up VK structs to snapshot other Vulkan objects
534         // TODO(b/323064243): group all images from the same device and reuse queue / command pool
535 
536         std::vector<VkImage> sortedBoxedImages;
537         for (const auto& imageIte : mImageInfo) {
538             sortedBoxedImages.push_back(unboxed_to_boxed_non_dispatchable_VkImage(imageIte.first));
539         }
540         // Image contents need to be saved and loaded in the same order.
541         // So sort them (by boxed handles) first.
542         std::sort(sortedBoxedImages.begin(), sortedBoxedImages.end());
543         for (const auto& boxedImage : sortedBoxedImages) {
544             auto unboxedImage = try_unbox_VkImage(boxedImage);
545             if (unboxedImage == VK_NULL_HANDLE) {
546                 //TODO(b/294277842): should return an error here.
547                 continue;
548             }
549             const ImageInfo& imageInfo = mImageInfo[unboxedImage];
550             if (imageInfo.memory == VK_NULL_HANDLE) {
551                 continue;
552             }
553             // Vulkan command playback doesn't recover image layout. We need to do it here.
554             stream->putBe32(imageInfo.layout);
555 
556             StateBlock stateBlock = createSnapshotStateBlock(imageInfo.device);
557             // TODO(b/294277842): make sure the queue is empty before using.
558             saveImageContent(stream, &stateBlock, unboxedImage, &imageInfo);
559             releaseSnapshotStateBlock(&stateBlock);
560         }
561 
562         // snapshot buffers
563         std::vector<VkBuffer> sortedBoxedBuffers;
564         for (const auto& bufferIte : mBufferInfo) {
565             sortedBoxedBuffers.push_back(
566                 unboxed_to_boxed_non_dispatchable_VkBuffer(bufferIte.first));
567         }
568         sort(sortedBoxedBuffers.begin(), sortedBoxedBuffers.end());
569         for (const auto& boxedBuffer : sortedBoxedBuffers) {
570             auto unboxedBuffer = try_unbox_VkBuffer(boxedBuffer);
571             if (unboxedBuffer == VK_NULL_HANDLE) {
572                 //TODO(b/294277842): should return an error here.
573                 continue;
574             }
575             const BufferInfo& bufferInfo = mBufferInfo[unboxedBuffer];
576             if (bufferInfo.memory == VK_NULL_HANDLE) {
577                 continue;
578             }
579             // TODO: add a special case for host mapped memory
580             StateBlock stateBlock = createSnapshotStateBlock(bufferInfo.device);
581 
582             // TODO(b/294277842): make sure the queue is empty before using.
583             saveBufferContent(stream, &stateBlock, unboxedBuffer, &bufferInfo);
584             releaseSnapshotStateBlock(&stateBlock);
585         }
586 
587         // snapshot descriptors
588         std::vector<VkDescriptorPool> sortedBoxedDescriptorPools;
589         for (const auto& descriptorPoolIte : mDescriptorPoolInfo) {
590             auto boxed =
591                 unboxed_to_boxed_non_dispatchable_VkDescriptorPool(descriptorPoolIte.first);
592             sortedBoxedDescriptorPools.push_back(boxed);
593         }
594         std::sort(sortedBoxedDescriptorPools.begin(), sortedBoxedDescriptorPools.end());
595         for (const auto& boxedDescriptorPool : sortedBoxedDescriptorPools) {
596             auto unboxedDescriptorPool = unbox_VkDescriptorPool(boxedDescriptorPool);
597             const DescriptorPoolInfo& poolInfo = mDescriptorPoolInfo[unboxedDescriptorPool];
598 
599             for (uint64_t poolId : poolInfo.poolIds) {
600                 DispatchableHandleInfo<uint64_t>* setHandleInfo = sBoxedHandleManager.get(poolId);
601                 bool allocated = setHandleInfo->underlying != 0;
602                 stream->putByte(allocated);
603                 if (!allocated) {
604                     continue;
605                 }
606 
607                 const DescriptorSetInfo& descriptorSetInfo =
608                     mDescriptorSetInfo[(VkDescriptorSet)setHandleInfo->underlying];
609                 VkDescriptorSetLayout boxedLayout =
610                     unboxed_to_boxed_non_dispatchable_VkDescriptorSetLayout(
611                         descriptorSetInfo.unboxedLayout);
612                 stream->putBe64((uint64_t)boxedLayout);
613                 // Count all valid descriptors.
614                 //
615                 // There is a use case where user can create an image, write it to a descriptor,
616                 // read/write the image by committing a command, then delete the image without
617                 // unbinding the descriptor. For example:
618                 //
619                 // T1: create "vkimage1" (original)
620                 // T2: update binding1 of vkdescriptorset1 with vkimage1
621                 // T3: draw
622                 // T4: delete "vkimage1" (original)
623                 // T5: create "vkimage1" (recycled)
624                 // T6: snapshot load
625                 //
626                 // At the point of the snapshot, the original vk image has been invalidated,
627                 // thus we cannot call vkUpdateDescriptorSets for it, and need to remove it
628                 // from the snapshot.
629                 //
630                 // The current implementation bases on smart pointers. A descriptor set info
631                 // holds weak pointers to their underlying resources (image, image view, buffer).
632                 // On snapshot load, we check if any of the smart pointers are invalidated.
633                 //
634                 // An alternative approach has been discussed by, instead of using smart
635                 // pointers, checking valid handles on snapshot save. This approach has the
636                 // advantage that it reduces number of smart pointer allocations. After discussion
637                 // we concluded that there is at least one corner case that will break the
638                 // alternative approach. That is when the user deletes a bound vkimage and creates
639                 // a new vkimage. The driver is free to reuse released handles, thus we might
640                 // end up having a new vkimage with the same handle as the old one (see T5 in the
641                 // example), and think the binding is still valid. And if we bind the new image
642                 // regardless, we might hit a Vulkan validation error because the new image might
643                 // have the "usage" flag that is unsuitable to bind to descriptors.
644                 std::vector<std::pair<int, int>> validWriteIndices;
645                 for (int bindingIdx = 0; bindingIdx < descriptorSetInfo.allWrites.size();
646                      bindingIdx++) {
647                     for (int bindingElemIdx = 0;
648                          bindingElemIdx < descriptorSetInfo.allWrites[bindingIdx].size();
649                          bindingElemIdx++) {
650                         const auto& entry = descriptorSetInfo.allWrites[bindingIdx][bindingElemIdx];
651                         if (entry.writeType == DescriptorSetInfo::DescriptorWriteType::Empty) {
652                             continue;
653                         }
654                         int dependencyObjCount =
655                             descriptorDependencyObjectCount(entry.descriptorType);
656                         if (entry.alives.size() < dependencyObjCount) {
657                             continue;
658                         }
659                         bool isValid = true;
660                         for (const auto& alive : entry.alives) {
661                             isValid &= !alive.expired();
662                             if (!isValid) {
663                                 break;
664                             }
665                         }
666                         if (!isValid) {
667                             continue;
668                         }
669                         validWriteIndices.push_back(std::make_pair(bindingIdx, bindingElemIdx));
670                     }
671                 }
672                 stream->putBe64(validWriteIndices.size());
673                 // Save all valid descriptors
674                 for (const auto& idx : validWriteIndices) {
675                     const auto& entry = descriptorSetInfo.allWrites[idx.first][idx.second];
676                     stream->putBe32(idx.first);
677                     stream->putBe32(idx.second);
678                     stream->putBe32(entry.writeType);
679                     // entry.descriptorType might be redundant.
680                     stream->putBe32(entry.descriptorType);
681                     switch (entry.writeType) {
682                         case DescriptorSetInfo::DescriptorWriteType::ImageInfo: {
683                             VkDescriptorImageInfo imageInfo = entry.imageInfo;
684                             // Get the unboxed version
685                             imageInfo.imageView =
686                                 descriptorTypeContainsImage(entry.descriptorType)
687                                     ? unboxed_to_boxed_non_dispatchable_VkImageView(
688                                           imageInfo.imageView)
689                                     : VK_NULL_HANDLE;
690                             imageInfo.sampler =
691                                 descriptorTypeContainsSampler(entry.descriptorType)
692                                     ? unboxed_to_boxed_non_dispatchable_VkSampler(imageInfo.sampler)
693                                     : VK_NULL_HANDLE;
694                             stream->write(&imageInfo, sizeof(imageInfo));
695                         } break;
696                         case DescriptorSetInfo::DescriptorWriteType::BufferInfo: {
697                             VkDescriptorBufferInfo bufferInfo = entry.bufferInfo;
698                             // Get the unboxed version
699                             bufferInfo.buffer =
700                                 unboxed_to_boxed_non_dispatchable_VkBuffer(bufferInfo.buffer);
701                             stream->write(&bufferInfo, sizeof(bufferInfo));
702                         } break;
703                         case DescriptorSetInfo::DescriptorWriteType::BufferView: {
704                             // Get the unboxed version
705                             VkBufferView bufferView =
706                                 unboxed_to_boxed_non_dispatchable_VkBufferView(entry.bufferView);
707                             stream->write(&bufferView, sizeof(bufferView));
708                         } break;
709                         case DescriptorSetInfo::DescriptorWriteType::InlineUniformBlock:
710                         case DescriptorSetInfo::DescriptorWriteType::AccelerationStructure:
711                             // TODO
712                             GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
713                                 << "Encountered pending inline uniform block or acceleration "
714                                    "structure "
715                                    "desc write, abort (NYI)";
716                         default:
717                             break;
718                     }
719                 }
720             }
721         }
722 
723         // Fences
724         std::vector<VkFence> unsignaledFencesBoxed;
725         for (const auto& fence : mFenceInfo) {
726             if (!fence.second.boxed) {
727                 continue;
728             }
729             const auto& device = fence.second.device;
730             const auto& deviceInfo = android::base::find(mDeviceInfo, device);
731             VulkanDispatch* dvk = dispatch_VkDevice(deviceInfo->boxed);
732             if (VK_NOT_READY == dvk->vkGetFenceStatus(device, fence.first)) {
733                 unsignaledFencesBoxed.push_back(fence.second.boxed);
734             }
735         }
736         stream->putBe64(unsignaledFencesBoxed.size());
737         stream->write(unsignaledFencesBoxed.data(), unsignaledFencesBoxed.size() * sizeof(VkFence));
738         mSnapshotState = SnapshotState::Normal;
739     }
740 
load(android::base::Stream * stream,GfxApiLogger & gfxLogger,HealthMonitor<> * healthMonitor)741     void load(android::base::Stream* stream, GfxApiLogger& gfxLogger,
742               HealthMonitor<>* healthMonitor) {
743         // assume that we already destroyed all instances
744         // from FrameBuffer's onLoad method.
745 
746         // destroy all current internal data structures
747         clear();
748         mSnapshotState = SnapshotState::Loading;
749 
750         // This needs to happen before the replay in the decoder so that virtio gpu context ids
751         // are available for operations involving `ExternalObjectManager`.
752         {
753             mSnapshotLoadVkDeviceToVirtioCpuContextId.emplace();
754             const uint64_t count = stream->getBe64();
755             for (uint64_t i = 0; i < count; i++) {
756                 const uint64_t device = stream->getBe64();
757                 const uint32_t contextId = stream->getBe32();
758                 (*mSnapshotLoadVkDeviceToVirtioCpuContextId)[reinterpret_cast<VkDevice>(device)] =
759                     contextId;
760             }
761         }
762 
763         android::base::BumpPool bumpPool;
764         // this part will replay in the decoder
765         snapshot()->load(stream, gfxLogger, healthMonitor);
766         // load mapped memory
767         uint32_t memoryCount = stream->getBe32();
768         for (uint32_t i = 0; i < memoryCount; i++) {
769             VkDeviceMemory boxedMemory = reinterpret_cast<VkDeviceMemory>(stream->getBe64());
770             VkDeviceMemory unboxedMemory = unbox_VkDeviceMemory(boxedMemory);
771             auto it = mMemoryInfo.find(unboxedMemory);
772             if (it == mMemoryInfo.end()) {
773                 GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
774                     << "Snapshot load failure: cannot find memory handle for " << boxedMemory;
775             }
776             VkDeviceSize size = stream->getBe64();
777             if (size != it->second.size || !it->second.ptr) {
778                 GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
779                     << "Snapshot load failure: memory size does not match for " << boxedMemory;
780             }
781             stream->read(it->second.ptr, size);
782         }
783         // Set up VK structs to snapshot other Vulkan objects
784         // TODO(b/323064243): group all images from the same device and reuse queue / command pool
785 
786         std::vector<VkImage> sortedBoxedImages;
787         for (const auto& imageIte : mImageInfo) {
788             sortedBoxedImages.push_back(unboxed_to_boxed_non_dispatchable_VkImage(imageIte.first));
789         }
790         sort(sortedBoxedImages.begin(), sortedBoxedImages.end());
791         for (const auto& boxedImage : sortedBoxedImages) {
792             auto unboxedImage = unbox_VkImage(boxedImage);
793             ImageInfo& imageInfo = mImageInfo[unboxedImage];
794             if (imageInfo.memory == VK_NULL_HANDLE) {
795                 continue;
796             }
797             // Playback doesn't recover image layout. We need to do it here.
798             //
799             // Layout transform was done by vkCmdPipelineBarrier but we don't record such command
800             // directly. Instead, we memorize the current layout and add our own
801             // vkCmdPipelineBarrier after load.
802             //
803             // We do the layout transform in loadImageContent. There are still use cases where it
804             // should recover the layout but does not.
805             //
806             // TODO(b/323059453): fix corner cases when image contents cannot be properly loaded.
807             imageInfo.layout = static_cast<VkImageLayout>(stream->getBe32());
808             StateBlock stateBlock = createSnapshotStateBlock(imageInfo.device);
809             // TODO(b/294277842): make sure the queue is empty before using.
810             loadImageContent(stream, &stateBlock, unboxedImage, &imageInfo);
811             releaseSnapshotStateBlock(&stateBlock);
812         }
813 
814         // snapshot buffers
815         std::vector<VkBuffer> sortedBoxedBuffers;
816         for (const auto& bufferIte : mBufferInfo) {
817             sortedBoxedBuffers.push_back(
818                 unboxed_to_boxed_non_dispatchable_VkBuffer(bufferIte.first));
819         }
820         sort(sortedBoxedBuffers.begin(), sortedBoxedBuffers.end());
821         for (const auto& boxedBuffer : sortedBoxedBuffers) {
822             auto unboxedBuffer = unbox_VkBuffer(boxedBuffer);
823             const BufferInfo& bufferInfo = mBufferInfo[unboxedBuffer];
824             if (bufferInfo.memory == VK_NULL_HANDLE) {
825                 continue;
826             }
827             // TODO: add a special case for host mapped memory
828             StateBlock stateBlock = createSnapshotStateBlock(bufferInfo.device);
829             // TODO(b/294277842): make sure the queue is empty before using.
830             loadBufferContent(stream, &stateBlock, unboxedBuffer, &bufferInfo);
831             releaseSnapshotStateBlock(&stateBlock);
832         }
833 
834         // snapshot descriptors
835         std::vector<VkDescriptorPool> sortedBoxedDescriptorPools;
836         for (const auto& descriptorPoolIte : mDescriptorPoolInfo) {
837             auto boxed =
838                 unboxed_to_boxed_non_dispatchable_VkDescriptorPool(descriptorPoolIte.first);
839             sortedBoxedDescriptorPools.push_back(boxed);
840         }
841         sort(sortedBoxedDescriptorPools.begin(), sortedBoxedDescriptorPools.end());
842         for (const auto& boxedDescriptorPool : sortedBoxedDescriptorPools) {
843             auto unboxedDescriptorPool = unbox_VkDescriptorPool(boxedDescriptorPool);
844             const DescriptorPoolInfo& poolInfo = mDescriptorPoolInfo[unboxedDescriptorPool];
845 
846             std::vector<VkDescriptorSetLayout> layouts;
847             std::vector<uint64_t> poolIds;
848             std::vector<VkWriteDescriptorSet> writeDescriptorSets;
849             std::vector<uint32_t> writeStartingIndices;
850 
851             // Temporary structures for the pointers in VkWriteDescriptorSet.
852             // Use unique_ptr so that the pointers don't change when vector resizes.
853             std::vector<std::unique_ptr<VkDescriptorImageInfo>> tmpImageInfos;
854             std::vector<std::unique_ptr<VkDescriptorBufferInfo>> tmpBufferInfos;
855             std::vector<std::unique_ptr<VkBufferView>> tmpBufferViews;
856 
857             for (uint64_t poolId : poolInfo.poolIds) {
858                 bool allocated = stream->getByte();
859                 if (!allocated) {
860                     continue;
861                 }
862                 poolIds.push_back(poolId);
863                 writeStartingIndices.push_back(writeDescriptorSets.size());
864                 VkDescriptorSetLayout boxedLayout = (VkDescriptorSetLayout)stream->getBe64();
865                 layouts.push_back(unbox_VkDescriptorSetLayout(boxedLayout));
866                 uint64_t validWriteCount = stream->getBe64();
867                 for (int write = 0; write < validWriteCount; write++) {
868                     uint32_t binding = stream->getBe32();
869                     uint32_t arrayElement = stream->getBe32();
870                     DescriptorSetInfo::DescriptorWriteType writeType =
871                         static_cast<DescriptorSetInfo::DescriptorWriteType>(stream->getBe32());
872                     VkDescriptorType descriptorType =
873                         static_cast<VkDescriptorType>(stream->getBe32());
874                     VkWriteDescriptorSet writeDescriptorSet = {
875                         .sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
876                         .dstSet = (VkDescriptorSet)poolId,
877                         .dstBinding = binding,
878                         .dstArrayElement = arrayElement,
879                         .descriptorCount = 1,
880                         .descriptorType = descriptorType,
881                     };
882                     switch (writeType) {
883                         case DescriptorSetInfo::DescriptorWriteType::ImageInfo: {
884                             tmpImageInfos.push_back(std::make_unique<VkDescriptorImageInfo>());
885                             writeDescriptorSet.pImageInfo = tmpImageInfos.back().get();
886                             VkDescriptorImageInfo& imageInfo = *tmpImageInfos.back();
887                             stream->read(&imageInfo, sizeof(imageInfo));
888                             imageInfo.imageView = descriptorTypeContainsImage(descriptorType)
889                                                       ? unbox_VkImageView(imageInfo.imageView)
890                                                       : 0;
891                             imageInfo.sampler = descriptorTypeContainsSampler(descriptorType)
892                                                     ? unbox_VkSampler(imageInfo.sampler)
893                                                     : 0;
894                         } break;
895                         case DescriptorSetInfo::DescriptorWriteType::BufferInfo: {
896                             tmpBufferInfos.push_back(std::make_unique<VkDescriptorBufferInfo>());
897                             writeDescriptorSet.pBufferInfo = tmpBufferInfos.back().get();
898                             VkDescriptorBufferInfo& bufferInfo = *tmpBufferInfos.back();
899                             stream->read(&bufferInfo, sizeof(bufferInfo));
900                             bufferInfo.buffer = unbox_VkBuffer(bufferInfo.buffer);
901                         } break;
902                         case DescriptorSetInfo::DescriptorWriteType::BufferView: {
903                             tmpBufferViews.push_back(std::make_unique<VkBufferView>());
904                             writeDescriptorSet.pTexelBufferView = tmpBufferViews.back().get();
905                             VkBufferView& bufferView = *tmpBufferViews.back();
906                             stream->read(&bufferView, sizeof(bufferView));
907                             bufferView = unbox_VkBufferView(bufferView);
908                         } break;
909                         case DescriptorSetInfo::DescriptorWriteType::InlineUniformBlock:
910                         case DescriptorSetInfo::DescriptorWriteType::AccelerationStructure:
911                             // TODO
912                             GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
913                                 << "Encountered pending inline uniform block or acceleration "
914                                    "structure "
915                                    "desc write, abort (NYI)";
916                         default:
917                             break;
918                     }
919                     writeDescriptorSets.push_back(writeDescriptorSet);
920                 }
921             }
922             std::vector<uint32_t> whichPool(poolIds.size(), 0);
923             std::vector<uint32_t> pendingAlloc(poolIds.size(), true);
924 
925             const auto& device = poolInfo.device;
926             const auto& deviceInfo = android::base::find(mDeviceInfo, device);
927             VulkanDispatch* dvk = dispatch_VkDevice(deviceInfo->boxed);
928             on_vkQueueCommitDescriptorSetUpdatesGOOGLE(
929                 &bumpPool, dvk, device, 1, &unboxedDescriptorPool, poolIds.size(), layouts.data(),
930                 poolIds.data(), whichPool.data(), pendingAlloc.data(), writeStartingIndices.data(),
931                 writeDescriptorSets.size(), writeDescriptorSets.data());
932         }
933         // Fences
934         uint64_t fenceCount = stream->getBe64();
935         std::vector<VkFence> unsignaledFencesBoxed(fenceCount);
936         stream->read(unsignaledFencesBoxed.data(), fenceCount * sizeof(VkFence));
937         for (VkFence boxedFence : unsignaledFencesBoxed) {
938             VkFence unboxedFence = unbox_VkFence(boxedFence);
939             auto it = mFenceInfo.find(unboxedFence);
940             if (it == mFenceInfo.end()) {
941                 GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
942                     << "Snapshot load failure: unrecognized VkFence";
943             }
944             const auto& device = it->second.device;
945             const auto& deviceInfo = android::base::find(mDeviceInfo, device);
946             VulkanDispatch* dvk = dispatch_VkDevice(deviceInfo->boxed);
947             dvk->vkResetFences(device, 1, &unboxedFence);
948         }
949 #ifdef GFXSTREAM_BUILD_WITH_SNAPSHOT_SUPPORT
950         if (!mInstanceInfo.empty()) {
951             get_emugl_vm_operations().setStatSnapshotUseVulkan();
952         }
953 #endif
954 
955         mSnapshotState = SnapshotState::Normal;
956     }
957 
lock()958     void lock() { mLock.lock(); }
959 
unlock()960     void unlock() { mLock.unlock(); }
961 
setCreatedHandlesForSnapshotLoad(const unsigned char * buffer)962     size_t setCreatedHandlesForSnapshotLoad(const unsigned char* buffer) {
963         size_t consumed = 0;
964 
965         if (!buffer) return consumed;
966 
967         uint32_t bufferSize = *(uint32_t*)buffer;
968 
969         consumed += 4;
970 
971         uint32_t handleCount = bufferSize / 8;
972         VKDGS_LOG("incoming handle count: %u", handleCount);
973 
974         uint64_t* handles = (uint64_t*)(buffer + 4);
975 
976         mCreatedHandlesForSnapshotLoad.clear();
977         mCreatedHandlesForSnapshotLoadIndex = 0;
978 
979         for (uint32_t i = 0; i < handleCount; ++i) {
980             VKDGS_LOG("handle to load: 0x%llx", (unsigned long long)(uintptr_t)handles[i]);
981             mCreatedHandlesForSnapshotLoad.push_back(handles[i]);
982             consumed += 8;
983         }
984 
985         return consumed;
986     }
987 
clearCreatedHandlesForSnapshotLoad()988     void clearCreatedHandlesForSnapshotLoad() {
989         mCreatedHandlesForSnapshotLoad.clear();
990         mCreatedHandlesForSnapshotLoadIndex = 0;
991     }
992 
getContextIdForDeviceLocked(VkDevice device)993     std::optional<uint32_t> getContextIdForDeviceLocked(VkDevice device) {
994         auto deviceInfoIt = mDeviceInfo.find(device);
995         if (deviceInfoIt == mDeviceInfo.end()) {
996             return std::nullopt;
997         }
998         auto& deviceInfo = deviceInfoIt->second;
999         if (!deviceInfo.virtioGpuContextId) {
1000             return std::nullopt;
1001         }
1002         return *deviceInfo.virtioGpuContextId;
1003     }
1004 
on_vkEnumerateInstanceVersion(android::base::BumpPool * pool,uint32_t * pApiVersion)1005     VkResult on_vkEnumerateInstanceVersion(android::base::BumpPool* pool, uint32_t* pApiVersion) {
1006         if (m_vk->vkEnumerateInstanceVersion) {
1007             VkResult res = m_vk->vkEnumerateInstanceVersion(pApiVersion);
1008 
1009             if (*pApiVersion > kMaxSafeVersion) {
1010                 *pApiVersion = kMaxSafeVersion;
1011             }
1012 
1013             return res;
1014         }
1015         *pApiVersion = kMinVersion;
1016         return VK_SUCCESS;
1017     }
1018 
on_vkCreateInstance(android::base::BumpPool * pool,const VkInstanceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkInstance * pInstance)1019     VkResult on_vkCreateInstance(android::base::BumpPool* pool,
1020                                  const VkInstanceCreateInfo* pCreateInfo,
1021                                  const VkAllocationCallbacks* pAllocator, VkInstance* pInstance) {
1022         std::vector<const char*> finalExts = filteredInstanceExtensionNames(
1023             pCreateInfo->enabledExtensionCount, pCreateInfo->ppEnabledExtensionNames);
1024 
1025         // Create higher version instance whenever it is possible.
1026         uint32_t apiVersion = VK_MAKE_VERSION(1, 0, 0);
1027         if (pCreateInfo->pApplicationInfo) {
1028             apiVersion = pCreateInfo->pApplicationInfo->apiVersion;
1029         }
1030         if (m_vk->vkEnumerateInstanceVersion) {
1031             uint32_t instanceVersion;
1032             VkResult result = m_vk->vkEnumerateInstanceVersion(&instanceVersion);
1033             if (result == VK_SUCCESS && instanceVersion >= VK_MAKE_VERSION(1, 1, 0)) {
1034                 apiVersion = instanceVersion;
1035             }
1036         }
1037 
1038         VkInstanceCreateInfo createInfoFiltered;
1039         VkApplicationInfo appInfo = {};
1040         deepcopy_VkInstanceCreateInfo(pool, VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO, pCreateInfo,
1041                                       &createInfoFiltered);
1042 
1043         createInfoFiltered.enabledExtensionCount = static_cast<uint32_t>(finalExts.size());
1044         createInfoFiltered.ppEnabledExtensionNames = finalExts.data();
1045         if (createInfoFiltered.pApplicationInfo != nullptr) {
1046             const_cast<VkApplicationInfo*>(createInfoFiltered.pApplicationInfo)->apiVersion =
1047                 apiVersion;
1048             appInfo = *createInfoFiltered.pApplicationInfo;
1049         }
1050 
1051         // remove VkDebugReportCallbackCreateInfoEXT and
1052         // VkDebugUtilsMessengerCreateInfoEXT from the chain.
1053         auto* curr = reinterpret_cast<vk_struct_common*>(&createInfoFiltered);
1054         while (curr != nullptr) {
1055             if (curr->pNext != nullptr &&
1056                 (curr->pNext->sType == VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT ||
1057                  curr->pNext->sType == VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT)) {
1058                 curr->pNext = curr->pNext->pNext;
1059             }
1060             curr = curr->pNext;
1061         }
1062 
1063 #if defined(__APPLE__)
1064         if (m_emu->instanceSupportsMoltenVK) {
1065             createInfoFiltered.flags |= VK_INSTANCE_CREATE_ENUMERATE_PORTABILITY_BIT_KHR;
1066         }
1067 #endif
1068 
1069         // bug: 155795731
1070         bool swiftshader =
1071             (android::base::getEnvironmentVariable("ANDROID_EMU_VK_ICD").compare("swiftshader") ==
1072              0);
1073         std::unique_ptr<std::lock_guard<std::recursive_mutex>> lock = nullptr;
1074 
1075         if (swiftshader) {
1076             if (mLogging) {
1077                 INFO("%s: acquire lock", __func__);
1078             }
1079             lock = std::make_unique<std::lock_guard<std::recursive_mutex>>(mLock);
1080         }
1081 
1082         VkResult res = m_vk->vkCreateInstance(&createInfoFiltered, pAllocator, pInstance);
1083 
1084         if (res != VK_SUCCESS) {
1085             WARN("Failed to create Vulkan instance: %s.", string_VkResult(res));
1086             return res;
1087         }
1088 
1089         if (!swiftshader) {
1090             lock = std::make_unique<std::lock_guard<std::recursive_mutex>>(mLock);
1091         }
1092 
1093         InstanceInfo info;
1094         info.apiVersion = apiVersion;
1095         if (pCreateInfo->pApplicationInfo) {
1096             if (pCreateInfo->pApplicationInfo->pApplicationName) {
1097                 info.applicationName = pCreateInfo->pApplicationInfo->pApplicationName;
1098             }
1099             if (pCreateInfo->pApplicationInfo->pEngineName) {
1100                 info.engineName = pCreateInfo->pApplicationInfo->pEngineName;
1101             }
1102         }
1103         for (uint32_t i = 0; i < createInfoFiltered.enabledExtensionCount; ++i) {
1104             info.enabledExtensionNames.push_back(createInfoFiltered.ppEnabledExtensionNames[i]);
1105         }
1106 
1107         INFO("Created VkInstance:%p for application:%s engine:%s.", *pInstance,
1108              info.applicationName.c_str(), info.engineName.c_str());
1109 
1110 #ifdef GFXSTREAM_BUILD_WITH_SNAPSHOT_SUPPORT
1111         // TODO: bug 129484301
1112         if (!m_emu->features.VulkanSnapshots.enabled ||
1113             (kSnapshotAppAllowList.find(info.applicationName) == kSnapshotAppAllowList.end() &&
1114              kSnapshotEngineAllowList.find(info.engineName) == kSnapshotEngineAllowList.end())) {
1115             get_emugl_vm_operations().setSkipSnapshotSave(true);
1116             get_emugl_vm_operations().setSkipSnapshotSaveReason(SNAPSHOT_SKIP_UNSUPPORTED_VK_APP);
1117         }
1118 #endif
1119         // Box it up
1120         VkInstance boxed = new_boxed_VkInstance(*pInstance, nullptr, true /* own dispatch */);
1121         init_vulkan_dispatch_from_instance(m_vk, *pInstance, dispatch_VkInstance(boxed));
1122         info.boxed = boxed;
1123 
1124         std::string_view engineName = appInfo.pEngineName ? appInfo.pEngineName : "";
1125         info.isAngle = (engineName == "ANGLE");
1126 
1127         VALIDATE_NEW_HANDLE_INFO_ENTRY(mInstanceInfo, *pInstance);
1128         mInstanceInfo[*pInstance] = info;
1129 
1130         *pInstance = (VkInstance)info.boxed;
1131 
1132         if (vkCleanupEnabled()) {
1133             m_emu->callbacks.registerProcessCleanupCallback(unbox_VkInstance(boxed), [this, boxed] {
1134                 if (snapshotsEnabled()) {
1135                     snapshot()->vkDestroyInstance(nullptr, 0, nullptr, boxed, nullptr);
1136                 }
1137                 vkDestroyInstanceImpl(unbox_VkInstance(boxed), nullptr);
1138             });
1139         }
1140 
1141         return res;
1142     }
1143 
vkDestroyInstanceImpl(VkInstance instance,const VkAllocationCallbacks * pAllocator)1144     void vkDestroyInstanceImpl(VkInstance instance, const VkAllocationCallbacks* pAllocator) {
1145         // Do delayed removes out of the lock, but get the list of devices to destroy inside the
1146         // lock.
1147         {
1148             std::lock_guard<std::recursive_mutex> lock(mLock);
1149             std::vector<VkDevice> devicesToDestroy;
1150 
1151             for (auto it : mDeviceToPhysicalDevice) {
1152                 auto* otherInstance = android::base::find(mPhysicalDeviceToInstance, it.second);
1153                 if (!otherInstance) continue;
1154                 if (instance == *otherInstance) {
1155                     devicesToDestroy.push_back(it.first);
1156                 }
1157             }
1158 
1159             for (auto device : devicesToDestroy) {
1160                 sBoxedHandleManager.processDelayedRemovesGlobalStateLocked(device);
1161             }
1162         }
1163 
1164         InstanceObjects instanceObjects;
1165 
1166         {
1167             std::lock_guard<std::recursive_mutex> lock(mLock);
1168             extractInstanceAndDependenciesLocked(instance, instanceObjects);
1169         }
1170 
1171         if (mRenderDocWithMultipleVkInstances) {
1172             mRenderDocWithMultipleVkInstances->removeVkInstance(instance);
1173         }
1174 
1175         destroyInstanceObjects(instanceObjects);
1176     }
1177 
on_vkDestroyInstance(android::base::BumpPool * pool,VkInstance boxed_instance,const VkAllocationCallbacks * pAllocator)1178     void on_vkDestroyInstance(android::base::BumpPool* pool, VkInstance boxed_instance,
1179                               const VkAllocationCallbacks* pAllocator) {
1180         auto instance = try_unbox_VkInstance(boxed_instance);
1181         if (instance == VK_NULL_HANDLE) {
1182             return;
1183         }
1184 
1185         vkDestroyInstanceImpl(instance, pAllocator);
1186 
1187         m_emu->callbacks.unregisterProcessCleanupCallback(instance);
1188     }
1189 
GetPhysicalDevices(VkInstance instance,VulkanDispatch * vk,std::vector<VkPhysicalDevice> & outPhysicalDevices)1190     VkResult GetPhysicalDevices(VkInstance instance, VulkanDispatch* vk,
1191                                 std::vector<VkPhysicalDevice>& outPhysicalDevices) {
1192         uint32_t physicalDevicesCount = 0;
1193         auto res = vk->vkEnumeratePhysicalDevices(instance, &physicalDevicesCount, nullptr);
1194         if (res != VK_SUCCESS) {
1195             return res;
1196         }
1197 
1198         outPhysicalDevices.resize(physicalDevicesCount);
1199 
1200         res = vk->vkEnumeratePhysicalDevices(instance, &physicalDevicesCount,
1201                                              outPhysicalDevices.data());
1202         if (res != VK_SUCCESS) {
1203             outPhysicalDevices.clear();
1204             return res;
1205         }
1206 
1207         outPhysicalDevices.resize(physicalDevicesCount);
1208 
1209         return VK_SUCCESS;
1210     }
1211 
FilterPhysicalDevicesLocked(VkInstance instance,VulkanDispatch * vk,std::vector<VkPhysicalDevice> & toFilterPhysicalDevices)1212     void FilterPhysicalDevicesLocked(VkInstance instance, VulkanDispatch* vk,
1213                                      std::vector<VkPhysicalDevice>& toFilterPhysicalDevices) {
1214         if (m_emu->instanceSupportsGetPhysicalDeviceProperties2) {
1215             PFN_vkGetPhysicalDeviceProperties2KHR getPhysdevProps2Func =
1216                 vk_util::getVkInstanceProcAddrWithFallback<
1217                     vk_util::vk_fn_info::GetPhysicalDeviceProperties2>(
1218                     {
1219                         vk->vkGetInstanceProcAddr,
1220                         m_vk->vkGetInstanceProcAddr,
1221                     },
1222                     instance);
1223 
1224             if (getPhysdevProps2Func) {
1225                 // Remove those devices whose UUIDs don't match the one in VkCommonOperations.
1226                 toFilterPhysicalDevices.erase(
1227                     std::remove_if(toFilterPhysicalDevices.begin(), toFilterPhysicalDevices.end(),
1228                                    [getPhysdevProps2Func, this](VkPhysicalDevice physicalDevice) {
1229                                        // We can get the device UUID.
1230                                        VkPhysicalDeviceIDPropertiesKHR idProps = {
1231                                            VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES_KHR,
1232                                            nullptr,
1233                                        };
1234                                        VkPhysicalDeviceProperties2KHR propsWithId = {
1235                                            VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR,
1236                                            &idProps,
1237                                        };
1238                                        getPhysdevProps2Func(physicalDevice, &propsWithId);
1239 
1240                                        return memcmp(m_emu->deviceInfo.idProps.deviceUUID,
1241                                                      idProps.deviceUUID, VK_UUID_SIZE) != 0;
1242                                    }),
1243                     toFilterPhysicalDevices.end());
1244             } else {
1245                 ERR("Failed to vkGetPhysicalDeviceProperties2KHR().");
1246             }
1247         } else {
1248             // If we don't support ID properties then just advertise only the
1249             // first physical device.
1250             WARN("Device ID not available, returning first physical device.");
1251         }
1252         if (!toFilterPhysicalDevices.empty()) {
1253             toFilterPhysicalDevices.erase(std::next(toFilterPhysicalDevices.begin()),
1254                                           toFilterPhysicalDevices.end());
1255         }
1256     }
1257 
on_vkEnumeratePhysicalDevices(android::base::BumpPool * pool,VkInstance boxed_instance,uint32_t * pPhysicalDeviceCount,VkPhysicalDevice * pPhysicalDevices)1258     VkResult on_vkEnumeratePhysicalDevices(android::base::BumpPool* pool, VkInstance boxed_instance,
1259                                            uint32_t* pPhysicalDeviceCount,
1260                                            VkPhysicalDevice* pPhysicalDevices) {
1261         auto instance = unbox_VkInstance(boxed_instance);
1262         auto vk = dispatch_VkInstance(boxed_instance);
1263 
1264         std::vector<VkPhysicalDevice> physicalDevices;
1265         auto res = GetPhysicalDevices(instance, vk, physicalDevices);
1266         if (res != VK_SUCCESS) {
1267             return res;
1268         }
1269 
1270         std::lock_guard<std::recursive_mutex> lock(mLock);
1271 
1272         FilterPhysicalDevicesLocked(instance, vk, physicalDevices);
1273 
1274         const uint32_t requestedCount = pPhysicalDeviceCount ? *pPhysicalDeviceCount : 0;
1275         const uint32_t availableCount = static_cast<uint32_t>(physicalDevices.size());
1276 
1277         if (pPhysicalDeviceCount) {
1278             *pPhysicalDeviceCount = availableCount;
1279         }
1280 
1281         if (pPhysicalDeviceCount && pPhysicalDevices) {
1282             // Box them up
1283             for (uint32_t i = 0; i < std::min(requestedCount, availableCount); ++i) {
1284                 VALIDATE_NEW_HANDLE_INFO_ENTRY(mPhysicalDeviceToInstance, physicalDevices[i]);
1285                 mPhysicalDeviceToInstance[physicalDevices[i]] = instance;
1286                 VALIDATE_NEW_HANDLE_INFO_ENTRY(mPhysdevInfo, physicalDevices[i]);
1287                 auto& physdevInfo = mPhysdevInfo[physicalDevices[i]];
1288                 physdevInfo.instance = instance;
1289                 physdevInfo.boxed = new_boxed_VkPhysicalDevice(physicalDevices[i], vk,
1290                                                                false /* does not own dispatch */);
1291 
1292                 vk->vkGetPhysicalDeviceProperties(physicalDevices[i], &physdevInfo.props);
1293 
1294                 if (physdevInfo.props.apiVersion > kMaxSafeVersion) {
1295                     physdevInfo.props.apiVersion = kMaxSafeVersion;
1296                 }
1297 
1298                 VkPhysicalDeviceMemoryProperties hostMemoryProperties;
1299                 vk->vkGetPhysicalDeviceMemoryProperties(physicalDevices[i], &hostMemoryProperties);
1300 
1301                 physdevInfo.memoryPropertiesHelper =
1302                     std::make_unique<EmulatedPhysicalDeviceMemoryProperties>(
1303                         hostMemoryProperties,
1304                         m_emu->representativeColorBufferMemoryTypeInfo->hostMemoryTypeIndex,
1305                         getFeatures());
1306 
1307                 uint32_t queueFamilyPropCount = 0;
1308 
1309                 vk->vkGetPhysicalDeviceQueueFamilyProperties(physicalDevices[i],
1310                                                              &queueFamilyPropCount, nullptr);
1311 
1312                 physdevInfo.queueFamilyProperties.resize((size_t)queueFamilyPropCount);
1313 
1314                 vk->vkGetPhysicalDeviceQueueFamilyProperties(
1315                     physicalDevices[i], &queueFamilyPropCount,
1316                     physdevInfo.queueFamilyProperties.data());
1317 
1318                 // Override queueCount for the virtual queue to be provided with device creations
1319                 if (mEnableVirtualVkQueue) {
1320                     for (VkQueueFamilyProperties& qfp : physdevInfo.queueFamilyProperties) {
1321                         // Check if the queue requires a virtualized version. For Android, we need
1322                         // 2 graphics queues on the same queue family.
1323                         if ( (qfp.queueFlags & VK_QUEUE_GRAPHICS_BIT) && qfp.queueCount == 1 ) {
1324                             qfp.queueCount = 2;
1325                             physdevInfo.hasVirtualGraphicsQueues = true;
1326                         }
1327                     }
1328                 }
1329 
1330                 pPhysicalDevices[i] = (VkPhysicalDevice)physdevInfo.boxed;
1331             }
1332             if (requestedCount < availableCount) {
1333                 res = VK_INCOMPLETE;
1334             }
1335         }
1336 
1337         return res;
1338     }
1339 
on_vkGetPhysicalDeviceFeatures(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,VkPhysicalDeviceFeatures * pFeatures)1340     void on_vkGetPhysicalDeviceFeatures(android::base::BumpPool* pool,
1341                                         VkPhysicalDevice boxed_physicalDevice,
1342                                         VkPhysicalDeviceFeatures* pFeatures) {
1343         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
1344         auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
1345 
1346         vk->vkGetPhysicalDeviceFeatures(physicalDevice, pFeatures);
1347         pFeatures->textureCompressionETC2 |= enableEmulatedEtc2(physicalDevice, vk);
1348         pFeatures->textureCompressionASTC_LDR |= enableEmulatedAstc(physicalDevice, vk);
1349     }
1350 
on_vkGetPhysicalDeviceFeatures2(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,VkPhysicalDeviceFeatures2 * pFeatures)1351     void on_vkGetPhysicalDeviceFeatures2(android::base::BumpPool* pool,
1352                                          VkPhysicalDevice boxed_physicalDevice,
1353                                          VkPhysicalDeviceFeatures2* pFeatures) {
1354         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
1355         auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
1356 
1357         std::lock_guard<std::recursive_mutex> lock(mLock);
1358 
1359         auto* physdevInfo = android::base::find(mPhysdevInfo, physicalDevice);
1360         if (!physdevInfo) return;
1361 
1362         auto instance = mPhysicalDeviceToInstance[physicalDevice];
1363         auto* instanceInfo = android::base::find(mInstanceInfo, instance);
1364         if (!instanceInfo) return;
1365 
1366         if (instanceInfo->apiVersion >= VK_MAKE_VERSION(1, 1, 0) &&
1367             physdevInfo->props.apiVersion >= VK_MAKE_VERSION(1, 1, 0)) {
1368             vk->vkGetPhysicalDeviceFeatures2(physicalDevice, pFeatures);
1369         } else if (hasInstanceExtension(instance,
1370                                         VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
1371             vk->vkGetPhysicalDeviceFeatures2KHR(physicalDevice, pFeatures);
1372         } else {
1373             // No instance extension, fake it!!!!
1374             if (pFeatures->pNext) {
1375                 fprintf(stderr,
1376                         "%s: Warning: Trying to use extension struct in "
1377                         "VkPhysicalDeviceFeatures2 without having enabled "
1378                         "the extension!\n",
1379                         __func__);
1380             }
1381             *pFeatures = {
1382                 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2,
1383                 0,
1384             };
1385             vk->vkGetPhysicalDeviceFeatures(physicalDevice, &pFeatures->features);
1386         }
1387 
1388         pFeatures->features.textureCompressionETC2 |= enableEmulatedEtc2(physicalDevice, vk);
1389         pFeatures->features.textureCompressionASTC_LDR |= enableEmulatedAstc(physicalDevice, vk);
1390         VkPhysicalDeviceSamplerYcbcrConversionFeatures* ycbcrFeatures =
1391             vk_find_struct<VkPhysicalDeviceSamplerYcbcrConversionFeatures>(pFeatures);
1392         if (ycbcrFeatures != nullptr) {
1393             ycbcrFeatures->samplerYcbcrConversion |= m_emu->enableYcbcrEmulation;
1394         }
1395 
1396         // Disable a set of Vulkan features if BypassVulkanDeviceFeatureOverrides is NOT enabled.
1397         if (!m_emu->features.BypassVulkanDeviceFeatureOverrides.enabled) {
1398             VkPhysicalDeviceProtectedMemoryFeatures* protectedMemoryFeatures =
1399                 vk_find_struct<VkPhysicalDeviceProtectedMemoryFeatures>(pFeatures);
1400             if (protectedMemoryFeatures != nullptr) {
1401                 // Protected memory is not supported on emulators. Override feature
1402                 // information to mark as unsupported (see b/329845987).
1403                 protectedMemoryFeatures->protectedMemory = VK_FALSE;
1404             }
1405 
1406             VkPhysicalDevicePrivateDataFeatures* privateDataFeatures =
1407                 vk_find_struct<VkPhysicalDevicePrivateDataFeatures>(pFeatures);
1408             if (privateDataFeatures != nullptr) {
1409                 // Private data from the guest side is not currently supported and causes emulator
1410                 // crashes with the dEQP-VK.api.object_management.private_data tests (b/368009403).
1411                 privateDataFeatures->privateData = VK_FALSE;
1412             }
1413 
1414             VkPhysicalDeviceVulkan13Features* vulkan13Features =
1415                 vk_find_struct<VkPhysicalDeviceVulkan13Features>(pFeatures);
1416             if (vulkan13Features != nullptr) {
1417                 vulkan13Features->privateData = VK_FALSE;
1418             }
1419 
1420             if (m_emu->features.VulkanBatchedDescriptorSetUpdate.enabled) {
1421                 // Currently not supporting iub due to descriptor set optimization.
1422                 // TODO: fix the non-optimized descriptor set path and re-enable the features afterwads.
1423                 // b/372217918
1424                 VkPhysicalDeviceInlineUniformBlockFeatures* iubFeatures =
1425                     vk_find_struct<VkPhysicalDeviceInlineUniformBlockFeatures>(pFeatures);
1426                 if (iubFeatures != nullptr) {
1427                     iubFeatures->inlineUniformBlock = VK_FALSE;
1428                 }
1429                 if (vulkan13Features != nullptr) {
1430                     vulkan13Features->inlineUniformBlock = VK_FALSE;
1431                 }
1432             }
1433         }
1434     }
1435 
on_vkGetPhysicalDeviceImageFormatProperties(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,VkFormat format,VkImageType type,VkImageTiling tiling,VkImageUsageFlags usage,VkImageCreateFlags flags,VkImageFormatProperties * pImageFormatProperties)1436     VkResult on_vkGetPhysicalDeviceImageFormatProperties(
1437         android::base::BumpPool* pool, VkPhysicalDevice boxed_physicalDevice, VkFormat format,
1438         VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags,
1439         VkImageFormatProperties* pImageFormatProperties) {
1440         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
1441         auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
1442         const bool emulatedTexture = isEmulatedCompressedTexture(format, physicalDevice, vk);
1443         if (emulatedTexture) {
1444             if (!supportEmulatedCompressedImageFormatProperty(format, type, tiling, usage, flags)) {
1445                 memset(pImageFormatProperties, 0, sizeof(VkImageFormatProperties));
1446                 return VK_ERROR_FORMAT_NOT_SUPPORTED;
1447             }
1448             flags &= ~VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT;
1449             flags |= VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
1450             usage |= VK_IMAGE_USAGE_STORAGE_BIT;
1451             format = CompressedImageInfo::getCompressedMipmapsFormat(format);
1452         }
1453 
1454         VkResult res = vk->vkGetPhysicalDeviceImageFormatProperties(
1455             physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties);
1456         if (res != VK_SUCCESS) {
1457             return res;
1458         }
1459         if (emulatedTexture) {
1460             maskImageFormatPropertiesForEmulatedTextures(pImageFormatProperties);
1461         }
1462         return res;
1463     }
1464 
on_vkGetPhysicalDeviceImageFormatProperties2(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VkImageFormatProperties2 * pImageFormatProperties)1465     VkResult on_vkGetPhysicalDeviceImageFormatProperties2(
1466         android::base::BumpPool* pool, VkPhysicalDevice boxed_physicalDevice,
1467         const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
1468         VkImageFormatProperties2* pImageFormatProperties) {
1469         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
1470         auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
1471         VkPhysicalDeviceImageFormatInfo2 imageFormatInfo;
1472         VkFormat format = pImageFormatInfo->format;
1473         const bool emulatedTexture = isEmulatedCompressedTexture(format, physicalDevice, vk);
1474         if (emulatedTexture) {
1475             if (!supportEmulatedCompressedImageFormatProperty(
1476                     pImageFormatInfo->format, pImageFormatInfo->type, pImageFormatInfo->tiling,
1477                     pImageFormatInfo->usage, pImageFormatInfo->flags)) {
1478                 memset(&pImageFormatProperties->imageFormatProperties, 0,
1479                        sizeof(VkImageFormatProperties));
1480                 return VK_ERROR_FORMAT_NOT_SUPPORTED;
1481             }
1482             imageFormatInfo = *pImageFormatInfo;
1483             pImageFormatInfo = &imageFormatInfo;
1484             imageFormatInfo.flags &= ~VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT;
1485             imageFormatInfo.flags |= VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
1486             imageFormatInfo.usage |= VK_IMAGE_USAGE_STORAGE_BIT;
1487             imageFormatInfo.format = CompressedImageInfo::getCompressedMipmapsFormat(format);
1488         }
1489         std::lock_guard<std::recursive_mutex> lock(mLock);
1490 
1491         auto* physdevInfo = android::base::find(mPhysdevInfo, physicalDevice);
1492         if (!physdevInfo) {
1493             return VK_ERROR_OUT_OF_HOST_MEMORY;
1494         }
1495 
1496         VkResult res = VK_ERROR_INITIALIZATION_FAILED;
1497 
1498         auto instance = mPhysicalDeviceToInstance[physicalDevice];
1499         auto* instanceInfo = android::base::find(mInstanceInfo, instance);
1500         if (!instanceInfo) {
1501             return res;
1502         }
1503 
1504         if (instanceInfo->apiVersion >= VK_MAKE_VERSION(1, 1, 0) &&
1505             physdevInfo->props.apiVersion >= VK_MAKE_VERSION(1, 1, 0)) {
1506             res = vk->vkGetPhysicalDeviceImageFormatProperties2(physicalDevice, pImageFormatInfo,
1507                                                                 pImageFormatProperties);
1508         } else if (hasInstanceExtension(instance,
1509                                         VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
1510             res = vk->vkGetPhysicalDeviceImageFormatProperties2KHR(physicalDevice, pImageFormatInfo,
1511                                                                    pImageFormatProperties);
1512         } else {
1513             // No instance extension, fake it!!!!
1514             if (pImageFormatProperties->pNext) {
1515                 fprintf(stderr,
1516                         "%s: Warning: Trying to use extension struct in "
1517                         "VkPhysicalDeviceFeatures2 without having enabled "
1518                         "the extension!!!!11111\n",
1519                         __func__);
1520             }
1521             *pImageFormatProperties = {
1522                 VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2,
1523                 0,
1524             };
1525             res = vk->vkGetPhysicalDeviceImageFormatProperties(
1526                 physicalDevice, pImageFormatInfo->format, pImageFormatInfo->type,
1527                 pImageFormatInfo->tiling, pImageFormatInfo->usage, pImageFormatInfo->flags,
1528                 &pImageFormatProperties->imageFormatProperties);
1529         }
1530         if (res != VK_SUCCESS) {
1531             return res;
1532         }
1533 
1534         const VkPhysicalDeviceExternalImageFormatInfo* extImageFormatInfo =
1535             vk_find_struct<VkPhysicalDeviceExternalImageFormatInfo>(pImageFormatInfo);
1536         VkExternalImageFormatProperties* extImageFormatProps =
1537             vk_find_struct<VkExternalImageFormatProperties>(pImageFormatProperties);
1538 
1539         // Only allow dedicated allocations for external images.
1540         if (extImageFormatInfo && extImageFormatProps) {
1541             extImageFormatProps->externalMemoryProperties.externalMemoryFeatures |=
1542                 VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT;
1543         }
1544 
1545         if (emulatedTexture) {
1546             maskImageFormatPropertiesForEmulatedTextures(
1547                 &pImageFormatProperties->imageFormatProperties);
1548         }
1549 
1550         return res;
1551     }
1552 
on_vkGetPhysicalDeviceFormatProperties(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,VkFormat format,VkFormatProperties * pFormatProperties)1553     void on_vkGetPhysicalDeviceFormatProperties(android::base::BumpPool* pool,
1554                                                 VkPhysicalDevice boxed_physicalDevice,
1555                                                 VkFormat format,
1556                                                 VkFormatProperties* pFormatProperties) {
1557         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
1558         auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
1559         getPhysicalDeviceFormatPropertiesCore<VkFormatProperties>(
1560             [vk](VkPhysicalDevice physicalDevice, VkFormat format,
1561                  VkFormatProperties* pFormatProperties) {
1562                 vk->vkGetPhysicalDeviceFormatProperties(physicalDevice, format, pFormatProperties);
1563             },
1564             vk, physicalDevice, format, pFormatProperties);
1565     }
1566 
on_vkGetPhysicalDeviceFormatProperties2(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,VkFormat format,VkFormatProperties2 * pFormatProperties)1567     void on_vkGetPhysicalDeviceFormatProperties2(android::base::BumpPool* pool,
1568                                                  VkPhysicalDevice boxed_physicalDevice,
1569                                                  VkFormat format,
1570                                                  VkFormatProperties2* pFormatProperties) {
1571         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
1572         auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
1573 
1574         std::lock_guard<std::recursive_mutex> lock(mLock);
1575 
1576         auto* physdevInfo = android::base::find(mPhysdevInfo, physicalDevice);
1577         if (!physdevInfo) return;
1578 
1579         auto instance = mPhysicalDeviceToInstance[physicalDevice];
1580         auto* instanceInfo = android::base::find(mInstanceInfo, instance);
1581         if (!instanceInfo) return;
1582 
1583         if (instanceInfo->apiVersion >= VK_MAKE_VERSION(1, 1, 0) &&
1584             physdevInfo->props.apiVersion >= VK_MAKE_VERSION(1, 1, 0)) {
1585             getPhysicalDeviceFormatPropertiesCore<VkFormatProperties2>(
1586                 [vk](VkPhysicalDevice physicalDevice, VkFormat format,
1587                      VkFormatProperties2* pFormatProperties) {
1588                     vk->vkGetPhysicalDeviceFormatProperties2(physicalDevice, format,
1589                                                              pFormatProperties);
1590                 },
1591                 vk, physicalDevice, format, pFormatProperties);
1592         } else if (hasInstanceExtension(instance,
1593                                         VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
1594             getPhysicalDeviceFormatPropertiesCore<VkFormatProperties2>(
1595                 [vk](VkPhysicalDevice physicalDevice, VkFormat format,
1596                      VkFormatProperties2* pFormatProperties) {
1597                     vk->vkGetPhysicalDeviceFormatProperties2KHR(physicalDevice, format,
1598                                                                 pFormatProperties);
1599                 },
1600                 vk, physicalDevice, format, pFormatProperties);
1601         } else {
1602             // No instance extension, fake it!!!!
1603             if (pFormatProperties->pNext) {
1604                 fprintf(stderr,
1605                         "%s: Warning: Trying to use extension struct in "
1606                         "vkGetPhysicalDeviceFormatProperties2 without having "
1607                         "enabled the extension!!!!11111\n",
1608                         __func__);
1609             }
1610             pFormatProperties->sType = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2;
1611             getPhysicalDeviceFormatPropertiesCore<VkFormatProperties>(
1612                 [vk](VkPhysicalDevice physicalDevice, VkFormat format,
1613                      VkFormatProperties* pFormatProperties) {
1614                     vk->vkGetPhysicalDeviceFormatProperties(physicalDevice, format,
1615                                                             pFormatProperties);
1616                 },
1617                 vk, physicalDevice, format, &pFormatProperties->formatProperties);
1618         }
1619     }
1620 
on_vkGetPhysicalDeviceProperties(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,VkPhysicalDeviceProperties * pProperties)1621     void on_vkGetPhysicalDeviceProperties(android::base::BumpPool* pool,
1622                                           VkPhysicalDevice boxed_physicalDevice,
1623                                           VkPhysicalDeviceProperties* pProperties) {
1624         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
1625         auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
1626 
1627         vk->vkGetPhysicalDeviceProperties(physicalDevice, pProperties);
1628 
1629         if (pProperties->apiVersion > kMaxSafeVersion) {
1630             pProperties->apiVersion = kMaxSafeVersion;
1631         }
1632     }
1633 
on_vkGetPhysicalDeviceProperties2(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,VkPhysicalDeviceProperties2 * pProperties)1634     void on_vkGetPhysicalDeviceProperties2(android::base::BumpPool* pool,
1635                                            VkPhysicalDevice boxed_physicalDevice,
1636                                            VkPhysicalDeviceProperties2* pProperties) {
1637         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
1638         auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
1639 
1640         std::lock_guard<std::recursive_mutex> lock(mLock);
1641 
1642         auto* physdevInfo = android::base::find(mPhysdevInfo, physicalDevice);
1643         if (!physdevInfo) return;
1644 
1645         auto instance = mPhysicalDeviceToInstance[physicalDevice];
1646         auto* instanceInfo = android::base::find(mInstanceInfo, instance);
1647         if (!instanceInfo) return;
1648 
1649         if (instanceInfo->apiVersion >= VK_MAKE_VERSION(1, 1, 0) &&
1650             physdevInfo->props.apiVersion >= VK_MAKE_VERSION(1, 1, 0)) {
1651             vk->vkGetPhysicalDeviceProperties2(physicalDevice, pProperties);
1652         } else if (hasInstanceExtension(instance,
1653                                         VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
1654             vk->vkGetPhysicalDeviceProperties2KHR(physicalDevice, pProperties);
1655         } else {
1656             // No instance extension, fake it!!!!
1657             if (pProperties->pNext) {
1658                 fprintf(stderr,
1659                         "%s: Warning: Trying to use extension struct in "
1660                         "VkPhysicalDeviceProperties2 without having enabled "
1661                         "the extension!!!!11111\n",
1662                         __func__);
1663             }
1664             *pProperties = {
1665                 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2,
1666                 0,
1667             };
1668             vk->vkGetPhysicalDeviceProperties(physicalDevice, &pProperties->properties);
1669         }
1670 
1671         if (pProperties->properties.apiVersion > kMaxSafeVersion) {
1672             pProperties->properties.apiVersion = kMaxSafeVersion;
1673         }
1674     }
1675 
on_vkGetPhysicalDeviceQueueFamilyProperties(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,uint32_t * pQueueFamilyPropertyCount,VkQueueFamilyProperties * pQueueFamilyProperties)1676     void on_vkGetPhysicalDeviceQueueFamilyProperties(
1677         android::base::BumpPool* pool, VkPhysicalDevice boxed_physicalDevice,
1678         uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties* pQueueFamilyProperties) {
1679         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
1680         auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
1681 
1682         const bool requiresPropertyOverrides = mEnableVirtualVkQueue && pQueueFamilyProperties;
1683         if (!requiresPropertyOverrides) {
1684             // Can just use results from the driver
1685             return vk->vkGetPhysicalDeviceQueueFamilyProperties(
1686                 physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
1687         }
1688 
1689         // Use cached queue family properties to accommodate for any property overrides/emulation
1690         std::lock_guard<std::recursive_mutex> lock(mLock);
1691         const PhysicalDeviceInfo* physicalDeviceInfo =
1692             android::base::find(mPhysdevInfo, physicalDevice);
1693         if (!physicalDeviceInfo) {
1694             ERR("Failed to find physical device info.");
1695             return;
1696         }
1697 
1698         const auto& properties = physicalDeviceInfo->queueFamilyProperties;
1699         *pQueueFamilyPropertyCount =
1700             std::min((uint32_t)properties.size(), *pQueueFamilyPropertyCount);
1701         for (uint32_t i = 0; i < *pQueueFamilyPropertyCount; i++) {
1702             pQueueFamilyProperties[i] = properties[i];
1703         }
1704     }
1705 
on_vkGetPhysicalDeviceQueueFamilyProperties2(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,uint32_t * pQueueFamilyPropertyCount,VkQueueFamilyProperties2 * pQueueFamilyProperties)1706     void on_vkGetPhysicalDeviceQueueFamilyProperties2(
1707         android::base::BumpPool* pool, VkPhysicalDevice boxed_physicalDevice,
1708         uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties) {
1709         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
1710         auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
1711 
1712         const bool requiresPropertyOverrides = mEnableVirtualVkQueue && pQueueFamilyProperties;
1713         if (!requiresPropertyOverrides) {
1714             // Can just use results from the driver
1715             return vk->vkGetPhysicalDeviceQueueFamilyProperties2(
1716                 physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
1717         }
1718 
1719         if (pQueueFamilyProperties->pNext) {
1720             // We still need to call the driver version to fill in any pNext values
1721             vk->vkGetPhysicalDeviceQueueFamilyProperties2(physicalDevice, pQueueFamilyPropertyCount,
1722                                                           pQueueFamilyProperties);
1723         }
1724 
1725         // Use cached queue family properties to accommodate for any property overrides/emulation
1726         std::lock_guard<std::recursive_mutex> lock(mLock);
1727         const PhysicalDeviceInfo* physicalDeviceInfo =
1728             android::base::find(mPhysdevInfo, physicalDevice);
1729         if (!physicalDeviceInfo) {
1730             ERR("Failed to find physical device info.");
1731             return;
1732         }
1733 
1734         const auto& properties = physicalDeviceInfo->queueFamilyProperties;
1735         *pQueueFamilyPropertyCount =
1736             std::min((uint32_t)properties.size(), *pQueueFamilyPropertyCount);
1737         for (uint32_t i = 0; i < *pQueueFamilyPropertyCount; i++) {
1738             pQueueFamilyProperties[i].queueFamilyProperties = properties[i];
1739         }
1740     }
1741 
on_vkGetPhysicalDeviceMemoryProperties(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,VkPhysicalDeviceMemoryProperties * pMemoryProperties)1742     void on_vkGetPhysicalDeviceMemoryProperties(
1743         android::base::BumpPool* pool, VkPhysicalDevice boxed_physicalDevice,
1744         VkPhysicalDeviceMemoryProperties* pMemoryProperties) {
1745         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
1746         auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
1747 
1748         std::lock_guard<std::recursive_mutex> lock(mLock);
1749 
1750         auto* physicalDeviceInfo = android::base::find(mPhysdevInfo, physicalDevice);
1751         if (!physicalDeviceInfo) {
1752             ERR("Failed to find physical device info.");
1753             return;
1754         }
1755 
1756         auto& physicalDeviceMemoryHelper = physicalDeviceInfo->memoryPropertiesHelper;
1757         *pMemoryProperties = physicalDeviceMemoryHelper->getGuestMemoryProperties();
1758     }
1759 
on_vkGetPhysicalDeviceMemoryProperties2(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,VkPhysicalDeviceMemoryProperties2 * pMemoryProperties)1760     void on_vkGetPhysicalDeviceMemoryProperties2(
1761         android::base::BumpPool* pool, VkPhysicalDevice boxed_physicalDevice,
1762         VkPhysicalDeviceMemoryProperties2* pMemoryProperties) {
1763         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
1764         auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
1765 
1766         auto* physicalDeviceInfo = android::base::find(mPhysdevInfo, physicalDevice);
1767         if (!physicalDeviceInfo) return;
1768 
1769         auto instance = mPhysicalDeviceToInstance[physicalDevice];
1770         auto* instanceInfo = android::base::find(mInstanceInfo, instance);
1771         if (!instanceInfo) return;
1772 
1773         if (instanceInfo->apiVersion >= VK_MAKE_VERSION(1, 1, 0) &&
1774             physicalDeviceInfo->props.apiVersion >= VK_MAKE_VERSION(1, 1, 0)) {
1775             vk->vkGetPhysicalDeviceMemoryProperties2(physicalDevice, pMemoryProperties);
1776         } else if (hasInstanceExtension(instance,
1777                                         VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
1778             vk->vkGetPhysicalDeviceMemoryProperties2KHR(physicalDevice, pMemoryProperties);
1779         } else {
1780             // No instance extension, fake it!!!!
1781             if (pMemoryProperties->pNext) {
1782                 fprintf(stderr,
1783                         "%s: Warning: Trying to use extension struct in "
1784                         "VkPhysicalDeviceMemoryProperties2 without having enabled "
1785                         "the extension!!!!11111\n",
1786                         __func__);
1787             }
1788             *pMemoryProperties = {
1789                 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2,
1790                 0,
1791             };
1792         }
1793 
1794         auto& physicalDeviceMemoryHelper = physicalDeviceInfo->memoryPropertiesHelper;
1795         pMemoryProperties->memoryProperties =
1796             physicalDeviceMemoryHelper->getGuestMemoryProperties();
1797     }
1798 
on_vkEnumerateDeviceExtensionProperties(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,const char * pLayerName,uint32_t * pPropertyCount,VkExtensionProperties * pProperties)1799     VkResult on_vkEnumerateDeviceExtensionProperties(android::base::BumpPool* pool,
1800                                                      VkPhysicalDevice boxed_physicalDevice,
1801                                                      const char* pLayerName,
1802                                                      uint32_t* pPropertyCount,
1803                                                      VkExtensionProperties* pProperties) {
1804         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
1805         auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
1806 
1807         bool shouldPassthrough = !m_emu->enableYcbcrEmulation;
1808 #if defined(__APPLE__)
1809         shouldPassthrough = shouldPassthrough && !m_emu->instanceSupportsMoltenVK;
1810 #endif
1811         if (shouldPassthrough) {
1812             return vk->vkEnumerateDeviceExtensionProperties(physicalDevice, pLayerName,
1813                                                             pPropertyCount, pProperties);
1814         }
1815 
1816         // If MoltenVK is supported on host, we need to ensure that we include
1817         // VK_MVK_moltenvk extenstion in returned properties.
1818         std::vector<VkExtensionProperties> properties;
1819         VkResult result =
1820             enumerateDeviceExtensionProperties(vk, physicalDevice, pLayerName, properties);
1821         if (result != VK_SUCCESS) {
1822             return result;
1823         }
1824 
1825 #if defined(__APPLE__) && defined(VK_MVK_moltenvk)
1826         // Guest will check for VK_MVK_moltenvk extension for enabling AHB support
1827         if (m_emu->instanceSupportsMoltenVK &&
1828             !hasDeviceExtension(properties, VK_MVK_MOLTENVK_EXTENSION_NAME)) {
1829             VkExtensionProperties mvk_props;
1830             strncpy(mvk_props.extensionName, VK_MVK_MOLTENVK_EXTENSION_NAME,
1831                     sizeof(mvk_props.extensionName));
1832             mvk_props.specVersion = VK_MVK_MOLTENVK_SPEC_VERSION;
1833             properties.push_back(mvk_props);
1834         }
1835 #endif
1836 
1837         if (m_emu->enableYcbcrEmulation &&
1838             !hasDeviceExtension(properties, VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME)) {
1839             VkExtensionProperties ycbcr_props;
1840             strncpy(ycbcr_props.extensionName, VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME,
1841                     sizeof(ycbcr_props.extensionName));
1842             ycbcr_props.specVersion = VK_KHR_SAMPLER_YCBCR_CONVERSION_SPEC_VERSION;
1843             properties.push_back(ycbcr_props);
1844         }
1845         if (pProperties == nullptr) {
1846             *pPropertyCount = properties.size();
1847         } else {
1848             // return number of structures actually written to pProperties.
1849             *pPropertyCount = std::min((uint32_t)properties.size(), *pPropertyCount);
1850             memcpy(pProperties, properties.data(), *pPropertyCount * sizeof(VkExtensionProperties));
1851         }
1852         return *pPropertyCount < properties.size() ? VK_INCOMPLETE : VK_SUCCESS;
1853     }
1854 
on_vkCreateDevice(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,const VkDeviceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDevice * pDevice)1855     VkResult on_vkCreateDevice(android::base::BumpPool* pool, VkPhysicalDevice boxed_physicalDevice,
1856                                const VkDeviceCreateInfo* pCreateInfo,
1857                                const VkAllocationCallbacks* pAllocator, VkDevice* pDevice) {
1858         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
1859         auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
1860 
1861         std::vector<const char*> updatedDeviceExtensions =
1862             filteredDeviceExtensionNames(vk, physicalDevice, pCreateInfo->enabledExtensionCount,
1863                                          pCreateInfo->ppEnabledExtensionNames);
1864 
1865         m_emu->deviceLostHelper.addNeededDeviceExtensions(&updatedDeviceExtensions);
1866 
1867         uint32_t supportedFenceHandleTypes = 0;
1868         uint32_t supportedBinarySemaphoreHandleTypes = 0;
1869         // Run the underlying API call, filtering extensions.
1870         VkDeviceCreateInfo createInfoFiltered = *pCreateInfo;
1871         // According to the spec, it seems that the application can use compressed texture formats
1872         // without enabling the feature when creating the VkDevice, as long as
1873         // vkGetPhysicalDeviceFormatProperties and vkGetPhysicalDeviceImageFormatProperties reports
1874         // support: to query for additional properties, or if the feature is not enabled,
1875         // vkGetPhysicalDeviceFormatProperties and vkGetPhysicalDeviceImageFormatProperties can be
1876         // used to check for supported properties of individual formats as normal.
1877         bool emulateTextureEtc2 = needEmulatedEtc2(physicalDevice, vk);
1878         bool emulateTextureAstc = needEmulatedAstc(physicalDevice, vk);
1879         VkPhysicalDeviceFeatures featuresFiltered;
1880         std::vector<VkPhysicalDeviceFeatures*> featuresToFilter;
1881 
1882         if (pCreateInfo->pEnabledFeatures) {
1883             featuresFiltered = *pCreateInfo->pEnabledFeatures;
1884             createInfoFiltered.pEnabledFeatures = &featuresFiltered;
1885             featuresToFilter.emplace_back(&featuresFiltered);
1886         }
1887 
1888         // TODO(b/378686769): Force enable private data feature when available to
1889         //  mitigate the issues with duplicated vulkan handles. This should be
1890         //  removed once the issue is properly fixed.
1891         VkPhysicalDevicePrivateDataFeatures forceEnablePrivateData = {
1892             VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES,
1893             nullptr,
1894             VK_TRUE,
1895         };
1896         if (m_emu->deviceInfo.supportsPrivateData) {
1897             VkPhysicalDevicePrivateDataFeatures* privateDataFeatures =
1898                 vk_find_struct<VkPhysicalDevicePrivateDataFeatures>(&createInfoFiltered);
1899             if (privateDataFeatures != nullptr) {
1900                 privateDataFeatures->privateData = VK_TRUE;
1901             } else {
1902                 // Insert into device create info chain
1903                 forceEnablePrivateData.pNext = const_cast<void*>(createInfoFiltered.pNext);
1904                 createInfoFiltered.pNext = &forceEnablePrivateData;
1905                 privateDataFeatures = &forceEnablePrivateData;
1906             }
1907         }
1908 
1909         if (VkPhysicalDeviceFeatures2* features2 =
1910                 vk_find_struct<VkPhysicalDeviceFeatures2>(&createInfoFiltered)) {
1911             featuresToFilter.emplace_back(&features2->features);
1912         }
1913 
1914         VkPhysicalDeviceDiagnosticsConfigFeaturesNV deviceDiagnosticsConfigFeatures = {
1915             .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV,
1916             .diagnosticsConfig = VK_TRUE,
1917         };
1918         if (m_emu->commandBufferCheckpointsSupportedAndRequested) {
1919             deviceDiagnosticsConfigFeatures.pNext = const_cast<void*>(createInfoFiltered.pNext);
1920             createInfoFiltered.pNext = &deviceDiagnosticsConfigFeatures;
1921         }
1922 
1923         for (VkPhysicalDeviceFeatures* feature : featuresToFilter) {
1924             if (emulateTextureEtc2) {
1925                 feature->textureCompressionETC2 = VK_FALSE;
1926             }
1927             if (emulateTextureAstc) {
1928                 feature->textureCompressionASTC_LDR = VK_FALSE;
1929             }
1930         }
1931 
1932         if (auto* ycbcrFeatures = vk_find_struct<VkPhysicalDeviceSamplerYcbcrConversionFeatures>(
1933                 &createInfoFiltered)) {
1934             if (m_emu->enableYcbcrEmulation && !m_emu->deviceInfo.supportsSamplerYcbcrConversion) {
1935                 ycbcrFeatures->samplerYcbcrConversion = VK_FALSE;
1936             }
1937         }
1938 
1939         if (auto* swapchainMaintenance1Features =
1940                 vk_find_struct<VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT>(
1941                     &createInfoFiltered)) {
1942             if (!supportsSwapchainMaintenance1(physicalDevice, vk)) {
1943                 swapchainMaintenance1Features->swapchainMaintenance1 = VK_FALSE;
1944             }
1945         }
1946 
1947 #ifdef __APPLE__
1948 #ifndef VK_ENABLE_BETA_EXTENSIONS
1949         // TODO(b/349066492): Update Vulkan headers, stringhelpers and compilation parameters
1950         // to use this directly from beta extensions and use regular chain append commands
1951         const VkStructureType VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_FEATURES_KHR =
1952             (VkStructureType)1000163000;
1953 #endif
1954         // Enable all portability features supported on the device
1955         VkPhysicalDevicePortabilitySubsetFeaturesKHR supportedPortabilityFeatures = {
1956             VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_FEATURES_KHR, nullptr};
1957         if (m_emu->instanceSupportsMoltenVK) {
1958             VkPhysicalDeviceFeatures2 features2 = {
1959                 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2,
1960                 .pNext = &supportedPortabilityFeatures,
1961             };
1962             vk->vkGetPhysicalDeviceFeatures2(physicalDevice, &features2);
1963 
1964             if (mVerbosePrints) {
1965                 fprintf(stderr,
1966                         "VERBOSE:%s: MoltenVK supportedPortabilityFeatures\n"
1967                         "constantAlphaColorBlendFactors = %d\n"
1968                         "events = %d\n"
1969                         "imageViewFormatReinterpretation = %d\n"
1970                         "imageViewFormatSwizzle = %d\n"
1971                         "imageView2DOn3DImage = %d\n"
1972                         "multisampleArrayImage = %d\n"
1973                         "mutableComparisonSamplers = %d\n"
1974                         "pointPolygons = %d\n"
1975                         "samplerMipLodBias = %d\n"
1976                         "separateStencilMaskRef = %d\n"
1977                         "shaderSampleRateInterpolationFunctions = %d\n"
1978                         "tessellationIsolines = %d\n"
1979                         "tessellationPointMode = %d\n"
1980                         "triangleFans = %d\n"
1981                         "vertexAttributeAccessBeyondStride = %d\n",
1982                         __func__, supportedPortabilityFeatures.constantAlphaColorBlendFactors,
1983                         supportedPortabilityFeatures.events,
1984                         supportedPortabilityFeatures.imageViewFormatReinterpretation,
1985                         supportedPortabilityFeatures.imageViewFormatSwizzle,
1986                         supportedPortabilityFeatures.imageView2DOn3DImage,
1987                         supportedPortabilityFeatures.multisampleArrayImage,
1988                         supportedPortabilityFeatures.mutableComparisonSamplers,
1989                         supportedPortabilityFeatures.pointPolygons,
1990                         supportedPortabilityFeatures.samplerMipLodBias,
1991                         supportedPortabilityFeatures.separateStencilMaskRef,
1992                         supportedPortabilityFeatures.shaderSampleRateInterpolationFunctions,
1993                         supportedPortabilityFeatures.tessellationIsolines,
1994                         supportedPortabilityFeatures.tessellationPointMode,
1995                         supportedPortabilityFeatures.triangleFans,
1996                         supportedPortabilityFeatures.vertexAttributeAccessBeyondStride);
1997             }
1998 
1999             // Insert into device create info chain
2000             supportedPortabilityFeatures.pNext = const_cast<void*>(createInfoFiltered.pNext);
2001             createInfoFiltered.pNext = &supportedPortabilityFeatures;
2002         }
2003 #endif
2004 
2005         // Filter device memory report as callbacks can not be passed between guest and host.
2006         vk_struct_chain_filter<VkDeviceDeviceMemoryReportCreateInfoEXT>(&createInfoFiltered);
2007 
2008         // Filter device groups as they are effectively disabled.
2009         vk_struct_chain_filter<VkDeviceGroupDeviceCreateInfo>(&createInfoFiltered);
2010 
2011         createInfoFiltered.enabledExtensionCount = (uint32_t)updatedDeviceExtensions.size();
2012         createInfoFiltered.ppEnabledExtensionNames = updatedDeviceExtensions.data();
2013 
2014         // bug: 155795731
2015         bool swiftshader =
2016             (android::base::getEnvironmentVariable("ANDROID_EMU_VK_ICD").compare("swiftshader") ==
2017              0);
2018 
2019         std::unique_ptr<std::lock_guard<std::recursive_mutex>> lock = nullptr;
2020 
2021         if (swiftshader) {
2022             lock = std::make_unique<std::lock_guard<std::recursive_mutex>>(mLock);
2023         }
2024 
2025         VkResult result =
2026             vk->vkCreateDevice(physicalDevice, &createInfoFiltered, pAllocator, pDevice);
2027 
2028         if (result != VK_SUCCESS) return result;
2029 
2030         if (!swiftshader) {
2031             lock = std::make_unique<std::lock_guard<std::recursive_mutex>>(mLock);
2032         }
2033 
2034         mDeviceToPhysicalDevice[*pDevice] = physicalDevice;
2035 
2036         auto physicalDeviceInfoIt = mPhysdevInfo.find(physicalDevice);
2037         if (physicalDeviceInfoIt == mPhysdevInfo.end()) return VK_ERROR_INITIALIZATION_FAILED;
2038         auto& physicalDeviceInfo = physicalDeviceInfoIt->second;
2039 
2040         auto instanceInfoIt = mInstanceInfo.find(physicalDeviceInfo.instance);
2041         if (instanceInfoIt == mInstanceInfo.end()) return VK_ERROR_INITIALIZATION_FAILED;
2042         auto& instanceInfo = instanceInfoIt->second;
2043 
2044         // Fill out information about the logical device here.
2045         VALIDATE_NEW_HANDLE_INFO_ENTRY(mDeviceInfo, *pDevice);
2046         auto& deviceInfo = mDeviceInfo[*pDevice];
2047         deviceInfo.physicalDevice = physicalDevice;
2048         deviceInfo.emulateTextureEtc2 = emulateTextureEtc2;
2049         deviceInfo.emulateTextureAstc = emulateTextureAstc;
2050         deviceInfo.useAstcCpuDecompression =
2051             m_emu->astcLdrEmulationMode == AstcEmulationMode::Cpu &&
2052             AstcCpuDecompressor::get().available();
2053         deviceInfo.decompPipelines =
2054             std::make_unique<GpuDecompressionPipelineManager>(m_vk, *pDevice);
2055         getSupportedFenceHandleTypes(vk, physicalDevice, &supportedFenceHandleTypes);
2056         getSupportedSemaphoreHandleTypes(vk, physicalDevice, &supportedBinarySemaphoreHandleTypes);
2057 
2058         deviceInfo.externalFenceInfo.supportedFenceHandleTypes =
2059             static_cast<VkExternalFenceHandleTypeFlagBits>(supportedFenceHandleTypes);
2060         deviceInfo.externalFenceInfo.supportedBinarySemaphoreHandleTypes =
2061             static_cast<VkExternalSemaphoreHandleTypeFlagBits>(supportedBinarySemaphoreHandleTypes);
2062 
2063         INFO("Created VkDevice:%p for application:%s engine:%s ASTC emulation:%s CPU decoding:%s.",
2064              *pDevice, instanceInfo.applicationName.c_str(), instanceInfo.engineName.c_str(),
2065              deviceInfo.emulateTextureAstc ? "on" : "off",
2066              deviceInfo.useAstcCpuDecompression ? "on" : "off");
2067 
2068         for (uint32_t i = 0; i < createInfoFiltered.enabledExtensionCount; ++i) {
2069             deviceInfo.enabledExtensionNames.push_back(
2070                 createInfoFiltered.ppEnabledExtensionNames[i]);
2071         }
2072 
2073         // First, get the dispatch table.
2074         VkDevice boxedDevice = new_boxed_VkDevice(*pDevice, nullptr, true /* own dispatch */);
2075 
2076         if (mLogging) {
2077             INFO("%s: init vulkan dispatch from device", __func__);
2078         }
2079 
2080         VulkanDispatch* dispatch = dispatch_VkDevice(boxedDevice);
2081         init_vulkan_dispatch_from_device(vk, *pDevice, dispatch);
2082         if (m_emu->debugUtilsAvailableAndRequested) {
2083             deviceInfo.debugUtilsHelper = DebugUtilsHelper::withUtilsEnabled(*pDevice, dispatch);
2084         }
2085 
2086         deviceInfo.externalFencePool =
2087             std::make_unique<ExternalFencePool<VulkanDispatch>>(dispatch, *pDevice);
2088 
2089         deviceInfo.deviceOpTracker = std::make_shared<DeviceOpTracker>(*pDevice, dispatch);
2090 
2091         if (mLogging) {
2092             INFO("%s: init vulkan dispatch from device (end)", __func__);
2093         }
2094 
2095         deviceInfo.boxed = boxedDevice;
2096 
2097         if (mSnapshotState == SnapshotState::Loading) {
2098             if (!mSnapshotLoadVkDeviceToVirtioCpuContextId) {
2099                 GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
2100                     << "Missing device to context id map during snapshot load.";
2101             }
2102             auto contextIdIt = mSnapshotLoadVkDeviceToVirtioCpuContextId->find(boxedDevice);
2103             if (contextIdIt == mSnapshotLoadVkDeviceToVirtioCpuContextId->end()) {
2104                 GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
2105                     << "Missing context id for VkDevice:" << boxedDevice;
2106             }
2107             deviceInfo.virtioGpuContextId = contextIdIt->second;
2108         } else {
2109             auto* renderThreadInfo = RenderThreadInfoVk::get();
2110             deviceInfo.virtioGpuContextId = renderThreadInfo->ctx_id;
2111         }
2112 
2113         // Next, get information about the queue families used by this device.
2114         std::unordered_map<uint32_t, uint32_t> queueFamilyIndexCounts;
2115         for (uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; ++i) {
2116             const auto& queueCreateInfo = pCreateInfo->pQueueCreateInfos[i];
2117             // Check only queues created with flags = 0 in VkDeviceQueueCreateInfo.
2118             auto flags = queueCreateInfo.flags;
2119             if (flags) continue;
2120             uint32_t queueFamilyIndex = queueCreateInfo.queueFamilyIndex;
2121             uint32_t queueCount = queueCreateInfo.queueCount;
2122             queueFamilyIndexCounts[queueFamilyIndex] = queueCount;
2123         }
2124 
2125         std::vector<uint64_t> extraHandles;
2126         for (auto it : queueFamilyIndexCounts) {
2127             auto index = it.first;
2128             auto count = it.second;
2129             auto addVirtualQueue = (count == 2) && physicalDeviceInfo.hasVirtualGraphicsQueues;
2130             auto& queues = deviceInfo.queues[index];
2131             for (uint32_t i = 0; i < count; ++i) {
2132                 VkQueue physicalQueue;
2133 
2134                 if (mLogging) {
2135                     INFO("%s: get device queue (begin)", __func__);
2136                 }
2137 
2138                 assert(i == 0 || !physicalDeviceInfo.hasVirtualGraphicsQueues);
2139                 vk->vkGetDeviceQueue(*pDevice, index, i, &physicalQueue);
2140 
2141                 if (mLogging) {
2142                     INFO("%s: get device queue (end)", __func__);
2143                 }
2144                 auto boxedQueue =
2145                     new_boxed_VkQueue(physicalQueue, dispatch, false /* does not own dispatch */);
2146                 extraHandles.push_back((uint64_t)boxedQueue);
2147 
2148                 VALIDATE_NEW_HANDLE_INFO_ENTRY(mQueueInfo, physicalQueue);
2149                 QueueInfo& physicalQueueInfo = mQueueInfo[physicalQueue];
2150                 physicalQueueInfo.device = *pDevice;
2151                 physicalQueueInfo.queueFamilyIndex = index;
2152                 physicalQueueInfo.boxed = boxedQueue;
2153                 physicalQueueInfo.physicalQueueLock = std::make_shared<android::base::Lock>();
2154                 queues.push_back(physicalQueue);
2155 
2156                 if (addVirtualQueue) {
2157                     VERBOSE("Creating virtual device queue for physical VkQueue %p", physicalQueue);
2158                     const uint64_t physicalQueue64 = reinterpret_cast<uint64_t>(physicalQueue);
2159 
2160                     if ((physicalQueue64 & QueueInfo::kVirtualQueueBit) != 0) {
2161                         // Cannot use queue virtualization on this GPU, where the pysical handle
2162                         // values generated are not 2-byte aligned. This is very unusual, but the
2163                         // spec is not enforcing handle values to be aligned and the driver is free
2164                         // to use a similar logic to use the last bit for other purposes.
2165                         // In this case, we disable the virtual queue support and unboxing will not
2166                         // remove the last bit coming from the actual driver.
2167                         ERR("Cannot create virtual queue for handle %p", physicalQueue);
2168                         mEnableVirtualVkQueue = false;
2169                     } else {
2170                         uint64_t virtualQueue64 = (physicalQueue64 | QueueInfo::kVirtualQueueBit);
2171                         VkQueue virtualQueue = reinterpret_cast<VkQueue>(virtualQueue64);
2172 
2173                         auto boxedVirtualQueue = new_boxed_VkQueue(
2174                             virtualQueue, dispatch, false /* does not own dispatch */);
2175                         extraHandles.push_back((uint64_t)boxedVirtualQueue);
2176 
2177                         VALIDATE_NEW_HANDLE_INFO_ENTRY(mQueueInfo, virtualQueue);
2178                         QueueInfo& virtualQueueInfo = mQueueInfo[virtualQueue];
2179                         virtualQueueInfo.device = physicalQueueInfo.device;
2180                         virtualQueueInfo.queueFamilyIndex = physicalQueueInfo.queueFamilyIndex;
2181                         virtualQueueInfo.boxed = boxedVirtualQueue;
2182                         virtualQueueInfo.physicalQueueLock =
2183                             physicalQueueInfo.physicalQueueLock;  // Shares the same lock!
2184                         queues.push_back(virtualQueue);
2185                     }
2186                     i++;
2187                 }
2188             }
2189         }
2190         if (snapshotsEnabled()) {
2191             snapshot()->createExtraHandlesForNextApi(extraHandles.data(), extraHandles.size());
2192         }
2193 
2194         // Box the device.
2195         *pDevice = (VkDevice)deviceInfo.boxed;
2196 
2197         if (mLogging) {
2198             INFO("%s: (end)", __func__);
2199         }
2200 
2201         return VK_SUCCESS;
2202     }
2203 
on_vkGetDeviceQueue(android::base::BumpPool * pool,VkDevice boxed_device,uint32_t queueFamilyIndex,uint32_t queueIndex,VkQueue * pQueue)2204     void on_vkGetDeviceQueue(android::base::BumpPool* pool, VkDevice boxed_device,
2205                              uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue* pQueue) {
2206         auto device = unbox_VkDevice(boxed_device);
2207 
2208         std::lock_guard<std::recursive_mutex> lock(mLock);
2209 
2210         *pQueue = VK_NULL_HANDLE;
2211 
2212         auto* deviceInfo = android::base::find(mDeviceInfo, device);
2213         if (!deviceInfo) return;
2214 
2215         const auto& queues = deviceInfo->queues;
2216 
2217         const auto* queueList = android::base::find(queues, queueFamilyIndex);
2218         if (!queueList) return;
2219         if (queueIndex >= queueList->size()) return;
2220 
2221         VkQueue unboxedQueue = (*queueList)[queueIndex];
2222 
2223         auto* queueInfo = android::base::find(mQueueInfo, unboxedQueue);
2224         if (!queueInfo) {
2225             ERR("vkGetDeviceQueue failed on queue: %p", unboxedQueue);
2226             return;
2227         }
2228 
2229         *pQueue = queueInfo->boxed;
2230     }
2231 
on_vkGetDeviceQueue2(android::base::BumpPool * pool,VkDevice boxed_device,const VkDeviceQueueInfo2 * pQueueInfo,VkQueue * pQueue)2232     void on_vkGetDeviceQueue2(android::base::BumpPool* pool, VkDevice boxed_device,
2233                               const VkDeviceQueueInfo2* pQueueInfo, VkQueue* pQueue) {
2234         // Protected memory is not supported on emulators. So we should
2235         // not return any queue if a client requests a protected device
2236         // queue. See b/328436383.
2237         if (pQueueInfo->flags & VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT) {
2238             *pQueue = VK_NULL_HANDLE;
2239             INFO("%s: Cannot get protected Vulkan device queue", __func__);
2240             return;
2241         }
2242         uint32_t queueFamilyIndex = pQueueInfo->queueFamilyIndex;
2243         uint32_t queueIndex = pQueueInfo->queueIndex;
2244         on_vkGetDeviceQueue(pool, boxed_device, queueFamilyIndex, queueIndex, pQueue);
2245     }
2246 
destroyDeviceWithExclusiveInfo(VkDevice device,DeviceInfo & deviceInfo,std::unordered_map<VkFence,FenceInfo> & fenceInfos,std::unordered_map<VkQueue,QueueInfo> & queueInfos,const VkAllocationCallbacks * pAllocator)2247     void destroyDeviceWithExclusiveInfo(VkDevice device, DeviceInfo& deviceInfo,
2248                                         std::unordered_map<VkFence, FenceInfo>& fenceInfos,
2249                                         std::unordered_map<VkQueue, QueueInfo>& queueInfos,
2250                                         const VkAllocationCallbacks* pAllocator) {
2251         deviceInfo.decompPipelines->clear();
2252 
2253         auto eraseIt = queueInfos.begin();
2254         for (; eraseIt != queueInfos.end();) {
2255             if (eraseIt->second.device == device) {
2256                 eraseIt->second.physicalQueueLock.reset();
2257                 delete_VkQueue(eraseIt->second.boxed);
2258                 eraseIt = queueInfos.erase(eraseIt);
2259             } else {
2260                 ++eraseIt;
2261             }
2262         }
2263 
2264         VulkanDispatch* deviceDispatch = dispatch_VkDevice(deviceInfo.boxed);
2265 
2266         for (auto fenceInfoIt = fenceInfos.begin(); fenceInfoIt != fenceInfos.end();) {
2267             auto fence = fenceInfoIt->first;
2268             auto& fenceInfo = fenceInfoIt->second;
2269             if (fenceInfo.device == device) {
2270                 destroyFenceWithExclusiveInfo(device, deviceDispatch, deviceInfo, fence, fenceInfo,
2271                                               nullptr, /*allowExternalFenceRecycling=*/false);
2272                 fenceInfoIt = fenceInfos.erase(fenceInfoIt);
2273             } else {
2274                 ++fenceInfoIt;
2275             }
2276         }
2277 
2278         // Should happen before destroying fences
2279         deviceInfo.deviceOpTracker->OnDestroyDevice();
2280 
2281         // Destroy pooled external fences
2282         auto deviceFences = deviceInfo.externalFencePool->popAll();
2283         for (auto fence : deviceFences) {
2284             deviceDispatch->vkDestroyFence(device, fence, pAllocator);
2285             fenceInfos.erase(fence);
2286         }
2287 
2288         // Run the underlying API call.
2289         m_vk->vkDestroyDevice(device, pAllocator);
2290 
2291         delete_VkDevice(deviceInfo.boxed);
2292     }
2293 
destroyDeviceLocked(VkDevice device,const VkAllocationCallbacks * pAllocator)2294     void destroyDeviceLocked(VkDevice device, const VkAllocationCallbacks* pAllocator) {
2295         auto deviceInfoIt = mDeviceInfo.find(device);
2296         if (deviceInfoIt == mDeviceInfo.end()) return;
2297         auto& deviceInfo = deviceInfoIt->second;
2298 
2299         destroyDeviceWithExclusiveInfo(device, deviceInfo, mFenceInfo, mQueueInfo, pAllocator);
2300 
2301         mDeviceInfo.erase(device);
2302         mDeviceToPhysicalDevice.erase(device);
2303     }
2304 
on_vkDestroyDevice(android::base::BumpPool * pool,VkDevice boxed_device,const VkAllocationCallbacks * pAllocator)2305     void on_vkDestroyDevice(android::base::BumpPool* pool, VkDevice boxed_device,
2306                             const VkAllocationCallbacks* pAllocator) {
2307         auto device = unbox_VkDevice(boxed_device);
2308 
2309         std::lock_guard<std::recursive_mutex> lock(mLock);
2310 
2311         sBoxedHandleManager.processDelayedRemovesGlobalStateLocked(device);
2312 
2313         destroyDeviceLocked(device, pAllocator);
2314     }
2315 
on_vkCreateBuffer(android::base::BumpPool * pool,VkDevice boxed_device,const VkBufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkBuffer * pBuffer)2316     VkResult on_vkCreateBuffer(android::base::BumpPool* pool, VkDevice boxed_device,
2317                                const VkBufferCreateInfo* pCreateInfo,
2318                                const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer) {
2319         auto device = unbox_VkDevice(boxed_device);
2320         auto vk = dispatch_VkDevice(boxed_device);
2321         VkBufferCreateInfo localCreateInfo;
2322         if (snapshotsEnabled()) {
2323             localCreateInfo = *pCreateInfo;
2324             // Add transfer src bit for potential device local memories.
2325             //
2326             // There are 3 ways to populate buffer content:
2327             //   a) use host coherent memory and memory mapping;
2328             //   b) use transfer_dst and vkcmdcopy* (for device local memories);
2329             //   c) use storage and compute shaders.
2330             //
2331             // (a) is covered by memory snapshot. (b) requires an extra vkCmdCopyBuffer
2332             // command on snapshot, thuse we need to add transfer_src for (b) so that
2333             // they could be loaded back on snapshot save. (c) is still future work.
2334             if (localCreateInfo.usage & VK_BUFFER_USAGE_TRANSFER_DST_BIT) {
2335                 localCreateInfo.usage |= VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
2336             }
2337             pCreateInfo = &localCreateInfo;
2338         }
2339 
2340         VkExternalMemoryBufferCreateInfo externalCI = {
2341             VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO};
2342         if (m_emu->features.VulkanAllocateHostMemory.enabled) {
2343             localCreateInfo = *pCreateInfo;
2344             // Hint that we 'may' use host allocation for this buffer. This will only be used for
2345             // host visible memory.
2346             externalCI.handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT;
2347 
2348             // Insert the new struct to the chain
2349             externalCI.pNext = localCreateInfo.pNext;
2350             localCreateInfo.pNext = &externalCI;
2351 
2352             pCreateInfo = &localCreateInfo;
2353         }
2354 
2355         VkResult result = vk->vkCreateBuffer(device, pCreateInfo, pAllocator, pBuffer);
2356 
2357         if (result == VK_SUCCESS) {
2358             std::lock_guard<std::recursive_mutex> lock(mLock);
2359             VALIDATE_NEW_HANDLE_INFO_ENTRY(mBufferInfo, *pBuffer);
2360             auto& bufInfo = mBufferInfo[*pBuffer];
2361             bufInfo.device = device;
2362             bufInfo.usage = pCreateInfo->usage;
2363             bufInfo.size = pCreateInfo->size;
2364             *pBuffer = new_boxed_non_dispatchable_VkBuffer(*pBuffer);
2365         }
2366 
2367         return result;
2368     }
2369 
destroyBufferWithExclusiveInfo(VkDevice device,VulkanDispatch * deviceDispatch,VkBuffer buffer,BufferInfo & bufferInfo,const VkAllocationCallbacks * pAllocator)2370     void destroyBufferWithExclusiveInfo(VkDevice device, VulkanDispatch* deviceDispatch,
2371                                         VkBuffer buffer, BufferInfo& bufferInfo,
2372                                         const VkAllocationCallbacks* pAllocator) {
2373         deviceDispatch->vkDestroyBuffer(device, buffer, pAllocator);
2374     }
2375 
destroyBufferLocked(VkDevice device,VulkanDispatch * deviceDispatch,VkBuffer buffer,const VkAllocationCallbacks * pAllocator)2376     void destroyBufferLocked(VkDevice device, VulkanDispatch* deviceDispatch, VkBuffer buffer,
2377                              const VkAllocationCallbacks* pAllocator) {
2378         auto bufferInfoIt = mBufferInfo.find(buffer);
2379         if (bufferInfoIt == mBufferInfo.end()) return;
2380         auto& bufferInfo = bufferInfoIt->second;
2381 
2382         destroyBufferWithExclusiveInfo(device, deviceDispatch, buffer, bufferInfo, pAllocator);
2383 
2384         mBufferInfo.erase(buffer);
2385     }
2386 
on_vkDestroyBuffer(android::base::BumpPool * pool,VkDevice boxed_device,VkBuffer buffer,const VkAllocationCallbacks * pAllocator)2387     void on_vkDestroyBuffer(android::base::BumpPool* pool, VkDevice boxed_device, VkBuffer buffer,
2388                             const VkAllocationCallbacks* pAllocator) {
2389         auto device = unbox_VkDevice(boxed_device);
2390         auto deviceDispatch = dispatch_VkDevice(boxed_device);
2391 
2392         std::lock_guard<std::recursive_mutex> lock(mLock);
2393         destroyBufferLocked(device, deviceDispatch, buffer, pAllocator);
2394     }
2395 
setBufferMemoryBindInfoLocked(VkDevice device,VkBuffer buffer,VkDeviceMemory memory,VkDeviceSize memoryOffset)2396     void setBufferMemoryBindInfoLocked(VkDevice device, VkBuffer buffer, VkDeviceMemory memory,
2397                                        VkDeviceSize memoryOffset) {
2398         auto* bufferInfo = android::base::find(mBufferInfo, buffer);
2399         if (!bufferInfo) return;
2400         bufferInfo->memory = memory;
2401         bufferInfo->memoryOffset = memoryOffset;
2402 
2403         auto* memoryInfo = android::base::find(mMemoryInfo, memory);
2404         if (memoryInfo && memoryInfo->boundBuffer) {
2405             auto* deviceInfo = android::base::find(mDeviceInfo, device);
2406             if (deviceInfo) {
2407                 deviceInfo->debugUtilsHelper.addDebugLabel(buffer, "Buffer:%d",
2408                                                            *memoryInfo->boundBuffer);
2409             }
2410         }
2411     }
2412 
on_vkBindBufferMemory(android::base::BumpPool * pool,VkDevice boxed_device,VkBuffer buffer,VkDeviceMemory memory,VkDeviceSize memoryOffset)2413     VkResult on_vkBindBufferMemory(android::base::BumpPool* pool, VkDevice boxed_device,
2414                                    VkBuffer buffer, VkDeviceMemory memory,
2415                                    VkDeviceSize memoryOffset) {
2416         auto device = unbox_VkDevice(boxed_device);
2417         auto vk = dispatch_VkDevice(boxed_device);
2418 
2419         VALIDATE_REQUIRED_HANDLE(memory);
2420         VkResult result = vk->vkBindBufferMemory(device, buffer, memory, memoryOffset);
2421 
2422         if (result == VK_SUCCESS) {
2423             std::lock_guard<std::recursive_mutex> lock(mLock);
2424             setBufferMemoryBindInfoLocked(device, buffer, memory, memoryOffset);
2425         }
2426         return result;
2427     }
2428 
on_vkBindBufferMemory2(android::base::BumpPool * pool,VkDevice boxed_device,uint32_t bindInfoCount,const VkBindBufferMemoryInfo * pBindInfos)2429     VkResult on_vkBindBufferMemory2(android::base::BumpPool* pool, VkDevice boxed_device,
2430                                     uint32_t bindInfoCount,
2431                                     const VkBindBufferMemoryInfo* pBindInfos) {
2432         auto device = unbox_VkDevice(boxed_device);
2433         auto vk = dispatch_VkDevice(boxed_device);
2434 
2435         for (uint32_t i = 0; i < bindInfoCount; ++i) {
2436             VALIDATE_REQUIRED_HANDLE(pBindInfos[i].memory);
2437         }
2438         VkResult result = vk->vkBindBufferMemory2(device, bindInfoCount, pBindInfos);
2439 
2440         if (result == VK_SUCCESS) {
2441             std::lock_guard<std::recursive_mutex> lock(mLock);
2442             for (uint32_t i = 0; i < bindInfoCount; ++i) {
2443                 setBufferMemoryBindInfoLocked(device, pBindInfos[i].buffer, pBindInfos[i].memory,
2444                                               pBindInfos[i].memoryOffset);
2445             }
2446         }
2447 
2448         return result;
2449     }
2450 
on_vkBindBufferMemory2KHR(android::base::BumpPool * pool,VkDevice boxed_device,uint32_t bindInfoCount,const VkBindBufferMemoryInfo * pBindInfos)2451     VkResult on_vkBindBufferMemory2KHR(android::base::BumpPool* pool, VkDevice boxed_device,
2452                                        uint32_t bindInfoCount,
2453                                        const VkBindBufferMemoryInfo* pBindInfos) {
2454         auto device = unbox_VkDevice(boxed_device);
2455         auto vk = dispatch_VkDevice(boxed_device);
2456 
2457         for (uint32_t i = 0; i < bindInfoCount; ++i) {
2458             VALIDATE_REQUIRED_HANDLE(pBindInfos[i].memory);
2459         }
2460         VkResult result = vk->vkBindBufferMemory2KHR(device, bindInfoCount, pBindInfos);
2461 
2462         if (result == VK_SUCCESS) {
2463             std::lock_guard<std::recursive_mutex> lock(mLock);
2464             for (uint32_t i = 0; i < bindInfoCount; ++i) {
2465                 setBufferMemoryBindInfoLocked(device, pBindInfos[i].buffer, pBindInfos[i].memory,
2466                                               pBindInfos[i].memoryOffset);
2467             }
2468         }
2469 
2470         return result;
2471     }
2472 
on_vkCreateImage(android::base::BumpPool * pool,VkDevice boxed_device,const VkImageCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImage * pImage,bool boxImage=true)2473     VkResult on_vkCreateImage(android::base::BumpPool* pool, VkDevice boxed_device,
2474                               const VkImageCreateInfo* pCreateInfo,
2475                               const VkAllocationCallbacks* pAllocator, VkImage* pImage,
2476                               bool boxImage = true) {
2477         auto device = unbox_VkDevice(boxed_device);
2478         auto vk = dispatch_VkDevice(boxed_device);
2479 
2480         if (pCreateInfo->format == VK_FORMAT_UNDEFINED) {
2481             // VUID-VkImageCreateInfo-pNext-01975:
2482             // If the pNext chain does not include a VkExternalFormatANDROID structure, or does
2483             // and its externalFormat member is 0, the format must not be VK_FORMAT_UNDEFINED.
2484             //
2485             // VkExternalFormatANDROID usages should be replaced with Vulkan formats on the guest
2486             // side during image creation. We don't support external formats on the host side and
2487             // format should be valid at this stage. This error indicates usage of an unsupported
2488             // external format, or an old system image.
2489             // We handle this here to better report the error and avoid crashes in the driver.
2490             ERR("vkCreateImage called with VK_FORMAT_UNDEFINED, external format is not supported.");
2491             return VK_ERROR_INITIALIZATION_FAILED;
2492         }
2493 
2494         std::lock_guard<std::recursive_mutex> lock(mLock);
2495 
2496         auto* deviceInfo = android::base::find(mDeviceInfo, device);
2497         if (!deviceInfo) {
2498             return VK_ERROR_OUT_OF_HOST_MEMORY;
2499         }
2500 
2501         if (deviceInfo->imageFormats.find(pCreateInfo->format) == deviceInfo->imageFormats.end()) {
2502             VERBOSE("gfxstream_texture_format_manifest: %s [%d]", string_VkFormat(pCreateInfo->format), pCreateInfo->format);
2503             deviceInfo->imageFormats.insert(pCreateInfo->format);
2504         }
2505 
2506         const bool needDecompression = deviceInfo->needEmulatedDecompression(pCreateInfo->format);
2507         CompressedImageInfo cmpInfo =
2508             needDecompression
2509                 ? CompressedImageInfo(device, *pCreateInfo, deviceInfo->decompPipelines.get())
2510                 : CompressedImageInfo(device);
2511         VkImageCreateInfo decompInfo;
2512         if (needDecompression) {
2513             decompInfo = cmpInfo.getOutputCreateInfo(*pCreateInfo);
2514             pCreateInfo = &decompInfo;
2515         }
2516 
2517         std::unique_ptr<AndroidNativeBufferInfo> anbInfo = nullptr;
2518         const VkNativeBufferANDROID* nativeBufferANDROID =
2519             vk_find_struct<VkNativeBufferANDROID>(pCreateInfo);
2520 
2521         VkResult createRes = VK_SUCCESS;
2522 
2523         if (nativeBufferANDROID) {
2524             auto* physicalDevice = android::base::find(mDeviceToPhysicalDevice, device);
2525             if (!physicalDevice) {
2526                 return VK_ERROR_DEVICE_LOST;
2527             }
2528 
2529             auto* physicalDeviceInfo = android::base::find(mPhysdevInfo, *physicalDevice);
2530             if (!physicalDeviceInfo) {
2531                 return VK_ERROR_DEVICE_LOST;
2532             }
2533 
2534             const VkPhysicalDeviceMemoryProperties& memoryProperties =
2535                 physicalDeviceInfo->memoryPropertiesHelper->getHostMemoryProperties();
2536 
2537             anbInfo = std::make_unique<AndroidNativeBufferInfo>();
2538             createRes =
2539                 prepareAndroidNativeBufferImage(vk, device, *pool, pCreateInfo, nativeBufferANDROID,
2540                                                 pAllocator, &memoryProperties, anbInfo.get());
2541             if (createRes == VK_SUCCESS) {
2542                 *pImage = anbInfo->image;
2543             }
2544         } else {
2545             createRes = vk->vkCreateImage(device, pCreateInfo, pAllocator, pImage);
2546         }
2547 
2548         if (createRes != VK_SUCCESS) return createRes;
2549 
2550         if (needDecompression) {
2551             cmpInfo.setOutputImage(*pImage);
2552             cmpInfo.createCompressedMipmapImages(vk, *pCreateInfo);
2553 
2554             if (cmpInfo.isAstc()) {
2555                 if (deviceInfo->useAstcCpuDecompression) {
2556                     cmpInfo.initAstcCpuDecompression(m_vk, mDeviceInfo[device].physicalDevice);
2557                 }
2558             }
2559         }
2560 
2561         VALIDATE_NEW_HANDLE_INFO_ENTRY(mImageInfo, *pImage);
2562         auto& imageInfo = mImageInfo[*pImage];
2563         imageInfo.device = device;
2564         imageInfo.cmpInfo = std::move(cmpInfo);
2565         imageInfo.imageCreateInfoShallow = vk_make_orphan_copy(*pCreateInfo);
2566         imageInfo.layout = pCreateInfo->initialLayout;
2567         if (nativeBufferANDROID) imageInfo.anbInfo = std::move(anbInfo);
2568 
2569         if (boxImage) {
2570             *pImage = new_boxed_non_dispatchable_VkImage(*pImage);
2571         }
2572         return createRes;
2573     }
2574 
destroyImageWithExclusiveInfo(VkDevice device,VulkanDispatch * deviceDispatch,VkImage image,ImageInfo & imageInfo,const VkAllocationCallbacks * pAllocator)2575     void destroyImageWithExclusiveInfo(VkDevice device, VulkanDispatch* deviceDispatch,
2576                                        VkImage image, ImageInfo& imageInfo,
2577                                        const VkAllocationCallbacks* pAllocator) {
2578         if (!imageInfo.anbInfo) {
2579             imageInfo.cmpInfo.destroy(deviceDispatch);
2580             if (image != imageInfo.cmpInfo.outputImage()) {
2581                 deviceDispatch->vkDestroyImage(device, image, pAllocator);
2582             }
2583         }
2584 
2585         imageInfo.anbInfo.reset();
2586     }
2587 
destroyImageLocked(VkDevice device,VulkanDispatch * deviceDispatch,VkImage image,const VkAllocationCallbacks * pAllocator)2588     void destroyImageLocked(VkDevice device, VulkanDispatch* deviceDispatch, VkImage image,
2589                             const VkAllocationCallbacks* pAllocator) {
2590         auto imageInfoIt = mImageInfo.find(image);
2591         if (imageInfoIt == mImageInfo.end()) return;
2592         auto& imageInfo = imageInfoIt->second;
2593 
2594         destroyImageWithExclusiveInfo(device, deviceDispatch, image, imageInfo, pAllocator);
2595 
2596         mImageInfo.erase(image);
2597     }
2598 
on_vkDestroyImage(android::base::BumpPool * pool,VkDevice boxed_device,VkImage image,const VkAllocationCallbacks * pAllocator)2599     void on_vkDestroyImage(android::base::BumpPool* pool, VkDevice boxed_device, VkImage image,
2600                            const VkAllocationCallbacks* pAllocator) {
2601         auto device = unbox_VkDevice(boxed_device);
2602         auto deviceDispatch = dispatch_VkDevice(boxed_device);
2603 
2604         std::lock_guard<std::recursive_mutex> lock(mLock);
2605         destroyImageLocked(device, deviceDispatch, image, pAllocator);
2606     }
2607 
performBindImageMemoryDeferredAhb(android::base::BumpPool * pool,VkDevice boxed_device,const VkBindImageMemoryInfo * bimi)2608     VkResult performBindImageMemoryDeferredAhb(android::base::BumpPool* pool,
2609                                                VkDevice boxed_device,
2610                                                const VkBindImageMemoryInfo* bimi) {
2611         auto device = unbox_VkDevice(boxed_device);
2612         auto vk = dispatch_VkDevice(boxed_device);
2613 
2614         auto original_underlying_image = bimi->image;
2615         auto original_boxed_image = unboxed_to_boxed_non_dispatchable_VkImage(original_underlying_image);
2616 
2617         VkImageCreateInfo ici = {};
2618         {
2619             std::lock_guard<std::recursive_mutex> lock(mLock);
2620 
2621             auto* imageInfo = android::base::find(mImageInfo, original_underlying_image);
2622             if (!imageInfo) {
2623                 ERR("Image for deferred AHB bind does not exist.");
2624                 return VK_ERROR_OUT_OF_HOST_MEMORY;
2625             }
2626 
2627             ici = imageInfo->imageCreateInfoShallow;
2628         }
2629 
2630         ici.pNext = vk_find_struct<VkNativeBufferANDROID>(bimi);
2631         if (!ici.pNext) {
2632             GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
2633                 << "Missing VkNativeBufferANDROID for deferred AHB bind.";
2634         }
2635 
2636         VkImage underlying_replacement_image = VK_NULL_HANDLE;
2637         VkResult result = on_vkCreateImage(pool, boxed_device, &ici, nullptr,
2638                                            &underlying_replacement_image, false);
2639         if (result != VK_SUCCESS) {
2640             ERR("Failed to create image for deferred AHB bind.");
2641             return VK_ERROR_OUT_OF_HOST_MEMORY;
2642         }
2643 
2644         on_vkDestroyImage(pool, boxed_device, original_underlying_image, nullptr);
2645 
2646         {
2647             std::lock_guard<std::recursive_mutex> lock(mLock);
2648 
2649             set_boxed_non_dispatchable_VkImage(original_boxed_image, underlying_replacement_image);
2650             const_cast<VkBindImageMemoryInfo*>(bimi)->image = underlying_replacement_image;
2651             const_cast<VkBindImageMemoryInfo*>(bimi)->memory = nullptr;
2652         }
2653 
2654         return VK_SUCCESS;
2655     }
2656 
performBindImageMemory(android::base::BumpPool * pool,VkDevice boxed_device,const VkBindImageMemoryInfo * bimi)2657     VkResult performBindImageMemory(android::base::BumpPool* pool, VkDevice boxed_device,
2658                                     const VkBindImageMemoryInfo* bimi) {
2659         auto image = bimi->image;
2660         auto memory = bimi->memory;
2661         auto memoryOffset = bimi->memoryOffset;
2662 
2663         const auto* anb = vk_find_struct<VkNativeBufferANDROID>(bimi);
2664         if (memory == VK_NULL_HANDLE && anb != nullptr) {
2665             return performBindImageMemoryDeferredAhb(pool, boxed_device, bimi);
2666         }
2667 
2668         auto device = unbox_VkDevice(boxed_device);
2669         auto vk = dispatch_VkDevice(boxed_device);
2670 
2671         VALIDATE_REQUIRED_HANDLE(memory);
2672         VkResult result = vk->vkBindImageMemory(device, image, memory, memoryOffset);
2673         if (result != VK_SUCCESS) {
2674             return result;
2675         }
2676 
2677         std::lock_guard<std::recursive_mutex> lock(mLock);
2678 
2679         auto* deviceInfo = android::base::find(mDeviceInfo, device);
2680         if (!deviceInfo) return VK_ERROR_OUT_OF_HOST_MEMORY;
2681 
2682         auto* memoryInfo = android::base::find(mMemoryInfo, memory);
2683         if (!memoryInfo) return VK_ERROR_OUT_OF_HOST_MEMORY;
2684 
2685         auto* imageInfo = android::base::find(mImageInfo, image);
2686         if (!imageInfo) return VK_ERROR_OUT_OF_HOST_MEMORY;
2687         imageInfo->boundColorBuffer = memoryInfo->boundColorBuffer;
2688         if (imageInfo->boundColorBuffer) {
2689             deviceInfo->debugUtilsHelper.addDebugLabel(image, "ColorBuffer:%d",
2690                                                        *imageInfo->boundColorBuffer);
2691         }
2692         imageInfo->memory = memory;
2693 
2694         if (!deviceInfo->emulateTextureEtc2 && !deviceInfo->emulateTextureAstc) {
2695             return VK_SUCCESS;
2696         }
2697 
2698         CompressedImageInfo& cmpInfo = imageInfo->cmpInfo;
2699         if (!deviceInfo->needEmulatedDecompression(cmpInfo)) {
2700             return VK_SUCCESS;
2701         }
2702         return cmpInfo.bindCompressedMipmapsMemory(vk, memory, memoryOffset);
2703     }
2704 
on_vkBindImageMemory(android::base::BumpPool * pool,VkDevice boxed_device,VkImage image,VkDeviceMemory memory,VkDeviceSize memoryOffset)2705     VkResult on_vkBindImageMemory(android::base::BumpPool* pool, VkDevice boxed_device,
2706                                   VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset) {
2707         const VkBindImageMemoryInfo bimi = {
2708             .sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO,
2709             .pNext = nullptr,
2710             .image = image,
2711             .memory = memory,
2712             .memoryOffset = memoryOffset,
2713         };
2714         return performBindImageMemory(pool, boxed_device, &bimi);
2715     }
2716 
on_vkBindImageMemory2(android::base::BumpPool * pool,VkDevice boxed_device,uint32_t bindInfoCount,const VkBindImageMemoryInfo * pBindInfos)2717     VkResult on_vkBindImageMemory2(android::base::BumpPool* pool, VkDevice boxed_device,
2718                                    uint32_t bindInfoCount,
2719                                    const VkBindImageMemoryInfo* pBindInfos) {
2720 #ifdef GFXSTREAM_BUILD_WITH_SNAPSHOT_SUPPORT
2721         if (bindInfoCount > 1 && snapshotsEnabled()) {
2722             if (mVerbosePrints) {
2723                 fprintf(stderr,
2724                     "vkBindImageMemory2 with more than 1 bindInfoCount not supporting snapshot");
2725             }
2726             get_emugl_vm_operations().setSkipSnapshotSave(true);
2727             get_emugl_vm_operations().setSkipSnapshotSaveReason(SNAPSHOT_SKIP_UNSUPPORTED_VK_API);
2728         }
2729 #endif
2730 
2731         auto device = unbox_VkDevice(boxed_device);
2732         auto vk = dispatch_VkDevice(boxed_device);
2733         bool needEmulation = false;
2734 
2735         auto* deviceInfo = android::base::find(mDeviceInfo, device);
2736         if (!deviceInfo) return VK_ERROR_UNKNOWN;
2737 
2738         for (uint32_t i = 0; i < bindInfoCount; i++) {
2739             auto* imageInfo = android::base::find(mImageInfo, pBindInfos[i].image);
2740             if (!imageInfo) return VK_ERROR_UNKNOWN;
2741 
2742             const auto* anb = vk_find_struct<VkNativeBufferANDROID>(&pBindInfos[i]);
2743             if (anb != nullptr) {
2744                 needEmulation = true;
2745                 break;
2746             }
2747 
2748             if (deviceInfo->needEmulatedDecompression(imageInfo->cmpInfo)) {
2749                 needEmulation = true;
2750                 break;
2751             }
2752         }
2753 
2754         if (needEmulation) {
2755             VkResult result;
2756             for (uint32_t i = 0; i < bindInfoCount; i++) {
2757                 result = performBindImageMemory(pool, boxed_device, &pBindInfos[i]);
2758                 if (result != VK_SUCCESS) return result;
2759             }
2760 
2761             return VK_SUCCESS;
2762         }
2763 
2764         VkResult result = vk->vkBindImageMemory2(device, bindInfoCount, pBindInfos);
2765         if (result != VK_SUCCESS) {
2766             return result;
2767         }
2768 
2769         if (deviceInfo->debugUtilsHelper.isEnabled()) {
2770             std::lock_guard<std::recursive_mutex> lock(mLock);
2771             for (uint32_t i = 0; i < bindInfoCount; i++) {
2772                 auto* memoryInfo = android::base::find(mMemoryInfo, pBindInfos[i].memory);
2773                 if (!memoryInfo) return VK_ERROR_OUT_OF_HOST_MEMORY;
2774 
2775                 if (memoryInfo->boundColorBuffer) {
2776                     deviceInfo->debugUtilsHelper.addDebugLabel(
2777                         pBindInfos[i].image, "ColorBuffer:%d", *memoryInfo->boundColorBuffer);
2778                 }
2779             }
2780         }
2781 
2782         return result;
2783     }
2784 
on_vkCreateImageView(android::base::BumpPool * pool,VkDevice boxed_device,const VkImageViewCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImageView * pView)2785     VkResult on_vkCreateImageView(android::base::BumpPool* pool, VkDevice boxed_device,
2786                                   const VkImageViewCreateInfo* pCreateInfo,
2787                                   const VkAllocationCallbacks* pAllocator, VkImageView* pView) {
2788         auto device = unbox_VkDevice(boxed_device);
2789         auto vk = dispatch_VkDevice(boxed_device);
2790         if (!pCreateInfo) {
2791             return VK_ERROR_OUT_OF_HOST_MEMORY;
2792         }
2793 
2794         std::lock_guard<std::recursive_mutex> lock(mLock);
2795         auto* deviceInfo = android::base::find(mDeviceInfo, device);
2796         auto* imageInfo = android::base::find(mImageInfo, pCreateInfo->image);
2797         if (!deviceInfo || !imageInfo) return VK_ERROR_OUT_OF_HOST_MEMORY;
2798         VkImageViewCreateInfo createInfo;
2799         bool needEmulatedAlpha = false;
2800         if (deviceInfo->needEmulatedDecompression(pCreateInfo->format)) {
2801             if (imageInfo->cmpInfo.outputImage()) {
2802                 createInfo = *pCreateInfo;
2803                 createInfo.format = CompressedImageInfo::getOutputFormat(pCreateInfo->format);
2804                 needEmulatedAlpha = CompressedImageInfo::needEmulatedAlpha(pCreateInfo->format);
2805                 createInfo.image = imageInfo->cmpInfo.outputImage();
2806                 pCreateInfo = &createInfo;
2807             }
2808         } else if (deviceInfo->needEmulatedDecompression(imageInfo->cmpInfo)) {
2809             // Image view on the compressed mipmaps
2810             createInfo = *pCreateInfo;
2811             createInfo.format =
2812                 CompressedImageInfo::getCompressedMipmapsFormat(pCreateInfo->format);
2813             needEmulatedAlpha = false;
2814             createInfo.image =
2815                 imageInfo->cmpInfo.compressedMipmap(pCreateInfo->subresourceRange.baseMipLevel);
2816             createInfo.subresourceRange.baseMipLevel = 0;
2817             pCreateInfo = &createInfo;
2818         }
2819         if (imageInfo->anbInfo && imageInfo->anbInfo->externallyBacked) {
2820             createInfo = *pCreateInfo;
2821             pCreateInfo = &createInfo;
2822         }
2823 
2824         VkResult result = vk->vkCreateImageView(device, pCreateInfo, pAllocator, pView);
2825         if (result != VK_SUCCESS) {
2826             return result;
2827         }
2828 
2829         VALIDATE_NEW_HANDLE_INFO_ENTRY(mImageViewInfo, *pView);
2830         auto& imageViewInfo = mImageViewInfo[*pView];
2831         imageViewInfo.device = device;
2832         imageViewInfo.needEmulatedAlpha = needEmulatedAlpha;
2833         imageViewInfo.boundColorBuffer = imageInfo->boundColorBuffer;
2834         if (imageViewInfo.boundColorBuffer) {
2835             deviceInfo->debugUtilsHelper.addDebugLabel(*pView, "ColorBuffer:%d",
2836                                                        *imageViewInfo.boundColorBuffer);
2837         }
2838 
2839         *pView = new_boxed_non_dispatchable_VkImageView(*pView);
2840         return result;
2841     }
2842 
destroyImageViewWithExclusiveInfo(VkDevice device,VulkanDispatch * deviceDispatch,VkImageView imageView,ImageViewInfo & imageViewInfo,const VkAllocationCallbacks * pAllocator)2843     void destroyImageViewWithExclusiveInfo(VkDevice device, VulkanDispatch* deviceDispatch,
2844                                            VkImageView imageView, ImageViewInfo& imageViewInfo,
2845                                            const VkAllocationCallbacks* pAllocator) {
2846         deviceDispatch->vkDestroyImageView(device, imageView, pAllocator);
2847     }
2848 
destroyImageViewLocked(VkDevice device,VulkanDispatch * deviceDispatch,VkImageView imageView,const VkAllocationCallbacks * pAllocator)2849     void destroyImageViewLocked(VkDevice device, VulkanDispatch* deviceDispatch,
2850                                 VkImageView imageView, const VkAllocationCallbacks* pAllocator) {
2851         auto imageViewInfoIt = mImageViewInfo.find(imageView);
2852         if (imageViewInfoIt == mImageViewInfo.end()) return;
2853         auto& imageViewInfo = imageViewInfoIt->second;
2854 
2855         destroyImageViewWithExclusiveInfo(device, deviceDispatch, imageView, imageViewInfo,
2856                                           pAllocator);
2857 
2858         mImageViewInfo.erase(imageView);
2859     }
2860 
on_vkDestroyImageView(android::base::BumpPool * pool,VkDevice boxed_device,VkImageView imageView,const VkAllocationCallbacks * pAllocator)2861     void on_vkDestroyImageView(android::base::BumpPool* pool, VkDevice boxed_device,
2862                                VkImageView imageView, const VkAllocationCallbacks* pAllocator) {
2863         auto device = unbox_VkDevice(boxed_device);
2864         auto deviceDispatch = dispatch_VkDevice(boxed_device);
2865 
2866         std::lock_guard<std::recursive_mutex> lock(mLock);
2867         destroyImageViewLocked(device, deviceDispatch, imageView, pAllocator);
2868     }
2869 
on_vkCreateSampler(android::base::BumpPool * pool,VkDevice boxed_device,const VkSamplerCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSampler * pSampler)2870     VkResult on_vkCreateSampler(android::base::BumpPool* pool, VkDevice boxed_device,
2871                                 const VkSamplerCreateInfo* pCreateInfo,
2872                                 const VkAllocationCallbacks* pAllocator, VkSampler* pSampler) {
2873         auto device = unbox_VkDevice(boxed_device);
2874         auto vk = dispatch_VkDevice(boxed_device);
2875         VkResult result = vk->vkCreateSampler(device, pCreateInfo, pAllocator, pSampler);
2876         if (result != VK_SUCCESS) {
2877             return result;
2878         }
2879         std::lock_guard<std::recursive_mutex> lock(mLock);
2880         VALIDATE_NEW_HANDLE_INFO_ENTRY(mSamplerInfo, *pSampler);
2881         auto& samplerInfo = mSamplerInfo[*pSampler];
2882         samplerInfo.device = device;
2883         deepcopy_VkSamplerCreateInfo(pool, VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
2884                                      pCreateInfo, &samplerInfo.createInfo);
2885         // We emulate RGB with RGBA for some compressed textures, which does not
2886         // handle translarent border correctly.
2887         samplerInfo.needEmulatedAlpha =
2888             (pCreateInfo->addressModeU == VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER ||
2889              pCreateInfo->addressModeV == VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER ||
2890              pCreateInfo->addressModeW == VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER) &&
2891             (pCreateInfo->borderColor == VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK ||
2892              pCreateInfo->borderColor == VK_BORDER_COLOR_INT_TRANSPARENT_BLACK ||
2893              pCreateInfo->borderColor == VK_BORDER_COLOR_FLOAT_CUSTOM_EXT ||
2894              pCreateInfo->borderColor == VK_BORDER_COLOR_INT_CUSTOM_EXT);
2895 
2896         *pSampler = new_boxed_non_dispatchable_VkSampler(*pSampler);
2897 
2898         return result;
2899     }
2900 
destroySamplerWithExclusiveInfo(VkDevice device,VulkanDispatch * deviceDispatch,VkSampler sampler,SamplerInfo & samplerInfo,const VkAllocationCallbacks * pAllocator)2901     void destroySamplerWithExclusiveInfo(VkDevice device, VulkanDispatch* deviceDispatch,
2902                                          VkSampler sampler, SamplerInfo& samplerInfo,
2903                                          const VkAllocationCallbacks* pAllocator) {
2904         deviceDispatch->vkDestroySampler(device, sampler, pAllocator);
2905 
2906         if (samplerInfo.emulatedborderSampler != VK_NULL_HANDLE) {
2907             deviceDispatch->vkDestroySampler(device, samplerInfo.emulatedborderSampler, nullptr);
2908         }
2909     }
2910 
destroySamplerLocked(VkDevice device,VulkanDispatch * deviceDispatch,VkSampler sampler,const VkAllocationCallbacks * pAllocator)2911     void destroySamplerLocked(VkDevice device, VulkanDispatch* deviceDispatch, VkSampler sampler,
2912                               const VkAllocationCallbacks* pAllocator) {
2913         auto samplerInfoIt = mSamplerInfo.find(sampler);
2914         if (samplerInfoIt == mSamplerInfo.end()) return;
2915         auto& samplerInfo = samplerInfoIt->second;
2916 
2917         destroySamplerWithExclusiveInfo(device, deviceDispatch, sampler, samplerInfo, pAllocator);
2918 
2919         mSamplerInfo.erase(samplerInfoIt);
2920     }
2921 
on_vkDestroySampler(android::base::BumpPool * pool,VkDevice boxed_device,VkSampler sampler,const VkAllocationCallbacks * pAllocator)2922     void on_vkDestroySampler(android::base::BumpPool* pool, VkDevice boxed_device,
2923                              VkSampler sampler, const VkAllocationCallbacks* pAllocator) {
2924         auto device = unbox_VkDevice(boxed_device);
2925         auto deviceDispatch = dispatch_VkDevice(boxed_device);
2926 
2927         std::lock_guard<std::recursive_mutex> lock(mLock);
2928         destroySamplerLocked(device, deviceDispatch, sampler, pAllocator);
2929     }
2930 
exportSemaphore(VulkanDispatch * vk,VkDevice device,VkSemaphore semaphore,VK_EXT_SYNC_HANDLE * outHandle,std::optional<VkExternalSemaphoreHandleTypeFlagBits> handleType=std::nullopt)2931     VkResult exportSemaphore(
2932         VulkanDispatch* vk, VkDevice device, VkSemaphore semaphore, VK_EXT_SYNC_HANDLE* outHandle,
2933         std::optional<VkExternalSemaphoreHandleTypeFlagBits> handleType = std::nullopt) {
2934 #if defined(_WIN32)
2935         VkSemaphoreGetWin32HandleInfoKHR getWin32 = {
2936             VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR,
2937             0,
2938             semaphore,
2939             VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT,
2940         };
2941 
2942         return vk->vkGetSemaphoreWin32HandleKHR(device, &getWin32, outHandle);
2943 #elif defined(__linux__)
2944         VkExternalSemaphoreHandleTypeFlagBits handleTypeBits =
2945             VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT;
2946         if (handleType) {
2947             handleTypeBits = *handleType;
2948         }
2949 
2950         VkSemaphoreGetFdInfoKHR getFd = {
2951             VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR,
2952             0,
2953             semaphore,
2954             handleTypeBits,
2955         };
2956 
2957         if (!hasDeviceExtension(device, VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME)) {
2958             // Note: VK_KHR_external_semaphore_fd might be advertised in the guest,
2959             // because SYNC_FD handling is performed guest-side only. But still need
2960             // need to error out here when handling a non-sync, opaque FD.
2961             return VK_ERROR_OUT_OF_HOST_MEMORY;
2962         }
2963 
2964         return vk->vkGetSemaphoreFdKHR(device, &getFd, outHandle);
2965 #else
2966         return VK_ERROR_OUT_OF_HOST_MEMORY;
2967 #endif
2968     }
2969 
on_vkCreateSemaphore(android::base::BumpPool * pool,VkDevice boxed_device,const VkSemaphoreCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSemaphore * pSemaphore)2970     VkResult on_vkCreateSemaphore(android::base::BumpPool* pool, VkDevice boxed_device,
2971                                   const VkSemaphoreCreateInfo* pCreateInfo,
2972                                   const VkAllocationCallbacks* pAllocator,
2973                                   VkSemaphore* pSemaphore) {
2974         auto device = unbox_VkDevice(boxed_device);
2975         auto vk = dispatch_VkDevice(boxed_device);
2976 
2977         VkSemaphoreCreateInfo localCreateInfo = vk_make_orphan_copy(*pCreateInfo);
2978         vk_struct_chain_iterator structChainIter = vk_make_chain_iterator(&localCreateInfo);
2979 
2980         bool timelineSemaphore = false;
2981 
2982         VkSemaphoreTypeCreateInfoKHR localSemaphoreTypeCreateInfo;
2983         if (const VkSemaphoreTypeCreateInfoKHR* semaphoreTypeCiPtr =
2984                 vk_find_struct<VkSemaphoreTypeCreateInfoKHR>(pCreateInfo);
2985             semaphoreTypeCiPtr) {
2986             localSemaphoreTypeCreateInfo = vk_make_orphan_copy(*semaphoreTypeCiPtr);
2987             vk_append_struct(&structChainIter, &localSemaphoreTypeCreateInfo);
2988 
2989             if (localSemaphoreTypeCreateInfo.semaphoreType == VK_SEMAPHORE_TYPE_TIMELINE) {
2990                 timelineSemaphore = true;
2991             }
2992         }
2993 
2994         VkExportSemaphoreCreateInfoKHR localExportSemaphoreCi = {};
2995 
2996         /* Timeline semaphores are exportable:
2997          *
2998          * "Timeline semaphore specific external sharing capabilities can be queried using
2999          *  vkGetPhysicalDeviceExternalSemaphoreProperties by chaining the new
3000          *  VkSemaphoreTypeCreateInfoKHR structure to its pExternalSemaphoreInfo structure.
3001          *  This allows having a different set of external semaphore handle types supported
3002          *  for timeline semaphores vs. binary semaphores."
3003          *
3004          *  We just don't support this here since neither Android or Zink use this feature
3005          *  with timeline semaphores yet.
3006          */
3007         if (m_emu->features.VulkanExternalSync.enabled && !timelineSemaphore) {
3008             localExportSemaphoreCi.sType = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO;
3009             localExportSemaphoreCi.pNext = nullptr;
3010 
3011             {
3012                 std::lock_guard<std::recursive_mutex> lock(mLock);
3013                 auto* deviceInfo = android::base::find(mDeviceInfo, device);
3014 
3015                 if (!deviceInfo) {
3016                     return VK_ERROR_DEVICE_LOST;
3017                 }
3018 
3019                 if (deviceInfo->externalFenceInfo.supportedBinarySemaphoreHandleTypes &
3020                     VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT) {
3021                     localExportSemaphoreCi.handleTypes =
3022                         VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT;
3023                 } else if (deviceInfo->externalFenceInfo.supportedBinarySemaphoreHandleTypes &
3024                            VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT) {
3025                     localExportSemaphoreCi.handleTypes =
3026                         VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
3027                 } else if (deviceInfo->externalFenceInfo.supportedBinarySemaphoreHandleTypes &
3028                            VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT) {
3029                     localExportSemaphoreCi.handleTypes =
3030                         VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT;
3031                 }
3032             }
3033 
3034             vk_append_struct(&structChainIter, &localExportSemaphoreCi);
3035         }
3036 
3037         VkResult res = vk->vkCreateSemaphore(device, &localCreateInfo, pAllocator, pSemaphore);
3038 
3039         if (res != VK_SUCCESS) return res;
3040 
3041         std::lock_guard<std::recursive_mutex> lock(mLock);
3042 
3043         VALIDATE_NEW_HANDLE_INFO_ENTRY(mSemaphoreInfo, *pSemaphore);
3044         auto& semaphoreInfo = mSemaphoreInfo[*pSemaphore];
3045         semaphoreInfo.device = device;
3046 
3047         *pSemaphore = new_boxed_non_dispatchable_VkSemaphore(*pSemaphore);
3048 
3049         return res;
3050     }
3051 
on_vkCreateFence(android::base::BumpPool * pool,VkDevice boxed_device,const VkFenceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkFence * pFence)3052     VkResult on_vkCreateFence(android::base::BumpPool* pool, VkDevice boxed_device,
3053                               const VkFenceCreateInfo* pCreateInfo,
3054                               const VkAllocationCallbacks* pAllocator, VkFence* pFence) {
3055         VkFenceCreateInfo localCreateInfo;
3056         if (mSnapshotState == SnapshotState::Loading) {
3057             // On snapshot load we create all fences as signaled then reset those that are not.
3058             localCreateInfo = *pCreateInfo;
3059             pCreateInfo = &localCreateInfo;
3060             localCreateInfo.flags |= VK_FENCE_CREATE_SIGNALED_BIT;
3061         }
3062         auto device = unbox_VkDevice(boxed_device);
3063         auto vk = dispatch_VkDevice(boxed_device);
3064 
3065         VkFenceCreateInfo& createInfo = const_cast<VkFenceCreateInfo&>(*pCreateInfo);
3066 
3067         const VkExportFenceCreateInfo* exportFenceInfoPtr =
3068             vk_find_struct<VkExportFenceCreateInfo>(pCreateInfo);
3069         bool exportSyncFd = exportFenceInfoPtr && (exportFenceInfoPtr->handleTypes &
3070                                                    VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT);
3071         bool fenceReused = false;
3072 
3073         *pFence = VK_NULL_HANDLE;
3074 
3075         if (exportSyncFd) {
3076             // Remove VkExportFenceCreateInfo, since host doesn't need to create
3077             // an exportable fence in this case
3078             ExternalFencePool<VulkanDispatch>* externalFencePool = nullptr;
3079             vk_struct_chain_remove(exportFenceInfoPtr, &createInfo);
3080             {
3081                 std::lock_guard<std::recursive_mutex> lock(mLock);
3082                 auto* deviceInfo = android::base::find(mDeviceInfo, device);
3083                 if (!deviceInfo) return VK_ERROR_OUT_OF_HOST_MEMORY;
3084                 externalFencePool = deviceInfo->externalFencePool.get();
3085             }
3086             *pFence = externalFencePool->pop(pCreateInfo);
3087             if (*pFence != VK_NULL_HANDLE) {
3088                 fenceReused = true;
3089             }
3090         }
3091 
3092         if (*pFence == VK_NULL_HANDLE) {
3093             VkResult res = vk->vkCreateFence(device, &createInfo, pAllocator, pFence);
3094             if (res != VK_SUCCESS) {
3095                 return res;
3096             }
3097         }
3098 
3099         {
3100             std::lock_guard<std::recursive_mutex> lock(mLock);
3101 
3102             // Create FenceInfo for *pFence.
3103             if (!fenceReused) {
3104                 VALIDATE_NEW_HANDLE_INFO_ENTRY(mFenceInfo, *pFence);
3105             }
3106             auto& fenceInfo = mFenceInfo[*pFence];
3107             fenceInfo.device = device;
3108             fenceInfo.vk = vk;
3109 
3110             *pFence = new_boxed_non_dispatchable_VkFence(*pFence);
3111             fenceInfo.boxed = *pFence;
3112             fenceInfo.external = exportSyncFd;
3113             fenceInfo.state = FenceInfo::State::kNotWaitable;
3114         }
3115 
3116         return VK_SUCCESS;
3117     }
3118 
on_vkResetFences(android::base::BumpPool * pool,VkDevice boxed_device,uint32_t fenceCount,const VkFence * pFences)3119     VkResult on_vkResetFences(android::base::BumpPool* pool, VkDevice boxed_device,
3120                               uint32_t fenceCount, const VkFence* pFences) {
3121         auto device = unbox_VkDevice(boxed_device);
3122         auto vk = dispatch_VkDevice(boxed_device);
3123 
3124         std::vector<VkFence> cleanedFences;
3125         std::vector<VkFence> externalFences;
3126 
3127         {
3128             std::lock_guard<std::recursive_mutex> lock(mLock);
3129             for (uint32_t i = 0; i < fenceCount; i++) {
3130                 if (pFences[i] == VK_NULL_HANDLE) continue;
3131 
3132                 if (mFenceInfo.find(pFences[i]) == mFenceInfo.end()) {
3133                     ERR("Invalid fence handle: %p!", pFences[i]);
3134                 } else {
3135                     if (mFenceInfo[pFences[i]].external) {
3136                         externalFences.push_back(pFences[i]);
3137                     } else {
3138                         // Reset all fences' states to kNotWaitable.
3139                         cleanedFences.push_back(pFences[i]);
3140                         mFenceInfo[pFences[i]].state = FenceInfo::State::kNotWaitable;
3141                     }
3142                 }
3143             }
3144         }
3145 
3146         if (!cleanedFences.empty()) {
3147             VK_CHECK(vk->vkResetFences(device, (uint32_t)cleanedFences.size(),
3148                                        cleanedFences.data()));
3149         }
3150 
3151         // For external fences, we unilaterally put them in the pool to ensure they finish
3152         // TODO: should store creation info / pNext chain per fence and re-apply?
3153         VkFenceCreateInfo createInfo{
3154             .sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, .pNext = 0, .flags = 0};
3155         auto* deviceInfo = android::base::find(mDeviceInfo, device);
3156         if (!deviceInfo) return VK_ERROR_OUT_OF_DEVICE_MEMORY;
3157         for (auto fence : externalFences) {
3158             VkFence replacement = deviceInfo->externalFencePool->pop(&createInfo);
3159             if (replacement == VK_NULL_HANDLE) {
3160                 VK_CHECK(vk->vkCreateFence(device, &createInfo, 0, &replacement));
3161             }
3162             deviceInfo->externalFencePool->add(fence);
3163 
3164             {
3165                 std::lock_guard<std::recursive_mutex> lock(mLock);
3166                 auto boxed_fence = unboxed_to_boxed_non_dispatchable_VkFence(fence);
3167                 set_boxed_non_dispatchable_VkFence(boxed_fence, replacement);
3168 
3169                 auto& fenceInfo = mFenceInfo[replacement];
3170                 fenceInfo.device = device;
3171                 fenceInfo.vk = vk;
3172                 fenceInfo.boxed = boxed_fence;
3173                 fenceInfo.external = true;
3174                 fenceInfo.state = FenceInfo::State::kNotWaitable;
3175 
3176                 mFenceInfo[fence].boxed = VK_NULL_HANDLE;
3177             }
3178         }
3179 
3180         return VK_SUCCESS;
3181     }
3182 
on_vkImportSemaphoreFdKHR(android::base::BumpPool * pool,VkDevice boxed_device,const VkImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo)3183     VkResult on_vkImportSemaphoreFdKHR(android::base::BumpPool* pool, VkDevice boxed_device,
3184                                        const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo) {
3185         auto device = unbox_VkDevice(boxed_device);
3186         auto vk = dispatch_VkDevice(boxed_device);
3187 
3188 #ifdef _WIN32
3189         std::lock_guard<std::recursive_mutex> lock(mLock);
3190 
3191         auto* infoPtr = android::base::find(mSemaphoreInfo,
3192                                             mExternalSemaphoresById[pImportSemaphoreFdInfo->fd]);
3193 
3194         if (!infoPtr) {
3195             return VK_ERROR_INVALID_EXTERNAL_HANDLE;
3196         }
3197 
3198         VK_EXT_SYNC_HANDLE handle = dupExternalSync(infoPtr->externalHandle);
3199 
3200         VkImportSemaphoreWin32HandleInfoKHR win32ImportInfo = {
3201             VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR,
3202             0,
3203             pImportSemaphoreFdInfo->semaphore,
3204             pImportSemaphoreFdInfo->flags,
3205             VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR,
3206             handle,
3207             L"",
3208         };
3209 
3210         return vk->vkImportSemaphoreWin32HandleKHR(device, &win32ImportInfo);
3211 #else
3212         if (!hasDeviceExtension(device, VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME)) {
3213             // Note: VK_KHR_external_semaphore_fd might be advertised in the guest,
3214             // because SYNC_FD handling is performed guest-side only. But still need
3215             // need to error out here when handling a non-sync, opaque FD.
3216             return VK_ERROR_OUT_OF_HOST_MEMORY;
3217         }
3218 
3219         VkImportSemaphoreFdInfoKHR importInfo = *pImportSemaphoreFdInfo;
3220         importInfo.fd = dup(pImportSemaphoreFdInfo->fd);
3221         return vk->vkImportSemaphoreFdKHR(device, &importInfo);
3222 #endif
3223     }
3224 
on_vkGetSemaphoreFdKHR(android::base::BumpPool * pool,VkDevice boxed_device,const VkSemaphoreGetFdInfoKHR * pGetFdInfo,int * pFd)3225     VkResult on_vkGetSemaphoreFdKHR(android::base::BumpPool* pool, VkDevice boxed_device,
3226                                     const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd) {
3227         auto device = unbox_VkDevice(boxed_device);
3228         auto vk = dispatch_VkDevice(boxed_device);
3229         VK_EXT_SYNC_HANDLE handle;
3230 
3231         VkResult result = exportSemaphore(vk, device, pGetFdInfo->semaphore, &handle);
3232         if (result != VK_SUCCESS) {
3233             return result;
3234         }
3235 
3236         std::lock_guard<std::recursive_mutex> lock(mLock);
3237         mSemaphoreInfo[pGetFdInfo->semaphore].externalHandle = handle;
3238 #ifdef _WIN32
3239         int nextId = genSemaphoreId();
3240         mExternalSemaphoresById[nextId] = pGetFdInfo->semaphore;
3241         *pFd = nextId;
3242 #else
3243         // No next id; its already an fd
3244         mSemaphoreInfo[pGetFdInfo->semaphore].externalHandle = handle;
3245 #endif
3246         return result;
3247     }
3248 
on_vkGetSemaphoreGOOGLE(android::base::BumpPool * pool,VkDevice boxed_device,VkSemaphore semaphore,uint64_t syncId)3249     VkResult on_vkGetSemaphoreGOOGLE(android::base::BumpPool* pool, VkDevice boxed_device,
3250                                      VkSemaphore semaphore, uint64_t syncId) {
3251         if (!m_emu->features.VulkanExternalSync.enabled) {
3252             return VK_ERROR_FEATURE_NOT_PRESENT;
3253         }
3254 
3255         auto vk = dispatch_VkDevice(boxed_device);
3256         auto device = unbox_VkDevice(boxed_device);
3257 
3258         uint32_t virtioGpuContextId = 0;
3259         VkExternalSemaphoreHandleTypeFlagBits flagBits =
3260             static_cast<VkExternalSemaphoreHandleTypeFlagBits>(0);
3261         {
3262             std::lock_guard<std::recursive_mutex> lock(mLock);
3263             auto* deviceInfo = android::base::find(mDeviceInfo, device);
3264 
3265             if (!deviceInfo) {
3266                 return VK_ERROR_DEVICE_LOST;
3267             }
3268 
3269             if (deviceInfo->externalFenceInfo.supportedBinarySemaphoreHandleTypes &
3270                 VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT) {
3271                 flagBits = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT;
3272             } else if (deviceInfo->externalFenceInfo.supportedBinarySemaphoreHandleTypes &
3273                        VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT) {
3274                 flagBits = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
3275             } else if (deviceInfo->externalFenceInfo.supportedBinarySemaphoreHandleTypes &
3276                        VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT) {
3277                 flagBits = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT;
3278             }
3279 
3280             if (!deviceInfo->virtioGpuContextId) {
3281                 ERR("VkDevice:%p is missing virtio gpu context id.", device);
3282                 return VK_ERROR_OUT_OF_HOST_MEMORY;
3283             }
3284             virtioGpuContextId = *deviceInfo->virtioGpuContextId;
3285         }
3286 
3287         VK_EXT_SYNC_HANDLE handle;
3288         VkResult result =
3289             exportSemaphore(vk, device, semaphore, &handle,
3290                             std::make_optional<VkExternalSemaphoreHandleTypeFlagBits>(flagBits));
3291         if (result != VK_SUCCESS) {
3292             return result;
3293         }
3294 
3295         ManagedDescriptor descriptor(handle);
3296         ExternalObjectManager::get()->addSyncDescriptorInfo(
3297             virtioGpuContextId, syncId, std::move(descriptor), /*streamHandleType*/ 0);
3298         return VK_SUCCESS;
3299     }
3300 
destroySemaphoreWithExclusiveInfo(VkDevice device,VulkanDispatch * deviceDispatch,VkSemaphore semaphore,SemaphoreInfo & semaphoreInfo,const VkAllocationCallbacks * pAllocator)3301     void destroySemaphoreWithExclusiveInfo(VkDevice device, VulkanDispatch* deviceDispatch,
3302                                            VkSemaphore semaphore, SemaphoreInfo& semaphoreInfo,
3303                                            const VkAllocationCallbacks* pAllocator) {
3304 #ifndef _WIN32
3305         if (semaphoreInfo.externalHandle != VK_EXT_SYNC_HANDLE_INVALID) {
3306             close(semaphoreInfo.externalHandle);
3307         }
3308 #endif
3309 
3310         if (semaphoreInfo.latestUse && !IsDone(*semaphoreInfo.latestUse)) {
3311             auto deviceInfoIt = mDeviceInfo.find(device);
3312             if (deviceInfoIt != mDeviceInfo.end()) {
3313                 auto& deviceInfo = deviceInfoIt->second;
3314                 deviceInfo.deviceOpTracker->AddPendingGarbage(*semaphoreInfo.latestUse, semaphore);
3315                 deviceInfo.deviceOpTracker->PollAndProcessGarbage();
3316             }
3317         } else {
3318             deviceDispatch->vkDestroySemaphore(device, semaphore, pAllocator);
3319         }
3320     }
3321 
destroySemaphoreLocked(VkDevice device,VulkanDispatch * deviceDispatch,VkSemaphore semaphore,const VkAllocationCallbacks * pAllocator)3322     void destroySemaphoreLocked(VkDevice device, VulkanDispatch* deviceDispatch,
3323                                 VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator) {
3324         auto semaphoreInfoIt = mSemaphoreInfo.find(semaphore);
3325         if (semaphoreInfoIt == mSemaphoreInfo.end()) return;
3326         auto& semaphoreInfo = semaphoreInfoIt->second;
3327 
3328         destroySemaphoreWithExclusiveInfo(device, deviceDispatch, semaphore, semaphoreInfo,
3329                                           pAllocator);
3330 
3331         mSemaphoreInfo.erase(semaphoreInfoIt);
3332     }
3333 
on_vkDestroySemaphore(android::base::BumpPool * pool,VkDevice boxed_device,VkSemaphore semaphore,const VkAllocationCallbacks * pAllocator)3334     void on_vkDestroySemaphore(android::base::BumpPool* pool, VkDevice boxed_device,
3335                                VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator) {
3336         auto device = unbox_VkDevice(boxed_device);
3337         auto deviceDispatch = dispatch_VkDevice(boxed_device);
3338 
3339         std::lock_guard<std::recursive_mutex> lock(mLock);
3340         destroySemaphoreLocked(device, deviceDispatch, semaphore, pAllocator);
3341     }
3342 
3343     enum class DestroyFenceStatus { kDestroyed, kRecycled };
3344 
destroyFenceWithExclusiveInfo(VkDevice device,VulkanDispatch * deviceDispatch,DeviceInfo & deviceInfo,VkFence fence,FenceInfo & fenceInfo,const VkAllocationCallbacks * pAllocator,bool allowExternalFenceRecycling)3345     DestroyFenceStatus destroyFenceWithExclusiveInfo(VkDevice device,
3346                                                      VulkanDispatch* deviceDispatch,
3347                                                      DeviceInfo& deviceInfo, VkFence fence,
3348                                                      FenceInfo& fenceInfo,
3349                                                      const VkAllocationCallbacks* pAllocator,
3350                                                      bool allowExternalFenceRecycling) {
3351         fenceInfo.boxed = VK_NULL_HANDLE;
3352 
3353         // External fences are just slated for recycling. This addresses known
3354         // behavior where the guest might destroy the fence prematurely. b/228221208
3355         if (fenceInfo.external) {
3356             if (allowExternalFenceRecycling) {
3357                 deviceInfo.externalFencePool->add(fence);
3358             }
3359             return DestroyFenceStatus::kRecycled;
3360         }
3361 
3362         if (fenceInfo.latestUse && !IsDone(*fenceInfo.latestUse)) {
3363             deviceInfo.deviceOpTracker->AddPendingGarbage(*fenceInfo.latestUse, fence);
3364             deviceInfo.deviceOpTracker->PollAndProcessGarbage();
3365         } else {
3366             deviceDispatch->vkDestroyFence(device, fence, pAllocator);
3367         }
3368 
3369         return DestroyFenceStatus::kDestroyed;
3370     }
3371 
destroyFenceLocked(VkDevice device,VulkanDispatch * deviceDispatch,VkFence fence,const VkAllocationCallbacks * pAllocator,bool allowExternalFenceRecycling)3372     void destroyFenceLocked(VkDevice device, VulkanDispatch* deviceDispatch, VkFence fence,
3373                             const VkAllocationCallbacks* pAllocator,
3374                             bool allowExternalFenceRecycling) {
3375         auto fenceInfoIt = mFenceInfo.find(fence);
3376         if (fenceInfoIt == mFenceInfo.end()) {
3377             ERR("Failed to find fence info for VkFence:%p. Leaking fence!", fence);
3378             return;
3379         }
3380         auto& fenceInfo = fenceInfoIt->second;
3381 
3382         auto deviceInfoIt = mDeviceInfo.find(device);
3383         if (deviceInfoIt == mDeviceInfo.end()) {
3384             ERR("Failed to find device info for VkDevice:%p for VkFence:%p. Leaking fence!", device,
3385                 fence);
3386             return;
3387         }
3388         auto& deviceInfo = deviceInfoIt->second;
3389 
3390         auto destroyStatus =
3391             destroyFenceWithExclusiveInfo(device, deviceDispatch, deviceInfo, fence, fenceInfo,
3392                                           pAllocator, /*allowExternalFenceRecycling=*/true);
3393         if (destroyStatus == DestroyFenceStatus::kDestroyed) {
3394             mFenceInfo.erase(fenceInfoIt);
3395         }
3396     }
3397 
on_vkDestroyFence(android::base::BumpPool * pool,VkDevice boxed_device,VkFence fence,const VkAllocationCallbacks * pAllocator)3398     void on_vkDestroyFence(android::base::BumpPool* pool, VkDevice boxed_device, VkFence fence,
3399                            const VkAllocationCallbacks* pAllocator) {
3400         if (fence == VK_NULL_HANDLE) return;
3401 
3402         auto device = unbox_VkDevice(boxed_device);
3403         auto deviceDispatch = dispatch_VkDevice(boxed_device);
3404 
3405         std::lock_guard<std::recursive_mutex> lock(mLock);
3406         destroyFenceLocked(device, deviceDispatch, fence, pAllocator, true);
3407     }
3408 
on_vkCreateDescriptorSetLayout(android::base::BumpPool * pool,VkDevice boxed_device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorSetLayout * pSetLayout)3409     VkResult on_vkCreateDescriptorSetLayout(android::base::BumpPool* pool, VkDevice boxed_device,
3410                                             const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
3411                                             const VkAllocationCallbacks* pAllocator,
3412                                             VkDescriptorSetLayout* pSetLayout) {
3413         auto device = unbox_VkDevice(boxed_device);
3414         auto vk = dispatch_VkDevice(boxed_device);
3415 
3416         auto res = vk->vkCreateDescriptorSetLayout(device, pCreateInfo, pAllocator, pSetLayout);
3417 
3418         if (res == VK_SUCCESS) {
3419             std::lock_guard<std::recursive_mutex> lock(mLock);
3420             VALIDATE_NEW_HANDLE_INFO_ENTRY(mDescriptorSetLayoutInfo, *pSetLayout);
3421             auto& info = mDescriptorSetLayoutInfo[*pSetLayout];
3422             info.device = device;
3423             *pSetLayout = new_boxed_non_dispatchable_VkDescriptorSetLayout(*pSetLayout);
3424             info.boxed = *pSetLayout;
3425 
3426             info.createInfo = *pCreateInfo;
3427             for (uint32_t i = 0; i < pCreateInfo->bindingCount; ++i) {
3428                 info.bindings.push_back(pCreateInfo->pBindings[i]);
3429             }
3430         }
3431 
3432         return res;
3433     }
3434 
destroyDescriptorSetLayoutWithExclusiveInfo(VkDevice device,VulkanDispatch * deviceDispatch,VkDescriptorSetLayout descriptorSetLayout,DescriptorSetLayoutInfo & descriptorSetLayoutInfo,const VkAllocationCallbacks * pAllocator)3435     void destroyDescriptorSetLayoutWithExclusiveInfo(
3436         VkDevice device, VulkanDispatch* deviceDispatch, VkDescriptorSetLayout descriptorSetLayout,
3437         DescriptorSetLayoutInfo& descriptorSetLayoutInfo, const VkAllocationCallbacks* pAllocator) {
3438         deviceDispatch->vkDestroyDescriptorSetLayout(device, descriptorSetLayout, pAllocator);
3439     }
3440 
destroyDescriptorSetLayoutLocked(VkDevice device,VulkanDispatch * deviceDispatch,VkDescriptorSetLayout descriptorSetLayout,const VkAllocationCallbacks * pAllocator)3441     void destroyDescriptorSetLayoutLocked(VkDevice device, VulkanDispatch* deviceDispatch,
3442                                           VkDescriptorSetLayout descriptorSetLayout,
3443                                           const VkAllocationCallbacks* pAllocator) {
3444         auto descriptorSetLayoutInfoIt = mDescriptorSetLayoutInfo.find(descriptorSetLayout);
3445         if (descriptorSetLayoutInfoIt == mDescriptorSetLayoutInfo.end()) return;
3446         auto& descriptorSetLayoutInfo = descriptorSetLayoutInfoIt->second;
3447 
3448         destroyDescriptorSetLayoutWithExclusiveInfo(device, deviceDispatch, descriptorSetLayout,
3449                                                     descriptorSetLayoutInfo, pAllocator);
3450 
3451         mDescriptorSetLayoutInfo.erase(descriptorSetLayoutInfoIt);
3452     }
3453 
on_vkDestroyDescriptorSetLayout(android::base::BumpPool * pool,VkDevice boxed_device,VkDescriptorSetLayout descriptorSetLayout,const VkAllocationCallbacks * pAllocator)3454     void on_vkDestroyDescriptorSetLayout(android::base::BumpPool* pool, VkDevice boxed_device,
3455                                          VkDescriptorSetLayout descriptorSetLayout,
3456                                          const VkAllocationCallbacks* pAllocator) {
3457         auto device = unbox_VkDevice(boxed_device);
3458         auto deviceDispatch = dispatch_VkDevice(boxed_device);
3459 
3460         std::lock_guard<std::recursive_mutex> lock(mLock);
3461         destroyDescriptorSetLayoutLocked(device, deviceDispatch, descriptorSetLayout, pAllocator);
3462     }
3463 
on_vkCreateDescriptorPool(android::base::BumpPool * pool,VkDevice boxed_device,const VkDescriptorPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorPool * pDescriptorPool)3464     VkResult on_vkCreateDescriptorPool(android::base::BumpPool* pool, VkDevice boxed_device,
3465                                        const VkDescriptorPoolCreateInfo* pCreateInfo,
3466                                        const VkAllocationCallbacks* pAllocator,
3467                                        VkDescriptorPool* pDescriptorPool) {
3468         auto device = unbox_VkDevice(boxed_device);
3469         auto vk = dispatch_VkDevice(boxed_device);
3470 
3471         auto res = vk->vkCreateDescriptorPool(device, pCreateInfo, pAllocator, pDescriptorPool);
3472 
3473         if (res == VK_SUCCESS) {
3474             std::lock_guard<std::recursive_mutex> lock(mLock);
3475             VALIDATE_NEW_HANDLE_INFO_ENTRY(mDescriptorPoolInfo, *pDescriptorPool);
3476             auto& info = mDescriptorPoolInfo[*pDescriptorPool];
3477             info.device = device;
3478             *pDescriptorPool = new_boxed_non_dispatchable_VkDescriptorPool(*pDescriptorPool);
3479             info.boxed = *pDescriptorPool;
3480             info.createInfo = *pCreateInfo;
3481             info.maxSets = pCreateInfo->maxSets;
3482             info.usedSets = 0;
3483 
3484             for (uint32_t i = 0; i < pCreateInfo->poolSizeCount; ++i) {
3485                 DescriptorPoolInfo::PoolState state;
3486                 state.type = pCreateInfo->pPoolSizes[i].type;
3487                 state.descriptorCount = pCreateInfo->pPoolSizes[i].descriptorCount;
3488                 state.used = 0;
3489                 info.pools.push_back(state);
3490             }
3491 
3492             if (m_emu->features.VulkanBatchedDescriptorSetUpdate.enabled) {
3493                 for (uint32_t i = 0; i < pCreateInfo->maxSets; ++i) {
3494                     info.poolIds.push_back(
3495                         (uint64_t)new_boxed_non_dispatchable_VkDescriptorSet(VK_NULL_HANDLE));
3496                 }
3497                 if (snapshotsEnabled()) {
3498                     snapshot()->createExtraHandlesForNextApi(info.poolIds.data(),
3499                                                              info.poolIds.size());
3500                 }
3501             }
3502         }
3503 
3504         return res;
3505     }
3506 
cleanupDescriptorPoolAllocedSetsLocked(DescriptorPoolInfo & descriptorPoolInfo,std::unordered_map<VkDescriptorSet,DescriptorSetInfo> & descriptorSetInfos,bool isDestroy=false)3507     void cleanupDescriptorPoolAllocedSetsLocked(
3508         DescriptorPoolInfo& descriptorPoolInfo,
3509         std::unordered_map<VkDescriptorSet, DescriptorSetInfo>& descriptorSetInfos,
3510         bool isDestroy = false) {
3511         for (auto it : descriptorPoolInfo.allocedSetsToBoxed) {
3512             auto unboxedSet = it.first;
3513             auto boxedSet = it.second;
3514             mDescriptorSetInfo.erase(unboxedSet);
3515             if (!m_emu->features.VulkanBatchedDescriptorSetUpdate.enabled) {
3516                 delete_VkDescriptorSet(boxedSet);
3517             }
3518         }
3519 
3520         if (m_emu->features.VulkanBatchedDescriptorSetUpdate.enabled) {
3521             if (isDestroy) {
3522                 for (auto poolId : descriptorPoolInfo.poolIds) {
3523                     delete_VkDescriptorSet((VkDescriptorSet)poolId);
3524                 }
3525             } else {
3526                 for (auto poolId : descriptorPoolInfo.poolIds) {
3527                     auto handleInfo = sBoxedHandleManager.get(poolId);
3528                     if (handleInfo)
3529                         handleInfo->underlying = reinterpret_cast<uint64_t>(VK_NULL_HANDLE);
3530                 }
3531             }
3532         }
3533 
3534         descriptorPoolInfo.usedSets = 0;
3535         descriptorPoolInfo.allocedSetsToBoxed.clear();
3536 
3537         for (auto& pool : descriptorPoolInfo.pools) {
3538             pool.used = 0;
3539         }
3540     }
3541 
destroyDescriptorPoolWithExclusiveInfo(VkDevice device,VulkanDispatch * deviceDispatch,VkDescriptorPool descriptorPool,DescriptorPoolInfo & descriptorPoolInfo,std::unordered_map<VkDescriptorSet,DescriptorSetInfo> & descriptorSetInfos,const VkAllocationCallbacks * pAllocator)3542     void destroyDescriptorPoolWithExclusiveInfo(
3543         VkDevice device, VulkanDispatch* deviceDispatch, VkDescriptorPool descriptorPool,
3544         DescriptorPoolInfo& descriptorPoolInfo,
3545         std::unordered_map<VkDescriptorSet, DescriptorSetInfo>& descriptorSetInfos,
3546         const VkAllocationCallbacks* pAllocator) {
3547         cleanupDescriptorPoolAllocedSetsLocked(descriptorPoolInfo, descriptorSetInfos,
3548                                                true /* destroy */);
3549 
3550         deviceDispatch->vkDestroyDescriptorPool(device, descriptorPool, pAllocator);
3551     }
3552 
destroyDescriptorPoolLocked(VkDevice device,VulkanDispatch * deviceDispatch,VkDescriptorPool descriptorPool,const VkAllocationCallbacks * pAllocator)3553     void destroyDescriptorPoolLocked(VkDevice device, VulkanDispatch* deviceDispatch,
3554                                      VkDescriptorPool descriptorPool,
3555                                      const VkAllocationCallbacks* pAllocator) {
3556         auto descriptorPoolInfoIt = mDescriptorPoolInfo.find(descriptorPool);
3557         if (descriptorPoolInfoIt == mDescriptorPoolInfo.end()) return;
3558         auto& descriptorPoolInfo = descriptorPoolInfoIt->second;
3559 
3560         destroyDescriptorPoolWithExclusiveInfo(device, deviceDispatch, descriptorPool,
3561                                                descriptorPoolInfo, mDescriptorSetInfo, pAllocator);
3562 
3563         mDescriptorPoolInfo.erase(descriptorPoolInfoIt);
3564     }
3565 
on_vkDestroyDescriptorPool(android::base::BumpPool * pool,VkDevice boxed_device,VkDescriptorPool descriptorPool,const VkAllocationCallbacks * pAllocator)3566     void on_vkDestroyDescriptorPool(android::base::BumpPool* pool, VkDevice boxed_device,
3567                                     VkDescriptorPool descriptorPool,
3568                                     const VkAllocationCallbacks* pAllocator) {
3569         auto device = unbox_VkDevice(boxed_device);
3570         auto deviceDispatch = dispatch_VkDevice(boxed_device);
3571 
3572         std::lock_guard<std::recursive_mutex> lock(mLock);
3573         destroyDescriptorPoolLocked(device, deviceDispatch, descriptorPool, pAllocator);
3574     }
3575 
resetDescriptorPoolInfoLocked(VkDescriptorPool descriptorPool)3576     void resetDescriptorPoolInfoLocked(VkDescriptorPool descriptorPool) {
3577         auto descriptorPoolInfoIt = mDescriptorPoolInfo.find(descriptorPool);
3578         if (descriptorPoolInfoIt == mDescriptorPoolInfo.end()) return;
3579         auto& descriptorPoolInfo = descriptorPoolInfoIt->second;
3580 
3581         cleanupDescriptorPoolAllocedSetsLocked(descriptorPoolInfo, mDescriptorSetInfo,
3582                                                /*isDestroy=*/false);
3583     }
3584 
on_vkResetDescriptorPool(android::base::BumpPool * pool,VkDevice boxed_device,VkDescriptorPool descriptorPool,VkDescriptorPoolResetFlags flags)3585     VkResult on_vkResetDescriptorPool(android::base::BumpPool* pool, VkDevice boxed_device,
3586                                       VkDescriptorPool descriptorPool,
3587                                       VkDescriptorPoolResetFlags flags) {
3588         auto device = unbox_VkDevice(boxed_device);
3589         auto deviceDispatch = dispatch_VkDevice(boxed_device);
3590 
3591         auto result = deviceDispatch->vkResetDescriptorPool(device, descriptorPool, flags);
3592         if (result != VK_SUCCESS) return result;
3593 
3594         std::lock_guard<std::recursive_mutex> lock(mLock);
3595         resetDescriptorPoolInfoLocked(descriptorPool);
3596 
3597         return VK_SUCCESS;
3598     }
3599 
initDescriptorSetInfoLocked(VkDescriptorPool pool,VkDescriptorSetLayout setLayout,uint64_t boxedDescriptorSet,VkDescriptorSet descriptorSet)3600     void initDescriptorSetInfoLocked(VkDescriptorPool pool, VkDescriptorSetLayout setLayout,
3601                                      uint64_t boxedDescriptorSet, VkDescriptorSet descriptorSet) {
3602         auto* poolInfo = android::base::find(mDescriptorPoolInfo, pool);
3603         if (!poolInfo) {
3604             GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER)) << "Cannot find poolInfo";
3605         }
3606 
3607         auto* setLayoutInfo = android::base::find(mDescriptorSetLayoutInfo, setLayout);
3608         if (!setLayoutInfo) {
3609             GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER)) << "Cannot find setLayout";
3610         }
3611 
3612         VALIDATE_NEW_HANDLE_INFO_ENTRY(mDescriptorSetInfo, descriptorSet);
3613         auto& setInfo = mDescriptorSetInfo[descriptorSet];
3614 
3615         setInfo.pool = pool;
3616         setInfo.unboxedLayout = setLayout;
3617         setInfo.bindings = setLayoutInfo->bindings;
3618         for (size_t i = 0; i < setInfo.bindings.size(); i++) {
3619             VkDescriptorSetLayoutBinding dslBinding = setInfo.bindings[i];
3620             int bindingIdx = dslBinding.binding;
3621             if (setInfo.allWrites.size() <= bindingIdx) {
3622                 setInfo.allWrites.resize(bindingIdx + 1);
3623             }
3624             setInfo.allWrites[bindingIdx].resize(dslBinding.descriptorCount);
3625             for (auto& write : setInfo.allWrites[bindingIdx]) {
3626                 write.descriptorType = dslBinding.descriptorType;
3627                 write.dstArrayElement = 0;
3628             }
3629         }
3630 
3631         poolInfo->allocedSetsToBoxed[descriptorSet] = (VkDescriptorSet)boxedDescriptorSet;
3632         applyDescriptorSetAllocationLocked(*poolInfo, setInfo.bindings);
3633     }
3634 
on_vkAllocateDescriptorSets(android::base::BumpPool * pool,VkDevice boxed_device,const VkDescriptorSetAllocateInfo * pAllocateInfo,VkDescriptorSet * pDescriptorSets)3635     VkResult on_vkAllocateDescriptorSets(android::base::BumpPool* pool, VkDevice boxed_device,
3636                                          const VkDescriptorSetAllocateInfo* pAllocateInfo,
3637                                          VkDescriptorSet* pDescriptorSets) {
3638         auto device = unbox_VkDevice(boxed_device);
3639         auto vk = dispatch_VkDevice(boxed_device);
3640 
3641         std::lock_guard<std::recursive_mutex> lock(mLock);
3642 
3643         auto allocValidationRes = validateDescriptorSetAllocLocked(pAllocateInfo);
3644         if (allocValidationRes != VK_SUCCESS) return allocValidationRes;
3645 
3646         auto res = vk->vkAllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets);
3647 
3648         if (res == VK_SUCCESS) {
3649             auto* poolInfo =
3650                 android::base::find(mDescriptorPoolInfo, pAllocateInfo->descriptorPool);
3651             if (!poolInfo) return res;
3652 
3653             for (uint32_t i = 0; i < pAllocateInfo->descriptorSetCount; ++i) {
3654                 auto unboxed = pDescriptorSets[i];
3655                 pDescriptorSets[i] = new_boxed_non_dispatchable_VkDescriptorSet(pDescriptorSets[i]);
3656                 initDescriptorSetInfoLocked(pAllocateInfo->descriptorPool,
3657                                             pAllocateInfo->pSetLayouts[i],
3658                                             (uint64_t)(pDescriptorSets[i]), unboxed);
3659             }
3660         }
3661 
3662         return res;
3663     }
3664 
on_vkFreeDescriptorSets(android::base::BumpPool * pool,VkDevice boxed_device,VkDescriptorPool descriptorPool,uint32_t descriptorSetCount,const VkDescriptorSet * pDescriptorSets)3665     VkResult on_vkFreeDescriptorSets(android::base::BumpPool* pool, VkDevice boxed_device,
3666                                      VkDescriptorPool descriptorPool, uint32_t descriptorSetCount,
3667                                      const VkDescriptorSet* pDescriptorSets) {
3668         auto device = unbox_VkDevice(boxed_device);
3669         auto vk = dispatch_VkDevice(boxed_device);
3670 
3671         auto res =
3672             vk->vkFreeDescriptorSets(device, descriptorPool, descriptorSetCount, pDescriptorSets);
3673 
3674         if (res == VK_SUCCESS) {
3675             std::lock_guard<std::recursive_mutex> lock(mLock);
3676 
3677             for (uint32_t i = 0; i < descriptorSetCount; ++i) {
3678                 auto* setInfo = android::base::find(mDescriptorSetInfo, pDescriptorSets[i]);
3679                 if (!setInfo) continue;
3680                 auto* poolInfo = android::base::find(mDescriptorPoolInfo, setInfo->pool);
3681                 if (!poolInfo) continue;
3682 
3683                 removeDescriptorSetAllocationLocked(*poolInfo, setInfo->bindings);
3684 
3685                 auto descSetAllocedEntry =
3686                     android::base::find(poolInfo->allocedSetsToBoxed, pDescriptorSets[i]);
3687                 if (!descSetAllocedEntry) continue;
3688 
3689                 auto handleInfo = sBoxedHandleManager.get((uint64_t)*descSetAllocedEntry);
3690                 if (handleInfo) {
3691                     if (m_emu->features.VulkanBatchedDescriptorSetUpdate.enabled) {
3692                         handleInfo->underlying = reinterpret_cast<uint64_t>(VK_NULL_HANDLE);
3693                     } else {
3694                         delete_VkDescriptorSet(*descSetAllocedEntry);
3695                     }
3696                 }
3697 
3698                 poolInfo->allocedSetsToBoxed.erase(pDescriptorSets[i]);
3699 
3700                 mDescriptorSetInfo.erase(pDescriptorSets[i]);
3701             }
3702         }
3703 
3704         return res;
3705     }
3706 
on_vkUpdateDescriptorSets(android::base::BumpPool * pool,VkDevice boxed_device,uint32_t descriptorWriteCount,const VkWriteDescriptorSet * pDescriptorWrites,uint32_t descriptorCopyCount,const VkCopyDescriptorSet * pDescriptorCopies)3707     void on_vkUpdateDescriptorSets(android::base::BumpPool* pool, VkDevice boxed_device,
3708                                    uint32_t descriptorWriteCount,
3709                                    const VkWriteDescriptorSet* pDescriptorWrites,
3710                                    uint32_t descriptorCopyCount,
3711                                    const VkCopyDescriptorSet* pDescriptorCopies) {
3712         auto device = unbox_VkDevice(boxed_device);
3713         auto vk = dispatch_VkDevice(boxed_device);
3714 
3715         std::lock_guard<std::recursive_mutex> lock(mLock);
3716         on_vkUpdateDescriptorSetsImpl(pool, vk, device, descriptorWriteCount, pDescriptorWrites,
3717                                       descriptorCopyCount, pDescriptorCopies);
3718     }
3719 
on_vkUpdateDescriptorSetsImpl(android::base::BumpPool * pool,VulkanDispatch * vk,VkDevice device,uint32_t descriptorWriteCount,const VkWriteDescriptorSet * pDescriptorWrites,uint32_t descriptorCopyCount,const VkCopyDescriptorSet * pDescriptorCopies)3720     void on_vkUpdateDescriptorSetsImpl(android::base::BumpPool* pool, VulkanDispatch* vk,
3721                                        VkDevice device, uint32_t descriptorWriteCount,
3722                                        const VkWriteDescriptorSet* pDescriptorWrites,
3723                                        uint32_t descriptorCopyCount,
3724                                        const VkCopyDescriptorSet* pDescriptorCopies) {
3725         for (uint32_t writeIdx = 0; writeIdx < descriptorWriteCount; writeIdx++) {
3726             const VkWriteDescriptorSet& descriptorWrite = pDescriptorWrites[writeIdx];
3727             auto ite = mDescriptorSetInfo.find(descriptorWrite.dstSet);
3728             if (ite == mDescriptorSetInfo.end()) {
3729                 continue;
3730             }
3731             DescriptorSetInfo& descriptorSetInfo = ite->second;
3732             auto& table = descriptorSetInfo.allWrites;
3733             VkDescriptorType descType = descriptorWrite.descriptorType;
3734             uint32_t dstBinding = descriptorWrite.dstBinding;
3735             uint32_t dstArrayElement = descriptorWrite.dstArrayElement;
3736             uint32_t descriptorCount = descriptorWrite.descriptorCount;
3737 
3738             uint32_t arrOffset = dstArrayElement;
3739 
3740             if (isDescriptorTypeImageInfo(descType)) {
3741                 for (uint32_t writeElemIdx = 0; writeElemIdx < descriptorCount;
3742                      ++writeElemIdx, ++arrOffset) {
3743                     // Descriptor writes wrap to the next binding. See
3744                     // https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/VkWriteDescriptorSet.html
3745                     if (arrOffset >= table[dstBinding].size()) {
3746                         ++dstBinding;
3747                         arrOffset = 0;
3748                     }
3749                     auto& entry = table[dstBinding][arrOffset];
3750                     entry.imageInfo = descriptorWrite.pImageInfo[writeElemIdx];
3751                     entry.writeType = DescriptorSetInfo::DescriptorWriteType::ImageInfo;
3752                     entry.descriptorType = descType;
3753                     entry.alives.clear();
3754                     entry.boundColorBuffer.reset();
3755                     if (descriptorTypeContainsImage(descType)) {
3756                         auto* imageViewInfo =
3757                             android::base::find(mImageViewInfo, entry.imageInfo.imageView);
3758                         if (imageViewInfo) {
3759                             entry.alives.push_back(imageViewInfo->alive);
3760                             entry.boundColorBuffer = imageViewInfo->boundColorBuffer;
3761                         }
3762                     }
3763                     if (descriptorTypeContainsSampler(descType)) {
3764                         auto* samplerInfo =
3765                             android::base::find(mSamplerInfo, entry.imageInfo.sampler);
3766                         if (samplerInfo) {
3767                             entry.alives.push_back(samplerInfo->alive);
3768                         }
3769                     }
3770                 }
3771             } else if (isDescriptorTypeBufferInfo(descType)) {
3772                 for (uint32_t writeElemIdx = 0; writeElemIdx < descriptorCount;
3773                      ++writeElemIdx, ++arrOffset) {
3774                     if (arrOffset >= table[dstBinding].size()) {
3775                         ++dstBinding;
3776                         arrOffset = 0;
3777                     }
3778                     auto& entry = table[dstBinding][arrOffset];
3779                     entry.bufferInfo = descriptorWrite.pBufferInfo[writeElemIdx];
3780                     entry.writeType = DescriptorSetInfo::DescriptorWriteType::BufferInfo;
3781                     entry.descriptorType = descType;
3782                     entry.alives.clear();
3783                     auto* bufferInfo = android::base::find(mBufferInfo, entry.bufferInfo.buffer);
3784                     if (bufferInfo) {
3785                         entry.alives.push_back(bufferInfo->alive);
3786                     }
3787                 }
3788             } else if (isDescriptorTypeBufferView(descType)) {
3789                 for (uint32_t writeElemIdx = 0; writeElemIdx < descriptorCount;
3790                      ++writeElemIdx, ++arrOffset) {
3791                     if (arrOffset >= table[dstBinding].size()) {
3792                         ++dstBinding;
3793                         arrOffset = 0;
3794                     }
3795                     auto& entry = table[dstBinding][arrOffset];
3796                     entry.bufferView = descriptorWrite.pTexelBufferView[writeElemIdx];
3797                     entry.writeType = DescriptorSetInfo::DescriptorWriteType::BufferView;
3798                     entry.descriptorType = descType;
3799                     if (snapshotsEnabled()) {
3800                         // TODO: check alive
3801                         ERR("%s: Snapshot for texel buffer view is incomplete.\n", __func__);
3802                     }
3803                 }
3804             } else if (isDescriptorTypeInlineUniformBlock(descType)) {
3805                 const VkWriteDescriptorSetInlineUniformBlock* descInlineUniformBlock =
3806                     static_cast<const VkWriteDescriptorSetInlineUniformBlock*>(
3807                         descriptorWrite.pNext);
3808                 while (descInlineUniformBlock &&
3809                        descInlineUniformBlock->sType !=
3810                            VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK) {
3811                     descInlineUniformBlock =
3812                         static_cast<const VkWriteDescriptorSetInlineUniformBlock*>(
3813                             descInlineUniformBlock->pNext);
3814                 }
3815                 if (!descInlineUniformBlock) {
3816                     GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
3817                         << __func__ << ": did not find inline uniform block";
3818                     return;
3819                 }
3820                 auto& entry = table[dstBinding][0];
3821                 entry.inlineUniformBlock = *descInlineUniformBlock;
3822                 entry.inlineUniformBlockBuffer.assign(
3823                     static_cast<const uint8_t*>(descInlineUniformBlock->pData),
3824                     static_cast<const uint8_t*>(descInlineUniformBlock->pData) +
3825                         descInlineUniformBlock->dataSize);
3826                 entry.writeType = DescriptorSetInfo::DescriptorWriteType::InlineUniformBlock;
3827                 entry.descriptorType = descType;
3828                 entry.dstArrayElement = dstArrayElement;
3829             } else if (isDescriptorTypeAccelerationStructure(descType)) {
3830                 // TODO
3831                 // Look for pNext inline uniform block or acceleration structure.
3832                 // Append new DescriptorWrite entry that holds the buffer
3833                 if (snapshotsEnabled()) {
3834                     ERR("%s: Ignoring Snapshot for emulated write for descriptor type 0x%x\n",
3835                         __func__, descType);
3836                 }
3837             }
3838         }
3839         // TODO: bookkeep pDescriptorCopies
3840         // Our primary use case vkQueueCommitDescriptorSetUpdatesGOOGLE does not use
3841         // pDescriptorCopies. Thus skip its implementation for now.
3842         if (descriptorCopyCount && snapshotsEnabled()) {
3843             ERR("%s: Snapshot does not support descriptor copy yet\n");
3844         }
3845         bool needEmulateWriteDescriptor = false;
3846         // c++ seems to allow for 0-size array allocation
3847         std::unique_ptr<bool[]> descriptorWritesNeedDeepCopy(new bool[descriptorWriteCount]);
3848         for (uint32_t i = 0; i < descriptorWriteCount; i++) {
3849             const VkWriteDescriptorSet& descriptorWrite = pDescriptorWrites[i];
3850             auto descriptorSetInfo =
3851                 android::base::find(mDescriptorSetInfo, descriptorWrite.dstSet);
3852             descriptorWritesNeedDeepCopy[i] = false;
3853             if (!vk_util::vk_descriptor_type_has_image_view(descriptorWrite.descriptorType)) {
3854                 continue;
3855             }
3856             for (uint32_t j = 0; j < descriptorWrite.descriptorCount; j++) {
3857                 const VkDescriptorImageInfo& imageInfo = descriptorWrite.pImageInfo[j];
3858                 const auto* imgViewInfo = android::base::find(mImageViewInfo, imageInfo.imageView);
3859                 if (!imgViewInfo) {
3860                     continue;
3861                 }
3862                 if (descriptorWrite.descriptorType != VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) {
3863                     continue;
3864                 }
3865                 const auto* samplerInfo = android::base::find(mSamplerInfo, imageInfo.sampler);
3866                 if (samplerInfo && imgViewInfo->needEmulatedAlpha &&
3867                     samplerInfo->needEmulatedAlpha) {
3868                     needEmulateWriteDescriptor = true;
3869                     descriptorWritesNeedDeepCopy[i] = true;
3870                     break;
3871                 }
3872             }
3873         }
3874         if (!needEmulateWriteDescriptor) {
3875             vk->vkUpdateDescriptorSets(device, descriptorWriteCount, pDescriptorWrites,
3876                                        descriptorCopyCount, pDescriptorCopies);
3877             return;
3878         }
3879         std::list<std::unique_ptr<VkDescriptorImageInfo[]>> imageInfoPool;
3880         std::unique_ptr<VkWriteDescriptorSet[]> descriptorWrites(
3881             new VkWriteDescriptorSet[descriptorWriteCount]);
3882         for (uint32_t i = 0; i < descriptorWriteCount; i++) {
3883             const VkWriteDescriptorSet& srcDescriptorWrite = pDescriptorWrites[i];
3884             VkWriteDescriptorSet& dstDescriptorWrite = descriptorWrites[i];
3885             // Shallow copy first
3886             dstDescriptorWrite = srcDescriptorWrite;
3887             if (!descriptorWritesNeedDeepCopy[i]) {
3888                 continue;
3889             }
3890             // Deep copy
3891             assert(dstDescriptorWrite.descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
3892             imageInfoPool.emplace_back(
3893                 new VkDescriptorImageInfo[dstDescriptorWrite.descriptorCount]);
3894             VkDescriptorImageInfo* imageInfos = imageInfoPool.back().get();
3895             memcpy(imageInfos, srcDescriptorWrite.pImageInfo,
3896                    dstDescriptorWrite.descriptorCount * sizeof(VkDescriptorImageInfo));
3897             dstDescriptorWrite.pImageInfo = imageInfos;
3898             for (uint32_t j = 0; j < dstDescriptorWrite.descriptorCount; j++) {
3899                 VkDescriptorImageInfo& imageInfo = imageInfos[j];
3900                 const auto* imgViewInfo = android::base::find(mImageViewInfo, imageInfo.imageView);
3901                 auto* samplerInfo = android::base::find(mSamplerInfo, imageInfo.sampler);
3902                 if (!imgViewInfo || !samplerInfo) continue;
3903                 if (imgViewInfo->needEmulatedAlpha && samplerInfo->needEmulatedAlpha) {
3904                     if (samplerInfo->emulatedborderSampler == VK_NULL_HANDLE) {
3905                         // create the emulated sampler
3906                         VkSamplerCreateInfo createInfo;
3907                         deepcopy_VkSamplerCreateInfo(pool, VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
3908                                                      &samplerInfo->createInfo, &createInfo);
3909                         switch (createInfo.borderColor) {
3910                             case VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK:
3911                                 createInfo.borderColor = VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK;
3912                                 break;
3913                             case VK_BORDER_COLOR_INT_TRANSPARENT_BLACK:
3914                                 createInfo.borderColor = VK_BORDER_COLOR_INT_OPAQUE_BLACK;
3915                                 break;
3916                             case VK_BORDER_COLOR_FLOAT_CUSTOM_EXT:
3917                             case VK_BORDER_COLOR_INT_CUSTOM_EXT: {
3918                                 VkSamplerCustomBorderColorCreateInfoEXT*
3919                                     customBorderColorCreateInfo =
3920                                         vk_find_struct<VkSamplerCustomBorderColorCreateInfoEXT>(
3921                                             &createInfo);
3922                                 if (customBorderColorCreateInfo) {
3923                                     switch (createInfo.borderColor) {
3924                                         case VK_BORDER_COLOR_FLOAT_CUSTOM_EXT:
3925                                             customBorderColorCreateInfo->customBorderColor
3926                                                 .float32[3] = 1.0f;
3927                                             break;
3928                                         case VK_BORDER_COLOR_INT_CUSTOM_EXT:
3929                                             customBorderColorCreateInfo->customBorderColor
3930                                                 .int32[3] = 128;
3931                                             break;
3932                                         default:
3933                                             break;
3934                                     }
3935                                 }
3936                                 break;
3937                             }
3938                             default:
3939                                 break;
3940                         }
3941                         vk->vkCreateSampler(device, &createInfo, nullptr,
3942                                             &samplerInfo->emulatedborderSampler);
3943                     }
3944                     imageInfo.sampler = samplerInfo->emulatedborderSampler;
3945                 }
3946             }
3947         }
3948         vk->vkUpdateDescriptorSets(device, descriptorWriteCount, descriptorWrites.get(),
3949                                    descriptorCopyCount, pDescriptorCopies);
3950     }
3951 
on_vkCreateShaderModule(android::base::BumpPool * pool,VkDevice boxed_device,const VkShaderModuleCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkShaderModule * pShaderModule)3952     VkResult on_vkCreateShaderModule(android::base::BumpPool* pool, VkDevice boxed_device,
3953                                      const VkShaderModuleCreateInfo* pCreateInfo,
3954                                      const VkAllocationCallbacks* pAllocator,
3955                                      VkShaderModule* pShaderModule) {
3956         auto device = unbox_VkDevice(boxed_device);
3957         auto deviceDispatch = dispatch_VkDevice(boxed_device);
3958 
3959         VkResult result =
3960             deviceDispatch->vkCreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule);
3961         if (result != VK_SUCCESS) {
3962             return result;
3963         }
3964 
3965         std::lock_guard<std::recursive_mutex> lock(mLock);
3966 
3967         VALIDATE_NEW_HANDLE_INFO_ENTRY(mShaderModuleInfo, *pShaderModule);
3968         auto& shaderModuleInfo = mShaderModuleInfo[*pShaderModule];
3969         shaderModuleInfo.device = device;
3970 
3971         *pShaderModule = new_boxed_non_dispatchable_VkShaderModule(*pShaderModule);
3972 
3973         return result;
3974     }
3975 
destroyShaderModuleWithExclusiveInfo(VkDevice device,VulkanDispatch * deviceDispatch,VkShaderModule shaderModule,ShaderModuleInfo &,const VkAllocationCallbacks * pAllocator)3976     void destroyShaderModuleWithExclusiveInfo(VkDevice device, VulkanDispatch* deviceDispatch,
3977                                               VkShaderModule shaderModule, ShaderModuleInfo&,
3978                                               const VkAllocationCallbacks* pAllocator) {
3979         deviceDispatch->vkDestroyShaderModule(device, shaderModule, pAllocator);
3980     }
3981 
destroyShaderModuleLocked(VkDevice device,VulkanDispatch * deviceDispatch,VkShaderModule shaderModule,const VkAllocationCallbacks * pAllocator)3982     void destroyShaderModuleLocked(VkDevice device, VulkanDispatch* deviceDispatch,
3983                                    VkShaderModule shaderModule,
3984                                    const VkAllocationCallbacks* pAllocator) {
3985         auto shaderModuleInfoIt = mShaderModuleInfo.find(shaderModule);
3986         if (shaderModuleInfoIt == mShaderModuleInfo.end()) return;
3987         auto& shaderModuleInfo = shaderModuleInfoIt->second;
3988 
3989         destroyShaderModuleWithExclusiveInfo(device, deviceDispatch, shaderModule, shaderModuleInfo,
3990                                              pAllocator);
3991 
3992         mShaderModuleInfo.erase(shaderModuleInfoIt);
3993     }
3994 
on_vkDestroyShaderModule(android::base::BumpPool * pool,VkDevice boxed_device,VkShaderModule shaderModule,const VkAllocationCallbacks * pAllocator)3995     void on_vkDestroyShaderModule(android::base::BumpPool* pool, VkDevice boxed_device,
3996                                   VkShaderModule shaderModule,
3997                                   const VkAllocationCallbacks* pAllocator) {
3998         auto device = unbox_VkDevice(boxed_device);
3999         auto deviceDispatch = dispatch_VkDevice(boxed_device);
4000 
4001         std::lock_guard<std::recursive_mutex> lock(mLock);
4002         destroyShaderModuleLocked(device, deviceDispatch, shaderModule, pAllocator);
4003     }
4004 
on_vkCreatePipelineCache(android::base::BumpPool * pool,VkDevice boxed_device,const VkPipelineCacheCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkPipelineCache * pPipelineCache)4005     VkResult on_vkCreatePipelineCache(android::base::BumpPool* pool, VkDevice boxed_device,
4006                                       const VkPipelineCacheCreateInfo* pCreateInfo,
4007                                       const VkAllocationCallbacks* pAllocator,
4008                                       VkPipelineCache* pPipelineCache) {
4009         auto device = unbox_VkDevice(boxed_device);
4010         auto deviceDispatch = dispatch_VkDevice(boxed_device);
4011 
4012         VkResult result =
4013             deviceDispatch->vkCreatePipelineCache(device, pCreateInfo, pAllocator, pPipelineCache);
4014         if (result != VK_SUCCESS) {
4015             return result;
4016         }
4017 
4018         std::lock_guard<std::recursive_mutex> lock(mLock);
4019 
4020         VALIDATE_NEW_HANDLE_INFO_ENTRY(mPipelineCacheInfo, *pPipelineCache);
4021         auto& pipelineCacheInfo = mPipelineCacheInfo[*pPipelineCache];
4022         pipelineCacheInfo.device = device;
4023 
4024         *pPipelineCache = new_boxed_non_dispatchable_VkPipelineCache(*pPipelineCache);
4025 
4026         return result;
4027     }
4028 
destroyPipelineCacheWithExclusiveInfo(VkDevice device,VulkanDispatch * deviceDispatch,VkPipelineCache pipelineCache,PipelineCacheInfo & pipelineCacheInfo,const VkAllocationCallbacks * pAllocator)4029     void destroyPipelineCacheWithExclusiveInfo(VkDevice device, VulkanDispatch* deviceDispatch,
4030                                                VkPipelineCache pipelineCache,
4031                                                PipelineCacheInfo& pipelineCacheInfo,
4032                                                const VkAllocationCallbacks* pAllocator) {
4033         deviceDispatch->vkDestroyPipelineCache(device, pipelineCache, pAllocator);
4034     }
4035 
destroyPipelineCacheLocked(VkDevice device,VulkanDispatch * deviceDispatch,VkPipelineCache pipelineCache,const VkAllocationCallbacks * pAllocator)4036     void destroyPipelineCacheLocked(VkDevice device, VulkanDispatch* deviceDispatch,
4037                                     VkPipelineCache pipelineCache,
4038                                     const VkAllocationCallbacks* pAllocator) {
4039         auto pipelineCacheInfoIt = mPipelineCacheInfo.find(pipelineCache);
4040         if (pipelineCacheInfoIt == mPipelineCacheInfo.end()) return;
4041         auto& pipelineCacheInfo = pipelineCacheInfoIt->second;
4042 
4043         destroyPipelineCacheWithExclusiveInfo(device, deviceDispatch, pipelineCache,
4044                                               pipelineCacheInfo, pAllocator);
4045 
4046         mPipelineCacheInfo.erase(pipelineCache);
4047     }
4048 
on_vkDestroyPipelineCache(android::base::BumpPool * pool,VkDevice boxed_device,VkPipelineCache pipelineCache,const VkAllocationCallbacks * pAllocator)4049     void on_vkDestroyPipelineCache(android::base::BumpPool* pool, VkDevice boxed_device,
4050                                    VkPipelineCache pipelineCache,
4051                                    const VkAllocationCallbacks* pAllocator) {
4052         auto device = unbox_VkDevice(boxed_device);
4053         auto deviceDispatch = dispatch_VkDevice(boxed_device);
4054 
4055         std::lock_guard<std::recursive_mutex> lock(mLock);
4056         destroyPipelineCacheLocked(device, deviceDispatch, pipelineCache, pAllocator);
4057     }
4058 
on_vkCreateGraphicsPipelines(android::base::BumpPool * pool,VkDevice boxed_device,VkPipelineCache pipelineCache,uint32_t createInfoCount,const VkGraphicsPipelineCreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines)4059     VkResult on_vkCreateGraphicsPipelines(android::base::BumpPool* pool, VkDevice boxed_device,
4060                                           VkPipelineCache pipelineCache, uint32_t createInfoCount,
4061                                           const VkGraphicsPipelineCreateInfo* pCreateInfos,
4062                                           const VkAllocationCallbacks* pAllocator,
4063                                           VkPipeline* pPipelines) {
4064         auto device = unbox_VkDevice(boxed_device);
4065         auto deviceDispatch = dispatch_VkDevice(boxed_device);
4066 
4067         VkResult result = deviceDispatch->vkCreateGraphicsPipelines(
4068             device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
4069         if (result != VK_SUCCESS && result != VK_PIPELINE_COMPILE_REQUIRED) {
4070             return result;
4071         }
4072 
4073         std::lock_guard<std::recursive_mutex> lock(mLock);
4074 
4075         for (uint32_t i = 0; i < createInfoCount; i++) {
4076             if (!pPipelines[i]) {
4077                 continue;
4078             }
4079             VALIDATE_NEW_HANDLE_INFO_ENTRY(mPipelineInfo, pPipelines[i]);
4080             auto& pipelineInfo = mPipelineInfo[pPipelines[i]];
4081             pipelineInfo.device = device;
4082 
4083             pPipelines[i] = new_boxed_non_dispatchable_VkPipeline(pPipelines[i]);
4084         }
4085 
4086         return result;
4087     }
4088 
on_vkCreateComputePipelines(android::base::BumpPool * pool,VkDevice boxed_device,VkPipelineCache pipelineCache,uint32_t createInfoCount,const VkComputePipelineCreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines)4089     VkResult on_vkCreateComputePipelines(android::base::BumpPool* pool, VkDevice boxed_device,
4090                                           VkPipelineCache pipelineCache, uint32_t createInfoCount,
4091                                           const VkComputePipelineCreateInfo* pCreateInfos,
4092                                           const VkAllocationCallbacks* pAllocator,
4093                                           VkPipeline* pPipelines) {
4094         auto device = unbox_VkDevice(boxed_device);
4095         auto deviceDispatch = dispatch_VkDevice(boxed_device);
4096 
4097         VkResult result = deviceDispatch->vkCreateComputePipelines(
4098             device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
4099         if (result != VK_SUCCESS && result != VK_PIPELINE_COMPILE_REQUIRED) {
4100             return result;
4101         }
4102 
4103         std::lock_guard<std::recursive_mutex> lock(mLock);
4104 
4105         for (uint32_t i = 0; i < createInfoCount; i++) {
4106             if (!pPipelines[i]) {
4107                 continue;
4108             }
4109             VALIDATE_NEW_HANDLE_INFO_ENTRY(mPipelineInfo, pPipelines[i]);
4110             auto& pipelineInfo = mPipelineInfo[pPipelines[i]];
4111             pipelineInfo.device = device;
4112 
4113             pPipelines[i] = new_boxed_non_dispatchable_VkPipeline(pPipelines[i]);
4114         }
4115 
4116         return result;
4117     }
4118 
destroyPipelineWithExclusiveInfo(VkDevice device,VulkanDispatch * deviceDispatch,VkPipeline pipeline,PipelineInfo & pipelineInfo,const VkAllocationCallbacks * pAllocator)4119     void destroyPipelineWithExclusiveInfo(VkDevice device, VulkanDispatch* deviceDispatch,
4120                                           VkPipeline pipeline, PipelineInfo& pipelineInfo,
4121                                           const VkAllocationCallbacks* pAllocator) {
4122         deviceDispatch->vkDestroyPipeline(device, pipeline, pAllocator);
4123     }
4124 
destroyPipelineLocked(VkDevice device,VulkanDispatch * deviceDispatch,VkPipeline pipeline,const VkAllocationCallbacks * pAllocator)4125     void destroyPipelineLocked(VkDevice device, VulkanDispatch* deviceDispatch, VkPipeline pipeline,
4126                                const VkAllocationCallbacks* pAllocator) {
4127         auto pipelineInfoIt = mPipelineInfo.find(pipeline);
4128         if (pipelineInfoIt == mPipelineInfo.end()) return;
4129         auto& pipelineInfo = pipelineInfoIt->second;
4130 
4131         destroyPipelineWithExclusiveInfo(device, deviceDispatch, pipeline, pipelineInfo,
4132                                          pAllocator);
4133 
4134         mPipelineInfo.erase(pipeline);
4135     }
4136 
on_vkDestroyPipeline(android::base::BumpPool * pool,VkDevice boxed_device,VkPipeline pipeline,const VkAllocationCallbacks * pAllocator)4137     void on_vkDestroyPipeline(android::base::BumpPool* pool, VkDevice boxed_device,
4138                               VkPipeline pipeline, const VkAllocationCallbacks* pAllocator) {
4139         auto device = unbox_VkDevice(boxed_device);
4140         auto deviceDispatch = dispatch_VkDevice(boxed_device);
4141 
4142         std::lock_guard<std::recursive_mutex> lock(mLock);
4143         destroyPipelineLocked(device, deviceDispatch, pipeline, pAllocator);
4144     }
4145 
on_vkCmdCopyImage(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkImageCopy * pRegions)4146     void on_vkCmdCopyImage(android::base::BumpPool* pool, VkCommandBuffer boxed_commandBuffer,
4147                            VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage,
4148                            VkImageLayout dstImageLayout, uint32_t regionCount,
4149                            const VkImageCopy* pRegions) {
4150         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
4151         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
4152 
4153         std::lock_guard<std::recursive_mutex> lock(mLock);
4154         auto* srcImg = android::base::find(mImageInfo, srcImage);
4155         auto* dstImg = android::base::find(mImageInfo, dstImage);
4156         if (!srcImg || !dstImg) return;
4157 
4158         VkDevice device = srcImg->cmpInfo.device();
4159         auto* deviceInfo = android::base::find(mDeviceInfo, device);
4160         if (!deviceInfo) return;
4161 
4162         bool needEmulatedSrc = deviceInfo->needEmulatedDecompression(srcImg->cmpInfo);
4163         bool needEmulatedDst = deviceInfo->needEmulatedDecompression(dstImg->cmpInfo);
4164         if (!needEmulatedSrc && !needEmulatedDst) {
4165             vk->vkCmdCopyImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout,
4166                                regionCount, pRegions);
4167             return;
4168         }
4169         VkImage srcImageMip = srcImage;
4170         VkImage dstImageMip = dstImage;
4171         for (uint32_t r = 0; r < regionCount; r++) {
4172             if (needEmulatedSrc) {
4173                 srcImageMip = srcImg->cmpInfo.compressedMipmap(pRegions[r].srcSubresource.mipLevel);
4174             }
4175             if (needEmulatedDst) {
4176                 dstImageMip = dstImg->cmpInfo.compressedMipmap(pRegions[r].dstSubresource.mipLevel);
4177             }
4178             VkImageCopy region = CompressedImageInfo::getCompressedMipmapsImageCopy(
4179                 pRegions[r], srcImg->cmpInfo, dstImg->cmpInfo, needEmulatedSrc, needEmulatedDst);
4180             vk->vkCmdCopyImage(commandBuffer, srcImageMip, srcImageLayout, dstImageMip,
4181                                dstImageLayout, 1, &region);
4182         }
4183     }
4184 
on_vkCmdCopyImageToBuffer(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkBuffer dstBuffer,uint32_t regionCount,const VkBufferImageCopy * pRegions)4185     void on_vkCmdCopyImageToBuffer(android::base::BumpPool* pool,
4186                                    VkCommandBuffer boxed_commandBuffer, VkImage srcImage,
4187                                    VkImageLayout srcImageLayout, VkBuffer dstBuffer,
4188                                    uint32_t regionCount, const VkBufferImageCopy* pRegions) {
4189         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
4190         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
4191 
4192         std::lock_guard<std::recursive_mutex> lock(mLock);
4193         auto* imageInfo = android::base::find(mImageInfo, srcImage);
4194         auto* bufferInfo = android::base::find(mBufferInfo, dstBuffer);
4195         if (!imageInfo || !bufferInfo) return;
4196         auto* deviceInfo = android::base::find(mDeviceInfo, bufferInfo->device);
4197         if (!deviceInfo) return;
4198         CompressedImageInfo& cmpInfo = imageInfo->cmpInfo;
4199         if (!deviceInfo->needEmulatedDecompression(cmpInfo)) {
4200             vk->vkCmdCopyImageToBuffer(commandBuffer, srcImage, srcImageLayout, dstBuffer,
4201                                        regionCount, pRegions);
4202             return;
4203         }
4204         for (uint32_t r = 0; r < regionCount; r++) {
4205             uint32_t mipLevel = pRegions[r].imageSubresource.mipLevel;
4206             VkBufferImageCopy region = cmpInfo.getBufferImageCopy(pRegions[r]);
4207             vk->vkCmdCopyImageToBuffer(commandBuffer, cmpInfo.compressedMipmap(mipLevel),
4208                                        srcImageLayout, dstBuffer, 1, &region);
4209         }
4210     }
4211 
on_vkCmdCopyImage2(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,const VkCopyImageInfo2 * pCopyImageInfo)4212     void on_vkCmdCopyImage2(android::base::BumpPool* pool,
4213                            VkCommandBuffer boxed_commandBuffer,
4214                            const VkCopyImageInfo2* pCopyImageInfo) {
4215         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
4216         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
4217 
4218         std::lock_guard<std::recursive_mutex> lock(mLock);
4219         auto* srcImg = android::base::find(mImageInfo, pCopyImageInfo->srcImage);
4220         auto* dstImg = android::base::find(mImageInfo, pCopyImageInfo->dstImage);
4221         if (!srcImg || !dstImg) return;
4222 
4223         VkDevice device = srcImg->cmpInfo.device();
4224         auto* deviceInfo = android::base::find(mDeviceInfo, device);
4225         if (!deviceInfo) return;
4226 
4227         bool needEmulatedSrc = deviceInfo->needEmulatedDecompression(srcImg->cmpInfo);
4228         bool needEmulatedDst = deviceInfo->needEmulatedDecompression(dstImg->cmpInfo);
4229         if (!needEmulatedSrc && !needEmulatedDst) {
4230             vk->vkCmdCopyImage2(commandBuffer, pCopyImageInfo);
4231             return;
4232         }
4233         VkImage srcImageMip = pCopyImageInfo->srcImage;
4234         VkImage dstImageMip = pCopyImageInfo->dstImage;
4235         for (uint32_t r = 0; r < pCopyImageInfo->regionCount; r++) {
4236             if (needEmulatedSrc) {
4237                 srcImageMip = srcImg->cmpInfo.compressedMipmap(pCopyImageInfo->pRegions[r].srcSubresource.mipLevel);
4238             }
4239             if (needEmulatedDst) {
4240                 dstImageMip = dstImg->cmpInfo.compressedMipmap(pCopyImageInfo->pRegions[r].dstSubresource.mipLevel);
4241             }
4242 
4243             VkCopyImageInfo2 inf2 = *pCopyImageInfo;
4244             inf2.regionCount = 1;
4245             inf2.srcImage = srcImageMip;
4246             inf2.dstImage = dstImageMip;
4247 
4248             VkImageCopy2 region = CompressedImageInfo::getCompressedMipmapsImageCopy(
4249                 pCopyImageInfo->pRegions[r], srcImg->cmpInfo, dstImg->cmpInfo, needEmulatedSrc, needEmulatedDst);
4250             inf2.pRegions = &region;
4251 
4252             vk->vkCmdCopyImage2(commandBuffer, &inf2);
4253         }
4254     }
4255 
on_vkCmdCopyImageToBuffer2(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,const VkCopyImageToBufferInfo2 * pCopyImageToBufferInfo)4256     void on_vkCmdCopyImageToBuffer2(android::base::BumpPool* pool,
4257                                    VkCommandBuffer boxed_commandBuffer,
4258                                    const VkCopyImageToBufferInfo2* pCopyImageToBufferInfo) {
4259         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
4260         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
4261 
4262         std::lock_guard<std::recursive_mutex> lock(mLock);
4263         auto* imageInfo = android::base::find(mImageInfo, pCopyImageToBufferInfo->srcImage);
4264         auto* bufferInfo = android::base::find(mBufferInfo, pCopyImageToBufferInfo->dstBuffer);
4265         if (!imageInfo || !bufferInfo) return;
4266         auto* deviceInfo = android::base::find(mDeviceInfo, bufferInfo->device);
4267         if (!deviceInfo) return;
4268         CompressedImageInfo& cmpInfo = imageInfo->cmpInfo;
4269         if (!deviceInfo->needEmulatedDecompression(cmpInfo)) {
4270             vk->vkCmdCopyImageToBuffer2(commandBuffer, pCopyImageToBufferInfo);
4271             return;
4272         }
4273         for (uint32_t r = 0; r < pCopyImageToBufferInfo->regionCount; r++) {
4274             uint32_t mipLevel = pCopyImageToBufferInfo->pRegions[r].imageSubresource.mipLevel;
4275             VkBufferImageCopy2 region = cmpInfo.getBufferImageCopy(pCopyImageToBufferInfo->pRegions[r]);
4276             VkCopyImageToBufferInfo2 inf = *pCopyImageToBufferInfo;
4277             inf.regionCount = 1;
4278             inf.pRegions = &region;
4279             inf.srcImage = cmpInfo.compressedMipmap(mipLevel);
4280 
4281             vk->vkCmdCopyImageToBuffer2(commandBuffer, &inf);
4282         }
4283     }
4284 
on_vkCmdCopyImage2KHR(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,const VkCopyImageInfo2KHR * pCopyImageInfo)4285     void on_vkCmdCopyImage2KHR(android::base::BumpPool* pool,
4286                            VkCommandBuffer boxed_commandBuffer,
4287                            const VkCopyImageInfo2KHR* pCopyImageInfo) {
4288         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
4289         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
4290 
4291         std::lock_guard<std::recursive_mutex> lock(mLock);
4292         auto* srcImg = android::base::find(mImageInfo, pCopyImageInfo->srcImage);
4293         auto* dstImg = android::base::find(mImageInfo, pCopyImageInfo->dstImage);
4294         if (!srcImg || !dstImg) return;
4295 
4296         VkDevice device = srcImg->cmpInfo.device();
4297         auto* deviceInfo = android::base::find(mDeviceInfo, device);
4298         if (!deviceInfo) return;
4299 
4300         bool needEmulatedSrc = deviceInfo->needEmulatedDecompression(srcImg->cmpInfo);
4301         bool needEmulatedDst = deviceInfo->needEmulatedDecompression(dstImg->cmpInfo);
4302         if (!needEmulatedSrc && !needEmulatedDst) {
4303             vk->vkCmdCopyImage2KHR(commandBuffer, pCopyImageInfo);
4304             return;
4305         }
4306         VkImage srcImageMip = pCopyImageInfo->srcImage;
4307         VkImage dstImageMip = pCopyImageInfo->dstImage;
4308         for (uint32_t r = 0; r < pCopyImageInfo->regionCount; r++) {
4309             if (needEmulatedSrc) {
4310                 srcImageMip = srcImg->cmpInfo.compressedMipmap(pCopyImageInfo->pRegions[r].srcSubresource.mipLevel);
4311             }
4312             if (needEmulatedDst) {
4313                 dstImageMip = dstImg->cmpInfo.compressedMipmap(pCopyImageInfo->pRegions[r].dstSubresource.mipLevel);
4314             }
4315 
4316             VkCopyImageInfo2KHR inf2 = *pCopyImageInfo;
4317             inf2.regionCount = 1;
4318             inf2.srcImage = srcImageMip;
4319             inf2.dstImage = dstImageMip;
4320 
4321             VkImageCopy2KHR region = CompressedImageInfo::getCompressedMipmapsImageCopy(
4322                 pCopyImageInfo->pRegions[r], srcImg->cmpInfo, dstImg->cmpInfo, needEmulatedSrc, needEmulatedDst);
4323             inf2.pRegions = &region;
4324 
4325             vk->vkCmdCopyImage2KHR(commandBuffer, &inf2);
4326         }
4327     }
4328 
on_vkCmdCopyImageToBuffer2KHR(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,const VkCopyImageToBufferInfo2KHR * pCopyImageToBufferInfo)4329     void on_vkCmdCopyImageToBuffer2KHR(android::base::BumpPool* pool,
4330                                    VkCommandBuffer boxed_commandBuffer,
4331                                    const VkCopyImageToBufferInfo2KHR* pCopyImageToBufferInfo) {
4332         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
4333         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
4334 
4335         std::lock_guard<std::recursive_mutex> lock(mLock);
4336         auto* imageInfo = android::base::find(mImageInfo, pCopyImageToBufferInfo->srcImage);
4337         auto* bufferInfo = android::base::find(mBufferInfo, pCopyImageToBufferInfo->dstBuffer);
4338         if (!imageInfo || !bufferInfo) return;
4339         auto* deviceInfo = android::base::find(mDeviceInfo, bufferInfo->device);
4340         if (!deviceInfo) return;
4341         CompressedImageInfo& cmpInfo = imageInfo->cmpInfo;
4342         if (!deviceInfo->needEmulatedDecompression(cmpInfo)) {
4343             vk->vkCmdCopyImageToBuffer2KHR(commandBuffer, pCopyImageToBufferInfo);
4344             return;
4345         }
4346         for (uint32_t r = 0; r < pCopyImageToBufferInfo->regionCount; r++) {
4347             uint32_t mipLevel = pCopyImageToBufferInfo->pRegions[r].imageSubresource.mipLevel;
4348             VkBufferImageCopy2KHR region = cmpInfo.getBufferImageCopy(pCopyImageToBufferInfo->pRegions[r]);
4349             VkCopyImageToBufferInfo2KHR inf = *pCopyImageToBufferInfo;
4350             inf.regionCount = 1;
4351             inf.pRegions = &region;
4352             inf.srcImage = cmpInfo.compressedMipmap(mipLevel);
4353 
4354             vk->vkCmdCopyImageToBuffer2KHR(commandBuffer, &inf);
4355         }
4356     }
4357 
on_vkGetImageMemoryRequirements(android::base::BumpPool * pool,VkDevice boxed_device,VkImage image,VkMemoryRequirements * pMemoryRequirements)4358     void on_vkGetImageMemoryRequirements(android::base::BumpPool* pool, VkDevice boxed_device,
4359                                          VkImage image, VkMemoryRequirements* pMemoryRequirements) {
4360         auto device = unbox_VkDevice(boxed_device);
4361         auto vk = dispatch_VkDevice(boxed_device);
4362         vk->vkGetImageMemoryRequirements(device, image, pMemoryRequirements);
4363         std::lock_guard<std::recursive_mutex> lock(mLock);
4364         updateImageMemorySizeLocked(device, image, pMemoryRequirements);
4365 
4366         auto* physicalDevice = android::base::find(mDeviceToPhysicalDevice, device);
4367         if (!physicalDevice) {
4368             ERR("Failed to find physical device for device:%p", device);
4369             return;
4370         }
4371 
4372         auto* physicalDeviceInfo = android::base::find(mPhysdevInfo, *physicalDevice);
4373         if (!physicalDeviceInfo) {
4374             ERR("Failed to find physical device info for physical device:%p", *physicalDevice);
4375             return;
4376         }
4377 
4378         auto& physicalDeviceMemHelper = physicalDeviceInfo->memoryPropertiesHelper;
4379         physicalDeviceMemHelper->transformToGuestMemoryRequirements(pMemoryRequirements);
4380     }
4381 
on_vkGetImageMemoryRequirements2(android::base::BumpPool * pool,VkDevice boxed_device,const VkImageMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)4382     void on_vkGetImageMemoryRequirements2(android::base::BumpPool* pool, VkDevice boxed_device,
4383                                           const VkImageMemoryRequirementsInfo2* pInfo,
4384                                           VkMemoryRequirements2* pMemoryRequirements) {
4385         auto device = unbox_VkDevice(boxed_device);
4386         auto vk = dispatch_VkDevice(boxed_device);
4387 
4388         std::lock_guard<std::recursive_mutex> lock(mLock);
4389 
4390         auto* physicalDevice = android::base::find(mDeviceToPhysicalDevice, device);
4391         if (!physicalDevice) {
4392             ERR("Failed to find physical device for device:%p", device);
4393             return;
4394         }
4395 
4396         auto* physicalDeviceInfo = android::base::find(mPhysdevInfo, *physicalDevice);
4397         if (!physicalDeviceInfo) {
4398             ERR("Failed to find physical device info for physical device:%p", *physicalDevice);
4399             return;
4400         }
4401 
4402         if ((physicalDeviceInfo->props.apiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
4403             vk->vkGetImageMemoryRequirements2) {
4404             vk->vkGetImageMemoryRequirements2(device, pInfo, pMemoryRequirements);
4405         } else if (hasDeviceExtension(device, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME)) {
4406             vk->vkGetImageMemoryRequirements2KHR(device, pInfo, pMemoryRequirements);
4407         } else {
4408             if (pInfo->pNext) {
4409                 ERR("Warning: trying to use extension struct in VkMemoryRequirements2 without "
4410                     "having enabled the extension!");
4411             }
4412 
4413             vk->vkGetImageMemoryRequirements(device, pInfo->image,
4414                                              &pMemoryRequirements->memoryRequirements);
4415         }
4416 
4417         updateImageMemorySizeLocked(device, pInfo->image, &pMemoryRequirements->memoryRequirements);
4418 
4419         auto& physicalDeviceMemHelper = physicalDeviceInfo->memoryPropertiesHelper;
4420         physicalDeviceMemHelper->transformToGuestMemoryRequirements(
4421             &pMemoryRequirements->memoryRequirements);
4422     }
4423 
on_vkGetBufferMemoryRequirements(android::base::BumpPool * pool,VkDevice boxed_device,VkBuffer buffer,VkMemoryRequirements * pMemoryRequirements)4424     void on_vkGetBufferMemoryRequirements(android::base::BumpPool* pool, VkDevice boxed_device,
4425                                           VkBuffer buffer,
4426                                           VkMemoryRequirements* pMemoryRequirements) {
4427         auto device = unbox_VkDevice(boxed_device);
4428         auto vk = dispatch_VkDevice(boxed_device);
4429         vk->vkGetBufferMemoryRequirements(device, buffer, pMemoryRequirements);
4430 
4431         std::lock_guard<std::recursive_mutex> lock(mLock);
4432 
4433         auto* physicalDevice = android::base::find(mDeviceToPhysicalDevice, device);
4434         if (!physicalDevice) {
4435             GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
4436                 << "No physical device available for " << device;
4437         }
4438 
4439         auto* physicalDeviceInfo = android::base::find(mPhysdevInfo, *physicalDevice);
4440         if (!physicalDeviceInfo) {
4441             GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
4442                 << "No physical device info available for " << *physicalDevice;
4443         }
4444 
4445         auto& physicalDeviceMemHelper = physicalDeviceInfo->memoryPropertiesHelper;
4446         physicalDeviceMemHelper->transformToGuestMemoryRequirements(pMemoryRequirements);
4447     }
4448 
on_vkGetBufferMemoryRequirements2(android::base::BumpPool * pool,VkDevice boxed_device,const VkBufferMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)4449     void on_vkGetBufferMemoryRequirements2(android::base::BumpPool* pool, VkDevice boxed_device,
4450                                            const VkBufferMemoryRequirementsInfo2* pInfo,
4451                                            VkMemoryRequirements2* pMemoryRequirements) {
4452         auto device = unbox_VkDevice(boxed_device);
4453         auto vk = dispatch_VkDevice(boxed_device);
4454 
4455         std::lock_guard<std::recursive_mutex> lock(mLock);
4456 
4457         auto* physicalDevice = android::base::find(mDeviceToPhysicalDevice, device);
4458         if (!physicalDevice) {
4459             GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
4460                 << "No physical device available for " << device;
4461         }
4462 
4463         auto* physicalDeviceInfo = android::base::find(mPhysdevInfo, *physicalDevice);
4464         if (!physicalDeviceInfo) {
4465             GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
4466                 << "No physical device info available for " << *physicalDevice;
4467         }
4468 
4469         if ((physicalDeviceInfo->props.apiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
4470             vk->vkGetBufferMemoryRequirements2) {
4471             vk->vkGetBufferMemoryRequirements2(device, pInfo, pMemoryRequirements);
4472         } else if (hasDeviceExtension(device, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME)) {
4473             vk->vkGetBufferMemoryRequirements2KHR(device, pInfo, pMemoryRequirements);
4474         } else {
4475             if (pInfo->pNext) {
4476                 ERR("Warning: trying to use extension struct in VkMemoryRequirements2 without "
4477                     "having enabled the extension!");
4478             }
4479 
4480             vk->vkGetBufferMemoryRequirements(device, pInfo->buffer,
4481                                               &pMemoryRequirements->memoryRequirements);
4482         }
4483 
4484         auto& physicalDeviceMemHelper = physicalDeviceInfo->memoryPropertiesHelper;
4485         physicalDeviceMemHelper->transformToGuestMemoryRequirements(
4486             &pMemoryRequirements->memoryRequirements);
4487     }
4488 
on_vkCmdCopyBufferToImage(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,VkBuffer srcBuffer,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkBufferImageCopy * pRegions,const VkDecoderContext & context)4489     void on_vkCmdCopyBufferToImage(android::base::BumpPool* pool,
4490                                    VkCommandBuffer boxed_commandBuffer, VkBuffer srcBuffer,
4491                                    VkImage dstImage, VkImageLayout dstImageLayout,
4492                                    uint32_t regionCount, const VkBufferImageCopy* pRegions,
4493                                    const VkDecoderContext& context) {
4494         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
4495         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
4496 
4497         std::lock_guard<std::recursive_mutex> lock(mLock);
4498         auto* imageInfo = android::base::find(mImageInfo, dstImage);
4499         if (!imageInfo) return;
4500         auto* bufferInfo = android::base::find(mBufferInfo, srcBuffer);
4501         if (!bufferInfo) {
4502             return;
4503         }
4504         VkDevice device = bufferInfo->device;
4505         auto* deviceInfo = android::base::find(mDeviceInfo, device);
4506         if (!deviceInfo) {
4507             return;
4508         }
4509         if (!deviceInfo->needEmulatedDecompression(imageInfo->cmpInfo)) {
4510             vk->vkCmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage, dstImageLayout,
4511                                        regionCount, pRegions);
4512             return;
4513         }
4514         auto* cmdBufferInfo = android::base::find(mCommandBufferInfo, commandBuffer);
4515         if (!cmdBufferInfo) {
4516             return;
4517         }
4518         CompressedImageInfo& cmpInfo = imageInfo->cmpInfo;
4519 
4520         for (uint32_t r = 0; r < regionCount; r++) {
4521             uint32_t mipLevel = pRegions[r].imageSubresource.mipLevel;
4522             VkBufferImageCopy region = cmpInfo.getBufferImageCopy(pRegions[r]);
4523             vk->vkCmdCopyBufferToImage(commandBuffer, srcBuffer, cmpInfo.compressedMipmap(mipLevel),
4524                                        dstImageLayout, 1, &region);
4525         }
4526 
4527         if (cmpInfo.canDecompressOnCpu()) {
4528             // Get a pointer to the compressed image memory
4529             const MemoryInfo* memoryInfo = android::base::find(mMemoryInfo, bufferInfo->memory);
4530             if (!memoryInfo) {
4531                 WARN("ASTC CPU decompression: couldn't find mapped memory info");
4532                 return;
4533             }
4534             if (!memoryInfo->ptr) {
4535                 WARN("ASTC CPU decompression: VkBuffer memory isn't host-visible");
4536                 return;
4537             }
4538             uint8_t* astcData = (uint8_t*)(memoryInfo->ptr) + bufferInfo->memoryOffset;
4539             cmpInfo.decompressOnCpu(commandBuffer, astcData, bufferInfo->size, dstImage,
4540                                     dstImageLayout, regionCount, pRegions, context);
4541         }
4542     }
4543 
on_vkCmdCopyBufferToImage2(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,const VkCopyBufferToImageInfo2 * pCopyBufferToImageInfo,const VkDecoderContext & context)4544     void on_vkCmdCopyBufferToImage2(android::base::BumpPool* pool,
4545                                     VkCommandBuffer boxed_commandBuffer,
4546                                     const VkCopyBufferToImageInfo2* pCopyBufferToImageInfo,
4547                                     const VkDecoderContext& context) {
4548         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
4549         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
4550 
4551         std::lock_guard<std::recursive_mutex> lock(mLock);
4552         auto* imageInfo = android::base::find(mImageInfo, pCopyBufferToImageInfo->dstImage);
4553         if (!imageInfo) return;
4554         auto* bufferInfo = android::base::find(mBufferInfo, pCopyBufferToImageInfo->srcBuffer);
4555         if (!bufferInfo) {
4556             return;
4557         }
4558         VkDevice device = bufferInfo->device;
4559         auto* deviceInfo = android::base::find(mDeviceInfo, device);
4560         if (!deviceInfo) {
4561             return;
4562         }
4563         if (!deviceInfo->needEmulatedDecompression(imageInfo->cmpInfo)) {
4564             vk->vkCmdCopyBufferToImage2(commandBuffer, pCopyBufferToImageInfo);
4565             return;
4566         }
4567         auto* cmdBufferInfo = android::base::find(mCommandBufferInfo, commandBuffer);
4568         if (!cmdBufferInfo) {
4569             return;
4570         }
4571         CompressedImageInfo& cmpInfo = imageInfo->cmpInfo;
4572 
4573         for (uint32_t r = 0; r < pCopyBufferToImageInfo->regionCount; r++) {
4574             VkCopyBufferToImageInfo2 inf;
4575             uint32_t mipLevel = pCopyBufferToImageInfo->pRegions[r].imageSubresource.mipLevel;
4576             inf.dstImage = cmpInfo.compressedMipmap(mipLevel);
4577             VkBufferImageCopy2 region = cmpInfo.getBufferImageCopy(pCopyBufferToImageInfo->pRegions[r]);
4578             inf.regionCount = 1;
4579             inf.pRegions = &region;
4580 
4581             vk->vkCmdCopyBufferToImage2(commandBuffer, &inf);
4582         }
4583 
4584         if (cmpInfo.canDecompressOnCpu()) {
4585             // Get a pointer to the compressed image memory
4586             const MemoryInfo* memoryInfo = android::base::find(mMemoryInfo, bufferInfo->memory);
4587             if (!memoryInfo) {
4588                 WARN("ASTC CPU decompression: couldn't find mapped memory info");
4589                 return;
4590             }
4591             if (!memoryInfo->ptr) {
4592                 WARN("ASTC CPU decompression: VkBuffer memory isn't host-visible");
4593                 return;
4594             }
4595             uint8_t* astcData = (uint8_t*)(memoryInfo->ptr) + bufferInfo->memoryOffset;
4596 
4597             cmpInfo.decompressOnCpu(commandBuffer, astcData, bufferInfo->size, pCopyBufferToImageInfo, context);
4598         }
4599     }
4600 
on_vkCmdCopyBufferToImage2KHR(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,const VkCopyBufferToImageInfo2KHR * pCopyBufferToImageInfo,const VkDecoderContext & context)4601     void on_vkCmdCopyBufferToImage2KHR(android::base::BumpPool* pool,
4602                                     VkCommandBuffer boxed_commandBuffer,
4603                                     const VkCopyBufferToImageInfo2KHR* pCopyBufferToImageInfo,
4604                                     const VkDecoderContext& context) {
4605         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
4606         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
4607 
4608         std::lock_guard<std::recursive_mutex> lock(mLock);
4609         auto* imageInfo = android::base::find(mImageInfo, pCopyBufferToImageInfo->dstImage);
4610         if (!imageInfo) return;
4611         auto* bufferInfo = android::base::find(mBufferInfo, pCopyBufferToImageInfo->srcBuffer);
4612         if (!bufferInfo) {
4613             return;
4614         }
4615         VkDevice device = bufferInfo->device;
4616         auto* deviceInfo = android::base::find(mDeviceInfo, device);
4617         if (!deviceInfo) {
4618             return;
4619         }
4620         if (!deviceInfo->needEmulatedDecompression(imageInfo->cmpInfo)) {
4621             vk->vkCmdCopyBufferToImage2KHR(commandBuffer, pCopyBufferToImageInfo);
4622             return;
4623         }
4624         auto* cmdBufferInfo = android::base::find(mCommandBufferInfo, commandBuffer);
4625         if (!cmdBufferInfo) {
4626             return;
4627         }
4628         CompressedImageInfo& cmpInfo = imageInfo->cmpInfo;
4629 
4630         for (uint32_t r = 0; r < pCopyBufferToImageInfo->regionCount; r++) {
4631             VkCopyBufferToImageInfo2KHR inf;
4632             uint32_t mipLevel = pCopyBufferToImageInfo->pRegions[r].imageSubresource.mipLevel;
4633             inf.dstImage = cmpInfo.compressedMipmap(mipLevel);
4634             VkBufferImageCopy2KHR region = cmpInfo.getBufferImageCopy(pCopyBufferToImageInfo->pRegions[r]);
4635             inf.regionCount = 1;
4636             inf.pRegions = &region;
4637 
4638             vk->vkCmdCopyBufferToImage2KHR(commandBuffer, &inf);
4639         }
4640 
4641         if (cmpInfo.canDecompressOnCpu()) {
4642             // Get a pointer to the compressed image memory
4643             const MemoryInfo* memoryInfo = android::base::find(mMemoryInfo, bufferInfo->memory);
4644             if (!memoryInfo) {
4645                 WARN("ASTC CPU decompression: couldn't find mapped memory info");
4646                 return;
4647             }
4648             if (!memoryInfo->ptr) {
4649                 WARN("ASTC CPU decompression: VkBuffer memory isn't host-visible");
4650                 return;
4651             }
4652             uint8_t* astcData = (uint8_t*)(memoryInfo->ptr) + bufferInfo->memoryOffset;
4653 
4654             cmpInfo.decompressOnCpu(commandBuffer, astcData, bufferInfo->size, pCopyBufferToImageInfo, context);
4655         }
4656     }
4657 
convertQueueFamilyForeignToExternal(uint32_t * queueFamilyIndexPtr)4658     inline void convertQueueFamilyForeignToExternal(uint32_t* queueFamilyIndexPtr) {
4659         if (*queueFamilyIndexPtr == VK_QUEUE_FAMILY_FOREIGN_EXT) {
4660             *queueFamilyIndexPtr = VK_QUEUE_FAMILY_EXTERNAL;
4661         }
4662     }
4663 
convertQueueFamilyForeignToExternal_VkBufferMemoryBarrier(VkBufferMemoryBarrier * barrier)4664     inline void convertQueueFamilyForeignToExternal_VkBufferMemoryBarrier(
4665         VkBufferMemoryBarrier* barrier) {
4666         convertQueueFamilyForeignToExternal(&barrier->srcQueueFamilyIndex);
4667         convertQueueFamilyForeignToExternal(&barrier->dstQueueFamilyIndex);
4668     }
4669 
convertQueueFamilyForeignToExternal_VkImageMemoryBarrier(VkImageMemoryBarrier * barrier)4670     inline void convertQueueFamilyForeignToExternal_VkImageMemoryBarrier(
4671         VkImageMemoryBarrier* barrier) {
4672         convertQueueFamilyForeignToExternal(&barrier->srcQueueFamilyIndex);
4673         convertQueueFamilyForeignToExternal(&barrier->dstQueueFamilyIndex);
4674     }
4675 
getIMBImage(const VkImageMemoryBarrier & imb)4676     inline VkImage getIMBImage(const VkImageMemoryBarrier& imb) { return imb.image; }
getIMBImage(const VkImageMemoryBarrier2 & imb)4677     inline VkImage getIMBImage(const VkImageMemoryBarrier2& imb) { return imb.image; }
4678 
getIMBNewLayout(const VkImageMemoryBarrier & imb)4679     inline VkImageLayout getIMBNewLayout(const VkImageMemoryBarrier& imb) { return imb.newLayout; }
getIMBNewLayout(const VkImageMemoryBarrier2 & imb)4680     inline VkImageLayout getIMBNewLayout(const VkImageMemoryBarrier2& imb) { return imb.newLayout; }
4681 
getIMBSrcQueueFamilyIndex(const VkImageMemoryBarrier & imb)4682     inline uint32_t getIMBSrcQueueFamilyIndex(const VkImageMemoryBarrier& imb) {
4683         return imb.srcQueueFamilyIndex;
4684     }
getIMBSrcQueueFamilyIndex(const VkImageMemoryBarrier2 & imb)4685     inline uint32_t getIMBSrcQueueFamilyIndex(const VkImageMemoryBarrier2& imb) {
4686         return imb.srcQueueFamilyIndex;
4687     }
getIMBDstQueueFamilyIndex(const VkImageMemoryBarrier & imb)4688     inline uint32_t getIMBDstQueueFamilyIndex(const VkImageMemoryBarrier& imb) {
4689         return imb.dstQueueFamilyIndex;
4690     }
getIMBDstQueueFamilyIndex(const VkImageMemoryBarrier2 & imb)4691     inline uint32_t getIMBDstQueueFamilyIndex(const VkImageMemoryBarrier2& imb) {
4692         return imb.dstQueueFamilyIndex;
4693     }
4694 
4695     template <typename VkImageMemoryBarrierType>
processImageMemoryBarrier(VkCommandBuffer commandBuffer,uint32_t imageMemoryBarrierCount,const VkImageMemoryBarrierType * pImageMemoryBarriers)4696     void processImageMemoryBarrier(VkCommandBuffer commandBuffer, uint32_t imageMemoryBarrierCount,
4697                                    const VkImageMemoryBarrierType* pImageMemoryBarriers) {
4698         std::lock_guard<std::recursive_mutex> lock(mLock);
4699         CommandBufferInfo* cmdBufferInfo = android::base::find(mCommandBufferInfo, commandBuffer);
4700         if (!cmdBufferInfo) return;
4701 
4702         // TODO: update image layout in ImageInfo
4703         for (uint32_t i = 0; i < imageMemoryBarrierCount; i++) {
4704             auto* imageInfo = android::base::find(mImageInfo, getIMBImage(pImageMemoryBarriers[i]));
4705             if (!imageInfo) {
4706                 continue;
4707             }
4708             cmdBufferInfo->imageLayouts[getIMBImage(pImageMemoryBarriers[i])] =
4709                 getIMBNewLayout(pImageMemoryBarriers[i]);
4710             if (!imageInfo->boundColorBuffer.has_value()) {
4711                 continue;
4712             }
4713             HandleType cb = imageInfo->boundColorBuffer.value();
4714             if (getIMBSrcQueueFamilyIndex(pImageMemoryBarriers[i]) == VK_QUEUE_FAMILY_EXTERNAL) {
4715                 cmdBufferInfo->acquiredColorBuffers.insert(cb);
4716             }
4717             if (getIMBDstQueueFamilyIndex(pImageMemoryBarriers[i]) == VK_QUEUE_FAMILY_EXTERNAL) {
4718                 cmdBufferInfo->releasedColorBuffers.insert(cb);
4719             }
4720             cmdBufferInfo->cbLayouts[cb] = getIMBNewLayout(pImageMemoryBarriers[i]);
4721             // Insert unconditionally to this list, regardless of whether or not
4722             // there is a queue family ownership transfer
4723             cmdBufferInfo->imageBarrierColorBuffers.insert(cb);
4724         }
4725     }
4726 
on_vkCmdPipelineBarrier(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,VkPipelineStageFlags srcStageMask,VkPipelineStageFlags dstStageMask,VkDependencyFlags dependencyFlags,uint32_t memoryBarrierCount,const VkMemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VkBufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VkImageMemoryBarrier * pImageMemoryBarriers)4727     void on_vkCmdPipelineBarrier(android::base::BumpPool* pool, VkCommandBuffer boxed_commandBuffer,
4728                                  VkPipelineStageFlags srcStageMask,
4729                                  VkPipelineStageFlags dstStageMask,
4730                                  VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount,
4731                                  const VkMemoryBarrier* pMemoryBarriers,
4732                                  uint32_t bufferMemoryBarrierCount,
4733                                  const VkBufferMemoryBarrier* pBufferMemoryBarriers,
4734                                  uint32_t imageMemoryBarrierCount,
4735                                  const VkImageMemoryBarrier* pImageMemoryBarriers) {
4736         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
4737         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
4738 
4739         for (uint32_t i = 0; i < bufferMemoryBarrierCount; ++i) {
4740             convertQueueFamilyForeignToExternal_VkBufferMemoryBarrier(
4741                 ((VkBufferMemoryBarrier*)pBufferMemoryBarriers) + i);
4742         }
4743 
4744         for (uint32_t i = 0; i < imageMemoryBarrierCount; ++i) {
4745             convertQueueFamilyForeignToExternal_VkImageMemoryBarrier(
4746                 ((VkImageMemoryBarrier*)pImageMemoryBarriers) + i);
4747         }
4748 
4749         if (imageMemoryBarrierCount == 0) {
4750             vk->vkCmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask, dependencyFlags,
4751                                      memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount,
4752                                      pBufferMemoryBarriers, imageMemoryBarrierCount,
4753                                      pImageMemoryBarriers);
4754             return;
4755         }
4756         std::lock_guard<std::recursive_mutex> lock(mLock);
4757         CommandBufferInfo* cmdBufferInfo = android::base::find(mCommandBufferInfo, commandBuffer);
4758         if (!cmdBufferInfo) return;
4759 
4760         DeviceInfo* deviceInfo = android::base::find(mDeviceInfo, cmdBufferInfo->device);
4761         if (!deviceInfo) return;
4762 
4763         processImageMemoryBarrier(commandBuffer, imageMemoryBarrierCount, pImageMemoryBarriers);
4764 
4765         if (!deviceInfo->emulateTextureEtc2 && !deviceInfo->emulateTextureAstc) {
4766             vk->vkCmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask, dependencyFlags,
4767                                      memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount,
4768                                      pBufferMemoryBarriers, imageMemoryBarrierCount,
4769                                      pImageMemoryBarriers);
4770             return;
4771         }
4772 
4773         // This is a compressed image. Handle decompression before calling vkCmdPipelineBarrier
4774 
4775         std::vector<VkImageMemoryBarrier> imageBarriers;
4776         bool needRebind = false;
4777 
4778         for (uint32_t i = 0; i < imageMemoryBarrierCount; i++) {
4779             const VkImageMemoryBarrier& srcBarrier = pImageMemoryBarriers[i];
4780             auto* imageInfo = android::base::find(mImageInfo, srcBarrier.image);
4781 
4782             // If the image doesn't need GPU decompression, nothing to do.
4783             if (!imageInfo || !deviceInfo->needGpuDecompression(imageInfo->cmpInfo)) {
4784                 imageBarriers.push_back(srcBarrier);
4785                 continue;
4786             }
4787 
4788             // Otherwise, decompress the image, if we're going to read from it.
4789             needRebind |= imageInfo->cmpInfo.decompressIfNeeded(
4790                 vk, commandBuffer, srcStageMask, dstStageMask, srcBarrier, imageBarriers);
4791         }
4792 
4793         if (needRebind && cmdBufferInfo->computePipeline) {
4794             // Recover pipeline bindings
4795             // TODO(gregschlom): instead of doing this here again and again after each image we
4796             // decompress, could we do it once before calling vkCmdDispatch?
4797             vk->vkCmdBindPipeline(commandBuffer, VK_PIPELINE_BIND_POINT_COMPUTE,
4798                                   cmdBufferInfo->computePipeline);
4799             if (!cmdBufferInfo->currentDescriptorSets.empty()) {
4800                 vk->vkCmdBindDescriptorSets(
4801                     commandBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, cmdBufferInfo->descriptorLayout,
4802                     cmdBufferInfo->firstSet, cmdBufferInfo->currentDescriptorSets.size(),
4803                     cmdBufferInfo->currentDescriptorSets.data(),
4804                     cmdBufferInfo->dynamicOffsets.size(), cmdBufferInfo->dynamicOffsets.data());
4805             }
4806         }
4807 
4808         // Apply the remaining barriers
4809         if (memoryBarrierCount || bufferMemoryBarrierCount || !imageBarriers.empty()) {
4810             vk->vkCmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask, dependencyFlags,
4811                                      memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount,
4812                                      pBufferMemoryBarriers, imageBarriers.size(),
4813                                      imageBarriers.data());
4814         }
4815     }
4816 
on_vkCmdPipelineBarrier2(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,const VkDependencyInfo * pDependencyInfo)4817     void on_vkCmdPipelineBarrier2(android::base::BumpPool* pool,
4818                                   VkCommandBuffer boxed_commandBuffer,
4819                                   const VkDependencyInfo* pDependencyInfo) {
4820         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
4821         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
4822 
4823         for (uint32_t i = 0; i < pDependencyInfo->bufferMemoryBarrierCount; ++i) {
4824             convertQueueFamilyForeignToExternal_VkBufferMemoryBarrier(
4825                 ((VkBufferMemoryBarrier*)pDependencyInfo->pBufferMemoryBarriers) + i);
4826         }
4827 
4828         for (uint32_t i = 0; i < pDependencyInfo->imageMemoryBarrierCount; ++i) {
4829             convertQueueFamilyForeignToExternal_VkImageMemoryBarrier(
4830                 ((VkImageMemoryBarrier*)pDependencyInfo->pImageMemoryBarriers) + i);
4831         }
4832 
4833         std::lock_guard<std::recursive_mutex> lock(mLock);
4834         CommandBufferInfo* cmdBufferInfo = android::base::find(mCommandBufferInfo, commandBuffer);
4835         if (!cmdBufferInfo) return;
4836 
4837         DeviceInfo* deviceInfo = android::base::find(mDeviceInfo, cmdBufferInfo->device);
4838         if (!deviceInfo) return;
4839 
4840         processImageMemoryBarrier(commandBuffer, pDependencyInfo->imageMemoryBarrierCount,
4841                                   pDependencyInfo->pImageMemoryBarriers);
4842 
4843         // TODO: If this is a decompressed image, handle decompression before calling
4844         // VkCmdvkCmdPipelineBarrier2 i.e. match on_vkCmdPipelineBarrier implementation
4845         vk->vkCmdPipelineBarrier2(commandBuffer, pDependencyInfo);
4846     }
4847 
mapHostVisibleMemoryToGuestPhysicalAddressLocked(VulkanDispatch * vk,VkDevice device,VkDeviceMemory memory,uint64_t physAddr)4848     bool mapHostVisibleMemoryToGuestPhysicalAddressLocked(VulkanDispatch* vk, VkDevice device,
4849                                                           VkDeviceMemory memory,
4850                                                           uint64_t physAddr) {
4851         if (!m_emu->features.GlDirectMem.enabled &&
4852             !m_emu->features.VirtioGpuNext.enabled) {
4853             // INFO("%s: Tried to use direct mapping "
4854             // "while GlDirectMem is not enabled!");
4855         }
4856 
4857         auto* info = android::base::find(mMemoryInfo, memory);
4858         if (!info) return false;
4859 
4860         info->guestPhysAddr = physAddr;
4861 
4862         constexpr size_t kPageBits = 12;
4863         constexpr size_t kPageSize = 1u << kPageBits;
4864         constexpr size_t kPageOffsetMask = kPageSize - 1;
4865 
4866         uintptr_t addr = reinterpret_cast<uintptr_t>(info->ptr);
4867         uintptr_t pageOffset = addr & kPageOffsetMask;
4868 
4869         info->pageAlignedHva = reinterpret_cast<void*>(addr - pageOffset);
4870         info->sizeToPage = ((info->size + pageOffset + kPageSize - 1) >> kPageBits) << kPageBits;
4871 
4872         if (mLogging) {
4873             INFO("%s: map: %p, %p -> [0x%llx 0x%llx]", __func__, info->ptr,
4874                     info->pageAlignedHva, (unsigned long long)info->guestPhysAddr,
4875                     (unsigned long long)info->guestPhysAddr + info->sizeToPage);
4876         }
4877 
4878         info->directMapped = true;
4879         uint64_t gpa = info->guestPhysAddr;
4880         void* hva = info->pageAlignedHva;
4881         size_t sizeToPage = info->sizeToPage;
4882 
4883         AutoLock occupiedGpasLock(mOccupiedGpasLock);
4884 
4885         auto* existingMemoryInfo = android::base::find(mOccupiedGpas, gpa);
4886         if (existingMemoryInfo) {
4887             INFO("%s: WARNING: already mapped gpa 0x%llx, replacing", __func__,
4888                     (unsigned long long)gpa);
4889 
4890             get_emugl_vm_operations().unmapUserBackedRam(existingMemoryInfo->gpa,
4891                                                          existingMemoryInfo->sizeToPage);
4892 
4893             mOccupiedGpas.erase(gpa);
4894         }
4895 
4896         get_emugl_vm_operations().mapUserBackedRam(gpa, hva, sizeToPage);
4897 
4898         if (mVerbosePrints) {
4899             INFO("VERBOSE:%s: registering gpa 0x%llx to mOccupiedGpas", __func__,
4900                     (unsigned long long)gpa);
4901         }
4902 
4903         mOccupiedGpas[gpa] = {
4904             vk, device, memory, gpa, sizeToPage,
4905         };
4906 
4907         if (!mUseOldMemoryCleanupPath) {
4908             get_emugl_address_space_device_control_ops().register_deallocation_callback(
4909                 this, gpa, [](void* thisPtr, uint64_t gpa) {
4910                     Impl* implPtr = (Impl*)thisPtr;
4911                     implPtr->unmapMemoryAtGpaIfExists(gpa);
4912                 });
4913         }
4914 
4915         return true;
4916     }
4917 
4918     // Only call this from the address space device deallocation operation's
4919     // context, or it's possible that the guest/host view of which gpa's are
4920     // occupied goes out of sync.
unmapMemoryAtGpaIfExists(uint64_t gpa)4921     void unmapMemoryAtGpaIfExists(uint64_t gpa) {
4922         AutoLock lock(mOccupiedGpasLock);
4923 
4924         if (mVerbosePrints) {
4925             INFO("VERBOSE:%s: deallocation callback for gpa 0x%llx", __func__,
4926                     (unsigned long long)gpa);
4927         }
4928 
4929         auto* existingMemoryInfo = android::base::find(mOccupiedGpas, gpa);
4930         if (!existingMemoryInfo) return;
4931 
4932         get_emugl_vm_operations().unmapUserBackedRam(existingMemoryInfo->gpa,
4933                                                      existingMemoryInfo->sizeToPage);
4934 
4935         mOccupiedGpas.erase(gpa);
4936     }
4937 
on_vkAllocateMemory(android::base::BumpPool * pool,VkDevice boxed_device,const VkMemoryAllocateInfo * pAllocateInfo,const VkAllocationCallbacks * pAllocator,VkDeviceMemory * pMemory)4938     VkResult on_vkAllocateMemory(android::base::BumpPool* pool, VkDevice boxed_device,
4939                                  const VkMemoryAllocateInfo* pAllocateInfo,
4940                                  const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory) {
4941         auto device = unbox_VkDevice(boxed_device);
4942         auto vk = dispatch_VkDevice(boxed_device);
4943         auto* tInfo = RenderThreadInfoVk::get();
4944 
4945         if (!pAllocateInfo) return VK_ERROR_INITIALIZATION_FAILED;
4946 
4947         VkMemoryAllocateInfo localAllocInfo = vk_make_orphan_copy(*pAllocateInfo);
4948         vk_struct_chain_iterator structChainIter = vk_make_chain_iterator(&localAllocInfo);
4949 
4950         VkMemoryAllocateFlagsInfo allocFlagsInfo;
4951         const VkMemoryAllocateFlagsInfo* allocFlagsInfoPtr =
4952             vk_find_struct<VkMemoryAllocateFlagsInfo>(pAllocateInfo);
4953         if (allocFlagsInfoPtr) {
4954             allocFlagsInfo = *allocFlagsInfoPtr;
4955             vk_append_struct(&structChainIter, &allocFlagsInfo);
4956         }
4957 
4958         VkMemoryOpaqueCaptureAddressAllocateInfo opaqueCaptureAddressAllocInfo;
4959         const VkMemoryOpaqueCaptureAddressAllocateInfo* opaqueCaptureAddressAllocInfoPtr =
4960             vk_find_struct<VkMemoryOpaqueCaptureAddressAllocateInfo>(pAllocateInfo);
4961         if (opaqueCaptureAddressAllocInfoPtr) {
4962             opaqueCaptureAddressAllocInfo = *opaqueCaptureAddressAllocInfoPtr;
4963             vk_append_struct(&structChainIter, &opaqueCaptureAddressAllocInfo);
4964         }
4965 
4966         const VkMemoryDedicatedAllocateInfo* dedicatedAllocInfoPtr =
4967             vk_find_struct<VkMemoryDedicatedAllocateInfo>(pAllocateInfo);
4968         VkMemoryDedicatedAllocateInfo localDedicatedAllocInfo = {};
4969 
4970         if (dedicatedAllocInfoPtr) {
4971             localDedicatedAllocInfo = vk_make_orphan_copy(*dedicatedAllocInfoPtr);
4972         }
4973         if (!usingDirectMapping()) {
4974             // We copy bytes 1 page at a time from the guest to the host
4975             // if we are not using direct mapping. This means we can end up
4976             // writing over memory we did not intend.
4977             // E.g. swiftshader just allocated with malloc, which can have
4978             // data stored between allocations.
4979         #ifdef PAGE_SIZE
4980             localAllocInfo.allocationSize += static_cast<VkDeviceSize>(PAGE_SIZE);
4981             localAllocInfo.allocationSize &= ~static_cast<VkDeviceSize>(PAGE_SIZE - 1);
4982         #elif defined(_WIN32)
4983             localAllocInfo.allocationSize += static_cast<VkDeviceSize>(4096);
4984             localAllocInfo.allocationSize &= ~static_cast<VkDeviceSize>(4095);
4985         #else
4986             localAllocInfo.allocationSize += static_cast<VkDeviceSize>(getpagesize());
4987             localAllocInfo.allocationSize &= ~static_cast<VkDeviceSize>(getpagesize() - 1);
4988         #endif
4989         }
4990         // Note for AHardwareBuffers, the Vulkan spec states:
4991         //
4992         //     Android hardware buffers have intrinsic width, height, format, and usage
4993         //     properties, so Vulkan images bound to memory imported from an Android
4994         //     hardware buffer must use dedicated allocations
4995         //
4996         // so any allocation requests with a VkImportAndroidHardwareBufferInfoANDROID
4997         // will necessarily have a VkMemoryDedicatedAllocateInfo. However, the host
4998         // may or may not actually use a dedicated allocations during Buffer/ColorBuffer
4999         // setup. Below checks if the underlying Buffer/ColorBuffer backing memory was
5000         // originally created with a dedicated allocation.
5001         bool shouldUseDedicatedAllocInfo = dedicatedAllocInfoPtr != nullptr;
5002 
5003         const VkImportColorBufferGOOGLE* importCbInfoPtr =
5004             vk_find_struct<VkImportColorBufferGOOGLE>(pAllocateInfo);
5005         const VkImportBufferGOOGLE* importBufferInfoPtr =
5006             vk_find_struct<VkImportBufferGOOGLE>(pAllocateInfo);
5007 
5008         const VkCreateBlobGOOGLE* createBlobInfoPtr =
5009             vk_find_struct<VkCreateBlobGOOGLE>(pAllocateInfo);
5010 
5011 #ifdef _WIN32
5012         VkImportMemoryWin32HandleInfoKHR importInfo{
5013             VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR,
5014             0,
5015             VK_EXT_MEMORY_HANDLE_TYPE_BIT,
5016             VK_EXT_MEMORY_HANDLE_INVALID,
5017             L"",
5018         };
5019 #elif defined(__QNX__)
5020         VkImportScreenBufferInfoQNX importInfo{
5021             VK_STRUCTURE_TYPE_IMPORT_SCREEN_BUFFER_INFO_QNX,
5022             0,
5023             VK_EXT_MEMORY_HANDLE_INVALID,
5024         };
5025 #else
5026         VkImportMemoryFdInfoKHR importInfo{
5027             VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR,
5028             0,
5029             VK_EXT_MEMORY_HANDLE_TYPE_BIT,
5030             VK_EXT_MEMORY_HANDLE_INVALID,
5031         };
5032 
5033 #if defined(__APPLE__)
5034         VkImportMemoryMetalHandleInfoEXT importInfoMetalHandle = {
5035             VK_STRUCTURE_TYPE_IMPORT_MEMORY_METAL_HANDLE_INFO_EXT,
5036             0,
5037             VK_EXTERNAL_MEMORY_HANDLE_TYPE_MTLHEAP_BIT_EXT,
5038             nullptr,
5039         };
5040 #endif
5041 
5042 #endif
5043 
5044         void* mappedPtr = nullptr;
5045         ManagedDescriptor externalMemoryHandle;
5046         if (importCbInfoPtr) {
5047             bool colorBufferMemoryUsesDedicatedAlloc = false;
5048             if (!getColorBufferAllocationInfo(importCbInfoPtr->colorBuffer,
5049                                               &localAllocInfo.allocationSize,
5050                                               &localAllocInfo.memoryTypeIndex,
5051                                               &colorBufferMemoryUsesDedicatedAlloc, &mappedPtr)) {
5052                 if (mSnapshotState != SnapshotState::Loading) {
5053                     GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
5054                         << "Failed to get allocation info for ColorBuffer:"
5055                         << importCbInfoPtr->colorBuffer;
5056                 }
5057                 // During snapshot load there could be invalidated references to
5058                 // color buffers.
5059                 // Here we just create a placeholder for it, as it is not suppoed
5060                 // to be used.
5061                 importCbInfoPtr = nullptr;
5062             } else {
5063                 shouldUseDedicatedAllocInfo &= colorBufferMemoryUsesDedicatedAlloc;
5064 
5065                 if (!m_emu->features.GuestVulkanOnly.enabled) {
5066                     m_emu->callbacks.invalidateColorBuffer(importCbInfoPtr->colorBuffer);
5067                 }
5068 
5069                 bool opaqueFd = true;
5070 
5071 #if defined(__APPLE__)
5072                 // Use metal object extension on MoltenVK mode for color buffer import,
5073                 // non-moltenVK path on MacOS will use FD handles
5074                 if (m_emu->instanceSupportsMoltenVK) {
5075 
5076                     extern VkImage getColorBufferVkImage(uint32_t colorBufferHandle);
5077                     if (dedicatedAllocInfoPtr == nullptr || localDedicatedAllocInfo.image == VK_NULL_HANDLE) {
5078                         // TODO(b/351765838): This should not happen, but somehow the guest
5079                         // is not providing us the necessary information for video rendering.
5080                         localDedicatedAllocInfo = {
5081                         .sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO,
5082                         .pNext = nullptr,
5083                         .image = getColorBufferVkImage(importCbInfoPtr->colorBuffer),
5084                         .buffer = VK_NULL_HANDLE,
5085                         };
5086 
5087                         shouldUseDedicatedAllocInfo = true;
5088                     }
5089 
5090                     MTLResource_id cbExtMemoryHandle =
5091                         getColorBufferMetalMemoryHandle(importCbInfoPtr->colorBuffer);
5092 
5093                     if (cbExtMemoryHandle == nullptr) {
5094                         fprintf(stderr,
5095                                 "%s: VK_ERROR_OUT_OF_DEVICE_MEMORY: "
5096                                 "colorBuffer 0x%x does not have Vulkan external memory backing\n",
5097                                 __func__, importCbInfoPtr->colorBuffer);
5098                         return VK_ERROR_OUT_OF_DEVICE_MEMORY;
5099                     }
5100                     importInfoMetalHandle.handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_MTLHEAP_BIT_EXT;
5101                     importInfoMetalHandle.handle = cbExtMemoryHandle;
5102 
5103                     vk_append_struct(&structChainIter, &importInfoMetalHandle);
5104                     opaqueFd = false;
5105                 }
5106 #endif
5107 
5108                 if (opaqueFd && m_emu->deviceInfo.supportsExternalMemoryImport) {
5109                     VK_EXT_MEMORY_HANDLE cbExtMemoryHandle =
5110                         getColorBufferExtMemoryHandle(importCbInfoPtr->colorBuffer);
5111 
5112                     if (cbExtMemoryHandle == VK_EXT_MEMORY_HANDLE_INVALID) {
5113                         fprintf(stderr,
5114                                 "%s: VK_ERROR_OUT_OF_DEVICE_MEMORY: "
5115                                 "colorBuffer 0x%x does not have Vulkan external memory backing\n",
5116                                 __func__, importCbInfoPtr->colorBuffer);
5117                         return VK_ERROR_OUT_OF_DEVICE_MEMORY;
5118                     }
5119 
5120 #if defined(__QNX__)
5121                     importInfo.buffer = cbExtMemoryHandle;
5122 #else
5123                     externalMemoryHandle = ManagedDescriptor(dupExternalMemory(cbExtMemoryHandle));
5124 
5125 #ifdef _WIN32
5126                     importInfo.handle =
5127                         externalMemoryHandle.get().value_or(static_cast<HANDLE>(NULL));
5128 #else
5129                     importInfo.fd = externalMemoryHandle.get().value_or(-1);
5130 #endif
5131 #endif
5132                     vk_append_struct(&structChainIter, &importInfo);
5133                 }
5134             }
5135         } else if (importBufferInfoPtr) {
5136             bool bufferMemoryUsesDedicatedAlloc = false;
5137             if (!getBufferAllocationInfo(
5138                     importBufferInfoPtr->buffer, &localAllocInfo.allocationSize,
5139                     &localAllocInfo.memoryTypeIndex, &bufferMemoryUsesDedicatedAlloc)) {
5140                 ERR("Failed to get Buffer:%d allocation info.", importBufferInfoPtr->buffer);
5141                 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
5142             }
5143 
5144             shouldUseDedicatedAllocInfo &= bufferMemoryUsesDedicatedAlloc;
5145 
5146             bool opaqueFd = true;
5147 #ifdef __APPLE__
5148             if (m_emu->instanceSupportsMoltenVK) {
5149                 MTLResource_id bufferMetalMemoryHandle =
5150                     getBufferMetalMemoryHandle(importBufferInfoPtr->buffer);
5151 
5152                 if (bufferMetalMemoryHandle == nullptr) {
5153                     fprintf(stderr,
5154                             "%s: VK_ERROR_OUT_OF_DEVICE_MEMORY: "
5155                             "buffer 0x%x does not have Vulkan external memory "
5156                             "backing\n",
5157                             __func__, importBufferInfoPtr->buffer);
5158                     return VK_ERROR_OUT_OF_DEVICE_MEMORY;
5159                 }
5160 
5161                 importInfoMetalHandle.handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_MTLBUFFER_BIT_EXT;
5162                 importInfoMetalHandle.handle = bufferMetalMemoryHandle;
5163 
5164                 vk_append_struct(&structChainIter, &importInfoMetalHandle);
5165 
5166                 opaqueFd = false;
5167             }
5168 #endif
5169 
5170             if (opaqueFd && m_emu->deviceInfo.supportsExternalMemoryImport) {
5171                 uint32_t outStreamHandleType;
5172                 VK_EXT_MEMORY_HANDLE bufferExtMemoryHandle =
5173                     getBufferExtMemoryHandle(importBufferInfoPtr->buffer, &outStreamHandleType);
5174 
5175                 if (bufferExtMemoryHandle == VK_EXT_MEMORY_HANDLE_INVALID) {
5176                     fprintf(stderr,
5177                             "%s: VK_ERROR_OUT_OF_DEVICE_MEMORY: "
5178                             "buffer 0x%x does not have Vulkan external memory "
5179                             "backing\n",
5180                             __func__, importBufferInfoPtr->buffer);
5181                     return VK_ERROR_OUT_OF_DEVICE_MEMORY;
5182                 }
5183 
5184 #if defined(__QNX__)
5185                 importInfo.buffer = bufferExtMemoryHandle;
5186 #else
5187                 bufferExtMemoryHandle = dupExternalMemory(bufferExtMemoryHandle);
5188 
5189 #ifdef _WIN32
5190                 importInfo.handle = bufferExtMemoryHandle;
5191 #else
5192                 importInfo.fd = bufferExtMemoryHandle;
5193 #endif
5194 #endif
5195                 vk_append_struct(&structChainIter, &importInfo);
5196             }
5197         }
5198 
5199         uint32_t virtioGpuContextId = 0;
5200         VkMemoryPropertyFlags memoryPropertyFlags;
5201 
5202         // Map guest memory index to host memory index and lookup memory properties:
5203         {
5204             std::lock_guard<std::recursive_mutex> lock(mLock);
5205 
5206             auto* physicalDevice = android::base::find(mDeviceToPhysicalDevice, device);
5207             if (!physicalDevice) {
5208                 // User app gave an invalid VkDevice, but we don't really want to crash here.
5209                 // We should allow invalid apps.
5210                 return VK_ERROR_DEVICE_LOST;
5211             }
5212             auto* physicalDeviceInfo = android::base::find(mPhysdevInfo, *physicalDevice);
5213             if (!physicalDeviceInfo) {
5214                 GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
5215                     << "No physical device info available for " << *physicalDevice;
5216             }
5217 
5218             const auto hostMemoryInfoOpt =
5219                 physicalDeviceInfo->memoryPropertiesHelper
5220                     ->getHostMemoryInfoFromGuestMemoryTypeIndex(localAllocInfo.memoryTypeIndex);
5221             if (!hostMemoryInfoOpt) {
5222                 return VK_ERROR_INCOMPATIBLE_DRIVER;
5223             }
5224             const auto& hostMemoryInfo = *hostMemoryInfoOpt;
5225 
5226             localAllocInfo.memoryTypeIndex = hostMemoryInfo.index;
5227             memoryPropertyFlags = hostMemoryInfo.memoryType.propertyFlags;
5228 
5229             auto virtioGpuContextIdOpt = getContextIdForDeviceLocked(device);
5230             if (!virtioGpuContextIdOpt) {
5231                 ERR("VkDevice:%p missing context id for vkAllocateMemory().");
5232                 return VK_ERROR_DEVICE_LOST;
5233             }
5234             virtioGpuContextId = *virtioGpuContextIdOpt;
5235         }
5236 
5237         if (shouldUseDedicatedAllocInfo) {
5238             vk_append_struct(&structChainIter, &localDedicatedAllocInfo);
5239         }
5240 
5241         const bool hostVisible = memoryPropertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
5242 
5243         if (createBlobInfoPtr && createBlobInfoPtr->blobMem == STREAM_BLOB_MEM_GUEST &&
5244             (createBlobInfoPtr->blobFlags & STREAM_BLOB_FLAG_CREATE_GUEST_HANDLE)) {
5245             DescriptorType rawDescriptor;
5246             auto descriptorInfoOpt = ExternalObjectManager::get()->removeBlobDescriptorInfo(
5247                 virtioGpuContextId, createBlobInfoPtr->blobId);
5248             if (descriptorInfoOpt) {
5249                 auto rawDescriptorOpt = (*descriptorInfoOpt).descriptor.release();
5250                 if (rawDescriptorOpt) {
5251                     rawDescriptor = *rawDescriptorOpt;
5252                 } else {
5253                     ERR("Failed vkAllocateMemory: missing raw descriptor.");
5254                     return VK_ERROR_OUT_OF_DEVICE_MEMORY;
5255                 }
5256             } else {
5257                 ERR("Failed vkAllocateMemory: missing descriptor info.");
5258                 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
5259             }
5260 #if defined(__linux__)
5261             importInfo.fd = rawDescriptor;
5262 #endif
5263 
5264 #ifdef __linux__
5265             if (m_emu->deviceInfo.supportsDmaBuf &&
5266                 hasDeviceExtension(device, VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME)) {
5267                 importInfo.handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT;
5268             }
5269 #endif
5270             vk_append_struct(&structChainIter, &importInfo);
5271         }
5272 
5273         const bool isImport = importCbInfoPtr || importBufferInfoPtr;
5274         const bool isExport = !isImport;
5275 
5276         std::optional<VkImportMemoryHostPointerInfoEXT> importHostInfo;
5277         std::optional<VkExportMemoryAllocateInfo> exportAllocateInfo;
5278 
5279         std::optional<SharedMemory> sharedMemory = std::nullopt;
5280         std::shared_ptr<PrivateMemory> privateMemory = {};
5281 
5282         if (isExport && hostVisible) {
5283             if (m_emu->features.SystemBlob.enabled) {
5284                 // Ensure size is page-aligned.
5285                 VkDeviceSize alignedSize = __ALIGN(localAllocInfo.allocationSize, kPageSizeforBlob);
5286                 if (alignedSize != localAllocInfo.allocationSize) {
5287                     ERR("Warning: Aligning allocation size from %llu to %llu",
5288                         static_cast<unsigned long long>(localAllocInfo.allocationSize),
5289                         static_cast<unsigned long long>(alignedSize));
5290                 }
5291                 localAllocInfo.allocationSize = alignedSize;
5292 
5293                 static std::atomic<uint64_t> uniqueShmemId = 0;
5294                 sharedMemory = SharedMemory("shared-memory-vk-" + std::to_string(uniqueShmemId++),
5295                                             localAllocInfo.allocationSize);
5296                 int ret = sharedMemory->create(0600);
5297                 if (ret) {
5298                     ERR("Failed to create system-blob host-visible memory, error: %d", ret);
5299                     return VK_ERROR_OUT_OF_HOST_MEMORY;
5300                 }
5301                 mappedPtr = sharedMemory->get();
5302                 int mappedPtrAlignment = reinterpret_cast<uintptr_t>(mappedPtr) % kPageSizeforBlob;
5303                 if (mappedPtrAlignment != 0) {
5304                     ERR("Warning: Mapped shared memory pointer is not aligned to page size, "
5305                         "alignment "
5306                         "is: %d",
5307                         mappedPtrAlignment);
5308                 }
5309                 importHostInfo = {
5310                     .sType = VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT,
5311                     .pNext = NULL,
5312                     .handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT,
5313                     .pHostPointer = mappedPtr,
5314                 };
5315                 vk_append_struct(&structChainIter, &*importHostInfo);
5316             } else if (m_emu->features.ExternalBlob.enabled) {
5317                 VkExternalMemoryHandleTypeFlags handleTypes;
5318 
5319 #if defined(__APPLE__)
5320                 if (m_emu->instanceSupportsMoltenVK) {
5321                     // Using a different handle type when in MoltenVK mode
5322                     handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_MTLBUFFER_BIT_EXT|VK_EXTERNAL_MEMORY_HANDLE_TYPE_MTLTEXTURE_BIT_EXT;
5323                 }
5324                 else {
5325                     handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT;
5326                 }
5327 #elif defined(_WIN32)
5328                 handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT;
5329 #elif defined(__unix__)
5330                 handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT;
5331 #endif
5332 
5333 #ifdef __linux__
5334                 if (m_emu->deviceInfo.supportsDmaBuf &&
5335                     hasDeviceExtension(device, VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME)) {
5336                     handleTypes |= VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT;
5337                 }
5338 #endif
5339 
5340                 exportAllocateInfo = {
5341                     .sType = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO,
5342                     .pNext = NULL,
5343                     .handleTypes = handleTypes,
5344                 };
5345                 vk_append_struct(&structChainIter, &*exportAllocateInfo);
5346             } else if (m_emu->features.VulkanAllocateHostMemory.enabled &&
5347                        localAllocInfo.pNext == nullptr) {
5348                 if (!m_emu || !m_emu->deviceInfo.supportsExternalMemoryHostProps) {
5349                     ERR("VK_EXT_EXTERNAL_MEMORY_HOST is not supported, cannot use "
5350                         "VulkanAllocateHostMemory");
5351                     return VK_ERROR_INCOMPATIBLE_DRIVER;
5352                 }
5353                 VkDeviceSize alignmentSize =
5354                     m_emu->deviceInfo.externalMemoryHostProps.minImportedHostPointerAlignment;
5355                 VkDeviceSize alignedSize = __ALIGN(localAllocInfo.allocationSize, alignmentSize);
5356                 localAllocInfo.allocationSize = alignedSize;
5357                 privateMemory =
5358                     std::make_shared<PrivateMemory>(alignmentSize, localAllocInfo.allocationSize);
5359                 mappedPtr = privateMemory->getAddr();
5360                 importHostInfo = {
5361                     .sType = VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT,
5362                     .pNext = NULL,
5363                     .handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT,
5364                     .pHostPointer = mappedPtr,
5365                 };
5366 
5367                 VkMemoryHostPointerPropertiesEXT memoryHostPointerProperties = {
5368                     .sType = VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT,
5369                     .pNext = NULL,
5370                     .memoryTypeBits = 0,
5371                 };
5372 
5373                 vk->vkGetMemoryHostPointerPropertiesEXT(
5374                     device, VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT, mappedPtr,
5375                     &memoryHostPointerProperties);
5376 
5377                 if (memoryHostPointerProperties.memoryTypeBits == 0) {
5378                     ERR("Cannot find suitable memory type for VulkanAllocateHostMemory");
5379                     return VK_ERROR_INCOMPATIBLE_DRIVER;
5380                 }
5381 
5382                 if (((1u << localAllocInfo.memoryTypeIndex) &
5383                      memoryHostPointerProperties.memoryTypeBits) == 0) {
5384                     // TODO Consider assigning the correct memory index earlier, instead of
5385                     // switching right before allocation.
5386 
5387                     // Look for the first available supported memory index and assign it.
5388                     for (uint32_t i = 0; i <= 31; ++i) {
5389                         if ((memoryHostPointerProperties.memoryTypeBits & (1u << i)) == 0) {
5390                             continue;
5391                         }
5392                         localAllocInfo.memoryTypeIndex = i;
5393                         break;
5394                     }
5395                     VERBOSE(
5396                         "Detected memoryTypeIndex violation on requested host memory import. "
5397                         "Switching "
5398                         "to a supported memory index %d",
5399                         localAllocInfo.memoryTypeIndex);
5400                 }
5401 
5402                 vk_append_struct(&structChainIter, &*importHostInfo);
5403             }
5404         }
5405 
5406         VkResult result = vk->vkAllocateMemory(device, &localAllocInfo, pAllocator, pMemory);
5407         if (result != VK_SUCCESS) {
5408             return result;
5409         }
5410 
5411 #ifdef _WIN32
5412         // Let ManagedDescriptor to close the underlying HANDLE when going out of scope. From the
5413         // VkImportMemoryWin32HandleInfoKHR spec: Importing memory object payloads from Windows
5414         // handles does not transfer ownership of the handle to the Vulkan implementation. For
5415         // handle types defined as NT handles, the application must release handle ownership using
5416         // the CloseHandle system call when the handle is no longer needed. For handle types defined
5417         // as NT handles, the imported memory object holds a reference to its payload.
5418 #else
5419         // Tell ManagedDescriptor not to close the underlying fd, because the ownership has already
5420         // been transferred to the Vulkan implementation. From VkImportMemoryFdInfoKHR spec:
5421         // Importing memory from a file descriptor transfers ownership of the file descriptor from
5422         // the application to the Vulkan implementation. The application must not perform any
5423         // operations on the file descriptor after a successful import. The imported memory object
5424         // holds a reference to its payload.
5425         externalMemoryHandle.release();
5426 #endif
5427 
5428         std::lock_guard<std::recursive_mutex> lock(mLock);
5429 
5430         VALIDATE_NEW_HANDLE_INFO_ENTRY(mMemoryInfo, *pMemory);
5431         mMemoryInfo[*pMemory] = MemoryInfo();
5432         auto& memoryInfo = mMemoryInfo[*pMemory];
5433         memoryInfo.size = localAllocInfo.allocationSize;
5434         memoryInfo.device = device;
5435         memoryInfo.memoryIndex = localAllocInfo.memoryTypeIndex;
5436 
5437         if (importCbInfoPtr) {
5438             memoryInfo.boundColorBuffer = importCbInfoPtr->colorBuffer;
5439         }
5440 
5441         if (!hostVisible) {
5442             *pMemory = new_boxed_non_dispatchable_VkDeviceMemory(*pMemory);
5443             return result;
5444         }
5445 
5446         if (memoryPropertyFlags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) {
5447             memoryInfo.caching = MAP_CACHE_CACHED;
5448         } else if (memoryPropertyFlags & VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD) {
5449             memoryInfo.caching = MAP_CACHE_UNCACHED;
5450         } else if (memoryPropertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) {
5451             memoryInfo.caching = MAP_CACHE_WC;
5452         }
5453 
5454         auto* deviceInfo = android::base::find(mDeviceInfo, device);
5455         if (!deviceInfo) return VK_ERROR_OUT_OF_HOST_MEMORY;
5456 
5457         // If gfxstream needs to be able to read from this memory, needToMap should be true.
5458         // When external blobs are off, we always want to map HOST_VISIBLE memory. Because, we run
5459         // in the same process as the guest.
5460         // When external blobs are on, we want to map memory only if a workaround is using it in
5461         // the gfxstream process. This happens when ASTC CPU emulation is on.
5462         bool needToMap =
5463             (!m_emu->features.ExternalBlob.enabled ||
5464              (deviceInfo->useAstcCpuDecompression && deviceInfo->emulateTextureAstc)) &&
5465             !createBlobInfoPtr;
5466 
5467         // Some cases provide a mappedPtr, so we only map if we still don't have a pointer here.
5468         if (!mappedPtr && needToMap) {
5469             memoryInfo.needUnmap = true;
5470             VkResult mapResult =
5471                 vk->vkMapMemory(device, *pMemory, 0, memoryInfo.size, 0, &memoryInfo.ptr);
5472             if (mapResult != VK_SUCCESS) {
5473                 freeMemoryLocked(device, vk, *pMemory, pAllocator);
5474                 *pMemory = VK_NULL_HANDLE;
5475                 return VK_ERROR_OUT_OF_HOST_MEMORY;
5476             }
5477         } else {
5478             // Since we didn't call vkMapMemory, unmapping is not needed (don't own mappedPtr).
5479             memoryInfo.needUnmap = false;
5480             memoryInfo.ptr = mappedPtr;
5481 
5482             if (createBlobInfoPtr) {
5483                 memoryInfo.blobId = createBlobInfoPtr->blobId;
5484             }
5485 
5486             // Always assign the shared memory into memoryInfo. If it was used, then it will have
5487             // ownership transferred.
5488             memoryInfo.sharedMemory = std::exchange(sharedMemory, std::nullopt);
5489 
5490             memoryInfo.privateMemory = privateMemory;
5491         }
5492 
5493         *pMemory = new_boxed_non_dispatchable_VkDeviceMemory(*pMemory);
5494 
5495         return result;
5496     }
5497 
destroyMemoryWithExclusiveInfo(VkDevice device,VulkanDispatch * deviceDispatch,VkDeviceMemory memory,MemoryInfo & memoryInfo,const VkAllocationCallbacks * pAllocator)5498     void destroyMemoryWithExclusiveInfo(VkDevice device, VulkanDispatch* deviceDispatch,
5499                                         VkDeviceMemory memory, MemoryInfo& memoryInfo,
5500                                         const VkAllocationCallbacks* pAllocator) {
5501         if (memoryInfo.directMapped) {
5502             // if direct mapped, we leave it up to the guest address space driver
5503             // to control the unmapping of kvm slot on the host side
5504             // in order to avoid situations where
5505             //
5506             // 1. we try to unmap here and deadlock
5507             //
5508             // 2. unmapping at the wrong time (possibility of a parallel call
5509             // to unmap vs. address space allocate and mapMemory leading to
5510             // mapping the same gpa twice)
5511             if (mUseOldMemoryCleanupPath) {
5512                 unmapMemoryAtGpaIfExists(memoryInfo.guestPhysAddr);
5513             }
5514         }
5515 
5516         if (memoryInfo.needUnmap && memoryInfo.ptr) {
5517             deviceDispatch->vkUnmapMemory(device, memory);
5518         }
5519 
5520         deviceDispatch->vkFreeMemory(device, memory, pAllocator);
5521     }
5522 
freeMemoryLocked(VkDevice device,VulkanDispatch * deviceDispatch,VkDeviceMemory memory,const VkAllocationCallbacks * pAllocator)5523     void freeMemoryLocked(VkDevice device, VulkanDispatch* deviceDispatch, VkDeviceMemory memory,
5524                           const VkAllocationCallbacks* pAllocator) {
5525         auto memoryInfoIt = mMemoryInfo.find(memory);
5526         if (memoryInfoIt == mMemoryInfo.end()) return;
5527         auto& memoryInfo = memoryInfoIt->second;
5528 
5529         destroyMemoryWithExclusiveInfo(device, deviceDispatch, memory, memoryInfo, pAllocator);
5530 
5531         mMemoryInfo.erase(memoryInfoIt);
5532     }
5533 
on_vkFreeMemory(android::base::BumpPool * pool,VkDevice boxed_device,VkDeviceMemory memory,const VkAllocationCallbacks * pAllocator)5534     void on_vkFreeMemory(android::base::BumpPool* pool, VkDevice boxed_device,
5535                          VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator) {
5536         auto device = unbox_VkDevice(boxed_device);
5537         auto deviceDispatch = dispatch_VkDevice(boxed_device);
5538         if (!device || !deviceDispatch) return;
5539 
5540         std::lock_guard<std::recursive_mutex> lock(mLock);
5541         freeMemoryLocked(device, deviceDispatch, memory, pAllocator);
5542     }
5543 
on_vkMapMemory(android::base::BumpPool * pool,VkDevice,VkDeviceMemory memory,VkDeviceSize offset,VkDeviceSize size,VkMemoryMapFlags flags,void ** ppData)5544     VkResult on_vkMapMemory(android::base::BumpPool* pool, VkDevice, VkDeviceMemory memory,
5545                             VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags,
5546                             void** ppData) {
5547         std::lock_guard<std::recursive_mutex> lock(mLock);
5548         return on_vkMapMemoryLocked(0, memory, offset, size, flags, ppData);
5549     }
on_vkMapMemoryLocked(VkDevice,VkDeviceMemory memory,VkDeviceSize offset,VkDeviceSize size,VkMemoryMapFlags flags,void ** ppData)5550     VkResult on_vkMapMemoryLocked(VkDevice, VkDeviceMemory memory, VkDeviceSize offset,
5551                                   VkDeviceSize size, VkMemoryMapFlags flags, void** ppData) {
5552         auto* info = android::base::find(mMemoryInfo, memory);
5553         if (!info || !info->ptr) return VK_ERROR_MEMORY_MAP_FAILED;  // Invalid usage.
5554 
5555         *ppData = (void*)((uint8_t*)info->ptr + offset);
5556         return VK_SUCCESS;
5557     }
5558 
on_vkUnmapMemory(android::base::BumpPool * pool,VkDevice,VkDeviceMemory)5559     void on_vkUnmapMemory(android::base::BumpPool* pool, VkDevice, VkDeviceMemory) {
5560         // no-op; user-level mapping does not correspond
5561         // to any operation here.
5562     }
5563 
getMappedHostPointer(VkDeviceMemory memory)5564     uint8_t* getMappedHostPointer(VkDeviceMemory memory) {
5565         std::lock_guard<std::recursive_mutex> lock(mLock);
5566 
5567         auto* info = android::base::find(mMemoryInfo, memory);
5568         if (!info) return nullptr;
5569 
5570         return (uint8_t*)(info->ptr);
5571     }
5572 
getDeviceMemorySize(VkDeviceMemory memory)5573     VkDeviceSize getDeviceMemorySize(VkDeviceMemory memory) {
5574         std::lock_guard<std::recursive_mutex> lock(mLock);
5575 
5576         auto* info = android::base::find(mMemoryInfo, memory);
5577         if (!info) return 0;
5578 
5579         return info->size;
5580     }
5581 
usingDirectMapping() const5582     bool usingDirectMapping() const {
5583         return m_emu->features.GlDirectMem.enabled ||
5584                m_emu->features.VirtioGpuNext.enabled;
5585     }
5586 
getHostFeatureSupport() const5587     HostFeatureSupport getHostFeatureSupport() const {
5588         HostFeatureSupport res;
5589 
5590         if (!m_vk) return res;
5591 
5592         auto emu = getGlobalVkEmulation();
5593 
5594         res.supportsVulkan = emu && emu->live;
5595 
5596         if (!res.supportsVulkan) return res;
5597 
5598         const auto& props = emu->deviceInfo.physdevProps;
5599 
5600         res.supportsVulkan1_1 = props.apiVersion >= VK_API_VERSION_1_1;
5601         res.useDeferredCommands = emu->useDeferredCommands;
5602         res.useCreateResourcesWithRequirements = emu->useCreateResourcesWithRequirements;
5603 
5604         res.apiVersion = props.apiVersion;
5605         res.driverVersion = props.driverVersion;
5606         res.deviceID = props.deviceID;
5607         res.vendorID = props.vendorID;
5608         return res;
5609     }
5610 
hasInstanceExtension(VkInstance instance,const std::string & name)5611     bool hasInstanceExtension(VkInstance instance, const std::string& name) {
5612         auto* info = android::base::find(mInstanceInfo, instance);
5613         if (!info) return false;
5614 
5615         for (const auto& enabledName : info->enabledExtensionNames) {
5616             if (name == enabledName) return true;
5617         }
5618 
5619         return false;
5620     }
5621 
hasDeviceExtension(VkDevice device,const std::string & name)5622     bool hasDeviceExtension(VkDevice device, const std::string& name) {
5623         auto* info = android::base::find(mDeviceInfo, device);
5624         if (!info) return false;
5625 
5626         for (const auto& enabledName : info->enabledExtensionNames) {
5627             if (name == enabledName) return true;
5628         }
5629 
5630         return false;
5631     }
5632 
5633     // Returns whether a vector of VkExtensionProperties contains a particular extension
hasDeviceExtension(const std::vector<VkExtensionProperties> & properties,const char * name)5634     bool hasDeviceExtension(const std::vector<VkExtensionProperties>& properties,
5635                             const char* name) {
5636         for (const auto& prop : properties) {
5637             if (strcmp(prop.extensionName, name) == 0) return true;
5638         }
5639         return false;
5640     }
5641 
5642     // Convenience function to call vkEnumerateDeviceExtensionProperties and get the results as an
5643     // std::vector
enumerateDeviceExtensionProperties(VulkanDispatch * vk,VkPhysicalDevice physicalDevice,const char * pLayerName,std::vector<VkExtensionProperties> & properties)5644     VkResult enumerateDeviceExtensionProperties(VulkanDispatch* vk, VkPhysicalDevice physicalDevice,
5645                                                 const char* pLayerName,
5646                                                 std::vector<VkExtensionProperties>& properties) {
5647         uint32_t propertyCount = 0;
5648         VkResult result = vk->vkEnumerateDeviceExtensionProperties(physicalDevice, pLayerName,
5649                                                                    &propertyCount, nullptr);
5650         if (result != VK_SUCCESS) return result;
5651 
5652         properties.resize(propertyCount);
5653         return vk->vkEnumerateDeviceExtensionProperties(physicalDevice, pLayerName, &propertyCount,
5654                                                         properties.data());
5655     }
5656 
5657     // VK_ANDROID_native_buffer
on_vkGetSwapchainGrallocUsageANDROID(android::base::BumpPool * pool,VkDevice,VkFormat format,VkImageUsageFlags imageUsage,int * grallocUsage)5658     VkResult on_vkGetSwapchainGrallocUsageANDROID(android::base::BumpPool* pool, VkDevice,
5659                                                   VkFormat format, VkImageUsageFlags imageUsage,
5660                                                   int* grallocUsage) {
5661         getGralloc0Usage(format, imageUsage, grallocUsage);
5662         return VK_SUCCESS;
5663     }
5664 
on_vkGetSwapchainGrallocUsage2ANDROID(android::base::BumpPool * pool,VkDevice,VkFormat format,VkImageUsageFlags imageUsage,VkSwapchainImageUsageFlagsANDROID swapchainImageUsage,uint64_t * grallocConsumerUsage,uint64_t * grallocProducerUsage)5665     VkResult on_vkGetSwapchainGrallocUsage2ANDROID(
5666         android::base::BumpPool* pool, VkDevice, VkFormat format, VkImageUsageFlags imageUsage,
5667         VkSwapchainImageUsageFlagsANDROID swapchainImageUsage, uint64_t* grallocConsumerUsage,
5668         uint64_t* grallocProducerUsage) {
5669         getGralloc1Usage(format, imageUsage, swapchainImageUsage, grallocConsumerUsage,
5670                          grallocProducerUsage);
5671         return VK_SUCCESS;
5672     }
5673 
on_vkAcquireImageANDROID(android::base::BumpPool * pool,VkDevice boxed_device,VkImage image,int nativeFenceFd,VkSemaphore semaphore,VkFence fence)5674     VkResult on_vkAcquireImageANDROID(android::base::BumpPool* pool, VkDevice boxed_device,
5675                                       VkImage image, int nativeFenceFd, VkSemaphore semaphore,
5676                                       VkFence fence) {
5677         auto device = unbox_VkDevice(boxed_device);
5678         auto vk = dispatch_VkDevice(boxed_device);
5679 
5680         std::lock_guard<std::recursive_mutex> lock(mLock);
5681 
5682         auto* deviceInfo = android::base::find(mDeviceInfo, device);
5683         if (!deviceInfo) return VK_ERROR_INITIALIZATION_FAILED;
5684 
5685         auto* imageInfo = android::base::find(mImageInfo, image);
5686         if (!imageInfo) return VK_ERROR_INITIALIZATION_FAILED;
5687 
5688         VkQueue defaultQueue;
5689         uint32_t defaultQueueFamilyIndex;
5690         Lock* defaultQueueLock;
5691         if (!getDefaultQueueForDeviceLocked(device, &defaultQueue, &defaultQueueFamilyIndex,
5692                                             &defaultQueueLock)) {
5693             INFO("%s: can't get the default q", __func__);
5694             return VK_ERROR_INITIALIZATION_FAILED;
5695         }
5696 
5697         DeviceOpBuilder builder(*deviceInfo->deviceOpTracker);
5698 
5699         VkFence usedFence = fence;
5700         if (usedFence == VK_NULL_HANDLE) {
5701             usedFence = builder.CreateFenceForOp();
5702         }
5703 
5704         AndroidNativeBufferInfo* anbInfo = imageInfo->anbInfo.get();
5705 
5706         VkResult result = setAndroidNativeImageSemaphoreSignaled(
5707             vk, device, defaultQueue, defaultQueueFamilyIndex, defaultQueueLock, semaphore,
5708             usedFence, anbInfo);
5709         if (result != VK_SUCCESS) {
5710             return result;
5711         }
5712 
5713         DeviceOpWaitable aniCompletedWaitable = builder.OnQueueSubmittedWithFence(usedFence);
5714 
5715         if (semaphore != VK_NULL_HANDLE) {
5716             auto semaphoreInfo = android::base::find(mSemaphoreInfo, semaphore);
5717             if (semaphoreInfo != nullptr) {
5718                 semaphoreInfo->latestUse = aniCompletedWaitable;
5719             }
5720         }
5721         if (fence != VK_NULL_HANDLE) {
5722             auto fenceInfo = android::base::find(mFenceInfo, fence);
5723             if (fenceInfo != nullptr) {
5724                 fenceInfo->latestUse = aniCompletedWaitable;
5725             }
5726         }
5727 
5728         deviceInfo->deviceOpTracker->PollAndProcessGarbage();
5729 
5730         return VK_SUCCESS;
5731     }
5732 
on_vkQueueSignalReleaseImageANDROID(android::base::BumpPool * pool,VkQueue boxed_queue,uint32_t waitSemaphoreCount,const VkSemaphore * pWaitSemaphores,VkImage image,int * pNativeFenceFd)5733     VkResult on_vkQueueSignalReleaseImageANDROID(android::base::BumpPool* pool, VkQueue boxed_queue,
5734                                                  uint32_t waitSemaphoreCount,
5735                                                  const VkSemaphore* pWaitSemaphores, VkImage image,
5736                                                  int* pNativeFenceFd) {
5737         auto queue = unbox_VkQueue(boxed_queue);
5738         auto vk = dispatch_VkQueue(boxed_queue);
5739 
5740         std::lock_guard<std::recursive_mutex> lock(mLock);
5741 
5742         auto* queueInfo = android::base::find(mQueueInfo, queue);
5743         if (!queueInfo) return VK_ERROR_INITIALIZATION_FAILED;
5744 
5745         if (mRenderDocWithMultipleVkInstances) {
5746             VkPhysicalDevice vkPhysicalDevice = mDeviceToPhysicalDevice.at(queueInfo->device);
5747             VkInstance vkInstance = mPhysicalDeviceToInstance.at(vkPhysicalDevice);
5748             mRenderDocWithMultipleVkInstances->onFrameDelimiter(vkInstance);
5749         }
5750 
5751         auto* imageInfo = android::base::find(mImageInfo, image);
5752         if (!imageInfo) return VK_ERROR_INITIALIZATION_FAILED;
5753 
5754         auto* anbInfo = imageInfo->anbInfo.get();
5755         if (anbInfo->useVulkanNativeImage) {
5756             // vkQueueSignalReleaseImageANDROID() is only called by the Android framework's
5757             // implementation of vkQueuePresentKHR(). The guest application is responsible for
5758             // transitioning the image layout of the image passed to vkQueuePresentKHR() to
5759             // VK_IMAGE_LAYOUT_PRESENT_SRC_KHR before the call. If the host is using native
5760             // Vulkan images where `image` is backed with the same memory as its ColorBuffer,
5761             // then we need to update the tracked layout for that ColorBuffer.
5762             setColorBufferCurrentLayout(anbInfo->colorBufferHandle,
5763                                         VK_IMAGE_LAYOUT_PRESENT_SRC_KHR);
5764         }
5765 
5766         return syncImageToColorBuffer(m_emu->callbacks, vk, queueInfo->queueFamilyIndex, queue,
5767                                       queueInfo->physicalQueueLock.get(), waitSemaphoreCount, pWaitSemaphores,
5768                                       pNativeFenceFd, anbInfo);
5769     }
5770 
on_vkMapMemoryIntoAddressSpaceGOOGLE(android::base::BumpPool * pool,VkDevice boxed_device,VkDeviceMemory memory,uint64_t * pAddress)5771     VkResult on_vkMapMemoryIntoAddressSpaceGOOGLE(android::base::BumpPool* pool,
5772                                                   VkDevice boxed_device, VkDeviceMemory memory,
5773                                                   uint64_t* pAddress) {
5774         auto device = unbox_VkDevice(boxed_device);
5775         auto vk = dispatch_VkDevice(boxed_device);
5776 
5777         if (!m_emu->features.GlDirectMem.enabled) {
5778             fprintf(stderr,
5779                     "FATAL: Tried to use direct mapping "
5780                     "while GlDirectMem is not enabled!\n");
5781         }
5782 
5783         std::lock_guard<std::recursive_mutex> lock(mLock);
5784 
5785         if (mLogging) {
5786             INFO("%s: deviceMemory: 0x%llx pAddress: 0x%llx", __func__,
5787                     (unsigned long long)memory, (unsigned long long)(*pAddress));
5788         }
5789 
5790         if (!mapHostVisibleMemoryToGuestPhysicalAddressLocked(vk, device, memory, *pAddress)) {
5791             return VK_ERROR_OUT_OF_HOST_MEMORY;
5792         }
5793 
5794         auto* info = android::base::find(mMemoryInfo, memory);
5795         if (!info) return VK_ERROR_INITIALIZATION_FAILED;
5796 
5797         *pAddress = (uint64_t)(uintptr_t)info->ptr;
5798 
5799         return VK_SUCCESS;
5800     }
5801 
vkGetBlobInternal(VkDevice boxed_device,VkDeviceMemory memory,uint64_t hostBlobId)5802     VkResult vkGetBlobInternal(VkDevice boxed_device, VkDeviceMemory memory, uint64_t hostBlobId) {
5803         auto device = unbox_VkDevice(boxed_device);
5804         auto vk = dispatch_VkDevice(boxed_device);
5805 
5806         std::lock_guard<std::recursive_mutex> lock(mLock);
5807 
5808         auto virtioGpuContextIdOpt = getContextIdForDeviceLocked(device);
5809         if (!virtioGpuContextIdOpt) {
5810             ERR("VkDevice:%p missing context id for vkAllocateMemory().");
5811             return VK_ERROR_OUT_OF_HOST_MEMORY;
5812         }
5813         const uint32_t virtioGpuContextId = *virtioGpuContextIdOpt;
5814 
5815         auto* info = android::base::find(mMemoryInfo, memory);
5816         if (!info) return VK_ERROR_OUT_OF_HOST_MEMORY;
5817 
5818         hostBlobId = (info->blobId && !hostBlobId) ? info->blobId : hostBlobId;
5819 
5820         if (m_emu->features.SystemBlob.enabled && info->sharedMemory.has_value()) {
5821             uint32_t handleType = STREAM_MEM_HANDLE_TYPE_SHM;
5822             // We transfer ownership of the shared memory handle to the descriptor info.
5823             // The memory itself is destroyed only when all processes unmap / release their
5824             // handles.
5825             ExternalObjectManager::get()->addBlobDescriptorInfo(
5826                 virtioGpuContextId, hostBlobId, info->sharedMemory->releaseHandle(), handleType,
5827                 info->caching, std::nullopt);
5828         } else if (m_emu->features.ExternalBlob.enabled) {
5829             VkResult result;
5830 
5831             DescriptorType handle;
5832             uint32_t handleType;
5833             struct VulkanInfo vulkanInfo = {
5834                 .memoryIndex = info->memoryIndex,
5835             };
5836             memcpy(vulkanInfo.deviceUUID, m_emu->deviceInfo.idProps.deviceUUID,
5837                    sizeof(vulkanInfo.deviceUUID));
5838             memcpy(vulkanInfo.driverUUID, m_emu->deviceInfo.idProps.driverUUID,
5839                    sizeof(vulkanInfo.driverUUID));
5840 
5841             if (snapshotsEnabled()) {
5842                 VkResult mapResult = vk->vkMapMemory(device, memory, 0, info->size, 0, &info->ptr);
5843                 if (mapResult != VK_SUCCESS) {
5844                     return VK_ERROR_OUT_OF_HOST_MEMORY;
5845                 }
5846 
5847                 info->needUnmap = true;
5848             }
5849 
5850 #ifdef __unix__
5851             VkMemoryGetFdInfoKHR getFd = {
5852                 .sType = VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR,
5853                 .pNext = nullptr,
5854                 .memory = memory,
5855                 .handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT,
5856             };
5857 
5858             handleType = STREAM_MEM_HANDLE_TYPE_OPAQUE_FD;
5859 #endif
5860 
5861 #ifdef __linux__
5862             if (m_emu->deviceInfo.supportsDmaBuf &&
5863                 hasDeviceExtension(device, VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME)) {
5864                 getFd.handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT;
5865                 handleType = STREAM_MEM_HANDLE_TYPE_DMABUF;
5866             }
5867 #endif
5868 
5869 #ifdef __unix__
5870             result = m_emu->deviceInfo.getMemoryHandleFunc(device, &getFd, &handle);
5871             if (result != VK_SUCCESS) {
5872                 return result;
5873             }
5874 #endif
5875 
5876 #ifdef _WIN32
5877             VkMemoryGetWin32HandleInfoKHR getHandle = {
5878                 .sType = VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR,
5879                 .pNext = nullptr,
5880                 .memory = memory,
5881                 .handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT,
5882             };
5883 
5884             handleType = STREAM_MEM_HANDLE_TYPE_OPAQUE_WIN32;
5885 
5886             result = m_emu->deviceInfo.getMemoryHandleFunc(device, &getHandle, &handle);
5887             if (result != VK_SUCCESS) {
5888                 return result;
5889             }
5890 #endif
5891 
5892 #ifdef __APPLE__
5893             if (m_emu->instanceSupportsMoltenVK) {
5894                 GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
5895                     << "ExternalBlob feature is not supported with MoltenVK";
5896             }
5897 #endif
5898 
5899             ManagedDescriptor managedHandle(handle);
5900             ExternalObjectManager::get()->addBlobDescriptorInfo(
5901                 virtioGpuContextId, hostBlobId, std::move(managedHandle), handleType, info->caching,
5902                 std::optional<VulkanInfo>(vulkanInfo));
5903         } else if (!info->needUnmap) {
5904             auto device = unbox_VkDevice(boxed_device);
5905             auto vk = dispatch_VkDevice(boxed_device);
5906             VkResult mapResult = vk->vkMapMemory(device, memory, 0, info->size, 0, &info->ptr);
5907             if (mapResult != VK_SUCCESS) {
5908                 return VK_ERROR_OUT_OF_HOST_MEMORY;
5909             }
5910 
5911             info->needUnmap = true;
5912         }
5913 
5914         if (info->needUnmap) {
5915             uint64_t hva = (uint64_t)(uintptr_t)(info->ptr);
5916             uint64_t alignedHva = hva & kPageMaskForBlob;
5917 
5918             if (hva != alignedHva) {
5919                 ERR("Mapping non page-size (0x%" PRIx64
5920                     ") aligned host virtual address:%p "
5921                     "using the aligned host virtual address:%p. The underlying resources "
5922                     "using this blob may be corrupted/offset.",
5923                     kPageSizeforBlob, hva, alignedHva);
5924             }
5925             ExternalObjectManager::get()->addMapping(virtioGpuContextId, hostBlobId,
5926                                                      (void*)(uintptr_t)alignedHva, info->caching);
5927             info->virtioGpuMapped = true;
5928             info->hostmemId = hostBlobId;
5929         }
5930 
5931         return VK_SUCCESS;
5932     }
5933 
on_vkGetBlobGOOGLE(android::base::BumpPool * pool,VkDevice boxed_device,VkDeviceMemory memory)5934     VkResult on_vkGetBlobGOOGLE(android::base::BumpPool* pool, VkDevice boxed_device,
5935                                 VkDeviceMemory memory) {
5936         return vkGetBlobInternal(boxed_device, memory, 0);
5937     }
5938 
on_vkGetMemoryHostAddressInfoGOOGLE(android::base::BumpPool * pool,VkDevice boxed_device,VkDeviceMemory memory,uint64_t * pAddress,uint64_t * pSize,uint64_t * pHostmemId)5939     VkResult on_vkGetMemoryHostAddressInfoGOOGLE(android::base::BumpPool* pool,
5940                                                  VkDevice boxed_device, VkDeviceMemory memory,
5941                                                  uint64_t* pAddress, uint64_t* pSize,
5942                                                  uint64_t* pHostmemId) {
5943         hostBlobId++;
5944         *pHostmemId = hostBlobId;
5945         return vkGetBlobInternal(boxed_device, memory, hostBlobId);
5946     }
5947 
on_vkFreeMemorySyncGOOGLE(android::base::BumpPool * pool,VkDevice boxed_device,VkDeviceMemory memory,const VkAllocationCallbacks * pAllocator)5948     VkResult on_vkFreeMemorySyncGOOGLE(android::base::BumpPool* pool, VkDevice boxed_device,
5949                                        VkDeviceMemory memory,
5950                                        const VkAllocationCallbacks* pAllocator) {
5951         on_vkFreeMemory(pool, boxed_device, memory, pAllocator);
5952 
5953         return VK_SUCCESS;
5954     }
5955 
on_vkAllocateCommandBuffers(android::base::BumpPool * pool,VkDevice boxed_device,const VkCommandBufferAllocateInfo * pAllocateInfo,VkCommandBuffer * pCommandBuffers)5956     VkResult on_vkAllocateCommandBuffers(android::base::BumpPool* pool, VkDevice boxed_device,
5957                                          const VkCommandBufferAllocateInfo* pAllocateInfo,
5958                                          VkCommandBuffer* pCommandBuffers) {
5959         auto device = unbox_VkDevice(boxed_device);
5960         auto vk = dispatch_VkDevice(boxed_device);
5961 
5962         VkResult result = vk->vkAllocateCommandBuffers(device, pAllocateInfo, pCommandBuffers);
5963 
5964         if (result != VK_SUCCESS) {
5965             return result;
5966         }
5967 
5968         std::lock_guard<std::recursive_mutex> lock(mLock);
5969 
5970         auto* deviceInfo = android::base::find(mDeviceInfo, device);
5971         auto* commandPoolInfo = android::base::find(mCommandPoolInfo, pAllocateInfo->commandPool);
5972         if (!deviceInfo || !commandPoolInfo) {
5973             ERR("Cannot allocate command buffers, dependency not found! (%p, %p)", deviceInfo,
5974                 commandPoolInfo);
5975             return VK_ERROR_UNKNOWN;
5976         }
5977 
5978         for (uint32_t i = 0; i < pAllocateInfo->commandBufferCount; i++) {
5979             VALIDATE_NEW_HANDLE_INFO_ENTRY(mCommandBufferInfo, pCommandBuffers[i]);
5980             mCommandBufferInfo[pCommandBuffers[i]] = CommandBufferInfo();
5981             mCommandBufferInfo[pCommandBuffers[i]].device = device;
5982             mCommandBufferInfo[pCommandBuffers[i]].debugUtilsHelper = deviceInfo->debugUtilsHelper;
5983             mCommandBufferInfo[pCommandBuffers[i]].cmdPool = pAllocateInfo->commandPool;
5984             auto boxed = new_boxed_VkCommandBuffer(pCommandBuffers[i], vk,
5985                                                    false /* does not own dispatch */);
5986             mCommandBufferInfo[pCommandBuffers[i]].boxed = boxed;
5987 
5988             commandPoolInfo->cmdBuffers.insert(pCommandBuffers[i]);
5989 
5990             pCommandBuffers[i] = (VkCommandBuffer)boxed;
5991         }
5992         return result;
5993     }
5994 
on_vkCreateCommandPool(android::base::BumpPool * pool,VkDevice boxed_device,const VkCommandPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkCommandPool * pCommandPool)5995     VkResult on_vkCreateCommandPool(android::base::BumpPool* pool, VkDevice boxed_device,
5996                                     const VkCommandPoolCreateInfo* pCreateInfo,
5997                                     const VkAllocationCallbacks* pAllocator,
5998                                     VkCommandPool* pCommandPool) {
5999         auto device = unbox_VkDevice(boxed_device);
6000         auto vk = dispatch_VkDevice(boxed_device);
6001 
6002         VkResult result = vk->vkCreateCommandPool(device, pCreateInfo, pAllocator, pCommandPool);
6003         if (result != VK_SUCCESS) {
6004             return result;
6005         }
6006         std::lock_guard<std::recursive_mutex> lock(mLock);
6007         VALIDATE_NEW_HANDLE_INFO_ENTRY(mCommandPoolInfo, *pCommandPool);
6008         mCommandPoolInfo[*pCommandPool] = CommandPoolInfo();
6009         auto& cmdPoolInfo = mCommandPoolInfo[*pCommandPool];
6010         cmdPoolInfo.device = device;
6011 
6012         *pCommandPool = new_boxed_non_dispatchable_VkCommandPool(*pCommandPool);
6013         cmdPoolInfo.boxed = *pCommandPool;
6014 
6015         return result;
6016     }
6017 
destroyCommandPoolWithExclusiveInfo(VkDevice device,VulkanDispatch * deviceDispatch,VkCommandPool commandPool,CommandPoolInfo & commandPoolInfo,std::unordered_map<VkCommandBuffer,CommandBufferInfo> & commandBufferInfos,const VkAllocationCallbacks * pAllocator)6018     void destroyCommandPoolWithExclusiveInfo(
6019         VkDevice device, VulkanDispatch* deviceDispatch, VkCommandPool commandPool,
6020         CommandPoolInfo& commandPoolInfo,
6021         std::unordered_map<VkCommandBuffer, CommandBufferInfo>& commandBufferInfos,
6022         const VkAllocationCallbacks* pAllocator) {
6023         for (const VkCommandBuffer commandBuffer : commandPoolInfo.cmdBuffers) {
6024             auto iterInInfos = commandBufferInfos.find(commandBuffer);
6025             if (iterInInfos != commandBufferInfos.end()) {
6026                 commandBufferInfos.erase(iterInInfos);
6027             } else {
6028                 ERR("Cannot find command buffer reference (%p).",
6029                     commandBuffer);
6030             }
6031         }
6032 
6033         deviceDispatch->vkDestroyCommandPool(device, commandPool, pAllocator);
6034     }
6035 
destroyCommandPoolLocked(VkDevice device,VulkanDispatch * deviceDispatch,VkCommandPool commandPool,const VkAllocationCallbacks * pAllocator)6036     void destroyCommandPoolLocked(VkDevice device, VulkanDispatch* deviceDispatch,
6037                                   VkCommandPool commandPool,
6038                                   const VkAllocationCallbacks* pAllocator) {
6039         auto commandPoolInfoIt = mCommandPoolInfo.find(commandPool);
6040         if (commandPoolInfoIt == mCommandPoolInfo.end()) return;
6041         auto& commandPoolInfo = commandPoolInfoIt->second;
6042 
6043         destroyCommandPoolWithExclusiveInfo(device, deviceDispatch, commandPool, commandPoolInfo,
6044                                             mCommandBufferInfo, pAllocator);
6045 
6046         mCommandPoolInfo.erase(commandPoolInfoIt);
6047     }
6048 
on_vkDestroyCommandPool(android::base::BumpPool * pool,VkDevice boxed_device,VkCommandPool commandPool,const VkAllocationCallbacks * pAllocator)6049     void on_vkDestroyCommandPool(android::base::BumpPool* pool, VkDevice boxed_device,
6050                                  VkCommandPool commandPool,
6051                                  const VkAllocationCallbacks* pAllocator) {
6052         auto device = unbox_VkDevice(boxed_device);
6053         auto deviceDispatch = dispatch_VkDevice(boxed_device);
6054 
6055         std::lock_guard<std::recursive_mutex> lock(mLock);
6056         destroyCommandPoolLocked(device, deviceDispatch, commandPool, pAllocator);
6057     }
6058 
on_vkResetCommandPool(android::base::BumpPool * pool,VkDevice boxed_device,VkCommandPool commandPool,VkCommandPoolResetFlags flags)6059     VkResult on_vkResetCommandPool(android::base::BumpPool* pool, VkDevice boxed_device,
6060                                    VkCommandPool commandPool, VkCommandPoolResetFlags flags) {
6061         auto device = unbox_VkDevice(boxed_device);
6062         auto vk = dispatch_VkDevice(boxed_device);
6063 
6064         VkResult result = vk->vkResetCommandPool(device, commandPool, flags);
6065         if (result != VK_SUCCESS) {
6066             return result;
6067         }
6068         return result;
6069     }
6070 
on_vkCmdExecuteCommands(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,uint32_t commandBufferCount,const VkCommandBuffer * pCommandBuffers)6071     void on_vkCmdExecuteCommands(android::base::BumpPool* pool, VkCommandBuffer boxed_commandBuffer,
6072                                  uint32_t commandBufferCount,
6073                                  const VkCommandBuffer* pCommandBuffers) {
6074         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
6075         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
6076 
6077         vk->vkCmdExecuteCommands(commandBuffer, commandBufferCount, pCommandBuffers);
6078         std::lock_guard<std::recursive_mutex> lock(mLock);
6079         CommandBufferInfo& cmdBuffer = mCommandBufferInfo[commandBuffer];
6080         cmdBuffer.subCmds.insert(cmdBuffer.subCmds.end(), pCommandBuffers,
6081                                  pCommandBuffers + commandBufferCount);
6082     }
6083 
dispatchVkQueueSubmit(VulkanDispatch * vk,VkQueue unboxed_queue,uint32_t submitCount,const VkSubmitInfo * pSubmits,VkFence fence)6084     VkResult dispatchVkQueueSubmit(VulkanDispatch* vk, VkQueue unboxed_queue, uint32_t submitCount,
6085                                    const VkSubmitInfo* pSubmits, VkFence fence) {
6086         return vk->vkQueueSubmit(unboxed_queue, submitCount, pSubmits, fence);
6087     }
6088 
dispatchVkQueueSubmit(VulkanDispatch * vk,VkQueue unboxed_queue,uint32_t submitCount,const VkSubmitInfo2 * pSubmits,VkFence fence)6089     VkResult dispatchVkQueueSubmit(VulkanDispatch* vk, VkQueue unboxed_queue, uint32_t submitCount,
6090                                    const VkSubmitInfo2* pSubmits, VkFence fence) {
6091         return vk->vkQueueSubmit2(unboxed_queue, submitCount, pSubmits, fence);
6092     }
6093 
getCommandBufferCount(const VkSubmitInfo & submitInfo)6094     int getCommandBufferCount(const VkSubmitInfo& submitInfo) {
6095         return submitInfo.commandBufferCount;
6096     }
6097 
getCommandBuffer(const VkSubmitInfo & submitInfo,int idx)6098     VkCommandBuffer getCommandBuffer(const VkSubmitInfo& submitInfo, int idx) {
6099         return submitInfo.pCommandBuffers[idx];
6100     }
6101 
getCommandBufferCount(const VkSubmitInfo2 & submitInfo)6102     int getCommandBufferCount(const VkSubmitInfo2& submitInfo) {
6103         return submitInfo.commandBufferInfoCount;
6104     }
6105 
getCommandBuffer(const VkSubmitInfo2 & submitInfo,int idx)6106     VkCommandBuffer getCommandBuffer(const VkSubmitInfo2& submitInfo, int idx) {
6107         return submitInfo.pCommandBufferInfos[idx].commandBuffer;
6108     }
6109 
getWaitSemaphoreCount(const VkSubmitInfo & pSubmit)6110     uint32_t getWaitSemaphoreCount(const VkSubmitInfo& pSubmit) {
6111         return pSubmit.waitSemaphoreCount;
6112     }
getWaitSemaphoreCount(const VkSubmitInfo2 & pSubmit)6113     uint32_t getWaitSemaphoreCount(const VkSubmitInfo2& pSubmit) {
6114         return pSubmit.waitSemaphoreInfoCount;
6115     }
getWaitSemaphore(const VkSubmitInfo & pSubmit,int i)6116     VkSemaphore getWaitSemaphore(const VkSubmitInfo& pSubmit, int i) {
6117         return pSubmit.pWaitSemaphores[i];
6118     }
getWaitSemaphore(const VkSubmitInfo2 & pSubmit,int i)6119     VkSemaphore getWaitSemaphore(const VkSubmitInfo2& pSubmit, int i) {
6120         return pSubmit.pWaitSemaphoreInfos[i].semaphore;
6121     }
6122 
getSignalSemaphoreCount(const VkSubmitInfo & pSubmit)6123     uint32_t getSignalSemaphoreCount(const VkSubmitInfo& pSubmit) {
6124         return pSubmit.signalSemaphoreCount;
6125     }
getSignalSemaphoreCount(const VkSubmitInfo2 & pSubmit)6126     uint32_t getSignalSemaphoreCount(const VkSubmitInfo2& pSubmit) {
6127         return pSubmit.signalSemaphoreInfoCount;
6128     }
getSignalSemaphore(const VkSubmitInfo & pSubmit,int i)6129     VkSemaphore getSignalSemaphore(const VkSubmitInfo& pSubmit, int i) {
6130         return pSubmit.pSignalSemaphores[i];
6131     }
getSignalSemaphore(const VkSubmitInfo2 & pSubmit,int i)6132     VkSemaphore getSignalSemaphore(const VkSubmitInfo2& pSubmit, int i) {
6133         return pSubmit.pSignalSemaphoreInfos[i].semaphore;
6134     }
6135 
6136     template <typename VkSubmitInfoType>
on_vkQueueSubmit(android::base::BumpPool * pool,VkQueue boxed_queue,uint32_t submitCount,const VkSubmitInfoType * pSubmits,VkFence fence)6137     VkResult on_vkQueueSubmit(android::base::BumpPool* pool, VkQueue boxed_queue,
6138                               uint32_t submitCount, const VkSubmitInfoType* pSubmits,
6139                               VkFence fence) {
6140         auto queue = unbox_VkQueue(boxed_queue);
6141         auto vk = dispatch_VkQueue(boxed_queue);
6142 
6143         std::unordered_set<HandleType> acquiredColorBuffers;
6144         std::unordered_set<HandleType> releasedColorBuffers;
6145         if (!m_emu->features.GuestVulkanOnly.enabled) {
6146             {
6147                 std::lock_guard<std::recursive_mutex> lock(mLock);
6148                 for (int i = 0; i < submitCount; i++) {
6149                     for (int j = 0; j < getCommandBufferCount(pSubmits[i]); j++) {
6150                         VkCommandBuffer cmdBuffer = getCommandBuffer(pSubmits[i], j);
6151                         CommandBufferInfo* cmdBufferInfo =
6152                             android::base::find(mCommandBufferInfo, cmdBuffer);
6153                         if (!cmdBufferInfo) {
6154                             continue;
6155                         }
6156                         for (auto descriptorSet : cmdBufferInfo->allDescriptorSets) {
6157                             auto descriptorSetInfo =
6158                                 android::base::find(mDescriptorSetInfo, descriptorSet);
6159                             if (!descriptorSetInfo) {
6160                                 continue;
6161                             }
6162                             for (auto& writes : descriptorSetInfo->allWrites) {
6163                                 for (const auto& write : writes) {
6164                                     bool isValid = true;
6165                                     for (const auto& alive : write.alives) {
6166                                         isValid &= !alive.expired();
6167                                     }
6168                                     if (isValid && write.boundColorBuffer.has_value()) {
6169                                         acquiredColorBuffers.insert(write.boundColorBuffer.value());
6170                                     }
6171                                 }
6172                             }
6173                         }
6174 
6175                         acquiredColorBuffers.merge(cmdBufferInfo->acquiredColorBuffers);
6176                         releasedColorBuffers.merge(cmdBufferInfo->releasedColorBuffers);
6177                         for (const auto& ite : cmdBufferInfo->cbLayouts) {
6178                             setColorBufferCurrentLayout(ite.first, ite.second);
6179                         }
6180                     }
6181                 }
6182             }
6183 
6184             for (HandleType cb : acquiredColorBuffers) {
6185                 m_emu->callbacks.invalidateColorBuffer(cb);
6186             }
6187         }
6188 
6189         VkDevice device = VK_NULL_HANDLE;
6190         Lock* ql = nullptr;
6191         DeviceOpTrackerPtr opTracker = nullptr;
6192         VkFence usedFence = fence;
6193         DeviceOpWaitable queueCompletedWaitable;
6194         {
6195             std::lock_guard<std::recursive_mutex> lock(mLock);
6196             auto* queueInfo = android::base::find(mQueueInfo, queue);
6197             if (!queueInfo) {
6198                 ERR("vkQueueSubmit cannot find queue info for %p", queue);
6199                 return VK_ERROR_INITIALIZATION_FAILED;
6200             }
6201             device = queueInfo->device;
6202             ql = queueInfo->physicalQueueLock.get();
6203 
6204             auto* deviceInfo = android::base::find(mDeviceInfo, device);
6205             if (!deviceInfo) {
6206                 ERR("vkQueueSubmit cannot find device info for %p", device);
6207                 return VK_ERROR_INITIALIZATION_FAILED;
6208             }
6209             opTracker = deviceInfo->deviceOpTracker;
6210 
6211             // Unsafe to release when snapshot enabled.
6212             // Snapshot load might fail to find the shader modules if we release them here.
6213             if (!snapshotsEnabled()) {
6214                 sBoxedHandleManager.processDelayedRemovesGlobalStateLocked(device);
6215             }
6216 
6217             for (uint32_t i = 0; i < submitCount; i++) {
6218                 executePreprocessRecursive(pSubmits[i]);
6219             }
6220             DeviceOpBuilder builder = DeviceOpBuilder(*opTracker);
6221             if (VK_NULL_HANDLE == usedFence) {
6222                 // Note: This fence will be managed by the DeviceOpTracker after the
6223                 // OnQueueSubmittedWithFence call, so it does not need to be destroyed in the scope
6224                 // of this queueSubmit
6225                 usedFence = builder.CreateFenceForOp();
6226             }
6227             queueCompletedWaitable = builder.OnQueueSubmittedWithFence(usedFence);
6228         }
6229 
6230         AutoLock qlock(*ql);
6231         auto result = dispatchVkQueueSubmit(vk, queue, submitCount, pSubmits, usedFence);
6232 
6233         if (result != VK_SUCCESS) {
6234             WARN("dispatchVkQueueSubmit failed: %s [%d]", string_VkResult(result), result);
6235             return result;
6236         }
6237 
6238         {
6239             std::lock_guard<std::recursive_mutex> lock(mLock);
6240 
6241             std::unordered_set<HandleType> imageBarrierColorBuffers;
6242             for (int i = 0; i < submitCount; i++) {
6243                 for (int j = 0; j < getCommandBufferCount(pSubmits[i]); j++) {
6244                     VkCommandBuffer cmdBuffer = getCommandBuffer(pSubmits[i], j);
6245                     CommandBufferInfo* cmdBufferInfo =
6246                         android::base::find(mCommandBufferInfo, cmdBuffer);
6247 
6248                     if (cmdBufferInfo) {
6249                         imageBarrierColorBuffers.merge(cmdBufferInfo->imageBarrierColorBuffers);
6250 
6251                         // Update image layouts
6252                         for (const auto& ite : cmdBufferInfo->imageLayouts) {
6253                             auto imageIte = mImageInfo.find(ite.first);
6254                             if (imageIte == mImageInfo.end()) {
6255                                 continue;
6256                             }
6257                             imageIte->second.layout = ite.second;
6258                         }
6259                     }
6260 
6261                     // Update latestUse for all wait/signal semaphores, to ensure that they
6262                     // are never asynchronously destroyed before the queue submissions referencing
6263                     // them have completed
6264                     for (int j = 0; j < getWaitSemaphoreCount(pSubmits[i]); j++) {
6265                         SemaphoreInfo* semaphoreInfo =
6266                             android::base::find(mSemaphoreInfo, getWaitSemaphore(pSubmits[i], j));
6267                         if (semaphoreInfo) {
6268                             semaphoreInfo->latestUse = queueCompletedWaitable;
6269                         }
6270                     }
6271                     for (int j = 0; j < getSignalSemaphoreCount(pSubmits[i]); j++) {
6272                         SemaphoreInfo* semaphoreInfo =
6273                             android::base::find(mSemaphoreInfo, getSignalSemaphore(pSubmits[i], j));
6274                         if (semaphoreInfo) {
6275                             semaphoreInfo->latestUse = queueCompletedWaitable;
6276                         }
6277                     }
6278                 }
6279             }
6280 
6281             // Update latest use for color buffers
6282             for (const auto& colorBuffer : imageBarrierColorBuffers) {
6283                 setColorBufferLatestUse(colorBuffer, queueCompletedWaitable, opTracker);
6284             }
6285 
6286             // After vkQueueSubmit is called, we can signal the conditional variable
6287             // in FenceInfo, so that other threads (e.g. SyncThread) can call
6288             // waitForFence() on this fence.
6289             auto* fenceInfo = android::base::find(mFenceInfo, fence);
6290             if (fenceInfo) {
6291                 fenceInfo->state = FenceInfo::State::kWaitable;
6292                 fenceInfo->lock.lock();
6293                 fenceInfo->cv.signalAndUnlock(&fenceInfo->lock);
6294                 // Also update the latestUse waitable for this fence, to ensure
6295                 // it is not asynchronously destroyed before all the waitables
6296                 // referencing it
6297                 fenceInfo->latestUse = queueCompletedWaitable;
6298             }
6299 
6300             opTracker->PollAndProcessGarbage();
6301         }
6302 
6303         if (!releasedColorBuffers.empty()) {
6304             result = vk->vkWaitForFences(device, 1, &usedFence, VK_TRUE, /* 1 sec */ 1000000000L);
6305             if (result != VK_SUCCESS) {
6306                 ERR("vkWaitForFences failed: %s [%d]", string_VkResult(result), result);
6307                 return result;
6308             }
6309 
6310             for (HandleType cb : releasedColorBuffers) {
6311                 m_emu->callbacks.flushColorBuffer(cb);
6312             }
6313         }
6314 
6315         return result;
6316     }
6317 
on_vkQueueWaitIdle(android::base::BumpPool * pool,VkQueue boxed_queue)6318     VkResult on_vkQueueWaitIdle(android::base::BumpPool* pool, VkQueue boxed_queue) {
6319         auto queue = unbox_VkQueue(boxed_queue);
6320         auto vk = dispatch_VkQueue(boxed_queue);
6321 
6322         if (!queue) return VK_SUCCESS;
6323 
6324         Lock* ql;
6325         {
6326             std::lock_guard<std::recursive_mutex> lock(mLock);
6327             auto* queueInfo = android::base::find(mQueueInfo, queue);
6328             if (!queueInfo) return VK_SUCCESS;
6329             ql = queueInfo->physicalQueueLock.get();
6330         }
6331 
6332         if (mEnableVirtualVkQueue) {
6333             // TODO(b/379862480): register and track gpu workload to wait only for them here, ie.
6334             // not any other fences/work. It should not hold the queue lock/ql while waiting to
6335             // allow submissions and other operations on the virtualized queue
6336         }
6337 
6338         AutoLock qlock(*ql);
6339         return vk->vkQueueWaitIdle(queue);
6340     }
6341 
on_vkResetCommandBuffer(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,VkCommandBufferResetFlags flags)6342     VkResult on_vkResetCommandBuffer(android::base::BumpPool* pool,
6343                                      VkCommandBuffer boxed_commandBuffer,
6344                                      VkCommandBufferResetFlags flags) {
6345         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
6346         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
6347 
6348         m_emu->deviceLostHelper.onResetCommandBuffer(commandBuffer);
6349 
6350         VkResult result = vk->vkResetCommandBuffer(commandBuffer, flags);
6351         if (VK_SUCCESS == result) {
6352             std::lock_guard<std::recursive_mutex> lock(mLock);
6353             auto& bufferInfo = mCommandBufferInfo[commandBuffer];
6354             bufferInfo.reset();
6355         }
6356         return result;
6357     }
6358 
freeCommandBufferWithExclusiveInfos(VkDevice device,VulkanDispatch * deviceDispatch,VkCommandBuffer commandBuffer,CommandBufferInfo & commandBufferInfo,std::unordered_map<VkCommandPool,CommandPoolInfo> & commandPoolInfos)6359     void freeCommandBufferWithExclusiveInfos(
6360         VkDevice device, VulkanDispatch* deviceDispatch, VkCommandBuffer commandBuffer,
6361         CommandBufferInfo& commandBufferInfo,
6362         std::unordered_map<VkCommandPool, CommandPoolInfo>& commandPoolInfos) {
6363         auto commandPool = commandBufferInfo.cmdPool;
6364 
6365         auto commandPoolInfoIt = commandPoolInfos.find(commandPool);
6366         if (commandPoolInfoIt == commandPoolInfos.end()) return;
6367         auto& commandPoolInfo = commandPoolInfoIt->second;
6368 
6369         auto iterInPool = commandPoolInfo.cmdBuffers.find(commandBuffer);
6370         if (iterInPool != commandPoolInfo.cmdBuffers.end()) {
6371             commandPoolInfo.cmdBuffers.erase(iterInPool);
6372         } else {
6373             ERR("Cannot find command buffer reference (%p) in the pool.", commandBuffer);
6374         }
6375 
6376         // Note delete_VkCommandBuffer(cmdBufferInfoIt->second.boxed); currently done in decoder.
6377 
6378         deviceDispatch->vkFreeCommandBuffers(device, commandPool, 1, &commandBuffer);
6379     }
6380 
freeCommandBufferLocked(VkDevice device,VulkanDispatch * deviceDispatch,VkCommandPool commandPool,VkCommandBuffer commandBuffer)6381     void freeCommandBufferLocked(VkDevice device, VulkanDispatch* deviceDispatch,
6382                                  VkCommandPool commandPool, VkCommandBuffer commandBuffer) {
6383         auto commandBufferInfoIt = mCommandBufferInfo.find(commandBuffer);
6384         if (commandBufferInfoIt == mCommandBufferInfo.end()) {
6385             WARN("freeCommandBufferLocked cannot find %p", commandBuffer);
6386             return;
6387         }
6388         auto& commandBufferInfo = commandBufferInfoIt->second;
6389 
6390         freeCommandBufferWithExclusiveInfos(device, deviceDispatch, commandBuffer,
6391                                             commandBufferInfo, mCommandPoolInfo);
6392 
6393         mCommandBufferInfo.erase(commandBufferInfoIt);
6394     }
6395 
on_vkFreeCommandBuffers(android::base::BumpPool * pool,VkDevice boxed_device,VkCommandPool commandPool,uint32_t commandBufferCount,const VkCommandBuffer * pCommandBuffers)6396     void on_vkFreeCommandBuffers(android::base::BumpPool* pool, VkDevice boxed_device,
6397                                  VkCommandPool commandPool, uint32_t commandBufferCount,
6398                                  const VkCommandBuffer* pCommandBuffers) {
6399         auto device = unbox_VkDevice(boxed_device);
6400         auto deviceDispatch = dispatch_VkDevice(boxed_device);
6401         if (!device || !deviceDispatch) return;
6402 
6403         for (uint32_t i = 0; i < commandBufferCount; i++) {
6404             m_emu->deviceLostHelper.onFreeCommandBuffer(pCommandBuffers[i]);
6405         }
6406 
6407         std::lock_guard<std::recursive_mutex> lock(mLock);
6408         for (uint32_t i = 0; i < commandBufferCount; i++) {
6409             freeCommandBufferLocked(device, deviceDispatch, commandPool, pCommandBuffers[i]);
6410         }
6411     }
6412 
on_vkGetPhysicalDeviceExternalSemaphoreProperties(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,VkExternalSemaphoreProperties * pExternalSemaphoreProperties)6413     void on_vkGetPhysicalDeviceExternalSemaphoreProperties(
6414         android::base::BumpPool* pool, VkPhysicalDevice boxed_physicalDevice,
6415         const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
6416         VkExternalSemaphoreProperties* pExternalSemaphoreProperties) {
6417         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
6418 
6419         if (!physicalDevice) {
6420             return;
6421         }
6422 
6423         if (m_emu->features.VulkanExternalSync.enabled) {
6424             // Cannot forward this call to driver because nVidia linux driver crahses on it.
6425             switch (pExternalSemaphoreInfo->handleType) {
6426                 case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT:
6427                     pExternalSemaphoreProperties->exportFromImportedHandleTypes =
6428                         VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT;
6429                     pExternalSemaphoreProperties->compatibleHandleTypes =
6430                         VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT;
6431                     pExternalSemaphoreProperties->externalSemaphoreFeatures =
6432                         VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT |
6433                         VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT;
6434                     return;
6435                 case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT:
6436                     pExternalSemaphoreProperties->exportFromImportedHandleTypes =
6437                         VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
6438                     pExternalSemaphoreProperties->compatibleHandleTypes =
6439                         VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
6440                     pExternalSemaphoreProperties->externalSemaphoreFeatures =
6441                         VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT |
6442                         VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT;
6443                     return;
6444                 default:
6445                     break;
6446             }
6447         }
6448 
6449         pExternalSemaphoreProperties->exportFromImportedHandleTypes = 0;
6450         pExternalSemaphoreProperties->compatibleHandleTypes = 0;
6451         pExternalSemaphoreProperties->externalSemaphoreFeatures = 0;
6452     }
6453 
on_vkCreateDescriptorUpdateTemplate(android::base::BumpPool * pool,VkDevice boxed_device,const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate)6454     VkResult on_vkCreateDescriptorUpdateTemplate(
6455         android::base::BumpPool* pool, VkDevice boxed_device,
6456         const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
6457         const VkAllocationCallbacks* pAllocator,
6458         VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) {
6459         auto device = unbox_VkDevice(boxed_device);
6460         auto vk = dispatch_VkDevice(boxed_device);
6461 
6462         auto descriptorUpdateTemplateInfo = calcLinearizedDescriptorUpdateTemplateInfo(pCreateInfo);
6463 
6464         VkResult res =
6465             vk->vkCreateDescriptorUpdateTemplate(device, &descriptorUpdateTemplateInfo.createInfo,
6466                                                  pAllocator, pDescriptorUpdateTemplate);
6467 
6468         if (res == VK_SUCCESS) {
6469             registerDescriptorUpdateTemplate(*pDescriptorUpdateTemplate,
6470                                              descriptorUpdateTemplateInfo);
6471             *pDescriptorUpdateTemplate =
6472                 new_boxed_non_dispatchable_VkDescriptorUpdateTemplate(*pDescriptorUpdateTemplate);
6473         }
6474 
6475         return res;
6476     }
6477 
on_vkCreateDescriptorUpdateTemplateKHR(android::base::BumpPool * pool,VkDevice boxed_device,const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate)6478     VkResult on_vkCreateDescriptorUpdateTemplateKHR(
6479         android::base::BumpPool* pool, VkDevice boxed_device,
6480         const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
6481         const VkAllocationCallbacks* pAllocator,
6482         VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) {
6483         auto device = unbox_VkDevice(boxed_device);
6484         auto vk = dispatch_VkDevice(boxed_device);
6485 
6486         auto descriptorUpdateTemplateInfo = calcLinearizedDescriptorUpdateTemplateInfo(pCreateInfo);
6487 
6488         VkResult res = vk->vkCreateDescriptorUpdateTemplateKHR(
6489             device, &descriptorUpdateTemplateInfo.createInfo, pAllocator,
6490             pDescriptorUpdateTemplate);
6491 
6492         if (res == VK_SUCCESS) {
6493             registerDescriptorUpdateTemplate(*pDescriptorUpdateTemplate,
6494                                              descriptorUpdateTemplateInfo);
6495             *pDescriptorUpdateTemplate =
6496                 new_boxed_non_dispatchable_VkDescriptorUpdateTemplate(*pDescriptorUpdateTemplate);
6497         }
6498 
6499         return res;
6500     }
6501 
on_vkDestroyDescriptorUpdateTemplate(android::base::BumpPool * pool,VkDevice boxed_device,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const VkAllocationCallbacks * pAllocator)6502     void on_vkDestroyDescriptorUpdateTemplate(android::base::BumpPool* pool, VkDevice boxed_device,
6503                                               VkDescriptorUpdateTemplate descriptorUpdateTemplate,
6504                                               const VkAllocationCallbacks* pAllocator) {
6505         auto device = unbox_VkDevice(boxed_device);
6506         auto vk = dispatch_VkDevice(boxed_device);
6507 
6508         vk->vkDestroyDescriptorUpdateTemplate(device, descriptorUpdateTemplate, pAllocator);
6509 
6510         unregisterDescriptorUpdateTemplate(descriptorUpdateTemplate);
6511     }
6512 
on_vkDestroyDescriptorUpdateTemplateKHR(android::base::BumpPool * pool,VkDevice boxed_device,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const VkAllocationCallbacks * pAllocator)6513     void on_vkDestroyDescriptorUpdateTemplateKHR(
6514         android::base::BumpPool* pool, VkDevice boxed_device,
6515         VkDescriptorUpdateTemplate descriptorUpdateTemplate,
6516         const VkAllocationCallbacks* pAllocator) {
6517         auto device = unbox_VkDevice(boxed_device);
6518         auto vk = dispatch_VkDevice(boxed_device);
6519 
6520         vk->vkDestroyDescriptorUpdateTemplateKHR(device, descriptorUpdateTemplate, pAllocator);
6521 
6522         unregisterDescriptorUpdateTemplate(descriptorUpdateTemplate);
6523     }
6524 
on_vkUpdateDescriptorSetWithTemplateSizedGOOGLE(android::base::BumpPool * pool,VkDevice boxed_device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplate descriptorUpdateTemplate,uint32_t imageInfoCount,uint32_t bufferInfoCount,uint32_t bufferViewCount,const uint32_t * pImageInfoEntryIndices,const uint32_t * pBufferInfoEntryIndices,const uint32_t * pBufferViewEntryIndices,const VkDescriptorImageInfo * pImageInfos,const VkDescriptorBufferInfo * pBufferInfos,const VkBufferView * pBufferViews)6525     void on_vkUpdateDescriptorSetWithTemplateSizedGOOGLE(
6526         android::base::BumpPool* pool, VkDevice boxed_device, VkDescriptorSet descriptorSet,
6527         VkDescriptorUpdateTemplate descriptorUpdateTemplate, uint32_t imageInfoCount,
6528         uint32_t bufferInfoCount, uint32_t bufferViewCount, const uint32_t* pImageInfoEntryIndices,
6529         const uint32_t* pBufferInfoEntryIndices, const uint32_t* pBufferViewEntryIndices,
6530         const VkDescriptorImageInfo* pImageInfos, const VkDescriptorBufferInfo* pBufferInfos,
6531         const VkBufferView* pBufferViews) {
6532         auto device = unbox_VkDevice(boxed_device);
6533         auto vk = dispatch_VkDevice(boxed_device);
6534 
6535         std::lock_guard<std::recursive_mutex> lock(mLock);
6536         auto* info = android::base::find(mDescriptorUpdateTemplateInfo, descriptorUpdateTemplate);
6537         if (!info) return;
6538 
6539         memcpy(info->data.data() + info->imageInfoStart, pImageInfos,
6540                imageInfoCount * sizeof(VkDescriptorImageInfo));
6541         memcpy(info->data.data() + info->bufferInfoStart, pBufferInfos,
6542                bufferInfoCount * sizeof(VkDescriptorBufferInfo));
6543         memcpy(info->data.data() + info->bufferViewStart, pBufferViews,
6544                bufferViewCount * sizeof(VkBufferView));
6545 
6546         vk->vkUpdateDescriptorSetWithTemplate(device, descriptorSet, descriptorUpdateTemplate,
6547                                               info->data.data());
6548     }
6549 
on_vkUpdateDescriptorSetWithTemplateSized2GOOGLE(android::base::BumpPool * pool,VkDevice boxed_device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplate descriptorUpdateTemplate,uint32_t imageInfoCount,uint32_t bufferInfoCount,uint32_t bufferViewCount,uint32_t inlineUniformBlockCount,const uint32_t * pImageInfoEntryIndices,const uint32_t * pBufferInfoEntryIndices,const uint32_t * pBufferViewEntryIndices,const VkDescriptorImageInfo * pImageInfos,const VkDescriptorBufferInfo * pBufferInfos,const VkBufferView * pBufferViews,const uint8_t * pInlineUniformBlockData)6550     void on_vkUpdateDescriptorSetWithTemplateSized2GOOGLE(
6551         android::base::BumpPool* pool, VkDevice boxed_device, VkDescriptorSet descriptorSet,
6552         VkDescriptorUpdateTemplate descriptorUpdateTemplate, uint32_t imageInfoCount,
6553         uint32_t bufferInfoCount, uint32_t bufferViewCount, uint32_t inlineUniformBlockCount,
6554         const uint32_t* pImageInfoEntryIndices, const uint32_t* pBufferInfoEntryIndices,
6555         const uint32_t* pBufferViewEntryIndices, const VkDescriptorImageInfo* pImageInfos,
6556         const VkDescriptorBufferInfo* pBufferInfos, const VkBufferView* pBufferViews,
6557         const uint8_t* pInlineUniformBlockData) {
6558         auto device = unbox_VkDevice(boxed_device);
6559         auto vk = dispatch_VkDevice(boxed_device);
6560 
6561         std::lock_guard<std::recursive_mutex> lock(mLock);
6562         auto* info = android::base::find(mDescriptorUpdateTemplateInfo, descriptorUpdateTemplate);
6563         if (!info) return;
6564 
6565         memcpy(info->data.data() + info->imageInfoStart, pImageInfos,
6566                imageInfoCount * sizeof(VkDescriptorImageInfo));
6567         memcpy(info->data.data() + info->bufferInfoStart, pBufferInfos,
6568                bufferInfoCount * sizeof(VkDescriptorBufferInfo));
6569         memcpy(info->data.data() + info->bufferViewStart, pBufferViews,
6570                bufferViewCount * sizeof(VkBufferView));
6571         memcpy(info->data.data() + info->inlineUniformBlockStart, pInlineUniformBlockData,
6572                inlineUniformBlockCount);
6573 
6574         vk->vkUpdateDescriptorSetWithTemplate(device, descriptorSet, descriptorUpdateTemplate,
6575                                               info->data.data());
6576     }
6577 
hostSyncCommandBuffer(const char * tag,VkCommandBuffer boxed_commandBuffer,uint32_t needHostSync,uint32_t sequenceNumber)6578     void hostSyncCommandBuffer(const char* tag, VkCommandBuffer boxed_commandBuffer,
6579                                uint32_t needHostSync, uint32_t sequenceNumber) {
6580         auto nextDeadline = []() {
6581             return android::base::getUnixTimeUs() + 10000;  // 10 ms
6582         };
6583 
6584         auto timeoutDeadline = android::base::getUnixTimeUs() + 5000000;  // 5 s
6585 
6586         OrderMaintenanceInfo* order = ordmaint_VkCommandBuffer(boxed_commandBuffer);
6587         if (!order) return;
6588 
6589         AutoLock lock(order->lock);
6590 
6591         if (needHostSync) {
6592             while (
6593                 (sequenceNumber - __atomic_load_n(&order->sequenceNumber, __ATOMIC_ACQUIRE) != 1)) {
6594                 auto waitUntilUs = nextDeadline();
6595                 order->cv.timedWait(&order->lock, waitUntilUs);
6596 
6597                 if (timeoutDeadline < android::base::getUnixTimeUs()) {
6598                     break;
6599                 }
6600             }
6601         }
6602 
6603         __atomic_store_n(&order->sequenceNumber, sequenceNumber, __ATOMIC_RELEASE);
6604         order->cv.signal();
6605         releaseOrderMaintInfo(order);
6606     }
6607 
on_vkCommandBufferHostSyncGOOGLE(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,uint32_t needHostSync,uint32_t sequenceNumber)6608     void on_vkCommandBufferHostSyncGOOGLE(android::base::BumpPool* pool,
6609                                           VkCommandBuffer commandBuffer, uint32_t needHostSync,
6610                                           uint32_t sequenceNumber) {
6611         this->hostSyncCommandBuffer("hostSync", commandBuffer, needHostSync, sequenceNumber);
6612     }
6613 
hostSyncQueue(const char * tag,VkQueue boxed_queue,uint32_t needHostSync,uint32_t sequenceNumber)6614     void hostSyncQueue(const char* tag, VkQueue boxed_queue, uint32_t needHostSync,
6615                        uint32_t sequenceNumber) {
6616         auto nextDeadline = []() {
6617             return android::base::getUnixTimeUs() + 10000;  // 10 ms
6618         };
6619 
6620         auto timeoutDeadline = android::base::getUnixTimeUs() + 5000000;  // 5 s
6621 
6622         OrderMaintenanceInfo* order = ordmaint_VkQueue(boxed_queue);
6623         if (!order) return;
6624 
6625         AutoLock lock(order->lock);
6626 
6627         if (needHostSync) {
6628             while (
6629                 (sequenceNumber - __atomic_load_n(&order->sequenceNumber, __ATOMIC_ACQUIRE) != 1)) {
6630                 auto waitUntilUs = nextDeadline();
6631                 order->cv.timedWait(&order->lock, waitUntilUs);
6632 
6633                 if (timeoutDeadline < android::base::getUnixTimeUs()) {
6634                     break;
6635                 }
6636             }
6637         }
6638 
6639         __atomic_store_n(&order->sequenceNumber, sequenceNumber, __ATOMIC_RELEASE);
6640         order->cv.signal();
6641         releaseOrderMaintInfo(order);
6642     }
6643 
on_vkQueueHostSyncGOOGLE(android::base::BumpPool * pool,VkQueue queue,uint32_t needHostSync,uint32_t sequenceNumber)6644     void on_vkQueueHostSyncGOOGLE(android::base::BumpPool* pool, VkQueue queue,
6645                                   uint32_t needHostSync, uint32_t sequenceNumber) {
6646         this->hostSyncQueue("hostSyncQueue", queue, needHostSync, sequenceNumber);
6647     }
6648 
on_vkCreateImageWithRequirementsGOOGLE(android::base::BumpPool * pool,VkDevice boxed_device,const VkImageCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImage * pImage,VkMemoryRequirements * pMemoryRequirements)6649     VkResult on_vkCreateImageWithRequirementsGOOGLE(android::base::BumpPool* pool,
6650                                                     VkDevice boxed_device,
6651                                                     const VkImageCreateInfo* pCreateInfo,
6652                                                     const VkAllocationCallbacks* pAllocator,
6653                                                     VkImage* pImage,
6654                                                     VkMemoryRequirements* pMemoryRequirements) {
6655         if (pMemoryRequirements) {
6656             memset(pMemoryRequirements, 0, sizeof(*pMemoryRequirements));
6657         }
6658 
6659         VkResult imageCreateRes =
6660             on_vkCreateImage(pool, boxed_device, pCreateInfo, pAllocator, pImage);
6661 
6662         if (imageCreateRes != VK_SUCCESS) {
6663             return imageCreateRes;
6664         }
6665 
6666         on_vkGetImageMemoryRequirements(pool, boxed_device, unbox_VkImage(*pImage),
6667                                         pMemoryRequirements);
6668 
6669         return imageCreateRes;
6670     }
6671 
on_vkCreateBufferWithRequirementsGOOGLE(android::base::BumpPool * pool,VkDevice boxed_device,const VkBufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkBuffer * pBuffer,VkMemoryRequirements * pMemoryRequirements)6672     VkResult on_vkCreateBufferWithRequirementsGOOGLE(android::base::BumpPool* pool,
6673                                                      VkDevice boxed_device,
6674                                                      const VkBufferCreateInfo* pCreateInfo,
6675                                                      const VkAllocationCallbacks* pAllocator,
6676                                                      VkBuffer* pBuffer,
6677                                                      VkMemoryRequirements* pMemoryRequirements) {
6678         if (pMemoryRequirements) {
6679             memset(pMemoryRequirements, 0, sizeof(*pMemoryRequirements));
6680         }
6681 
6682         VkResult bufferCreateRes =
6683             on_vkCreateBuffer(pool, boxed_device, pCreateInfo, pAllocator, pBuffer);
6684 
6685         if (bufferCreateRes != VK_SUCCESS) {
6686             return bufferCreateRes;
6687         }
6688 
6689         on_vkGetBufferMemoryRequirements(pool, boxed_device, unbox_VkBuffer(*pBuffer),
6690                                          pMemoryRequirements);
6691 
6692         return bufferCreateRes;
6693     }
6694 
on_vkBeginCommandBuffer(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,const VkCommandBufferBeginInfo * pBeginInfo,const VkDecoderContext & context)6695     VkResult on_vkBeginCommandBuffer(android::base::BumpPool* pool,
6696                                      VkCommandBuffer boxed_commandBuffer,
6697                                      const VkCommandBufferBeginInfo* pBeginInfo,
6698                                      const VkDecoderContext& context) {
6699         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
6700         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
6701         VkResult result = vk->vkBeginCommandBuffer(commandBuffer, pBeginInfo);
6702 
6703         if (result != VK_SUCCESS) {
6704             return result;
6705         }
6706 
6707         m_emu->deviceLostHelper.onBeginCommandBuffer(commandBuffer, vk);
6708 
6709         std::lock_guard<std::recursive_mutex> lock(mLock);
6710 
6711         auto* commandBufferInfo = android::base::find(mCommandBufferInfo, commandBuffer);
6712         if (!commandBufferInfo) return VK_ERROR_UNKNOWN;
6713         commandBufferInfo->reset();
6714 
6715         if (context.processName) {
6716             commandBufferInfo->debugUtilsHelper.cmdBeginDebugLabel(commandBuffer, "Process %s",
6717                                                                    context.processName);
6718         }
6719 
6720         return VK_SUCCESS;
6721     }
6722 
on_vkBeginCommandBufferAsyncGOOGLE(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,const VkCommandBufferBeginInfo * pBeginInfo,const VkDecoderContext & context)6723     VkResult on_vkBeginCommandBufferAsyncGOOGLE(android::base::BumpPool* pool,
6724                                                 VkCommandBuffer boxed_commandBuffer,
6725                                                 const VkCommandBufferBeginInfo* pBeginInfo,
6726                                                 const VkDecoderContext& context) {
6727         return this->on_vkBeginCommandBuffer(pool, boxed_commandBuffer, pBeginInfo, context);
6728     }
6729 
on_vkEndCommandBuffer(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,const VkDecoderContext & context)6730     VkResult on_vkEndCommandBuffer(android::base::BumpPool* pool,
6731                                    VkCommandBuffer boxed_commandBuffer,
6732                                    const VkDecoderContext& context) {
6733         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
6734         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
6735 
6736         m_emu->deviceLostHelper.onEndCommandBuffer(commandBuffer, vk);
6737 
6738         std::lock_guard<std::recursive_mutex> lock(mLock);
6739 
6740         auto* commandBufferInfo = android::base::find(mCommandBufferInfo, commandBuffer);
6741         if (!commandBufferInfo) return VK_ERROR_UNKNOWN;
6742 
6743         if (context.processName) {
6744             commandBufferInfo->debugUtilsHelper.cmdEndDebugLabel(commandBuffer);
6745         }
6746 
6747         return vk->vkEndCommandBuffer(commandBuffer);
6748     }
6749 
on_vkEndCommandBufferAsyncGOOGLE(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,const VkDecoderContext & context)6750     void on_vkEndCommandBufferAsyncGOOGLE(android::base::BumpPool* pool,
6751                                           VkCommandBuffer boxed_commandBuffer,
6752                                           const VkDecoderContext& context) {
6753         on_vkEndCommandBuffer(pool, boxed_commandBuffer, context);
6754     }
6755 
on_vkResetCommandBufferAsyncGOOGLE(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,VkCommandBufferResetFlags flags)6756     void on_vkResetCommandBufferAsyncGOOGLE(android::base::BumpPool* pool,
6757                                             VkCommandBuffer boxed_commandBuffer,
6758                                             VkCommandBufferResetFlags flags) {
6759         on_vkResetCommandBuffer(pool, boxed_commandBuffer, flags);
6760     }
6761 
on_vkCmdBindPipeline(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipeline pipeline)6762     void on_vkCmdBindPipeline(android::base::BumpPool* pool, VkCommandBuffer boxed_commandBuffer,
6763                               VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline) {
6764         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
6765         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
6766         vk->vkCmdBindPipeline(commandBuffer, pipelineBindPoint, pipeline);
6767         if (pipelineBindPoint == VK_PIPELINE_BIND_POINT_COMPUTE) {
6768             std::lock_guard<std::recursive_mutex> lock(mLock);
6769             auto* cmdBufferInfo = android::base::find(mCommandBufferInfo, commandBuffer);
6770             if (cmdBufferInfo) {
6771                 cmdBufferInfo->computePipeline = pipeline;
6772             }
6773         }
6774     }
6775 
on_vkCmdBindDescriptorSets(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipelineLayout layout,uint32_t firstSet,uint32_t descriptorSetCount,const VkDescriptorSet * pDescriptorSets,uint32_t dynamicOffsetCount,const uint32_t * pDynamicOffsets)6776     void on_vkCmdBindDescriptorSets(android::base::BumpPool* pool,
6777                                     VkCommandBuffer boxed_commandBuffer,
6778                                     VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
6779                                     uint32_t firstSet, uint32_t descriptorSetCount,
6780                                     const VkDescriptorSet* pDescriptorSets,
6781                                     uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets) {
6782         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
6783         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
6784         vk->vkCmdBindDescriptorSets(commandBuffer, pipelineBindPoint, layout, firstSet,
6785                                     descriptorSetCount, pDescriptorSets, dynamicOffsetCount,
6786                                     pDynamicOffsets);
6787         if (descriptorSetCount) {
6788             std::lock_guard<std::recursive_mutex> lock(mLock);
6789             auto* cmdBufferInfo = android::base::find(mCommandBufferInfo, commandBuffer);
6790             if (cmdBufferInfo) {
6791                 cmdBufferInfo->descriptorLayout = layout;
6792 
6793                 cmdBufferInfo->allDescriptorSets.insert(pDescriptorSets,
6794                                                         pDescriptorSets + descriptorSetCount);
6795                 cmdBufferInfo->firstSet = firstSet;
6796                 cmdBufferInfo->currentDescriptorSets.assign(pDescriptorSets,
6797                                                             pDescriptorSets + descriptorSetCount);
6798                 cmdBufferInfo->dynamicOffsets.assign(pDynamicOffsets,
6799                                                      pDynamicOffsets + dynamicOffsetCount);
6800             }
6801         }
6802     }
6803 
on_vkCreateRenderPass(android::base::BumpPool * pool,VkDevice boxed_device,const VkRenderPassCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkRenderPass * pRenderPass)6804     VkResult on_vkCreateRenderPass(android::base::BumpPool* pool, VkDevice boxed_device,
6805                                    const VkRenderPassCreateInfo* pCreateInfo,
6806                                    const VkAllocationCallbacks* pAllocator,
6807                                    VkRenderPass* pRenderPass) {
6808         auto device = unbox_VkDevice(boxed_device);
6809         auto vk = dispatch_VkDevice(boxed_device);
6810         VkRenderPassCreateInfo createInfo;
6811         bool needReformat = false;
6812         std::lock_guard<std::recursive_mutex> lock(mLock);
6813 
6814         auto* deviceInfo = android::base::find(mDeviceInfo, device);
6815         if (!deviceInfo) return VK_ERROR_OUT_OF_HOST_MEMORY;
6816         if (deviceInfo->emulateTextureEtc2 || deviceInfo->emulateTextureAstc) {
6817             for (uint32_t i = 0; i < pCreateInfo->attachmentCount; i++) {
6818                 if (deviceInfo->needEmulatedDecompression(pCreateInfo->pAttachments[i].format)) {
6819                     needReformat = true;
6820                     break;
6821                 }
6822             }
6823         }
6824         std::vector<VkAttachmentDescription> attachments;
6825         if (needReformat) {
6826             createInfo = *pCreateInfo;
6827             attachments.assign(pCreateInfo->pAttachments,
6828                                pCreateInfo->pAttachments + pCreateInfo->attachmentCount);
6829             createInfo.pAttachments = attachments.data();
6830             for (auto& attachment : attachments) {
6831                 attachment.format = CompressedImageInfo::getOutputFormat(attachment.format);
6832             }
6833             pCreateInfo = &createInfo;
6834         }
6835         VkResult res = vk->vkCreateRenderPass(device, pCreateInfo, pAllocator, pRenderPass);
6836         if (res != VK_SUCCESS) {
6837             return res;
6838         }
6839 
6840         VALIDATE_NEW_HANDLE_INFO_ENTRY(mRenderPassInfo, *pRenderPass);
6841         auto& renderPassInfo = mRenderPassInfo[*pRenderPass];
6842         renderPassInfo.device = device;
6843 
6844         *pRenderPass = new_boxed_non_dispatchable_VkRenderPass(*pRenderPass);
6845 
6846         return res;
6847     }
6848 
on_vkCreateRenderPass2(android::base::BumpPool * pool,VkDevice boxed_device,const VkRenderPassCreateInfo2 * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkRenderPass * pRenderPass)6849     VkResult on_vkCreateRenderPass2(android::base::BumpPool* pool, VkDevice boxed_device,
6850                                     const VkRenderPassCreateInfo2* pCreateInfo,
6851                                     const VkAllocationCallbacks* pAllocator,
6852                                     VkRenderPass* pRenderPass) {
6853         auto device = unbox_VkDevice(boxed_device);
6854         auto vk = dispatch_VkDevice(boxed_device);
6855         std::lock_guard<std::recursive_mutex> lock(mLock);
6856 
6857         VkResult res = vk->vkCreateRenderPass2(device, pCreateInfo, pAllocator, pRenderPass);
6858         if (res != VK_SUCCESS) {
6859             return res;
6860         }
6861 
6862         VALIDATE_NEW_HANDLE_INFO_ENTRY(mRenderPassInfo, *pRenderPass);
6863         auto& renderPassInfo = mRenderPassInfo[*pRenderPass];
6864         renderPassInfo.device = device;
6865 
6866         *pRenderPass = new_boxed_non_dispatchable_VkRenderPass(*pRenderPass);
6867 
6868         return res;
6869     }
6870 
destroyRenderPassWithExclusiveInfo(VkDevice device,VulkanDispatch * deviceDispatch,VkRenderPass renderPass,RenderPassInfo & renderPassInfo,const VkAllocationCallbacks * pAllocator)6871     void destroyRenderPassWithExclusiveInfo(VkDevice device, VulkanDispatch* deviceDispatch,
6872                                             VkRenderPass renderPass, RenderPassInfo& renderPassInfo,
6873                                             const VkAllocationCallbacks* pAllocator) {
6874         deviceDispatch->vkDestroyRenderPass(device, renderPass, pAllocator);
6875     }
6876 
destroyRenderPassLocked(VkDevice device,VulkanDispatch * deviceDispatch,VkRenderPass renderPass,const VkAllocationCallbacks * pAllocator)6877     void destroyRenderPassLocked(VkDevice device, VulkanDispatch* deviceDispatch,
6878                                  VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator) {
6879         auto renderPassInfoIt = mRenderPassInfo.find(renderPass);
6880         if (renderPassInfoIt == mRenderPassInfo.end()) return;
6881         auto& renderPassInfo = renderPassInfoIt->second;
6882 
6883         destroyRenderPassWithExclusiveInfo(device, deviceDispatch, renderPass, renderPassInfo,
6884                                            pAllocator);
6885 
6886         mRenderPassInfo.erase(renderPass);
6887     }
6888 
on_vkDestroyRenderPass(android::base::BumpPool * pool,VkDevice boxed_device,VkRenderPass renderPass,const VkAllocationCallbacks * pAllocator)6889     void on_vkDestroyRenderPass(android::base::BumpPool* pool, VkDevice boxed_device,
6890                                 VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator) {
6891         auto device = unbox_VkDevice(boxed_device);
6892         auto deviceDispatch = dispatch_VkDevice(boxed_device);
6893 
6894         std::lock_guard<std::recursive_mutex> lock(mLock);
6895         destroyRenderPassLocked(device, deviceDispatch, renderPass, pAllocator);
6896     }
6897 
registerRenderPassBeginInfo(VkCommandBuffer commandBuffer,const VkRenderPassBeginInfo * pRenderPassBegin)6898     bool registerRenderPassBeginInfo(VkCommandBuffer commandBuffer,
6899                                      const VkRenderPassBeginInfo* pRenderPassBegin) {
6900         if (!pRenderPassBegin) {
6901             ERR("pRenderPassBegin is null");
6902             return false;
6903         }
6904 
6905         std::lock_guard<std::recursive_mutex> lock(mLock);
6906         CommandBufferInfo* cmdBufferInfo = android::base::find(mCommandBufferInfo, commandBuffer);
6907         if (!cmdBufferInfo) {
6908             ERR("VkCommandBuffer=%p not found in mCommandBufferInfo", commandBuffer);
6909             return false;
6910         }
6911 
6912         FramebufferInfo* fbInfo =
6913             android::base::find(mFramebufferInfo, pRenderPassBegin->framebuffer);
6914         if (!fbInfo) {
6915             ERR("pRenderPassBegin->framebuffer=%p not found in mFbInfo",
6916                 pRenderPassBegin->framebuffer);
6917             return false;
6918         }
6919 
6920         cmdBufferInfo->releasedColorBuffers.insert(fbInfo->attachedColorBuffers.begin(),
6921                                                    fbInfo->attachedColorBuffers.end());
6922         return true;
6923     }
6924 
on_vkCmdBeginRenderPass(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,const VkRenderPassBeginInfo * pRenderPassBegin,VkSubpassContents contents)6925     void on_vkCmdBeginRenderPass(android::base::BumpPool* pool, VkCommandBuffer boxed_commandBuffer,
6926                                  const VkRenderPassBeginInfo* pRenderPassBegin,
6927                                  VkSubpassContents contents) {
6928         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
6929         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
6930         if (registerRenderPassBeginInfo(commandBuffer, pRenderPassBegin)) {
6931             vk->vkCmdBeginRenderPass(commandBuffer, pRenderPassBegin, contents);
6932         }
6933     }
6934 
on_vkCmdBeginRenderPass2(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,const VkRenderPassBeginInfo * pRenderPassBegin,const VkSubpassBeginInfo * pSubpassBeginInfo)6935     void on_vkCmdBeginRenderPass2(android::base::BumpPool* pool,
6936                                   VkCommandBuffer boxed_commandBuffer,
6937                                   const VkRenderPassBeginInfo* pRenderPassBegin,
6938                                   const VkSubpassBeginInfo* pSubpassBeginInfo) {
6939         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
6940         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
6941         if (registerRenderPassBeginInfo(commandBuffer, pRenderPassBegin)) {
6942             vk->vkCmdBeginRenderPass2(commandBuffer, pRenderPassBegin, pSubpassBeginInfo);
6943         }
6944     }
6945 
on_vkCmdBeginRenderPass2KHR(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,const VkRenderPassBeginInfo * pRenderPassBegin,const VkSubpassBeginInfo * pSubpassBeginInfo)6946     void on_vkCmdBeginRenderPass2KHR(android::base::BumpPool* pool,
6947                                      VkCommandBuffer boxed_commandBuffer,
6948                                      const VkRenderPassBeginInfo* pRenderPassBegin,
6949                                      const VkSubpassBeginInfo* pSubpassBeginInfo) {
6950         on_vkCmdBeginRenderPass2(pool, boxed_commandBuffer, pRenderPassBegin, pSubpassBeginInfo);
6951     }
6952 
on_vkCmdCopyQueryPoolResults(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,VkBuffer dstBuffer,VkDeviceSize dstOffset,VkDeviceSize stride,VkQueryResultFlags flags)6953     void on_vkCmdCopyQueryPoolResults(android::base::BumpPool* pool,
6954                                       VkCommandBuffer boxed_commandBuffer, VkQueryPool queryPool,
6955                                       uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer,
6956                                       VkDeviceSize dstOffset, VkDeviceSize stride,
6957                                       VkQueryResultFlags flags) {
6958         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
6959         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
6960         if (queryCount == 1 && stride == 0) {
6961             // Some drivers don't seem to handle stride==0 very well.
6962             // In fact, the spec does not say what should happen with stride==0.
6963             // So we just use the largest stride possible.
6964             stride = mBufferInfo[dstBuffer].size - dstOffset;
6965         }
6966         vk->vkCmdCopyQueryPoolResults(commandBuffer, queryPool, firstQuery, queryCount, dstBuffer,
6967                                       dstOffset, stride, flags);
6968     }
6969 
on_vkCreateFramebuffer(android::base::BumpPool * pool,VkDevice boxed_device,const VkFramebufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkFramebuffer * pFramebuffer)6970     VkResult on_vkCreateFramebuffer(android::base::BumpPool* pool, VkDevice boxed_device,
6971                                     const VkFramebufferCreateInfo* pCreateInfo,
6972                                     const VkAllocationCallbacks* pAllocator,
6973                                     VkFramebuffer* pFramebuffer) {
6974         auto device = unbox_VkDevice(boxed_device);
6975         auto deviceDispatch = dispatch_VkDevice(boxed_device);
6976 
6977         VkResult result =
6978             deviceDispatch->vkCreateFramebuffer(device, pCreateInfo, pAllocator, pFramebuffer);
6979         if (result != VK_SUCCESS) {
6980             return result;
6981         }
6982 
6983         std::lock_guard<std::recursive_mutex> lock(mLock);
6984 
6985         VALIDATE_NEW_HANDLE_INFO_ENTRY(mFramebufferInfo, *pFramebuffer);
6986         auto& framebufferInfo = mFramebufferInfo[*pFramebuffer];
6987         framebufferInfo.device = device;
6988 
6989         if ((pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT) == 0) {
6990             // b/327522469
6991             // Track the Colorbuffers that would be written to.
6992             // It might be better to check for VK_QUEUE_FAMILY_EXTERNAL in pipeline barrier.
6993             // But the guest does not always add it to pipeline barrier.
6994             for (int i = 0; i < pCreateInfo->attachmentCount; i++) {
6995                 auto* imageViewInfo = android::base::find(mImageViewInfo, pCreateInfo->pAttachments[i]);
6996                 if (imageViewInfo->boundColorBuffer.has_value()) {
6997                     framebufferInfo.attachedColorBuffers.push_back(
6998                         imageViewInfo->boundColorBuffer.value());
6999                 }
7000             }
7001         }
7002 
7003         *pFramebuffer = new_boxed_non_dispatchable_VkFramebuffer(*pFramebuffer);
7004 
7005         return result;
7006     }
7007 
destroyFramebufferWithExclusiveInfo(VkDevice device,VulkanDispatch * deviceDispatch,VkFramebuffer framebuffer,FramebufferInfo & framebufferInfo,const VkAllocationCallbacks * pAllocator)7008     void destroyFramebufferWithExclusiveInfo(VkDevice device, VulkanDispatch* deviceDispatch,
7009                                              VkFramebuffer framebuffer,
7010                                              FramebufferInfo& framebufferInfo,
7011                                              const VkAllocationCallbacks* pAllocator) {
7012         deviceDispatch->vkDestroyFramebuffer(device, framebuffer, pAllocator);
7013     }
7014 
destroyFramebufferLocked(VkDevice device,VulkanDispatch * deviceDispatch,VkFramebuffer framebuffer,const VkAllocationCallbacks * pAllocator)7015     void destroyFramebufferLocked(VkDevice device, VulkanDispatch* deviceDispatch,
7016                                   VkFramebuffer framebuffer,
7017                                   const VkAllocationCallbacks* pAllocator) {
7018         auto framebufferInfoIt = mFramebufferInfo.find(framebuffer);
7019         if (framebufferInfoIt == mFramebufferInfo.end()) return;
7020         auto& framebufferInfo = framebufferInfoIt->second;
7021 
7022         destroyFramebufferWithExclusiveInfo(device, deviceDispatch, framebuffer, framebufferInfo,
7023                                             pAllocator);
7024 
7025         mFramebufferInfo.erase(framebuffer);
7026     }
7027 
on_vkDestroyFramebuffer(android::base::BumpPool * pool,VkDevice boxed_device,VkFramebuffer framebuffer,const VkAllocationCallbacks * pAllocator)7028     void on_vkDestroyFramebuffer(android::base::BumpPool* pool, VkDevice boxed_device,
7029                                  VkFramebuffer framebuffer,
7030                                  const VkAllocationCallbacks* pAllocator) {
7031         auto device = unbox_VkDevice(boxed_device);
7032         auto deviceDispatch = dispatch_VkDevice(boxed_device);
7033 
7034         std::lock_guard<std::recursive_mutex> lock(mLock);
7035         destroyFramebufferLocked(device, deviceDispatch, framebuffer, pAllocator);
7036     }
7037 
on_vkQueueBindSparse(android::base::BumpPool * pool,VkQueue boxed_queue,uint32_t bindInfoCount,const VkBindSparseInfo * pBindInfo,VkFence fence)7038     VkResult on_vkQueueBindSparse(android::base::BumpPool* pool, VkQueue boxed_queue,
7039                                   uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo,
7040                                   VkFence fence) {
7041         // If pBindInfo contains VkTimelineSemaphoreSubmitInfo, then it's
7042         // possible the host driver isn't equipped to deal with them yet.  To
7043         // work around this, send empty vkQueueSubmits before and after the
7044         // call to vkQueueBindSparse that contain the right values for
7045         // wait/signal semaphores and contains the user's
7046         // VkTimelineSemaphoreSubmitInfo structure, following the *submission
7047         // order* implied by the indices of pBindInfo.
7048 
7049         // TODO: Detect if we are running on a driver that supports timeline
7050         // semaphore signal/wait operations in vkQueueBindSparse
7051         const bool needTimelineSubmitInfoWorkaround = true;
7052         (void)needTimelineSubmitInfoWorkaround;
7053 
7054         bool hasTimelineSemaphoreSubmitInfo = false;
7055 
7056         for (uint32_t i = 0; i < bindInfoCount; ++i) {
7057             const VkTimelineSemaphoreSubmitInfoKHR* tsSi =
7058                 vk_find_struct<VkTimelineSemaphoreSubmitInfoKHR>(pBindInfo + i);
7059             if (tsSi) {
7060                 hasTimelineSemaphoreSubmitInfo = true;
7061             }
7062         }
7063 
7064         auto queue = unbox_VkQueue(boxed_queue);
7065         auto vk = dispatch_VkQueue(boxed_queue);
7066 
7067         if (!hasTimelineSemaphoreSubmitInfo) {
7068             (void)pool;
7069             return vk->vkQueueBindSparse(queue, bindInfoCount, pBindInfo, fence);
7070         } else {
7071             std::vector<VkPipelineStageFlags> waitDstStageMasks;
7072             VkTimelineSemaphoreSubmitInfoKHR currTsSi = {
7073                 VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO, 0, 0, nullptr, 0, nullptr,
7074             };
7075 
7076             VkSubmitInfo currSi = {
7077                 VK_STRUCTURE_TYPE_SUBMIT_INFO,
7078                 &currTsSi,
7079                 0,
7080                 nullptr,
7081                 nullptr,
7082                 0,
7083                 nullptr,  // No commands
7084                 0,
7085                 nullptr,
7086             };
7087 
7088             VkBindSparseInfo currBi;
7089 
7090             VkResult res;
7091 
7092             for (uint32_t i = 0; i < bindInfoCount; ++i) {
7093                 const VkTimelineSemaphoreSubmitInfoKHR* tsSi =
7094                     vk_find_struct<VkTimelineSemaphoreSubmitInfoKHR>(pBindInfo + i);
7095                 if (!tsSi) {
7096                     res = vk->vkQueueBindSparse(queue, 1, pBindInfo + i, fence);
7097                     if (VK_SUCCESS != res) return res;
7098                     continue;
7099                 }
7100 
7101                 currTsSi.waitSemaphoreValueCount = tsSi->waitSemaphoreValueCount;
7102                 currTsSi.pWaitSemaphoreValues = tsSi->pWaitSemaphoreValues;
7103                 currTsSi.signalSemaphoreValueCount = 0;
7104                 currTsSi.pSignalSemaphoreValues = nullptr;
7105 
7106                 currSi.waitSemaphoreCount = pBindInfo[i].waitSemaphoreCount;
7107                 currSi.pWaitSemaphores = pBindInfo[i].pWaitSemaphores;
7108                 waitDstStageMasks.resize(pBindInfo[i].waitSemaphoreCount,
7109                                          VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT);
7110                 currSi.pWaitDstStageMask = waitDstStageMasks.data();
7111 
7112                 currSi.signalSemaphoreCount = 0;
7113                 currSi.pSignalSemaphores = nullptr;
7114 
7115                 res = vk->vkQueueSubmit(queue, 1, &currSi, nullptr);
7116                 if (VK_SUCCESS != res) return res;
7117 
7118                 currBi = pBindInfo[i];
7119 
7120                 vk_struct_chain_remove(tsSi, &currBi);
7121 
7122                 currBi.waitSemaphoreCount = 0;
7123                 currBi.pWaitSemaphores = nullptr;
7124                 currBi.signalSemaphoreCount = 0;
7125                 currBi.pSignalSemaphores = nullptr;
7126 
7127                 res = vk->vkQueueBindSparse(queue, 1, &currBi, nullptr);
7128                 if (VK_SUCCESS != res) return res;
7129 
7130                 currTsSi.waitSemaphoreValueCount = 0;
7131                 currTsSi.pWaitSemaphoreValues = nullptr;
7132                 currTsSi.signalSemaphoreValueCount = tsSi->signalSemaphoreValueCount;
7133                 currTsSi.pSignalSemaphoreValues = tsSi->pSignalSemaphoreValues;
7134 
7135                 currSi.waitSemaphoreCount = 0;
7136                 currSi.pWaitSemaphores = nullptr;
7137                 currSi.signalSemaphoreCount = pBindInfo[i].signalSemaphoreCount;
7138                 currSi.pSignalSemaphores = pBindInfo[i].pSignalSemaphores;
7139 
7140                 res =
7141                     vk->vkQueueSubmit(queue, 1, &currSi, i == bindInfoCount - 1 ? fence : nullptr);
7142                 if (VK_SUCCESS != res) return res;
7143             }
7144 
7145             return VK_SUCCESS;
7146         }
7147     }
7148 
on_vkQueuePresentKHR(android::base::BumpPool * pool,VkQueue boxed_queue,const VkPresentInfoKHR * pPresentInfo)7149     VkResult on_vkQueuePresentKHR(android::base::BumpPool* pool, VkQueue boxed_queue,
7150                                   const VkPresentInfoKHR* pPresentInfo) {
7151         // Note that on Android guests, this call will actually be handled
7152         // with vkQueueSignalReleaseImageANDROID
7153         auto queue = unbox_VkQueue(boxed_queue);
7154         auto vk = dispatch_VkQueue(boxed_queue);
7155 
7156         return vk->vkQueuePresentKHR(queue, pPresentInfo);
7157     }
7158 
on_vkGetLinearImageLayoutGOOGLE(android::base::BumpPool * pool,VkDevice boxed_device,VkFormat format,VkDeviceSize * pOffset,VkDeviceSize * pRowPitchAlignment)7159     void on_vkGetLinearImageLayoutGOOGLE(android::base::BumpPool* pool, VkDevice boxed_device,
7160                                          VkFormat format, VkDeviceSize* pOffset,
7161                                          VkDeviceSize* pRowPitchAlignment) {
7162         if (mPerFormatLinearImageProperties.find(format) == mPerFormatLinearImageProperties.end()) {
7163             VkDeviceSize offset = 0u;
7164             VkDeviceSize rowPitchAlignment = UINT_MAX;
7165 
7166             for (uint32_t width = 64; width <= 256; width++) {
7167                 LinearImageCreateInfo linearImageCreateInfo = {
7168                     .extent =
7169                         {
7170                             .width = width,
7171                             .height = 64,
7172                             .depth = 1,
7173                         },
7174                     .format = format,
7175                     .usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
7176                 };
7177 
7178                 VkDeviceSize currOffset = 0u;
7179                 VkDeviceSize currRowPitchAlignment = UINT_MAX;
7180 
7181                 VkImageCreateInfo defaultVkImageCreateInfo = linearImageCreateInfo.toDefaultVk();
7182                 on_vkGetLinearImageLayout2GOOGLE(pool, boxed_device, &defaultVkImageCreateInfo,
7183                                                  &currOffset, &currRowPitchAlignment);
7184 
7185                 offset = currOffset;
7186                 rowPitchAlignment = std::min(currRowPitchAlignment, rowPitchAlignment);
7187             }
7188             mPerFormatLinearImageProperties[format] = LinearImageProperties{
7189                 .offset = offset,
7190                 .rowPitchAlignment = rowPitchAlignment,
7191             };
7192         }
7193 
7194         if (pOffset) {
7195             *pOffset = mPerFormatLinearImageProperties[format].offset;
7196         }
7197         if (pRowPitchAlignment) {
7198             *pRowPitchAlignment = mPerFormatLinearImageProperties[format].rowPitchAlignment;
7199         }
7200     }
7201 
on_vkGetLinearImageLayout2GOOGLE(android::base::BumpPool * pool,VkDevice boxed_device,const VkImageCreateInfo * pCreateInfo,VkDeviceSize * pOffset,VkDeviceSize * pRowPitchAlignment)7202     void on_vkGetLinearImageLayout2GOOGLE(android::base::BumpPool* pool, VkDevice boxed_device,
7203                                           const VkImageCreateInfo* pCreateInfo,
7204                                           VkDeviceSize* pOffset, VkDeviceSize* pRowPitchAlignment) {
7205         LinearImageCreateInfo linearImageCreateInfo = {
7206             .extent = pCreateInfo->extent,
7207             .format = pCreateInfo->format,
7208             .usage = pCreateInfo->usage,
7209         };
7210         if (mLinearImageProperties.find(linearImageCreateInfo) == mLinearImageProperties.end()) {
7211             auto device = unbox_VkDevice(boxed_device);
7212             auto vk = dispatch_VkDevice(boxed_device);
7213 
7214             VkImageSubresource subresource = {
7215                 .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
7216                 .mipLevel = 0,
7217                 .arrayLayer = 0,
7218             };
7219 
7220             VkImage image;
7221             VkSubresourceLayout subresourceLayout;
7222 
7223             VkImageCreateInfo defaultVkImageCreateInfo = linearImageCreateInfo.toDefaultVk();
7224             VkResult result = vk->vkCreateImage(device, &defaultVkImageCreateInfo, nullptr, &image);
7225             if (result != VK_SUCCESS) {
7226                 INFO("vkCreateImage failed. size: (%u x %u) result: %d",
7227                         linearImageCreateInfo.extent.width, linearImageCreateInfo.extent.height,
7228                         result);
7229                 return;
7230             }
7231             vk->vkGetImageSubresourceLayout(device, image, &subresource, &subresourceLayout);
7232             vk->vkDestroyImage(device, image, nullptr);
7233 
7234             VkDeviceSize offset = subresourceLayout.offset;
7235             uint64_t rowPitch = subresourceLayout.rowPitch;
7236             VkDeviceSize rowPitchAlignment = rowPitch & (~rowPitch + 1);
7237 
7238             mLinearImageProperties[linearImageCreateInfo] = {
7239                 .offset = offset,
7240                 .rowPitchAlignment = rowPitchAlignment,
7241             };
7242         }
7243 
7244         if (pOffset != nullptr) {
7245             *pOffset = mLinearImageProperties[linearImageCreateInfo].offset;
7246         }
7247         if (pRowPitchAlignment != nullptr) {
7248             *pRowPitchAlignment = mLinearImageProperties[linearImageCreateInfo].rowPitchAlignment;
7249         }
7250     }
7251 
7252 #include "VkSubDecoder.cpp"
7253 
on_vkQueueFlushCommandsGOOGLE(android::base::BumpPool * pool,VkQueue queue,VkCommandBuffer boxed_commandBuffer,VkDeviceSize dataSize,const void * pData,const VkDecoderContext & context)7254     void on_vkQueueFlushCommandsGOOGLE(android::base::BumpPool* pool, VkQueue queue,
7255                                        VkCommandBuffer boxed_commandBuffer, VkDeviceSize dataSize,
7256                                        const void* pData, const VkDecoderContext& context) {
7257         (void)queue;
7258 
7259         VkCommandBuffer commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
7260         VulkanDispatch* vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
7261         VulkanMemReadingStream* readStream = readstream_VkCommandBuffer(boxed_commandBuffer);
7262         subDecode(readStream, vk, boxed_commandBuffer, commandBuffer, dataSize, pData, context);
7263     }
7264 
on_vkQueueFlushCommandsFromAuxMemoryGOOGLE(android::base::BumpPool * pool,VkQueue queue,VkCommandBuffer commandBuffer,VkDeviceMemory deviceMemory,VkDeviceSize dataOffset,VkDeviceSize dataSize,const VkDecoderContext & context)7265     void on_vkQueueFlushCommandsFromAuxMemoryGOOGLE(android::base::BumpPool* pool, VkQueue queue,
7266                                                     VkCommandBuffer commandBuffer,
7267                                                     VkDeviceMemory deviceMemory,
7268                                                     VkDeviceSize dataOffset, VkDeviceSize dataSize,
7269                                                     const VkDecoderContext& context) {
7270         // TODO : implement
7271     }
getOrAllocateDescriptorSetFromPoolAndId(VulkanDispatch * vk,VkDevice device,VkDescriptorPool pool,VkDescriptorSetLayout setLayout,uint64_t poolId,uint32_t pendingAlloc,bool * didAlloc)7272     VkDescriptorSet getOrAllocateDescriptorSetFromPoolAndId(VulkanDispatch* vk, VkDevice device,
7273                                                             VkDescriptorPool pool,
7274                                                             VkDescriptorSetLayout setLayout,
7275                                                             uint64_t poolId, uint32_t pendingAlloc,
7276                                                             bool* didAlloc) {
7277         auto* poolInfo = android::base::find(mDescriptorPoolInfo, pool);
7278         if (!poolInfo) {
7279             GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
7280                 << "descriptor pool " << pool << " not found ";
7281         }
7282 
7283         DispatchableHandleInfo<uint64_t>* setHandleInfo = sBoxedHandleManager.get(poolId);
7284 
7285         if (setHandleInfo->underlying) {
7286             if (pendingAlloc) {
7287                 VkDescriptorSet allocedSet;
7288                 vk->vkFreeDescriptorSets(device, pool, 1,
7289                                          (VkDescriptorSet*)(&setHandleInfo->underlying));
7290                 VkDescriptorSetAllocateInfo dsAi = {
7291                     VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO, 0, pool, 1, &setLayout,
7292                 };
7293                 vk->vkAllocateDescriptorSets(device, &dsAi, &allocedSet);
7294                 setHandleInfo->underlying = (uint64_t)allocedSet;
7295                 initDescriptorSetInfoLocked(pool, setLayout, poolId, allocedSet);
7296                 *didAlloc = true;
7297                 return allocedSet;
7298             } else {
7299                 *didAlloc = false;
7300                 return (VkDescriptorSet)(setHandleInfo->underlying);
7301             }
7302         } else {
7303             if (pendingAlloc) {
7304                 VkDescriptorSet allocedSet;
7305                 VkDescriptorSetAllocateInfo dsAi = {
7306                     VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO, 0, pool, 1, &setLayout,
7307                 };
7308                 vk->vkAllocateDescriptorSets(device, &dsAi, &allocedSet);
7309                 setHandleInfo->underlying = (uint64_t)allocedSet;
7310                 initDescriptorSetInfoLocked(pool, setLayout, poolId, allocedSet);
7311                 *didAlloc = true;
7312                 return allocedSet;
7313             } else {
7314                 GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
7315                     << "descriptor pool " << pool << " wanted to get set with id 0x" << std::hex
7316                     << poolId;
7317                 return nullptr;
7318             }
7319         }
7320     }
7321 
on_vkQueueCommitDescriptorSetUpdatesGOOGLE(android::base::BumpPool * pool,VkQueue boxed_queue,uint32_t descriptorPoolCount,const VkDescriptorPool * pDescriptorPools,uint32_t descriptorSetCount,const VkDescriptorSetLayout * pDescriptorSetLayouts,const uint64_t * pDescriptorSetPoolIds,const uint32_t * pDescriptorSetWhichPool,const uint32_t * pDescriptorSetPendingAllocation,const uint32_t * pDescriptorWriteStartingIndices,uint32_t pendingDescriptorWriteCount,const VkWriteDescriptorSet * pPendingDescriptorWrites)7322     void on_vkQueueCommitDescriptorSetUpdatesGOOGLE(
7323         android::base::BumpPool* pool, VkQueue boxed_queue, uint32_t descriptorPoolCount,
7324         const VkDescriptorPool* pDescriptorPools, uint32_t descriptorSetCount,
7325         const VkDescriptorSetLayout* pDescriptorSetLayouts, const uint64_t* pDescriptorSetPoolIds,
7326         const uint32_t* pDescriptorSetWhichPool, const uint32_t* pDescriptorSetPendingAllocation,
7327         const uint32_t* pDescriptorWriteStartingIndices, uint32_t pendingDescriptorWriteCount,
7328         const VkWriteDescriptorSet* pPendingDescriptorWrites) {
7329         std::lock_guard<std::recursive_mutex> lock(mLock);
7330 
7331         VkDevice device;
7332 
7333         auto queue = unbox_VkQueue(boxed_queue);
7334         auto vk = dispatch_VkQueue(boxed_queue);
7335 
7336         auto* queueInfo = android::base::find(mQueueInfo, queue);
7337         if (queueInfo) {
7338             device = queueInfo->device;
7339         } else {
7340             GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
7341                 << "queue " << queue << "(boxed: " << boxed_queue << ") with no device registered";
7342         }
7343         on_vkQueueCommitDescriptorSetUpdatesGOOGLE(
7344             pool, vk, device, descriptorPoolCount, pDescriptorPools, descriptorSetCount,
7345             pDescriptorSetLayouts, pDescriptorSetPoolIds, pDescriptorSetWhichPool,
7346             pDescriptorSetPendingAllocation, pDescriptorWriteStartingIndices,
7347             pendingDescriptorWriteCount, pPendingDescriptorWrites);
7348     }
7349 
on_vkQueueCommitDescriptorSetUpdatesGOOGLE(android::base::BumpPool * pool,VulkanDispatch * vk,VkDevice device,uint32_t descriptorPoolCount,const VkDescriptorPool * pDescriptorPools,uint32_t descriptorSetCount,const VkDescriptorSetLayout * pDescriptorSetLayouts,const uint64_t * pDescriptorSetPoolIds,const uint32_t * pDescriptorSetWhichPool,const uint32_t * pDescriptorSetPendingAllocation,const uint32_t * pDescriptorWriteStartingIndices,uint32_t pendingDescriptorWriteCount,const VkWriteDescriptorSet * pPendingDescriptorWrites)7350     void on_vkQueueCommitDescriptorSetUpdatesGOOGLE(
7351         android::base::BumpPool* pool, VulkanDispatch* vk, VkDevice device,
7352         uint32_t descriptorPoolCount, const VkDescriptorPool* pDescriptorPools,
7353         uint32_t descriptorSetCount, const VkDescriptorSetLayout* pDescriptorSetLayouts,
7354         const uint64_t* pDescriptorSetPoolIds, const uint32_t* pDescriptorSetWhichPool,
7355         const uint32_t* pDescriptorSetPendingAllocation,
7356         const uint32_t* pDescriptorWriteStartingIndices, uint32_t pendingDescriptorWriteCount,
7357         const VkWriteDescriptorSet* pPendingDescriptorWrites) {
7358         std::vector<VkDescriptorSet> setsToUpdate(descriptorSetCount, nullptr);
7359 
7360         bool didAlloc = false;
7361 
7362         for (uint32_t i = 0; i < descriptorSetCount; ++i) {
7363             uint64_t poolId = pDescriptorSetPoolIds[i];
7364             uint32_t whichPool = pDescriptorSetWhichPool[i];
7365             uint32_t pendingAlloc = pDescriptorSetPendingAllocation[i];
7366             bool didAllocThisTime;
7367             setsToUpdate[i] = getOrAllocateDescriptorSetFromPoolAndId(
7368                 vk, device, pDescriptorPools[whichPool], pDescriptorSetLayouts[i], poolId,
7369                 pendingAlloc, &didAllocThisTime);
7370 
7371             if (didAllocThisTime) didAlloc = true;
7372         }
7373 
7374         if (didAlloc) {
7375             std::vector<VkWriteDescriptorSet> writeDescriptorSetsForHostDriver(
7376                 pendingDescriptorWriteCount);
7377             memcpy(writeDescriptorSetsForHostDriver.data(), pPendingDescriptorWrites,
7378                    pendingDescriptorWriteCount * sizeof(VkWriteDescriptorSet));
7379 
7380             for (uint32_t i = 0; i < descriptorSetCount; ++i) {
7381                 uint32_t writeStartIndex = pDescriptorWriteStartingIndices[i];
7382                 uint32_t writeEndIndex;
7383                 if (i == descriptorSetCount - 1) {
7384                     writeEndIndex = pendingDescriptorWriteCount;
7385                 } else {
7386                     writeEndIndex = pDescriptorWriteStartingIndices[i + 1];
7387                 }
7388                 for (uint32_t j = writeStartIndex; j < writeEndIndex; ++j) {
7389                     writeDescriptorSetsForHostDriver[j].dstSet = setsToUpdate[i];
7390                 }
7391             }
7392             this->on_vkUpdateDescriptorSetsImpl(
7393                 pool, vk, device, (uint32_t)writeDescriptorSetsForHostDriver.size(),
7394                 writeDescriptorSetsForHostDriver.data(), 0, nullptr);
7395         } else {
7396             this->on_vkUpdateDescriptorSetsImpl(pool, vk, device, pendingDescriptorWriteCount,
7397                                                 pPendingDescriptorWrites, 0, nullptr);
7398         }
7399     }
7400 
on_vkCollectDescriptorPoolIdsGOOGLE(android::base::BumpPool * pool,VkDevice device,VkDescriptorPool descriptorPool,uint32_t * pPoolIdCount,uint64_t * pPoolIds)7401     void on_vkCollectDescriptorPoolIdsGOOGLE(android::base::BumpPool* pool, VkDevice device,
7402                                              VkDescriptorPool descriptorPool,
7403                                              uint32_t* pPoolIdCount, uint64_t* pPoolIds) {
7404         std::lock_guard<std::recursive_mutex> lock(mLock);
7405         auto& info = mDescriptorPoolInfo[descriptorPool];
7406         *pPoolIdCount = (uint32_t)info.poolIds.size();
7407 
7408         if (pPoolIds) {
7409             for (uint32_t i = 0; i < info.poolIds.size(); ++i) {
7410                 pPoolIds[i] = info.poolIds[i];
7411             }
7412         }
7413     }
7414 
on_vkCreateSamplerYcbcrConversion(android::base::BumpPool *,VkDevice boxed_device,const VkSamplerYcbcrConversionCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSamplerYcbcrConversion * pYcbcrConversion)7415     VkResult on_vkCreateSamplerYcbcrConversion(
7416         android::base::BumpPool*, VkDevice boxed_device,
7417         const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
7418         const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion) {
7419         if (m_emu->enableYcbcrEmulation && !m_emu->deviceInfo.supportsSamplerYcbcrConversion) {
7420             *pYcbcrConversion = new_boxed_non_dispatchable_VkSamplerYcbcrConversion(
7421                 (VkSamplerYcbcrConversion)((uintptr_t)0xffff0000ull));
7422             return VK_SUCCESS;
7423         }
7424         auto device = unbox_VkDevice(boxed_device);
7425         auto vk = dispatch_VkDevice(boxed_device);
7426         VkResult res =
7427             vk->vkCreateSamplerYcbcrConversion(device, pCreateInfo, pAllocator, pYcbcrConversion);
7428         if (res != VK_SUCCESS) {
7429             return res;
7430         }
7431         *pYcbcrConversion = new_boxed_non_dispatchable_VkSamplerYcbcrConversion(*pYcbcrConversion);
7432         return VK_SUCCESS;
7433     }
7434 
on_vkDestroySamplerYcbcrConversion(android::base::BumpPool * pool,VkDevice boxed_device,VkSamplerYcbcrConversion ycbcrConversion,const VkAllocationCallbacks * pAllocator)7435     void on_vkDestroySamplerYcbcrConversion(android::base::BumpPool* pool, VkDevice boxed_device,
7436                                             VkSamplerYcbcrConversion ycbcrConversion,
7437                                             const VkAllocationCallbacks* pAllocator) {
7438         if (m_emu->enableYcbcrEmulation && !m_emu->deviceInfo.supportsSamplerYcbcrConversion) {
7439             return;
7440         }
7441         auto device = unbox_VkDevice(boxed_device);
7442         auto vk = dispatch_VkDevice(boxed_device);
7443         vk->vkDestroySamplerYcbcrConversion(device, ycbcrConversion, pAllocator);
7444         return;
7445     }
7446 
on_vkEnumeratePhysicalDeviceGroups(android::base::BumpPool * pool,VkInstance boxed_instance,uint32_t * pPhysicalDeviceGroupCount,VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties)7447     VkResult on_vkEnumeratePhysicalDeviceGroups(
7448         android::base::BumpPool* pool, VkInstance boxed_instance,
7449         uint32_t* pPhysicalDeviceGroupCount,
7450         VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties) {
7451         auto instance = unbox_VkInstance(boxed_instance);
7452         auto vk = dispatch_VkInstance(boxed_instance);
7453 
7454         std::vector<VkPhysicalDevice> physicalDevices;
7455         auto res = GetPhysicalDevices(instance, vk, physicalDevices);
7456         if (res != VK_SUCCESS) {
7457             return res;
7458         }
7459 
7460         {
7461             std::lock_guard<std::recursive_mutex> lock(mLock);
7462             FilterPhysicalDevicesLocked(instance, vk, physicalDevices);
7463         }
7464 
7465         const uint32_t requestedCount = pPhysicalDeviceGroupCount ? *pPhysicalDeviceGroupCount : 0;
7466         const uint32_t availableCount = static_cast<uint32_t>(physicalDevices.size());
7467 
7468         if (pPhysicalDeviceGroupCount) {
7469             *pPhysicalDeviceGroupCount = availableCount;
7470         }
7471         if (pPhysicalDeviceGroupCount && pPhysicalDeviceGroupProperties) {
7472             for (uint32_t i = 0; i < std::min(requestedCount, availableCount); ++i) {
7473                 pPhysicalDeviceGroupProperties[i] = VkPhysicalDeviceGroupProperties{
7474                     .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES,
7475                     .pNext = nullptr,
7476                     .physicalDeviceCount = 1,
7477                     .physicalDevices =
7478                         {
7479                             unboxed_to_boxed_VkPhysicalDevice(physicalDevices[i]),
7480                         },
7481                     .subsetAllocation = VK_FALSE,
7482                 };
7483             }
7484             if (requestedCount < availableCount) {
7485                 return VK_INCOMPLETE;
7486             }
7487         }
7488 
7489         return VK_SUCCESS;
7490     }
7491 
on_DeviceLost()7492     void on_DeviceLost() {
7493         {
7494             std::lock_guard<std::recursive_mutex> lock(mLock);
7495 
7496             std::vector<DeviceLostHelper::DeviceWithQueues> devicesToQueues;
7497             for (const auto& [device, deviceInfo] : mDeviceInfo) {
7498                 auto& deviceToQueues = devicesToQueues.emplace_back();
7499                 deviceToQueues.device = device;
7500                 deviceToQueues.deviceDispatch = dispatch_VkDevice(deviceInfo.boxed);
7501                 for (const auto& [queueIndex, queues] : deviceInfo.queues) {
7502                     deviceToQueues.queues.insert(deviceToQueues.queues.end(), queues.begin(),
7503                                                  queues.end());
7504                 }
7505             }
7506             m_emu->deviceLostHelper.onDeviceLost(devicesToQueues);
7507         }
7508 
7509         GFXSTREAM_ABORT(FatalError(VK_ERROR_DEVICE_LOST));
7510     }
7511 
on_CheckOutOfMemory(VkResult result,uint32_t opCode,const VkDecoderContext & context,std::optional<uint64_t> allocationSize=std::nullopt)7512     void on_CheckOutOfMemory(VkResult result, uint32_t opCode, const VkDecoderContext& context,
7513                              std::optional<uint64_t> allocationSize = std::nullopt) {
7514         if (result == VK_ERROR_OUT_OF_HOST_MEMORY || result == VK_ERROR_OUT_OF_DEVICE_MEMORY ||
7515             result == VK_ERROR_OUT_OF_POOL_MEMORY) {
7516             context.metricsLogger->logMetricEvent(
7517                 MetricEventVulkanOutOfMemory{.vkResultCode = result,
7518                                              .opCode = std::make_optional(opCode),
7519                                              .allocationSize = allocationSize});
7520         }
7521     }
7522 
waitForFence(VkFence boxed_fence,uint64_t timeout)7523     VkResult waitForFence(VkFence boxed_fence, uint64_t timeout) {
7524         VkFence fence = unbox_VkFence(boxed_fence);
7525         VkDevice device;
7526         VulkanDispatch* vk;
7527         StaticLock* fenceLock;
7528         ConditionVariable* cv;
7529         {
7530             std::lock_guard<std::recursive_mutex> lock(mLock);
7531             if (fence == VK_NULL_HANDLE || mFenceInfo.find(fence) == mFenceInfo.end()) {
7532                 // No fence, could be a semaphore.
7533                 // TODO: Async wait for semaphores
7534                 return VK_SUCCESS;
7535             }
7536 
7537             // Vulkan specs require fences of vkQueueSubmit to be *externally
7538             // synchronized*, i.e. we cannot submit a queue while waiting for the
7539             // fence in another thread. For threads that call this function, they
7540             // have to wait until a vkQueueSubmit() using this fence is called
7541             // before calling vkWaitForFences(). So we use a conditional variable
7542             // and mutex for thread synchronization.
7543             //
7544             // See:
7545             // https://www.khronos.org/registry/vulkan/specs/1.2/html/vkspec.html#fundamentals-threadingbehavior
7546             // https://github.com/KhronosGroup/Vulkan-LoaderAndValidationLayers/issues/519
7547 
7548             device = mFenceInfo[fence].device;
7549             vk = mFenceInfo[fence].vk;
7550             fenceLock = &mFenceInfo[fence].lock;
7551             cv = &mFenceInfo[fence].cv;
7552         }
7553 
7554         fenceLock->lock();
7555         cv->wait(fenceLock, [this, fence] {
7556             std::lock_guard<std::recursive_mutex> lock(mLock);
7557             if (mFenceInfo[fence].state == FenceInfo::State::kWaitable) {
7558                 mFenceInfo[fence].state = FenceInfo::State::kWaiting;
7559                 return true;
7560             }
7561             return false;
7562         });
7563         fenceLock->unlock();
7564 
7565         {
7566             std::lock_guard<std::recursive_mutex> lock(mLock);
7567             if (mFenceInfo.find(fence) == mFenceInfo.end()) {
7568                 GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
7569                     << "Fence was destroyed before vkWaitForFences call.";
7570             }
7571         }
7572 
7573         return vk->vkWaitForFences(device, /* fenceCount */ 1u, &fence,
7574                                    /* waitAll */ false, timeout);
7575     }
7576 
getFenceStatus(VkFence boxed_fence)7577     VkResult getFenceStatus(VkFence boxed_fence) {
7578         VkFence fence = unbox_VkFence(boxed_fence);
7579         VkDevice device;
7580         VulkanDispatch* vk;
7581         {
7582             std::lock_guard<std::recursive_mutex> lock(mLock);
7583             if (fence == VK_NULL_HANDLE || mFenceInfo.find(fence) == mFenceInfo.end()) {
7584                 // No fence, could be a semaphore.
7585                 // TODO: Async get status for semaphores
7586                 return VK_SUCCESS;
7587             }
7588 
7589             device = mFenceInfo[fence].device;
7590             vk = mFenceInfo[fence].vk;
7591         }
7592 
7593         return vk->vkGetFenceStatus(device, fence);
7594     }
7595 
registerQsriCallback(VkImage boxed_image,VkQsriTimeline::Callback callback)7596     AsyncResult registerQsriCallback(VkImage boxed_image, VkQsriTimeline::Callback callback) {
7597         std::lock_guard<std::recursive_mutex> lock(mLock);
7598 
7599         VkImage image = try_unbox_VkImage(boxed_image);
7600         if (image == VK_NULL_HANDLE) return AsyncResult::FAIL_AND_CALLBACK_NOT_SCHEDULED;
7601 
7602         auto imageInfoIt = mImageInfo.find(image);
7603         if (imageInfoIt == mImageInfo.end()) return AsyncResult::FAIL_AND_CALLBACK_NOT_SCHEDULED;
7604         auto& imageInfo = imageInfoIt->second;
7605 
7606         auto* anbInfo = imageInfo.anbInfo.get();
7607         if (!anbInfo) {
7608             ERR("Attempted to register QSRI callback on VkImage:%p without ANB info.", image);
7609             return AsyncResult::FAIL_AND_CALLBACK_NOT_SCHEDULED;
7610         }
7611         if (!anbInfo->vk) {
7612             ERR("Attempted to register QSRI callback on VkImage:%p with uninitialized ANB info.",
7613                 image);
7614             return AsyncResult::FAIL_AND_CALLBACK_NOT_SCHEDULED;
7615         }
7616         // Could be null or mismatched image, check later
7617         if (image != anbInfo->image) {
7618             ERR("Attempted on register QSRI callback on VkImage:%p with wrong image %p.", image,
7619                 anbInfo->image);
7620             return AsyncResult::FAIL_AND_CALLBACK_NOT_SCHEDULED;
7621         }
7622 
7623         anbInfo->qsriTimeline->registerCallbackForNextPresentAndPoll(std::move(callback));
7624         return AsyncResult::OK_AND_CALLBACK_SCHEDULED;
7625     }
7626 
7627 #define GUEST_EXTERNAL_MEMORY_HANDLE_TYPES                                \
7628     (VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID | \
7629      VK_EXTERNAL_MEMORY_HANDLE_TYPE_ZIRCON_VMO_BIT_FUCHSIA)
7630 
7631     // Transforms
7632     // If adding a new transform here, please check if it needs to be used in VkDecoderTestDispatch
7633 
transformImpl_VkExternalMemoryProperties_tohost(const VkExternalMemoryProperties * props,uint32_t count)7634     void transformImpl_VkExternalMemoryProperties_tohost(const VkExternalMemoryProperties* props,
7635                                                          uint32_t count) {
7636         VkExternalMemoryProperties* mut = (VkExternalMemoryProperties*)props;
7637         for (uint32_t i = 0; i < count; ++i) {
7638             mut[i] = transformExternalMemoryProperties_tohost(mut[i]);
7639         }
7640     }
transformImpl_VkExternalMemoryProperties_fromhost(const VkExternalMemoryProperties * props,uint32_t count)7641     void transformImpl_VkExternalMemoryProperties_fromhost(const VkExternalMemoryProperties* props,
7642                                                            uint32_t count) {
7643         VkExternalMemoryProperties* mut = (VkExternalMemoryProperties*)props;
7644         for (uint32_t i = 0; i < count; ++i) {
7645             mut[i] = transformExternalMemoryProperties_fromhost(mut[i],
7646                                                                 GUEST_EXTERNAL_MEMORY_HANDLE_TYPES);
7647         }
7648     }
7649 
transformImpl_VkImageCreateInfo_tohost(const VkImageCreateInfo * pImageCreateInfos,uint32_t count)7650     void transformImpl_VkImageCreateInfo_tohost(const VkImageCreateInfo* pImageCreateInfos,
7651                                                 uint32_t count) {
7652         for (uint32_t i = 0; i < count; i++) {
7653             VkImageCreateInfo& imageCreateInfo =
7654                 const_cast<VkImageCreateInfo&>(pImageCreateInfos[i]);
7655             const VkExternalMemoryImageCreateInfo* pExternalMemoryImageCi =
7656                 vk_find_struct<VkExternalMemoryImageCreateInfo>(&imageCreateInfo);
7657             bool importAndroidHardwareBuffer =
7658                 pExternalMemoryImageCi &&
7659                 (pExternalMemoryImageCi->handleTypes &
7660                  VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID);
7661             const VkNativeBufferANDROID* pNativeBufferANDROID =
7662                 vk_find_struct<VkNativeBufferANDROID>(&imageCreateInfo);
7663 
7664             // If the VkImage is going to bind to a ColorBuffer, we have to make sure the VkImage
7665             // that backs the ColorBuffer is created with identical parameters. From the spec: If
7666             // two aliases are both images that were created with identical creation parameters,
7667             // both were created with the VK_IMAGE_CREATE_ALIAS_BIT flag set, and both are bound
7668             // identically to memory except for VkBindImageMemoryDeviceGroupInfo::pDeviceIndices and
7669             // VkBindImageMemoryDeviceGroupInfo::pSplitInstanceBindRegions, then they interpret the
7670             // contents of the memory in consistent ways, and data written to one alias can be read
7671             // by the other alias. ... Aliases created by binding the same memory to resources in
7672             // multiple Vulkan instances or external APIs using external memory handle export and
7673             // import mechanisms interpret the contents of the memory in consistent ways, and data
7674             // written to one alias can be read by the other alias. Otherwise, the aliases interpret
7675             // the contents of the memory differently, ...
7676             std::unique_ptr<VkImageCreateInfo> colorBufferVkImageCi = nullptr;
7677             const char* importSourceDebug = "";
7678             VkFormat resolvedFormat = VK_FORMAT_UNDEFINED;
7679             // Use UNORM formats for SRGB format requests.
7680             switch (imageCreateInfo.format) {
7681                 case VK_FORMAT_R8G8B8A8_SRGB:
7682                     resolvedFormat = VK_FORMAT_R8G8B8A8_UNORM;
7683                     break;
7684                 case VK_FORMAT_R8G8B8_SRGB:
7685                     resolvedFormat = VK_FORMAT_R8G8B8_UNORM;
7686                     break;
7687                 case VK_FORMAT_B8G8R8A8_SRGB:
7688                     resolvedFormat = VK_FORMAT_B8G8R8A8_UNORM;
7689                     break;
7690                 case VK_FORMAT_R8_SRGB:
7691                     resolvedFormat = VK_FORMAT_R8_UNORM;
7692                     break;
7693                 default:
7694                     resolvedFormat = imageCreateInfo.format;
7695             }
7696             if (importAndroidHardwareBuffer) {
7697                 // For AHardwareBufferImage binding, we can't know which ColorBuffer this
7698                 // to-be-created VkImage will bind to, so we try our best to infer the creation
7699                 // parameters.
7700                 colorBufferVkImageCi = generateColorBufferVkImageCreateInfo(
7701                     resolvedFormat, imageCreateInfo.extent.width, imageCreateInfo.extent.height,
7702                     imageCreateInfo.tiling);
7703                 importSourceDebug = "AHardwareBuffer";
7704             } else if (pNativeBufferANDROID) {
7705                 // For native buffer binding, we can query the creation parameters from handle.
7706                 uint32_t cbHandle = *static_cast<const uint32_t*>(pNativeBufferANDROID->handle);
7707                 auto colorBufferInfo = getColorBufferInfo(cbHandle);
7708                 if (colorBufferInfo.handle == cbHandle) {
7709                     colorBufferVkImageCi =
7710                         std::make_unique<VkImageCreateInfo>(colorBufferInfo.imageCreateInfoShallow);
7711                 } else {
7712                     ERR("Unknown ColorBuffer handle: %" PRIu32 ".", cbHandle);
7713                 }
7714                 importSourceDebug = "NativeBufferANDROID";
7715             }
7716             if (!colorBufferVkImageCi) {
7717                 continue;
7718             }
7719             imageCreateInfo.format = resolvedFormat;
7720             if (imageCreateInfo.flags & (~colorBufferVkImageCi->flags)) {
7721                 ERR("The VkImageCreateInfo to import %s contains unsupported VkImageCreateFlags. "
7722                     "All supported VkImageCreateFlags are %s, the input VkImageCreateInfo requires "
7723                     "support for %s.",
7724                     importSourceDebug,
7725                     string_VkImageCreateFlags(colorBufferVkImageCi->flags).c_str()?:"",
7726                     string_VkImageCreateFlags(imageCreateInfo.flags).c_str()?:"");
7727             }
7728             imageCreateInfo.flags |= colorBufferVkImageCi->flags;
7729             if (imageCreateInfo.imageType != colorBufferVkImageCi->imageType) {
7730                 ERR("The VkImageCreateInfo to import %s has an unexpected VkImageType: %s, %s "
7731                     "expected.",
7732                     importSourceDebug, string_VkImageType(imageCreateInfo.imageType),
7733                     string_VkImageType(colorBufferVkImageCi->imageType));
7734             }
7735             if (imageCreateInfo.extent.depth != colorBufferVkImageCi->extent.depth) {
7736                 ERR("The VkImageCreateInfo to import %s has an unexpected VkExtent::depth: %" PRIu32
7737                     ", %" PRIu32 " expected.",
7738                     importSourceDebug, imageCreateInfo.extent.depth,
7739                     colorBufferVkImageCi->extent.depth);
7740             }
7741             if (imageCreateInfo.mipLevels != colorBufferVkImageCi->mipLevels) {
7742                 ERR("The VkImageCreateInfo to import %s has an unexpected mipLevels: %" PRIu32
7743                     ", %" PRIu32 " expected.",
7744                     importSourceDebug, imageCreateInfo.mipLevels,
7745                     colorBufferVkImageCi->mipLevels);
7746             }
7747             if (imageCreateInfo.arrayLayers != colorBufferVkImageCi->arrayLayers) {
7748                 ERR("The VkImageCreateInfo to import %s has an unexpected arrayLayers: %" PRIu32
7749                     ", %" PRIu32 " expected.",
7750                     importSourceDebug, imageCreateInfo.arrayLayers,
7751                     colorBufferVkImageCi->arrayLayers);
7752             }
7753             if (imageCreateInfo.samples != colorBufferVkImageCi->samples) {
7754                 ERR("The VkImageCreateInfo to import %s has an unexpected VkSampleCountFlagBits: "
7755                     "%s, %s expected.",
7756                     importSourceDebug, string_VkSampleCountFlagBits(imageCreateInfo.samples),
7757                     string_VkSampleCountFlagBits(colorBufferVkImageCi->samples));
7758             }
7759             if (imageCreateInfo.usage & (~colorBufferVkImageCi->usage)) {
7760                 ERR("The VkImageCreateInfo to import %s contains unsupported VkImageUsageFlags. "
7761                     "All supported VkImageUsageFlags are %s, the input VkImageCreateInfo requires "
7762                     "support for %s.",
7763                     importSourceDebug,
7764                     string_VkImageUsageFlags(colorBufferVkImageCi->usage).c_str()?:"",
7765                     string_VkImageUsageFlags(imageCreateInfo.usage).c_str()?:"");
7766             }
7767             imageCreateInfo.usage |= colorBufferVkImageCi->usage;
7768             // For the AndroidHardwareBuffer binding case VkImageCreateInfo::sharingMode isn't
7769             // filled in generateColorBufferVkImageCreateInfo, and
7770             // VkImageCreateInfo::{format,extent::{width, height}, tiling} are guaranteed to match.
7771             if (importAndroidHardwareBuffer) {
7772                 continue;
7773             }
7774             if (resolvedFormat != colorBufferVkImageCi->format) {
7775                 ERR("The VkImageCreateInfo to import %s contains unexpected VkFormat:"
7776                     "%s [%d]. %s [%d] expected.",
7777                     importSourceDebug, string_VkFormat(imageCreateInfo.format),
7778                     imageCreateInfo.format, string_VkFormat(colorBufferVkImageCi->format),
7779                     colorBufferVkImageCi->format);
7780             }
7781             if (imageCreateInfo.extent.width != colorBufferVkImageCi->extent.width) {
7782                 ERR("The VkImageCreateInfo to import %s contains unexpected VkExtent::width: "
7783                     "%" PRIu32 ". %" PRIu32 " expected.",
7784                     importSourceDebug, imageCreateInfo.extent.width,
7785                     colorBufferVkImageCi->extent.width);
7786             }
7787             if (imageCreateInfo.extent.height != colorBufferVkImageCi->extent.height) {
7788                 ERR("The VkImageCreateInfo to import %s contains unexpected VkExtent::height: "
7789                     "%" PRIu32 ". %" PRIu32 " expected.",
7790                     importSourceDebug, imageCreateInfo.extent.height,
7791                     colorBufferVkImageCi->extent.height);
7792             }
7793             if (imageCreateInfo.tiling != colorBufferVkImageCi->tiling) {
7794                 ERR("The VkImageCreateInfo to import %s contains unexpected VkImageTiling: %s. %s "
7795                     "expected.",
7796                     importSourceDebug, string_VkImageTiling(imageCreateInfo.tiling),
7797                     string_VkImageTiling(colorBufferVkImageCi->tiling));
7798             }
7799             if (imageCreateInfo.sharingMode != colorBufferVkImageCi->sharingMode) {
7800                 ERR("The VkImageCreateInfo to import %s contains unexpected VkSharingMode: %s. %s "
7801                     "expected.",
7802                     importSourceDebug, string_VkSharingMode(imageCreateInfo.sharingMode),
7803                     string_VkSharingMode(colorBufferVkImageCi->sharingMode));
7804             }
7805         }
7806     }
7807 
transformImpl_VkImageCreateInfo_fromhost(const VkImageCreateInfo *,uint32_t)7808     void transformImpl_VkImageCreateInfo_fromhost(const VkImageCreateInfo*, uint32_t) {
7809         GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER)) << "Not yet implemented.";
7810     }
7811 
7812 #define DEFINE_EXTERNAL_HANDLE_TYPE_TRANSFORM(type, field)                                         \
7813     void transformImpl_##type##_tohost(const type* props, uint32_t count) {                        \
7814         type* mut = (type*)props;                                                                  \
7815         for (uint32_t i = 0; i < count; ++i) {                                                     \
7816             mut[i].field =                                                                         \
7817                 (VkExternalMemoryHandleTypeFlagBits)transformExternalMemoryHandleTypeFlags_tohost( \
7818                     mut[i].field);                                                                 \
7819         }                                                                                          \
7820     }                                                                                              \
7821     void transformImpl_##type##_fromhost(const type* props, uint32_t count) {                      \
7822         type* mut = (type*)props;                                                                  \
7823         for (uint32_t i = 0; i < count; ++i) {                                                     \
7824             mut[i].field = (VkExternalMemoryHandleTypeFlagBits)                                    \
7825                 transformExternalMemoryHandleTypeFlags_fromhost(                                   \
7826                     mut[i].field, GUEST_EXTERNAL_MEMORY_HANDLE_TYPES);                             \
7827         }                                                                                          \
7828     }
7829 
7830 #define DEFINE_EXTERNAL_MEMORY_PROPERTIES_TRANSFORM(type)                                  \
7831     void transformImpl_##type##_tohost(const type* props, uint32_t count) {                \
7832         type* mut = (type*)props;                                                          \
7833         for (uint32_t i = 0; i < count; ++i) {                                             \
7834             mut[i].externalMemoryProperties =                                              \
7835                 transformExternalMemoryProperties_tohost(mut[i].externalMemoryProperties); \
7836         }                                                                                  \
7837     }                                                                                      \
7838     void transformImpl_##type##_fromhost(const type* props, uint32_t count) {              \
7839         type* mut = (type*)props;                                                          \
7840         for (uint32_t i = 0; i < count; ++i) {                                             \
7841             mut[i].externalMemoryProperties = transformExternalMemoryProperties_fromhost(  \
7842                 mut[i].externalMemoryProperties, GUEST_EXTERNAL_MEMORY_HANDLE_TYPES);      \
7843         }                                                                                  \
7844     }
7845 
DEFINE_EXTERNAL_HANDLE_TYPE_TRANSFORM(VkPhysicalDeviceExternalImageFormatInfo,handleType)7846     DEFINE_EXTERNAL_HANDLE_TYPE_TRANSFORM(VkPhysicalDeviceExternalImageFormatInfo, handleType)
7847     DEFINE_EXTERNAL_HANDLE_TYPE_TRANSFORM(VkPhysicalDeviceExternalBufferInfo, handleType)
7848     DEFINE_EXTERNAL_HANDLE_TYPE_TRANSFORM(VkExternalMemoryImageCreateInfo, handleTypes)
7849     DEFINE_EXTERNAL_HANDLE_TYPE_TRANSFORM(VkExternalMemoryBufferCreateInfo, handleTypes)
7850     DEFINE_EXTERNAL_HANDLE_TYPE_TRANSFORM(VkExportMemoryAllocateInfo, handleTypes)
7851     DEFINE_EXTERNAL_MEMORY_PROPERTIES_TRANSFORM(VkExternalImageFormatProperties)
7852     DEFINE_EXTERNAL_MEMORY_PROPERTIES_TRANSFORM(VkExternalBufferProperties)
7853 
7854     uint64_t newGlobalHandle(const DispatchableHandleInfo<uint64_t>& item,
7855                              BoxedHandleTypeTag typeTag) {
7856         if (!mCreatedHandlesForSnapshotLoad.empty() &&
7857             (mCreatedHandlesForSnapshotLoad.size() - mCreatedHandlesForSnapshotLoadIndex > 0)) {
7858             auto handle = mCreatedHandlesForSnapshotLoad[mCreatedHandlesForSnapshotLoadIndex];
7859             VKDGS_LOG("use handle: 0x%lx underlying 0x%lx", handle, item.underlying);
7860             ++mCreatedHandlesForSnapshotLoadIndex;
7861             auto res = sBoxedHandleManager.addFixed(handle, item, typeTag);
7862 
7863             return res;
7864         } else {
7865             return sBoxedHandleManager.add(item, typeTag);
7866         }
7867     }
7868 
7869 #define DEFINE_BOXED_DISPATCHABLE_HANDLE_API_IMPL(type)                                           \
7870     type new_boxed_##type(type underlying, VulkanDispatch* dispatch, bool ownDispatch) {          \
7871         DispatchableHandleInfo<uint64_t> item;                                                    \
7872         item.underlying = (uint64_t)underlying;                                                   \
7873         item.dispatch = dispatch ? dispatch : new VulkanDispatch;                                 \
7874         item.ownDispatch = ownDispatch;                                                           \
7875         item.ordMaintInfo = new OrderMaintenanceInfo;                                             \
7876         item.readStream = nullptr;                                                                \
7877         auto res = (type)newGlobalHandle(item, Tag_##type);                                       \
7878         return res;                                                                               \
7879     }                                                                                             \
7880     void delete_##type(type boxed) {                                                              \
7881         if (!boxed) return;                                                                       \
7882         auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed);                           \
7883         if (!elt) return;                                                                         \
7884         releaseOrderMaintInfo(elt->ordMaintInfo);                                                 \
7885         if (elt->readStream) {                                                                    \
7886             sReadStreamRegistry.push(elt->readStream);                                            \
7887             elt->readStream = nullptr;                                                            \
7888         }                                                                                         \
7889         sBoxedHandleManager.remove((uint64_t)boxed);                                              \
7890     }                                                                                             \
7891     OrderMaintenanceInfo* ordmaint_##type(type boxed) {                                           \
7892         auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed);                           \
7893         if (!elt) return 0;                                                                       \
7894         auto info = elt->ordMaintInfo;                                                            \
7895         if (!info) return 0;                                                                      \
7896         acquireOrderMaintInfo(info);                                                              \
7897         return info;                                                                              \
7898     }                                                                                             \
7899     VulkanMemReadingStream* readstream_##type(type boxed) {                                       \
7900         auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed);                           \
7901         if (!elt) return 0;                                                                       \
7902         auto stream = elt->readStream;                                                            \
7903         if (!stream) {                                                                            \
7904             stream = sReadStreamRegistry.pop(getFeatures());                                      \
7905             elt->readStream = stream;                                                             \
7906         }                                                                                         \
7907         return stream;                                                                            \
7908     }                                                                                             \
7909     VulkanDispatch* dispatch_##type(type boxed) {                                                 \
7910         auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed);                           \
7911         if (!elt) {                                                                               \
7912             ERR("%s: Failed to unbox %p", __func__, boxed);                                       \
7913             return nullptr;                                                                       \
7914         }                                                                                         \
7915         return elt->dispatch;                                                                     \
7916     }
7917 
7918 #define DEFINE_BOXED_NON_DISPATCHABLE_HANDLE_API_IMPL(type)                                       \
7919     type new_boxed_non_dispatchable_##type(type underlying) {                                     \
7920         DispatchableHandleInfo<uint64_t> item;                                                    \
7921         item.underlying = (uint64_t)underlying;                                                   \
7922         auto res = (type)newGlobalHandle(item, Tag_##type);                                       \
7923         return res;                                                                               \
7924     }                                                                                             \
7925     void delayed_delete_##type(type boxed, VkDevice device, std::function<void()> callback) {     \
7926         sBoxedHandleManager.removeDelayed((uint64_t)boxed, device, callback);                     \
7927     }                                                                                             \
7928     void delete_##type(type boxed) { sBoxedHandleManager.remove((uint64_t)boxed); }               \
7929     void set_boxed_non_dispatchable_##type(type boxed, type underlying) {                         \
7930         DispatchableHandleInfo<uint64_t> item;                                                    \
7931         item.underlying = (uint64_t)underlying;                                                   \
7932         sBoxedHandleManager.update((uint64_t)boxed, item, Tag_##type);                            \
7933     }                                                                                             \
7934     type unboxed_to_boxed_non_dispatchable_##type(type unboxed) {                                 \
7935         AutoLock lock(sBoxedHandleManager.lock);                                                  \
7936         return (type)sBoxedHandleManager.getBoxedFromUnboxedLocked((uint64_t)(uintptr_t)unboxed); \
7937     }                                                                                             \
7938     type unbox_##type(type boxed) {                                                               \
7939         AutoLock lock(sBoxedHandleManager.lock);                                                  \
7940         auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed);                           \
7941         if (!elt) {                                                                               \
7942             if constexpr (!std::is_same_v<type, VkFence>) {                                       \
7943                 GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))                                   \
7944                     << "Unbox " << boxed << " failed, not found.";                                \
7945             }                                                                                     \
7946             return VK_NULL_HANDLE;                                                                \
7947         }                                                                                         \
7948         return (type)elt->underlying;                                                             \
7949     }                                                                                             \
7950     type try_unbox_##type(type boxed) {                                                           \
7951         AutoLock lock(sBoxedHandleManager.lock);                                                  \
7952         auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed);                           \
7953         if (!elt) {                                                                               \
7954             WARN("%s: Failed to unbox %p", __func__, boxed);                                      \
7955             return VK_NULL_HANDLE;                                                                \
7956         }                                                                                         \
7957         return (type)elt->underlying;                                                             \
7958     }
7959 
7960     GOLDFISH_VK_LIST_DISPATCHABLE_HANDLE_TYPES(DEFINE_BOXED_DISPATCHABLE_HANDLE_API_IMPL)
GOLDFISH_VK_LIST_NON_DISPATCHABLE_HANDLE_TYPES(DEFINE_BOXED_NON_DISPATCHABLE_HANDLE_API_IMPL)7961     GOLDFISH_VK_LIST_NON_DISPATCHABLE_HANDLE_TYPES(DEFINE_BOXED_NON_DISPATCHABLE_HANDLE_API_IMPL)
7962 
7963 #define DEFINE_BOXED_DISPATCHABLE_HANDLE_API_REGULAR_UNBOX_IMPL(type)                             \
7964     type unbox_##type(type boxed) {                                                               \
7965         auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed);                           \
7966         if (!elt){                                                                                \
7967             ERR("%s: Failed to unbox %p", __func__, boxed);                                       \
7968             return VK_NULL_HANDLE;                                                                \
7969         }                                                                                         \
7970         return (type)elt->underlying;                                                             \
7971     }                                                                                             \
7972     type try_unbox_##type(type boxed) {                                                           \
7973         auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed);                           \
7974         if (!elt){                                                                                \
7975             WARN("%s: Failed to unbox %p", __func__, boxed);                                      \
7976             return VK_NULL_HANDLE;                                                                \
7977         }                                                                                         \
7978         return (type)elt->underlying;                                                             \
7979     }                                                                                             \
7980     type unboxed_to_boxed_##type(type unboxed) {                                                  \
7981         AutoLock lock(sBoxedHandleManager.lock);                                                  \
7982         return (type)sBoxedHandleManager.getBoxedFromUnboxedLocked((uint64_t)(uintptr_t)unboxed); \
7983     }
7984 
7985     GOLDFISH_VK_LIST_DISPATCHABLE_REGULAR_UNBOX_HANDLE_TYPES(DEFINE_BOXED_DISPATCHABLE_HANDLE_API_REGULAR_UNBOX_IMPL)
7986 
7987     // Custom unbox_* functions or GOLDFISH_VK_LIST_DISPATCHABLE_CUSTOM_UNBOX_HANDLE_TYPES
7988     // VkQueue objects can be virtual, meaning that multiple boxed queues can map into a single
7989     // physical queue on the host GPU. Some conversion is needed for unboxing to physical.
7990     VkQueue unbox_VkQueueImp(VkQueue boxed) {
7991         auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed);
7992         if (!elt) {
7993             return VK_NULL_HANDLE;
7994         }
7995         const uint64_t unboxedQueue64 = elt->underlying;
7996         if (mEnableVirtualVkQueue) {
7997             // Clear virtual bit and unbox into the actual physical queue handle
7998             return (VkQueue)(unboxedQueue64 & ~QueueInfo::kVirtualQueueBit);
7999         }
8000         return (VkQueue)(unboxedQueue64);
8001     }
unbox_VkQueue(VkQueue boxed)8002     VkQueue unbox_VkQueue(VkQueue boxed) {
8003         VkQueue unboxed = unbox_VkQueueImp(boxed);
8004         if (unboxed == VK_NULL_HANDLE) {
8005             ERR("%s: Failed to unbox %p", __func__, boxed);
8006         }
8007         return unboxed;
8008     }
try_unbox_VkQueue(VkQueue boxed)8009     VkQueue try_unbox_VkQueue(VkQueue boxed) {
8010         VkQueue unboxed = unbox_VkQueueImp(boxed);
8011         if (unboxed == VK_NULL_HANDLE) {
8012             WARN("%s: Failed to unbox %p", __func__, boxed);
8013         }
8014         return unboxed;
8015     }
8016 
snapshot()8017     VkDecoderSnapshot* snapshot() { return &mSnapshot; }
getSnapshotState()8018     SnapshotState getSnapshotState() { return mSnapshotState; }
8019 
8020    private:
isEmulatedInstanceExtension(const char * name) const8021     bool isEmulatedInstanceExtension(const char* name) const {
8022         for (auto emulatedExt : kEmulatedInstanceExtensions) {
8023             if (!strcmp(emulatedExt, name)) return true;
8024         }
8025         return false;
8026     }
8027 
isEmulatedDeviceExtension(const char * name) const8028     bool isEmulatedDeviceExtension(const char* name) const {
8029         for (auto emulatedExt : kEmulatedDeviceExtensions) {
8030             if (!strcmp(emulatedExt, name)) return true;
8031         }
8032         return false;
8033     }
8034 
supportEmulatedCompressedImageFormatProperty(VkFormat compressedFormat,VkImageType type,VkImageTiling tiling,VkImageUsageFlags usage,VkImageCreateFlags flags)8035     bool supportEmulatedCompressedImageFormatProperty(VkFormat compressedFormat, VkImageType type,
8036                                                       VkImageTiling tiling, VkImageUsageFlags usage,
8037                                                       VkImageCreateFlags flags) {
8038         // BUG: 139193497
8039         return !(usage & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) && !(type == VK_IMAGE_TYPE_1D);
8040     }
8041 
filteredDeviceExtensionNames(VulkanDispatch * vk,VkPhysicalDevice physicalDevice,uint32_t count,const char * const * extNames)8042     std::vector<const char*> filteredDeviceExtensionNames(VulkanDispatch* vk,
8043                                                           VkPhysicalDevice physicalDevice,
8044                                                           uint32_t count,
8045                                                           const char* const* extNames) {
8046         std::vector<const char*> res;
8047         std::vector<VkExtensionProperties> properties;
8048         VkResult result;
8049 
8050         for (uint32_t i = 0; i < count; ++i) {
8051             auto extName = extNames[i];
8052             if (!isEmulatedDeviceExtension(extName)) {
8053                 res.push_back(extName);
8054                 continue;
8055             }
8056         }
8057 
8058         result = enumerateDeviceExtensionProperties(vk, physicalDevice, nullptr, properties);
8059         if (result != VK_SUCCESS) {
8060             VKDGS_LOG("failed to enumerate device extensions");
8061             return res;
8062         }
8063 
8064         if (hasDeviceExtension(properties, VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME)) {
8065             res.push_back(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
8066         }
8067 
8068         if (hasDeviceExtension(properties, VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME)) {
8069             res.push_back(VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME);
8070         }
8071 
8072         if (hasDeviceExtension(properties, VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME)) {
8073             res.push_back(VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME);
8074         }
8075 
8076         if (hasDeviceExtension(properties, VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME)) {
8077             res.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
8078         }
8079 
8080         if (hasDeviceExtension(properties, VK_KHR_SWAPCHAIN_EXTENSION_NAME)) {
8081             res.push_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
8082         }
8083 
8084 #ifdef _WIN32
8085         if (hasDeviceExtension(properties, VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME)) {
8086             res.push_back(VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME);
8087         }
8088 
8089         if (hasDeviceExtension(properties, VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME)) {
8090             res.push_back(VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME);
8091         }
8092 #elif defined(__QNX__)
8093         // Note: VK_QNX_external_memory_screen_buffer is not supported in API translation,
8094         // decoding, etc. However, push name to indicate external memory support to guest
8095         if (hasDeviceExtension(properties, VK_QNX_EXTERNAL_MEMORY_SCREEN_BUFFER_EXTENSION_NAME)) {
8096             res.push_back(VK_QNX_EXTERNAL_MEMORY_SCREEN_BUFFER_EXTENSION_NAME);
8097             // EXT_queue_family_foreign is a pre-requisite for QNX_external_memory_screen_buffer
8098             if (hasDeviceExtension(properties, VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME)) {
8099                 res.push_back(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME);
8100             }
8101         }
8102 
8103         if (hasDeviceExtension(properties, VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME)) {
8104             res.push_back(VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME);
8105         }
8106 #elif __unix__
8107         if (hasDeviceExtension(properties, VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME)) {
8108             res.push_back(VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME);
8109         }
8110 
8111         if (hasDeviceExtension(properties, VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME)) {
8112             res.push_back(VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME);
8113         }
8114 #elif defined(__APPLE__)
8115         if (m_emu->instanceSupportsMoltenVK) {
8116             if (hasDeviceExtension(properties, VK_KHR_PORTABILITY_SUBSET_EXTENSION_NAME)) {
8117                 res.push_back(VK_KHR_PORTABILITY_SUBSET_EXTENSION_NAME);
8118             }
8119             if (hasDeviceExtension(properties, VK_EXT_METAL_OBJECTS_EXTENSION_NAME)) {
8120                 res.push_back(VK_EXT_METAL_OBJECTS_EXTENSION_NAME);
8121             }
8122             if (hasDeviceExtension(properties, VK_EXT_EXTERNAL_MEMORY_METAL_EXTENSION_NAME)) {
8123                 res.push_back(VK_EXT_EXTERNAL_MEMORY_METAL_EXTENSION_NAME);
8124             }
8125         } else {
8126             // Non-MoltenVK path, use memory_fd
8127             if (hasDeviceExtension(properties, VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME)) {
8128                 res.push_back(VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME);
8129             }
8130         }
8131 #endif
8132 
8133 #ifdef __linux__
8134         // A dma-buf is a Linux kernel construct, commonly used with open-source DRM drivers.
8135         // See https://docs.kernel.org/driver-api/dma-buf.html for details.
8136         if (m_emu->deviceInfo.supportsDmaBuf &&
8137             hasDeviceExtension(properties, VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME)) {
8138             res.push_back(VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME);
8139         }
8140 
8141         if (hasDeviceExtension(properties, VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME)) {
8142             // Mesa Vulkan Wayland WSI needs vkGetImageDrmFormatModifierPropertiesEXT. On some Intel
8143             // GPUs, this extension is exposed by the driver only if
8144             // VK_EXT_image_drm_format_modifier extension is requested via
8145             // VkDeviceCreateInfo::ppEnabledExtensionNames. vkcube-wayland does not request it,
8146             // which makes the host attempt to call a null function pointer unless we force-enable
8147             // it regardless of the client's wishes.
8148             res.push_back(VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME);
8149         }
8150 
8151 #endif
8152 
8153         if (hasDeviceExtension(properties, VK_EXT_PRIVATE_DATA_EXTENSION_NAME)) {
8154             //TODO(b/378686769): Enable private data extension where available to
8155             // mitigate the issues with duplicated vulkan handles. This should be
8156             // removed once the issue is properly resolved.
8157             res.push_back(VK_EXT_PRIVATE_DATA_EXTENSION_NAME);
8158         }
8159 
8160         return res;
8161     }
8162 
filteredInstanceExtensionNames(uint32_t count,const char * const * extNames)8163     std::vector<const char*> filteredInstanceExtensionNames(uint32_t count,
8164                                                             const char* const* extNames) {
8165         std::vector<const char*> res;
8166         for (uint32_t i = 0; i < count; ++i) {
8167             auto extName = extNames[i];
8168             if (!isEmulatedInstanceExtension(extName)) {
8169                 res.push_back(extName);
8170             }
8171         }
8172 
8173         if (m_emu->instanceSupportsExternalMemoryCapabilities) {
8174             res.push_back(VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME);
8175         }
8176 
8177         if (m_emu->instanceSupportsExternalSemaphoreCapabilities) {
8178             res.push_back(VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME);
8179         }
8180 
8181         if (m_emu->instanceSupportsExternalFenceCapabilities) {
8182             res.push_back(VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME);
8183         }
8184 
8185         if (m_emu->debugUtilsAvailableAndRequested) {
8186             res.push_back(VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
8187         }
8188 
8189         if (m_emu->instanceSupportsSurface) {
8190             res.push_back(VK_KHR_SURFACE_EXTENSION_NAME);
8191         }
8192 
8193 #if defined(__APPLE__)
8194         if (m_emu->instanceSupportsMoltenVK) {
8195             res.push_back(VK_MVK_MACOS_SURFACE_EXTENSION_NAME);
8196             res.push_back(VK_KHR_PORTABILITY_ENUMERATION_EXTENSION_NAME);
8197         }
8198 #endif
8199 
8200         return res;
8201     }
8202 
getDefaultQueueForDeviceLocked(VkDevice device,VkQueue * queue,uint32_t * queueFamilyIndex,Lock ** queueLock)8203     bool getDefaultQueueForDeviceLocked(VkDevice device, VkQueue* queue, uint32_t* queueFamilyIndex,
8204                                         Lock** queueLock) {
8205         auto* deviceInfo = android::base::find(mDeviceInfo, device);
8206         if (!deviceInfo) return false;
8207 
8208         auto zeroIt = deviceInfo->queues.find(0);
8209         if (zeroIt == deviceInfo->queues.end() || zeroIt->second.empty()) {
8210             // Get the first queue / queueFamilyIndex
8211             // that does show up.
8212             for (const auto& it : deviceInfo->queues) {
8213                 auto index = it.first;
8214                 for (auto& deviceQueue : it.second) {
8215                     *queue = deviceQueue;
8216                     *queueFamilyIndex = index;
8217                     *queueLock = mQueueInfo.at(deviceQueue).physicalQueueLock.get();
8218                     return true;
8219                 }
8220             }
8221             // Didn't find anything, fail.
8222             return false;
8223         } else {
8224             // Use queue family index 0.
8225             *queue = zeroIt->second[0];
8226             *queueFamilyIndex = 0;
8227             *queueLock = mQueueInfo.at(zeroIt->second[0]).physicalQueueLock.get();
8228             return true;
8229         }
8230 
8231         return false;
8232     }
8233 
updateImageMemorySizeLocked(VkDevice device,VkImage image,VkMemoryRequirements * pMemoryRequirements)8234     void updateImageMemorySizeLocked(VkDevice device, VkImage image,
8235                                      VkMemoryRequirements* pMemoryRequirements) {
8236         auto* deviceInfo = android::base::find(mDeviceInfo, device);
8237         if (!deviceInfo->emulateTextureEtc2 && !deviceInfo->emulateTextureAstc) {
8238             return;
8239         }
8240         auto* imageInfo = android::base::find(mImageInfo, image);
8241         if (!imageInfo) return;
8242         CompressedImageInfo& cmpInfo = imageInfo->cmpInfo;
8243         if (!deviceInfo->needEmulatedDecompression(cmpInfo)) {
8244             return;
8245         }
8246         *pMemoryRequirements = cmpInfo.getMemoryRequirements();
8247     }
8248 
8249     // Whether the VkInstance associated with this physical device was created by ANGLE
isAngleInstance(VkPhysicalDevice physicalDevice,VulkanDispatch * vk)8250     bool isAngleInstance(VkPhysicalDevice physicalDevice, VulkanDispatch* vk) {
8251         std::lock_guard<std::recursive_mutex> lock(mLock);
8252         VkInstance* instance = android::base::find(mPhysicalDeviceToInstance, physicalDevice);
8253         if (!instance) return false;
8254         InstanceInfo* instanceInfo = android::base::find(mInstanceInfo, *instance);
8255         if (!instanceInfo) return false;
8256         return instanceInfo->isAngle;
8257     }
8258 
enableEmulatedEtc2(VkPhysicalDevice physicalDevice,VulkanDispatch * vk)8259     bool enableEmulatedEtc2(VkPhysicalDevice physicalDevice, VulkanDispatch* vk) {
8260         if (!m_emu->enableEtc2Emulation) return false;
8261 
8262         // Don't enable ETC2 emulation for ANGLE, let it do its own emulation.
8263         return !isAngleInstance(physicalDevice, vk);
8264     }
8265 
enableEmulatedAstc(VkPhysicalDevice physicalDevice,VulkanDispatch * vk)8266     bool enableEmulatedAstc(VkPhysicalDevice physicalDevice, VulkanDispatch* vk) {
8267         if (m_emu->astcLdrEmulationMode == AstcEmulationMode::Disabled) {
8268             return false;
8269         }
8270 
8271         // Don't enable ASTC emulation for ANGLE, let it do its own emulation.
8272         return !isAngleInstance(physicalDevice, vk);
8273     }
8274 
needEmulatedEtc2(VkPhysicalDevice physicalDevice,VulkanDispatch * vk)8275     bool needEmulatedEtc2(VkPhysicalDevice physicalDevice, VulkanDispatch* vk) {
8276         if (!enableEmulatedEtc2(physicalDevice, vk)) {
8277             return false;
8278         }
8279         VkPhysicalDeviceFeatures feature;
8280         vk->vkGetPhysicalDeviceFeatures(physicalDevice, &feature);
8281         return !feature.textureCompressionETC2;
8282     }
8283 
needEmulatedAstc(VkPhysicalDevice physicalDevice,VulkanDispatch * vk)8284     bool needEmulatedAstc(VkPhysicalDevice physicalDevice, VulkanDispatch* vk) {
8285         if (!enableEmulatedAstc(physicalDevice, vk)) {
8286             return false;
8287         }
8288         VkPhysicalDeviceFeatures feature;
8289         vk->vkGetPhysicalDeviceFeatures(physicalDevice, &feature);
8290         return !feature.textureCompressionASTC_LDR;
8291     }
8292 
getSupportedFenceHandleTypes(VulkanDispatch * vk,VkPhysicalDevice physicalDevice,uint32_t * supportedFenceHandleTypes)8293     void getSupportedFenceHandleTypes(VulkanDispatch* vk, VkPhysicalDevice physicalDevice,
8294                                       uint32_t* supportedFenceHandleTypes) {
8295         if (!m_emu->instanceSupportsExternalFenceCapabilities) {
8296             return;
8297         }
8298 
8299         VkExternalFenceHandleTypeFlagBits handleTypes[] = {
8300             VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR,
8301             VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT,
8302             VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT,
8303         };
8304 
8305         for (auto handleType : handleTypes) {
8306             VkExternalFenceProperties externalFenceProps;
8307             externalFenceProps.sType = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES;
8308             externalFenceProps.pNext = nullptr;
8309 
8310             VkPhysicalDeviceExternalFenceInfo externalFenceInfo = {
8311                 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO, nullptr, handleType};
8312 
8313             vk->vkGetPhysicalDeviceExternalFenceProperties(physicalDevice, &externalFenceInfo,
8314                                                            &externalFenceProps);
8315 
8316             if ((externalFenceProps.externalFenceFeatures &
8317                  (VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT)) == 0) {
8318                 continue;
8319             }
8320 
8321             if ((externalFenceProps.externalFenceFeatures &
8322                  (VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT)) == 0) {
8323                 continue;
8324             }
8325 
8326             *supportedFenceHandleTypes |= handleType;
8327         }
8328     }
8329 
getSupportedSemaphoreHandleTypes(VulkanDispatch * vk,VkPhysicalDevice physicalDevice,uint32_t * supportedBinarySemaphoreHandleTypes)8330     void getSupportedSemaphoreHandleTypes(VulkanDispatch* vk, VkPhysicalDevice physicalDevice,
8331                                           uint32_t* supportedBinarySemaphoreHandleTypes) {
8332         if (!m_emu->instanceSupportsExternalSemaphoreCapabilities) {
8333             return;
8334         }
8335 
8336         VkExternalSemaphoreHandleTypeFlagBits handleTypes[] = {
8337             VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR,
8338             VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT,
8339             VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT,
8340         };
8341 
8342         for (auto handleType : handleTypes) {
8343             VkExternalSemaphoreProperties externalSemaphoreProps;
8344             externalSemaphoreProps.sType = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES;
8345             externalSemaphoreProps.pNext = nullptr;
8346 
8347             VkPhysicalDeviceExternalSemaphoreInfo externalSemaphoreInfo = {
8348                 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO, nullptr, handleType};
8349 
8350             vk->vkGetPhysicalDeviceExternalSemaphoreProperties(
8351                 physicalDevice, &externalSemaphoreInfo, &externalSemaphoreProps);
8352 
8353             if ((externalSemaphoreProps.externalSemaphoreFeatures &
8354                  (VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT)) == 0) {
8355                 continue;
8356             }
8357 
8358             if ((externalSemaphoreProps.externalSemaphoreFeatures &
8359                  (VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT)) == 0) {
8360                 continue;
8361             }
8362 
8363             *supportedBinarySemaphoreHandleTypes |= handleType;
8364         }
8365     }
8366 
supportsSwapchainMaintenance1(VkPhysicalDevice physicalDevice,VulkanDispatch * vk)8367     bool supportsSwapchainMaintenance1(VkPhysicalDevice physicalDevice, VulkanDispatch* vk) {
8368         bool hasGetPhysicalDeviceFeatures2 = false;
8369         bool hasGetPhysicalDeviceFeatures2KHR = false;
8370 
8371         {
8372             std::lock_guard<std::recursive_mutex> lock(mLock);
8373 
8374             auto* physdevInfo = android::base::find(mPhysdevInfo, physicalDevice);
8375             if (!physdevInfo) {
8376                 return false;
8377             }
8378 
8379             auto instance = mPhysicalDeviceToInstance[physicalDevice];
8380             auto* instanceInfo = android::base::find(mInstanceInfo, instance);
8381             if (!instanceInfo) {
8382                 return false;
8383             }
8384 
8385             if (instanceInfo->apiVersion >= VK_MAKE_VERSION(1, 1, 0) &&
8386                 physdevInfo->props.apiVersion >= VK_MAKE_VERSION(1, 1, 0)) {
8387                 hasGetPhysicalDeviceFeatures2 = true;
8388             } else if (hasInstanceExtension(instance,
8389                                             VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
8390                 hasGetPhysicalDeviceFeatures2KHR = true;
8391             } else {
8392                 return false;
8393             }
8394         }
8395 
8396         VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT swapchainMaintenance1Features = {
8397             .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SWAPCHAIN_MAINTENANCE_1_FEATURES_EXT,
8398             .pNext = nullptr,
8399             .swapchainMaintenance1 = VK_FALSE,
8400         };
8401         VkPhysicalDeviceFeatures2 features2 = {
8402             .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2,
8403             .pNext = &swapchainMaintenance1Features,
8404         };
8405         if (hasGetPhysicalDeviceFeatures2) {
8406             vk->vkGetPhysicalDeviceFeatures2(physicalDevice, &features2);
8407         } else if (hasGetPhysicalDeviceFeatures2KHR) {
8408             vk->vkGetPhysicalDeviceFeatures2KHR(physicalDevice, &features2);
8409         } else {
8410             return false;
8411         }
8412 
8413         return swapchainMaintenance1Features.swapchainMaintenance1 == VK_TRUE;
8414     }
8415 
isEmulatedCompressedTexture(VkFormat format,VkPhysicalDevice physicalDevice,VulkanDispatch * vk)8416     bool isEmulatedCompressedTexture(VkFormat format, VkPhysicalDevice physicalDevice,
8417                                      VulkanDispatch* vk) {
8418         return (gfxstream::vk::isEtc2(format) && needEmulatedEtc2(physicalDevice, vk)) ||
8419                (gfxstream::vk::isAstc(format) && needEmulatedAstc(physicalDevice, vk));
8420     }
8421 
8422     static const VkFormatFeatureFlags kEmulatedTextureBufferFeatureMask =
8423         VK_FORMAT_FEATURE_TRANSFER_SRC_BIT | VK_FORMAT_FEATURE_TRANSFER_DST_BIT |
8424         VK_FORMAT_FEATURE_BLIT_SRC_BIT | VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT;
8425 
8426     static const VkFormatFeatureFlags kEmulatedTextureOptimalTilingMask =
8427         VK_FORMAT_FEATURE_TRANSFER_SRC_BIT | VK_FORMAT_FEATURE_TRANSFER_DST_BIT |
8428         VK_FORMAT_FEATURE_BLIT_SRC_BIT | VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT |
8429         VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT;
8430 
maskFormatPropertiesForEmulatedTextures(VkFormatProperties * pFormatProp)8431     void maskFormatPropertiesForEmulatedTextures(VkFormatProperties* pFormatProp) {
8432         pFormatProp->linearTilingFeatures &= kEmulatedTextureBufferFeatureMask;
8433         pFormatProp->optimalTilingFeatures &= kEmulatedTextureOptimalTilingMask;
8434         pFormatProp->bufferFeatures &= kEmulatedTextureBufferFeatureMask;
8435     }
8436 
maskFormatPropertiesForEmulatedTextures(VkFormatProperties2 * pFormatProp)8437     void maskFormatPropertiesForEmulatedTextures(VkFormatProperties2* pFormatProp) {
8438         pFormatProp->formatProperties.linearTilingFeatures &= kEmulatedTextureBufferFeatureMask;
8439         pFormatProp->formatProperties.optimalTilingFeatures &= kEmulatedTextureOptimalTilingMask;
8440         pFormatProp->formatProperties.bufferFeatures &= kEmulatedTextureBufferFeatureMask;
8441     }
8442 
maskImageFormatPropertiesForEmulatedTextures(VkImageFormatProperties * pProperties)8443     void maskImageFormatPropertiesForEmulatedTextures(VkImageFormatProperties* pProperties) {
8444         // dEQP-VK.api.info.image_format_properties.2d.optimal#etc2_r8g8b8_unorm_block
8445         pProperties->sampleCounts &= VK_SAMPLE_COUNT_1_BIT;
8446     }
8447 
8448     template <class VkFormatProperties1or2>
getPhysicalDeviceFormatPropertiesCore(std::function<void (VkPhysicalDevice,VkFormat,VkFormatProperties1or2 *)> getPhysicalDeviceFormatPropertiesFunc,VulkanDispatch * vk,VkPhysicalDevice physicalDevice,VkFormat format,VkFormatProperties1or2 * pFormatProperties)8449     void getPhysicalDeviceFormatPropertiesCore(
8450         std::function<void(VkPhysicalDevice, VkFormat, VkFormatProperties1or2*)>
8451             getPhysicalDeviceFormatPropertiesFunc,
8452         VulkanDispatch* vk, VkPhysicalDevice physicalDevice, VkFormat format,
8453         VkFormatProperties1or2* pFormatProperties) {
8454         if (isEmulatedCompressedTexture(format, physicalDevice, vk)) {
8455             getPhysicalDeviceFormatPropertiesFunc(
8456                 physicalDevice, CompressedImageInfo::getOutputFormat(format),
8457                 pFormatProperties);
8458             maskFormatPropertiesForEmulatedTextures(pFormatProperties);
8459             return;
8460         }
8461         getPhysicalDeviceFormatPropertiesFunc(physicalDevice, format, pFormatProperties);
8462     }
8463 
executePreprocessRecursive(int level,VkCommandBuffer cmdBuffer)8464     void executePreprocessRecursive(int level, VkCommandBuffer cmdBuffer) {
8465         auto* cmdBufferInfo = android::base::find(mCommandBufferInfo, cmdBuffer);
8466         if (!cmdBufferInfo) return;
8467         for (const auto& func : cmdBufferInfo->preprocessFuncs) {
8468             func();
8469         }
8470         // TODO: fix
8471         // for (const auto& subCmd : cmdBufferInfo->subCmds) {
8472         // executePreprocessRecursive(level + 1, subCmd);
8473         // }
8474     }
8475 
executePreprocessRecursive(const VkSubmitInfo & submit)8476     void executePreprocessRecursive(const VkSubmitInfo& submit) {
8477         for (uint32_t c = 0; c < submit.commandBufferCount; c++) {
8478             executePreprocessRecursive(0, submit.pCommandBuffers[c]);
8479         }
8480     }
8481 
executePreprocessRecursive(const VkSubmitInfo2 & submit)8482     void executePreprocessRecursive(const VkSubmitInfo2& submit) {
8483         for (uint32_t c = 0; c < submit.commandBufferInfoCount; c++) {
8484             executePreprocessRecursive(0, submit.pCommandBufferInfos[c].commandBuffer);
8485         }
8486     }
8487 
8488     template <typename VkHandleToInfoMap,
8489               typename VkHandleType = typename std::decay_t<VkHandleToInfoMap>::key_type>
extractInfosWithDeviceInto(VkDevice device,VkHandleToInfoMap & inputMap,VkHandleToInfoMap & outputMap)8490     void extractInfosWithDeviceInto(VkDevice device, VkHandleToInfoMap& inputMap,
8491                                     VkHandleToInfoMap& outputMap) {
8492         for (auto it = inputMap.begin(); it != inputMap.end();) {
8493             // "Extracting a node invalidates only the iterators to the extracted element ..."
8494             auto current = it++;
8495 
8496             auto& info = current->second;
8497             if (info.device == device) {
8498                 outputMap.insert(inputMap.extract(current));
8499             }
8500         }
8501     }
8502 
extractInstanceAndDependenciesLocked(VkInstance instance,InstanceObjects & objects)8503     void extractInstanceAndDependenciesLocked(VkInstance instance, InstanceObjects& objects) {
8504         auto instanceInfoIt = mInstanceInfo.find(instance);
8505         if (instanceInfoIt == mInstanceInfo.end()) return;
8506         auto& instanceInfo = instanceInfoIt->second;
8507 
8508         objects.instance = mInstanceInfo.extract(instanceInfoIt);
8509 
8510         for (auto [device, physicalDevice] : mDeviceToPhysicalDevice) {
8511             auto physicalDeviceInstanceIt = mPhysicalDeviceToInstance.find(physicalDevice);
8512             if (physicalDeviceInstanceIt == mPhysicalDeviceToInstance.end()) continue;
8513             auto physicalDeviceInstance = physicalDeviceInstanceIt->second;
8514 
8515             if (physicalDeviceInstance != instance) continue;
8516             mPhysicalDeviceToInstance.erase(physicalDeviceInstanceIt);
8517 
8518             mPhysdevInfo.erase(physicalDevice);
8519 
8520             auto deviceInfoIt = mDeviceInfo.find(device);
8521             if (deviceInfoIt == mDeviceInfo.end()) continue;
8522 
8523             InstanceObjects::DeviceObjects& deviceObjects = objects.devices.emplace_back();
8524             deviceObjects.device = mDeviceInfo.extract(deviceInfoIt);
8525 
8526             extractInfosWithDeviceInto(device, mBufferInfo, deviceObjects.buffers);
8527             extractInfosWithDeviceInto(device, mCommandBufferInfo, deviceObjects.commandBuffers);
8528             extractInfosWithDeviceInto(device, mCommandPoolInfo, deviceObjects.commandPools);
8529             extractInfosWithDeviceInto(device, mDescriptorPoolInfo, deviceObjects.descriptorPools);
8530             extractInfosWithDeviceInto(device, mDescriptorSetLayoutInfo,
8531                                        deviceObjects.descriptorSetLayouts);
8532             extractInfosWithDeviceInto(device, mFenceInfo, deviceObjects.fences);
8533             extractInfosWithDeviceInto(device, mFramebufferInfo, deviceObjects.framebuffers);
8534             extractInfosWithDeviceInto(device, mImageInfo, deviceObjects.images);
8535             extractInfosWithDeviceInto(device, mImageViewInfo, deviceObjects.imageViews);
8536             extractInfosWithDeviceInto(device, mMemoryInfo, deviceObjects.memories);
8537             extractInfosWithDeviceInto(device, mPipelineCacheInfo, deviceObjects.pipelineCaches);
8538             extractInfosWithDeviceInto(device, mQueueInfo, deviceObjects.queues);
8539             extractInfosWithDeviceInto(device, mPipelineInfo, deviceObjects.pipelines);
8540             extractInfosWithDeviceInto(device, mRenderPassInfo, deviceObjects.renderPasses);
8541             extractInfosWithDeviceInto(device, mSemaphoreInfo, deviceObjects.semaphores);
8542             extractInfosWithDeviceInto(device, mShaderModuleInfo, deviceObjects.shaderModules);
8543         }
8544 
8545         for (InstanceObjects::DeviceObjects& deviceObjects : objects.devices) {
8546             mDeviceToPhysicalDevice.erase(deviceObjects.device.key());
8547         }
8548 
8549         for (auto it = mPhysicalDeviceToInstance.begin(); it != mPhysicalDeviceToInstance.end();) {
8550             auto current = it++;
8551             auto physicalDevice = current->first;
8552             auto& physicalDeviceInstance = current->second;
8553             if (physicalDeviceInstance != instance) continue;
8554             mPhysicalDeviceToInstance.erase(current);
8555             mPhysdevInfo.erase(physicalDevice);
8556         }
8557     }
8558 
destroyInstanceObjects(InstanceObjects & objects)8559     void destroyInstanceObjects(InstanceObjects& objects) {
8560         VkInstance instance = objects.instance.key();
8561         InstanceInfo& instanceInfo = objects.instance.mapped();
8562 
8563         for (InstanceObjects::DeviceObjects& deviceObjects : objects.devices) {
8564             VkDevice device = deviceObjects.device.key();
8565             DeviceInfo& deviceInfo = deviceObjects.device.mapped();
8566             VulkanDispatch* deviceDispatch = dispatch_VkDevice(deviceInfo.boxed);
8567 
8568             // https://bugs.chromium.org/p/chromium/issues/detail?id=1074600
8569             // it's important to idle the device before destroying it!
8570             deviceDispatch->vkDeviceWaitIdle(device);
8571 
8572             for (auto& [semaphore, semaphoreInfo] : deviceObjects.semaphores) {
8573                 destroySemaphoreWithExclusiveInfo(device, deviceDispatch, semaphore, semaphoreInfo,
8574                                                   nullptr);
8575             }
8576 
8577             for (auto& [sampler, samplerInfo] : deviceObjects.samplers) {
8578                 destroySamplerWithExclusiveInfo(device, deviceDispatch, sampler, samplerInfo,
8579                                                 nullptr);
8580             }
8581 
8582             for (auto& [buffer, bufferInfo] : deviceObjects.buffers) {
8583                 destroyBufferWithExclusiveInfo(device, deviceDispatch, buffer, bufferInfo, nullptr);
8584             }
8585 
8586             for (auto& [imageView, imageViewInfo] : deviceObjects.imageViews) {
8587                 destroyImageViewWithExclusiveInfo(device, deviceDispatch, imageView, imageViewInfo,
8588                                                   nullptr);
8589             }
8590 
8591             for (auto& [image, imageInfo] : deviceObjects.images) {
8592                 destroyImageWithExclusiveInfo(device, deviceDispatch, image, imageInfo, nullptr);
8593             }
8594 
8595             for (auto& [memory, memoryInfo] : deviceObjects.memories) {
8596                 destroyMemoryWithExclusiveInfo(device, deviceDispatch, memory, memoryInfo, nullptr);
8597             }
8598 
8599             for (auto& [commandBuffer, commandBufferInfo] : deviceObjects.commandBuffers) {
8600                 freeCommandBufferWithExclusiveInfos(device, deviceDispatch, commandBuffer,
8601                                                        commandBufferInfo,
8602                                                        deviceObjects.commandPools);
8603             }
8604 
8605             for (auto& [commandPool, commandPoolInfo] : deviceObjects.commandPools) {
8606                 destroyCommandPoolWithExclusiveInfo(device, deviceDispatch, commandPool,
8607                                                     commandPoolInfo, deviceObjects.commandBuffers,
8608                                                     nullptr);
8609             }
8610 
8611             for (auto& [descriptorPool, descriptorPoolInfo] : deviceObjects.descriptorPools) {
8612                 destroyDescriptorPoolWithExclusiveInfo(device, deviceDispatch, descriptorPool,
8613                                                        descriptorPoolInfo,
8614                                                        deviceObjects.descriptorSets, nullptr);
8615             }
8616 
8617             for (auto& [descriptorSetLayout, descriptorSetLayoutInfo] :
8618                  deviceObjects.descriptorSetLayouts) {
8619                 destroyDescriptorSetLayoutWithExclusiveInfo(
8620                     device, deviceDispatch, descriptorSetLayout, descriptorSetLayoutInfo, nullptr);
8621             }
8622 
8623             for (auto& [shaderModule, shaderModuleInfo] : deviceObjects.shaderModules) {
8624                 destroyShaderModuleWithExclusiveInfo(device, deviceDispatch, shaderModule,
8625                                                      shaderModuleInfo, nullptr);
8626             }
8627 
8628             for (auto& [pipeline, pipelineInfo] : deviceObjects.pipelines) {
8629                 destroyPipelineWithExclusiveInfo(device, deviceDispatch, pipeline, pipelineInfo,
8630                                                  nullptr);
8631             }
8632 
8633             for (auto& [pipelineCache, pipelineCacheInfo] : deviceObjects.pipelineCaches) {
8634                 destroyPipelineCacheWithExclusiveInfo(device, deviceDispatch, pipelineCache,
8635                                                       pipelineCacheInfo, nullptr);
8636             }
8637 
8638             for (auto& [framebuffer, framebufferInfo] : deviceObjects.framebuffers) {
8639                 destroyFramebufferWithExclusiveInfo(device, deviceDispatch, framebuffer,
8640                                                     framebufferInfo, nullptr);
8641             }
8642 
8643             for (auto& [renderPass, renderPassInfo] : deviceObjects.renderPasses) {
8644                 destroyRenderPassWithExclusiveInfo(device, deviceDispatch, renderPass,
8645                                                    renderPassInfo, nullptr);
8646             }
8647 
8648             destroyDeviceWithExclusiveInfo(device, deviceObjects.device.mapped(),
8649                                            deviceObjects.fences, deviceObjects.queues, nullptr);
8650         }
8651 
8652         m_vk->vkDestroyInstance(instance, nullptr);
8653         delete_VkInstance(instanceInfo.boxed);
8654     }
8655 
isDescriptorTypeImageInfo(VkDescriptorType descType)8656     bool isDescriptorTypeImageInfo(VkDescriptorType descType) {
8657         return (descType == VK_DESCRIPTOR_TYPE_SAMPLER) ||
8658                (descType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) ||
8659                (descType == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE) ||
8660                (descType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE) ||
8661                (descType == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
8662     }
8663 
descriptorTypeContainsImage(VkDescriptorType descType)8664     bool descriptorTypeContainsImage(VkDescriptorType descType) {
8665         return (descType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) ||
8666                (descType == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE) ||
8667                (descType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE) ||
8668                (descType == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
8669     }
8670 
descriptorTypeContainsSampler(VkDescriptorType descType)8671     bool descriptorTypeContainsSampler(VkDescriptorType descType) {
8672         return (descType == VK_DESCRIPTOR_TYPE_SAMPLER) ||
8673                (descType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
8674     }
8675 
isDescriptorTypeBufferInfo(VkDescriptorType descType)8676     bool isDescriptorTypeBufferInfo(VkDescriptorType descType) {
8677         return (descType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) ||
8678                (descType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) ||
8679                (descType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER) ||
8680                (descType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC);
8681     }
8682 
isDescriptorTypeBufferView(VkDescriptorType descType)8683     bool isDescriptorTypeBufferView(VkDescriptorType descType) {
8684         return (descType == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER) ||
8685                (descType == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER);
8686     }
8687 
isDescriptorTypeInlineUniformBlock(VkDescriptorType descType)8688     bool isDescriptorTypeInlineUniformBlock(VkDescriptorType descType) {
8689         return descType == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT;
8690     }
8691 
isDescriptorTypeAccelerationStructure(VkDescriptorType descType)8692     bool isDescriptorTypeAccelerationStructure(VkDescriptorType descType) {
8693         return descType == VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR;
8694     }
8695 
descriptorDependencyObjectCount(VkDescriptorType descType)8696     int descriptorDependencyObjectCount(VkDescriptorType descType) {
8697         switch (descType) {
8698             case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
8699                 return 2;
8700             case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
8701             case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
8702             case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
8703             case VK_DESCRIPTOR_TYPE_SAMPLER:
8704             case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
8705             case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
8706             case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
8707             case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
8708             case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
8709             case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
8710                 return 1;
8711             default:
8712                 return 0;
8713         }
8714     }
8715 
8716     struct DescriptorUpdateTemplateInfo {
8717         VkDescriptorUpdateTemplateCreateInfo createInfo;
8718         std::vector<VkDescriptorUpdateTemplateEntry> linearizedTemplateEntries;
8719         // Preallocated pData
8720         std::vector<uint8_t> data;
8721         size_t imageInfoStart;
8722         size_t bufferInfoStart;
8723         size_t bufferViewStart;
8724         size_t inlineUniformBlockStart;
8725     };
8726 
calcLinearizedDescriptorUpdateTemplateInfo(const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo)8727     DescriptorUpdateTemplateInfo calcLinearizedDescriptorUpdateTemplateInfo(
8728         const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo) {
8729         DescriptorUpdateTemplateInfo res;
8730         res.createInfo = *pCreateInfo;
8731 
8732         size_t numImageInfos = 0;
8733         size_t numBufferInfos = 0;
8734         size_t numBufferViews = 0;
8735         size_t numInlineUniformBlocks = 0;
8736 
8737         for (uint32_t i = 0; i < pCreateInfo->descriptorUpdateEntryCount; ++i) {
8738             const auto& entry = pCreateInfo->pDescriptorUpdateEntries[i];
8739             auto type = entry.descriptorType;
8740             auto count = entry.descriptorCount;
8741             if (isDescriptorTypeImageInfo(type)) {
8742                 numImageInfos += count;
8743             } else if (isDescriptorTypeBufferInfo(type)) {
8744                 numBufferInfos += count;
8745             } else if (isDescriptorTypeBufferView(type)) {
8746                 numBufferViews += count;
8747             } else if (type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) {
8748                 numInlineUniformBlocks += count;
8749             } else {
8750                 GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
8751                     << "unknown descriptor type 0x" << std::hex << type;
8752             }
8753         }
8754 
8755         size_t imageInfoBytes = numImageInfos * sizeof(VkDescriptorImageInfo);
8756         size_t bufferInfoBytes = numBufferInfos * sizeof(VkDescriptorBufferInfo);
8757         size_t bufferViewBytes = numBufferViews * sizeof(VkBufferView);
8758         size_t inlineUniformBlockBytes = numInlineUniformBlocks;
8759 
8760         res.data.resize(imageInfoBytes + bufferInfoBytes + bufferViewBytes +
8761                         inlineUniformBlockBytes);
8762         res.imageInfoStart = 0;
8763         res.bufferInfoStart = imageInfoBytes;
8764         res.bufferViewStart = imageInfoBytes + bufferInfoBytes;
8765         res.inlineUniformBlockStart = imageInfoBytes + bufferInfoBytes + bufferViewBytes;
8766 
8767         size_t imageInfoCount = 0;
8768         size_t bufferInfoCount = 0;
8769         size_t bufferViewCount = 0;
8770         size_t inlineUniformBlockCount = 0;
8771 
8772         for (uint32_t i = 0; i < pCreateInfo->descriptorUpdateEntryCount; ++i) {
8773             const auto& entry = pCreateInfo->pDescriptorUpdateEntries[i];
8774             VkDescriptorUpdateTemplateEntry entryForHost = entry;
8775 
8776             auto type = entry.descriptorType;
8777 
8778             if (isDescriptorTypeImageInfo(type)) {
8779                 entryForHost.offset =
8780                     res.imageInfoStart + imageInfoCount * sizeof(VkDescriptorImageInfo);
8781                 entryForHost.stride = sizeof(VkDescriptorImageInfo);
8782                 ++imageInfoCount;
8783             } else if (isDescriptorTypeBufferInfo(type)) {
8784                 entryForHost.offset =
8785                     res.bufferInfoStart + bufferInfoCount * sizeof(VkDescriptorBufferInfo);
8786                 entryForHost.stride = sizeof(VkDescriptorBufferInfo);
8787                 ++bufferInfoCount;
8788             } else if (isDescriptorTypeBufferView(type)) {
8789                 entryForHost.offset = res.bufferViewStart + bufferViewCount * sizeof(VkBufferView);
8790                 entryForHost.stride = sizeof(VkBufferView);
8791                 ++bufferViewCount;
8792             } else if (type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) {
8793                 entryForHost.offset = res.inlineUniformBlockStart + inlineUniformBlockCount;
8794                 entryForHost.stride = 0;
8795                 inlineUniformBlockCount += entryForHost.descriptorCount;
8796             } else {
8797                 GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
8798                     << "unknown descriptor type 0x" << std::hex << type;
8799             }
8800 
8801             res.linearizedTemplateEntries.push_back(entryForHost);
8802         }
8803 
8804         res.createInfo.pDescriptorUpdateEntries = res.linearizedTemplateEntries.data();
8805 
8806         return res;
8807     }
8808 
registerDescriptorUpdateTemplate(VkDescriptorUpdateTemplate descriptorUpdateTemplate,const DescriptorUpdateTemplateInfo & info)8809     void registerDescriptorUpdateTemplate(VkDescriptorUpdateTemplate descriptorUpdateTemplate,
8810                                           const DescriptorUpdateTemplateInfo& info) {
8811         std::lock_guard<std::recursive_mutex> lock(mLock);
8812         mDescriptorUpdateTemplateInfo[descriptorUpdateTemplate] = info;
8813     }
8814 
unregisterDescriptorUpdateTemplate(VkDescriptorUpdateTemplate descriptorUpdateTemplate)8815     void unregisterDescriptorUpdateTemplate(VkDescriptorUpdateTemplate descriptorUpdateTemplate) {
8816         std::lock_guard<std::recursive_mutex> lock(mLock);
8817         mDescriptorUpdateTemplateInfo.erase(descriptorUpdateTemplate);
8818     }
8819 
8820     // Returns the VkInstance associated with a VkDevice, or null if it's not found
deviceToInstanceLocked(VkDevice device)8821     VkInstance* deviceToInstanceLocked(VkDevice device) {
8822         auto* physicalDevice = android::base::find(mDeviceToPhysicalDevice, device);
8823         if (!physicalDevice) return nullptr;
8824         return android::base::find(mPhysicalDeviceToInstance, *physicalDevice);
8825     }
8826 
8827     VulkanDispatch* m_vk;
8828     VkEmulation* m_emu;
8829     emugl::RenderDocWithMultipleVkInstances* mRenderDocWithMultipleVkInstances = nullptr;
8830     bool mSnapshotsEnabled = false;
8831     bool mBatchedDescriptorSetUpdateEnabled = false;
8832     bool mVkCleanupEnabled = true;
8833     bool mLogging = false;
8834     bool mVerbosePrints = false;
8835     bool mUseOldMemoryCleanupPath = false;
8836     bool mEnableVirtualVkQueue = false;
8837 
8838     std::recursive_mutex mLock;
8839 
isBindingFeasibleForAlloc(const DescriptorPoolInfo::PoolState & poolState,const VkDescriptorSetLayoutBinding & binding)8840     bool isBindingFeasibleForAlloc(const DescriptorPoolInfo::PoolState& poolState,
8841                                    const VkDescriptorSetLayoutBinding& binding) {
8842         if (binding.descriptorCount && (poolState.type != binding.descriptorType)) {
8843             return false;
8844         }
8845 
8846         uint32_t availDescriptorCount = poolState.descriptorCount - poolState.used;
8847 
8848         if (availDescriptorCount < binding.descriptorCount) {
8849             return false;
8850         }
8851 
8852         return true;
8853     }
8854 
isBindingFeasibleForFree(const DescriptorPoolInfo::PoolState & poolState,const VkDescriptorSetLayoutBinding & binding)8855     bool isBindingFeasibleForFree(const DescriptorPoolInfo::PoolState& poolState,
8856                                   const VkDescriptorSetLayoutBinding& binding) {
8857         if (poolState.type != binding.descriptorType) return false;
8858         if (poolState.used < binding.descriptorCount) return false;
8859         return true;
8860     }
8861 
allocBindingFeasible(const VkDescriptorSetLayoutBinding & binding,DescriptorPoolInfo::PoolState & poolState)8862     void allocBindingFeasible(const VkDescriptorSetLayoutBinding& binding,
8863                               DescriptorPoolInfo::PoolState& poolState) {
8864         poolState.used += binding.descriptorCount;
8865     }
8866 
freeBindingFeasible(const VkDescriptorSetLayoutBinding & binding,DescriptorPoolInfo::PoolState & poolState)8867     void freeBindingFeasible(const VkDescriptorSetLayoutBinding& binding,
8868                              DescriptorPoolInfo::PoolState& poolState) {
8869         poolState.used -= binding.descriptorCount;
8870     }
8871 
validateDescriptorSetAllocLocked(const VkDescriptorSetAllocateInfo * pAllocateInfo)8872     VkResult validateDescriptorSetAllocLocked(const VkDescriptorSetAllocateInfo* pAllocateInfo) {
8873         auto* poolInfo = android::base::find(mDescriptorPoolInfo, pAllocateInfo->descriptorPool);
8874         if (!poolInfo) return VK_ERROR_INITIALIZATION_FAILED;
8875 
8876         // Check the number of sets available.
8877         auto setsAvailable = poolInfo->maxSets - poolInfo->usedSets;
8878 
8879         if (setsAvailable < pAllocateInfo->descriptorSetCount) {
8880             return VK_ERROR_OUT_OF_POOL_MEMORY;
8881         }
8882 
8883         // Perform simulated allocation and error out with
8884         // VK_ERROR_OUT_OF_POOL_MEMORY if it fails.
8885         std::vector<DescriptorPoolInfo::PoolState> poolCopy = poolInfo->pools;
8886 
8887         for (uint32_t i = 0; i < pAllocateInfo->descriptorSetCount; ++i) {
8888             auto setLayoutInfo =
8889                 android::base::find(mDescriptorSetLayoutInfo, pAllocateInfo->pSetLayouts[i]);
8890             if (!setLayoutInfo) return VK_ERROR_INITIALIZATION_FAILED;
8891 
8892             for (const auto& binding : setLayoutInfo->bindings) {
8893                 bool success = false;
8894                 for (auto& pool : poolCopy) {
8895                     if (!isBindingFeasibleForAlloc(pool, binding)) continue;
8896 
8897                     success = true;
8898                     allocBindingFeasible(binding, pool);
8899                     break;
8900                 }
8901 
8902                 if (!success) {
8903                     return VK_ERROR_OUT_OF_POOL_MEMORY;
8904                 }
8905             }
8906         }
8907         return VK_SUCCESS;
8908     }
8909 
applyDescriptorSetAllocationLocked(DescriptorPoolInfo & poolInfo,const std::vector<VkDescriptorSetLayoutBinding> & bindings)8910     void applyDescriptorSetAllocationLocked(
8911         DescriptorPoolInfo& poolInfo, const std::vector<VkDescriptorSetLayoutBinding>& bindings) {
8912         ++poolInfo.usedSets;
8913         for (const auto& binding : bindings) {
8914             for (auto& pool : poolInfo.pools) {
8915                 if (!isBindingFeasibleForAlloc(pool, binding)) continue;
8916                 allocBindingFeasible(binding, pool);
8917                 break;
8918             }
8919         }
8920     }
8921 
removeDescriptorSetAllocationLocked(DescriptorPoolInfo & poolInfo,const std::vector<VkDescriptorSetLayoutBinding> & bindings)8922     void removeDescriptorSetAllocationLocked(
8923         DescriptorPoolInfo& poolInfo, const std::vector<VkDescriptorSetLayoutBinding>& bindings) {
8924         --poolInfo.usedSets;
8925         for (const auto& binding : bindings) {
8926             for (auto& pool : poolInfo.pools) {
8927                 if (!isBindingFeasibleForFree(pool, binding)) continue;
8928                 freeBindingFeasible(binding, pool);
8929                 break;
8930             }
8931         }
8932     }
8933 
8934     std::unordered_map<VkInstance, InstanceInfo> mInstanceInfo;
8935     std::unordered_map<VkPhysicalDevice, PhysicalDeviceInfo> mPhysdevInfo;
8936     std::unordered_map<VkDevice, DeviceInfo> mDeviceInfo;
8937     std::unordered_map<VkImage, ImageInfo> mImageInfo;
8938     std::unordered_map<VkImageView, ImageViewInfo> mImageViewInfo;
8939     std::unordered_map<VkSampler, SamplerInfo> mSamplerInfo;
8940     std::unordered_map<VkCommandBuffer, CommandBufferInfo> mCommandBufferInfo;
8941     std::unordered_map<VkCommandPool, CommandPoolInfo> mCommandPoolInfo;
8942     // TODO: release CommandBufferInfo when a command pool is reset/released
8943     std::unordered_map<VkQueue, QueueInfo> mQueueInfo;
8944     std::unordered_map<VkBuffer, BufferInfo> mBufferInfo;
8945     std::unordered_map<VkDeviceMemory, MemoryInfo> mMemoryInfo;
8946     std::unordered_map<VkShaderModule, ShaderModuleInfo> mShaderModuleInfo;
8947     std::unordered_map<VkPipelineCache, PipelineCacheInfo> mPipelineCacheInfo;
8948     std::unordered_map<VkPipeline, PipelineInfo> mPipelineInfo;
8949     std::unordered_map<VkRenderPass, RenderPassInfo> mRenderPassInfo;
8950     std::unordered_map<VkFramebuffer, FramebufferInfo> mFramebufferInfo;
8951     std::unordered_map<VkSemaphore, SemaphoreInfo> mSemaphoreInfo;
8952     std::unordered_map<VkFence, FenceInfo> mFenceInfo;
8953     std::unordered_map<VkDescriptorSetLayout, DescriptorSetLayoutInfo> mDescriptorSetLayoutInfo;
8954     std::unordered_map<VkDescriptorPool, DescriptorPoolInfo> mDescriptorPoolInfo;
8955     std::unordered_map<VkDescriptorSet, DescriptorSetInfo> mDescriptorSetInfo;
8956 
8957     // Back-reference to the physical device associated with a particular
8958     // VkDevice, and the VkDevice corresponding to a VkQueue.
8959     std::unordered_map<VkDevice, VkPhysicalDevice> mDeviceToPhysicalDevice;
8960     std::unordered_map<VkPhysicalDevice, VkInstance> mPhysicalDeviceToInstance;
8961 
8962 #ifdef _WIN32
8963     int mSemaphoreId = 1;
genSemaphoreId()8964     int genSemaphoreId() {
8965         if (mSemaphoreId == -1) {
8966             mSemaphoreId = 1;
8967         }
8968         int res = mSemaphoreId;
8969         ++mSemaphoreId;
8970         return res;
8971     }
8972     std::unordered_map<int, VkSemaphore> mExternalSemaphoresById;
8973 #endif
8974     std::unordered_map<VkDescriptorUpdateTemplate, DescriptorUpdateTemplateInfo>
8975         mDescriptorUpdateTemplateInfo;
8976 
8977     VkDecoderSnapshot mSnapshot;
8978 
8979     std::vector<uint64_t> mCreatedHandlesForSnapshotLoad;
8980     size_t mCreatedHandlesForSnapshotLoadIndex = 0;
8981 
8982     // NOTE: Only present during snapshot loading. This is needed to associate
8983     // `VkDevice`s with Virtio GPU context ids because API calls are not currently
8984     // replayed on the "same" RenderThread which originally made the API call so
8985     // RenderThreadInfoVk::ctx_id is not available.
8986     std::optional<std::unordered_map<VkDevice, uint32_t>> mSnapshotLoadVkDeviceToVirtioCpuContextId;
8987 
8988     Lock mOccupiedGpasLock;
8989     // Back-reference to the VkDeviceMemory that is occupying a particular
8990     // guest physical address
8991     struct OccupiedGpaInfo {
8992         VulkanDispatch* vk;
8993         VkDevice device;
8994         VkDeviceMemory memory;
8995         uint64_t gpa;
8996         size_t sizeToPage;
8997     };
8998     std::unordered_map<uint64_t, OccupiedGpaInfo> mOccupiedGpas;
8999 
9000     struct LinearImageCreateInfo {
9001         VkExtent3D extent;
9002         VkFormat format;
9003         VkImageUsageFlags usage;
9004 
toDefaultVkgfxstream::vk::VkDecoderGlobalState::Impl::LinearImageCreateInfo9005         VkImageCreateInfo toDefaultVk() const {
9006             return VkImageCreateInfo{
9007                 .sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
9008                 .pNext = nullptr,
9009                 .flags = {},
9010                 .imageType = VK_IMAGE_TYPE_2D,
9011                 .format = format,
9012                 .extent = extent,
9013                 .mipLevels = 1,
9014                 .arrayLayers = 1,
9015                 .samples = VK_SAMPLE_COUNT_1_BIT,
9016                 .tiling = VK_IMAGE_TILING_LINEAR,
9017                 .usage = usage,
9018                 .sharingMode = VK_SHARING_MODE_EXCLUSIVE,
9019                 .queueFamilyIndexCount = 0,
9020                 .pQueueFamilyIndices = nullptr,
9021                 .initialLayout = VK_IMAGE_LAYOUT_UNDEFINED,
9022             };
9023         }
9024 
9025         struct Hash {
operator ()gfxstream::vk::VkDecoderGlobalState::Impl::LinearImageCreateInfo::Hash9026             std::size_t operator()(const LinearImageCreateInfo& ci) const {
9027                 std::size_t s = 0;
9028                 // Magic number used in boost::hash_combine().
9029                 constexpr size_t kHashMagic = 0x9e3779b9;
9030                 s ^= std::hash<uint32_t>{}(ci.extent.width) + kHashMagic + (s << 6) + (s >> 2);
9031                 s ^= std::hash<uint32_t>{}(ci.extent.height) + kHashMagic + (s << 6) + (s >> 2);
9032                 s ^= std::hash<uint32_t>{}(ci.extent.depth) + kHashMagic + (s << 6) + (s >> 2);
9033                 s ^= std::hash<VkFormat>{}(ci.format) + kHashMagic + (s << 6) + (s >> 2);
9034                 s ^= std::hash<VkImageUsageFlags>{}(ci.usage) + kHashMagic + (s << 6) + (s >> 2);
9035                 return s;
9036             }
9037         };
9038     };
9039 
operator ==(const LinearImageCreateInfo & a,const LinearImageCreateInfo & b)9040     friend bool operator==(const LinearImageCreateInfo& a, const LinearImageCreateInfo& b) {
9041         return a.extent.width == b.extent.width && a.extent.height == b.extent.height &&
9042                a.extent.depth == b.extent.depth && a.format == b.format && a.usage == b.usage;
9043     }
9044 
9045     struct LinearImageProperties {
9046         VkDeviceSize offset;
9047         VkDeviceSize rowPitchAlignment;
9048     };
9049 
9050     // TODO(liyl): Remove after removing the old vkGetLinearImageLayoutGOOGLE.
9051     std::unordered_map<VkFormat, LinearImageProperties> mPerFormatLinearImageProperties;
9052 
9053     std::unordered_map<LinearImageCreateInfo, LinearImageProperties, LinearImageCreateInfo::Hash>
9054         mLinearImageProperties;
9055 
9056     SnapshotState mSnapshotState = SnapshotState::Normal;
9057 };
9058 
VkDecoderGlobalState()9059 VkDecoderGlobalState::VkDecoderGlobalState() : mImpl(new VkDecoderGlobalState::Impl()) {}
9060 
9061 VkDecoderGlobalState::~VkDecoderGlobalState() = default;
9062 
9063 static VkDecoderGlobalState* sGlobalDecoderState = nullptr;
9064 
9065 // static
get()9066 VkDecoderGlobalState* VkDecoderGlobalState::get() {
9067     if (sGlobalDecoderState) return sGlobalDecoderState;
9068     sGlobalDecoderState = new VkDecoderGlobalState;
9069     return sGlobalDecoderState;
9070 }
9071 
9072 // static
reset()9073 void VkDecoderGlobalState::reset() {
9074     delete sGlobalDecoderState;
9075     sGlobalDecoderState = nullptr;
9076 }
9077 
9078 // Snapshots
snapshotsEnabled() const9079 bool VkDecoderGlobalState::snapshotsEnabled() const { return mImpl->snapshotsEnabled(); }
batchedDescriptorSetUpdateEnabled() const9080 bool VkDecoderGlobalState::batchedDescriptorSetUpdateEnabled() const { return mImpl->batchedDescriptorSetUpdateEnabled(); }
9081 
newGlobalVkGenericHandle()9082 uint64_t VkDecoderGlobalState::newGlobalVkGenericHandle() {
9083     DispatchableHandleInfo<uint64_t> item;                                                    \
9084     return mImpl->newGlobalHandle(item, Tag_VkGeneric);
9085 }
9086 
getSnapshotState() const9087 VkDecoderGlobalState::SnapshotState VkDecoderGlobalState::getSnapshotState() const {
9088     return mImpl->getSnapshotState();
9089 }
9090 
getFeatures() const9091 const gfxstream::host::FeatureSet& VkDecoderGlobalState::getFeatures() const { return mImpl->getFeatures(); }
9092 
vkCleanupEnabled() const9093 bool VkDecoderGlobalState::vkCleanupEnabled() const { return mImpl->vkCleanupEnabled(); }
9094 
save(android::base::Stream * stream)9095 void VkDecoderGlobalState::save(android::base::Stream* stream) { mImpl->save(stream); }
9096 
load(android::base::Stream * stream,GfxApiLogger & gfxLogger,HealthMonitor<> * healthMonitor)9097 void VkDecoderGlobalState::load(android::base::Stream* stream, GfxApiLogger& gfxLogger,
9098                                 HealthMonitor<>* healthMonitor) {
9099     mImpl->load(stream, gfxLogger, healthMonitor);
9100 }
9101 
lock()9102 void VkDecoderGlobalState::lock() { mImpl->lock(); }
9103 
unlock()9104 void VkDecoderGlobalState::unlock() { mImpl->unlock(); }
9105 
setCreatedHandlesForSnapshotLoad(const unsigned char * buffer)9106 size_t VkDecoderGlobalState::setCreatedHandlesForSnapshotLoad(const unsigned char* buffer) {
9107     return mImpl->setCreatedHandlesForSnapshotLoad(buffer);
9108 }
9109 
clearCreatedHandlesForSnapshotLoad()9110 void VkDecoderGlobalState::clearCreatedHandlesForSnapshotLoad() {
9111     mImpl->clearCreatedHandlesForSnapshotLoad();
9112 }
9113 
on_vkEnumerateInstanceVersion(android::base::BumpPool * pool,uint32_t * pApiVersion)9114 VkResult VkDecoderGlobalState::on_vkEnumerateInstanceVersion(android::base::BumpPool* pool,
9115                                                              uint32_t* pApiVersion) {
9116     return mImpl->on_vkEnumerateInstanceVersion(pool, pApiVersion);
9117 }
9118 
on_vkCreateInstance(android::base::BumpPool * pool,const VkInstanceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkInstance * pInstance)9119 VkResult VkDecoderGlobalState::on_vkCreateInstance(android::base::BumpPool* pool,
9120                                                    const VkInstanceCreateInfo* pCreateInfo,
9121                                                    const VkAllocationCallbacks* pAllocator,
9122                                                    VkInstance* pInstance) {
9123     return mImpl->on_vkCreateInstance(pool, pCreateInfo, pAllocator, pInstance);
9124 }
9125 
on_vkDestroyInstance(android::base::BumpPool * pool,VkInstance instance,const VkAllocationCallbacks * pAllocator)9126 void VkDecoderGlobalState::on_vkDestroyInstance(android::base::BumpPool* pool, VkInstance instance,
9127                                                 const VkAllocationCallbacks* pAllocator) {
9128     mImpl->on_vkDestroyInstance(pool, instance, pAllocator);
9129 }
9130 
on_vkEnumeratePhysicalDevices(android::base::BumpPool * pool,VkInstance instance,uint32_t * physicalDeviceCount,VkPhysicalDevice * physicalDevices)9131 VkResult VkDecoderGlobalState::on_vkEnumeratePhysicalDevices(android::base::BumpPool* pool,
9132                                                              VkInstance instance,
9133                                                              uint32_t* physicalDeviceCount,
9134                                                              VkPhysicalDevice* physicalDevices) {
9135     return mImpl->on_vkEnumeratePhysicalDevices(pool, instance, physicalDeviceCount,
9136                                                 physicalDevices);
9137 }
9138 
on_vkGetPhysicalDeviceFeatures(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,VkPhysicalDeviceFeatures * pFeatures)9139 void VkDecoderGlobalState::on_vkGetPhysicalDeviceFeatures(android::base::BumpPool* pool,
9140                                                           VkPhysicalDevice physicalDevice,
9141                                                           VkPhysicalDeviceFeatures* pFeatures) {
9142     mImpl->on_vkGetPhysicalDeviceFeatures(pool, physicalDevice, pFeatures);
9143 }
9144 
on_vkGetPhysicalDeviceFeatures2(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,VkPhysicalDeviceFeatures2 * pFeatures)9145 void VkDecoderGlobalState::on_vkGetPhysicalDeviceFeatures2(android::base::BumpPool* pool,
9146                                                            VkPhysicalDevice physicalDevice,
9147                                                            VkPhysicalDeviceFeatures2* pFeatures) {
9148     mImpl->on_vkGetPhysicalDeviceFeatures2(pool, physicalDevice, pFeatures);
9149 }
9150 
on_vkGetPhysicalDeviceFeatures2KHR(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,VkPhysicalDeviceFeatures2KHR * pFeatures)9151 void VkDecoderGlobalState::on_vkGetPhysicalDeviceFeatures2KHR(
9152     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
9153     VkPhysicalDeviceFeatures2KHR* pFeatures) {
9154     mImpl->on_vkGetPhysicalDeviceFeatures2(pool, physicalDevice, pFeatures);
9155 }
9156 
on_vkGetPhysicalDeviceImageFormatProperties(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,VkFormat format,VkImageType type,VkImageTiling tiling,VkImageUsageFlags usage,VkImageCreateFlags flags,VkImageFormatProperties * pImageFormatProperties)9157 VkResult VkDecoderGlobalState::on_vkGetPhysicalDeviceImageFormatProperties(
9158     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice, VkFormat format,
9159     VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags,
9160     VkImageFormatProperties* pImageFormatProperties) {
9161     return mImpl->on_vkGetPhysicalDeviceImageFormatProperties(
9162         pool, physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties);
9163 }
on_vkGetPhysicalDeviceImageFormatProperties2(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VkImageFormatProperties2 * pImageFormatProperties)9164 VkResult VkDecoderGlobalState::on_vkGetPhysicalDeviceImageFormatProperties2(
9165     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
9166     const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
9167     VkImageFormatProperties2* pImageFormatProperties) {
9168     return mImpl->on_vkGetPhysicalDeviceImageFormatProperties2(
9169         pool, physicalDevice, pImageFormatInfo, pImageFormatProperties);
9170 }
on_vkGetPhysicalDeviceImageFormatProperties2KHR(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VkImageFormatProperties2 * pImageFormatProperties)9171 VkResult VkDecoderGlobalState::on_vkGetPhysicalDeviceImageFormatProperties2KHR(
9172     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
9173     const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
9174     VkImageFormatProperties2* pImageFormatProperties) {
9175     return mImpl->on_vkGetPhysicalDeviceImageFormatProperties2(
9176         pool, physicalDevice, pImageFormatInfo, pImageFormatProperties);
9177 }
9178 
on_vkGetPhysicalDeviceFormatProperties(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,VkFormat format,VkFormatProperties * pFormatProperties)9179 void VkDecoderGlobalState::on_vkGetPhysicalDeviceFormatProperties(
9180     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice, VkFormat format,
9181     VkFormatProperties* pFormatProperties) {
9182     mImpl->on_vkGetPhysicalDeviceFormatProperties(pool, physicalDevice, format, pFormatProperties);
9183 }
9184 
on_vkGetPhysicalDeviceFormatProperties2(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,VkFormat format,VkFormatProperties2 * pFormatProperties)9185 void VkDecoderGlobalState::on_vkGetPhysicalDeviceFormatProperties2(
9186     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice, VkFormat format,
9187     VkFormatProperties2* pFormatProperties) {
9188     mImpl->on_vkGetPhysicalDeviceFormatProperties2(pool, physicalDevice, format, pFormatProperties);
9189 }
9190 
on_vkGetPhysicalDeviceFormatProperties2KHR(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,VkFormat format,VkFormatProperties2 * pFormatProperties)9191 void VkDecoderGlobalState::on_vkGetPhysicalDeviceFormatProperties2KHR(
9192     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice, VkFormat format,
9193     VkFormatProperties2* pFormatProperties) {
9194     mImpl->on_vkGetPhysicalDeviceFormatProperties2(pool, physicalDevice, format, pFormatProperties);
9195 }
9196 
on_vkGetPhysicalDeviceProperties(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,VkPhysicalDeviceProperties * pProperties)9197 void VkDecoderGlobalState::on_vkGetPhysicalDeviceProperties(
9198     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
9199     VkPhysicalDeviceProperties* pProperties) {
9200     mImpl->on_vkGetPhysicalDeviceProperties(pool, physicalDevice, pProperties);
9201 }
9202 
on_vkGetPhysicalDeviceProperties2(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,VkPhysicalDeviceProperties2 * pProperties)9203 void VkDecoderGlobalState::on_vkGetPhysicalDeviceProperties2(
9204     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
9205     VkPhysicalDeviceProperties2* pProperties) {
9206     mImpl->on_vkGetPhysicalDeviceProperties2(pool, physicalDevice, pProperties);
9207 }
9208 
on_vkGetPhysicalDeviceQueueFamilyProperties(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,uint32_t * pQueueFamilyPropertyCount,VkQueueFamilyProperties * pQueueFamilyProperties)9209 void VkDecoderGlobalState::on_vkGetPhysicalDeviceQueueFamilyProperties(
9210     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
9211     uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties* pQueueFamilyProperties) {
9212     mImpl->on_vkGetPhysicalDeviceQueueFamilyProperties(
9213         pool, physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
9214 }
9215 
on_vkGetPhysicalDeviceQueueFamilyProperties2(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,uint32_t * pQueueFamilyPropertyCount,VkQueueFamilyProperties2 * pQueueFamilyProperties)9216 void VkDecoderGlobalState::on_vkGetPhysicalDeviceQueueFamilyProperties2(
9217     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
9218     uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties) {
9219     mImpl->on_vkGetPhysicalDeviceQueueFamilyProperties2(
9220         pool, physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
9221 }
9222 
on_vkQueuePresentKHR(android::base::BumpPool * pool,VkQueue queue,const VkPresentInfoKHR * pPresentInfo)9223 VkResult VkDecoderGlobalState::on_vkQueuePresentKHR(android::base::BumpPool* pool, VkQueue queue,
9224                                                  const VkPresentInfoKHR* pPresentInfo) {
9225     return mImpl->on_vkQueuePresentKHR(pool, queue, pPresentInfo);
9226 }
9227 
on_vkGetPhysicalDeviceProperties2KHR(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,VkPhysicalDeviceProperties2 * pProperties)9228 void VkDecoderGlobalState::on_vkGetPhysicalDeviceProperties2KHR(
9229     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
9230     VkPhysicalDeviceProperties2* pProperties) {
9231     mImpl->on_vkGetPhysicalDeviceProperties2(pool, physicalDevice, pProperties);
9232 }
9233 
on_vkGetPhysicalDeviceMemoryProperties(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,VkPhysicalDeviceMemoryProperties * pMemoryProperties)9234 void VkDecoderGlobalState::on_vkGetPhysicalDeviceMemoryProperties(
9235     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
9236     VkPhysicalDeviceMemoryProperties* pMemoryProperties) {
9237     mImpl->on_vkGetPhysicalDeviceMemoryProperties(pool, physicalDevice, pMemoryProperties);
9238 }
9239 
on_vkGetPhysicalDeviceMemoryProperties2(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,VkPhysicalDeviceMemoryProperties2 * pMemoryProperties)9240 void VkDecoderGlobalState::on_vkGetPhysicalDeviceMemoryProperties2(
9241     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
9242     VkPhysicalDeviceMemoryProperties2* pMemoryProperties) {
9243     mImpl->on_vkGetPhysicalDeviceMemoryProperties2(pool, physicalDevice, pMemoryProperties);
9244 }
9245 
on_vkGetPhysicalDeviceMemoryProperties2KHR(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,VkPhysicalDeviceMemoryProperties2 * pMemoryProperties)9246 void VkDecoderGlobalState::on_vkGetPhysicalDeviceMemoryProperties2KHR(
9247     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
9248     VkPhysicalDeviceMemoryProperties2* pMemoryProperties) {
9249     mImpl->on_vkGetPhysicalDeviceMemoryProperties2(pool, physicalDevice, pMemoryProperties);
9250 }
9251 
on_vkEnumerateDeviceExtensionProperties(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,const char * pLayerName,uint32_t * pPropertyCount,VkExtensionProperties * pProperties)9252 VkResult VkDecoderGlobalState::on_vkEnumerateDeviceExtensionProperties(
9253     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice, const char* pLayerName,
9254     uint32_t* pPropertyCount, VkExtensionProperties* pProperties) {
9255     return mImpl->on_vkEnumerateDeviceExtensionProperties(pool, physicalDevice, pLayerName,
9256                                                           pPropertyCount, pProperties);
9257 }
9258 
on_vkCreateDevice(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,const VkDeviceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDevice * pDevice)9259 VkResult VkDecoderGlobalState::on_vkCreateDevice(android::base::BumpPool* pool,
9260                                                  VkPhysicalDevice physicalDevice,
9261                                                  const VkDeviceCreateInfo* pCreateInfo,
9262                                                  const VkAllocationCallbacks* pAllocator,
9263                                                  VkDevice* pDevice) {
9264     return mImpl->on_vkCreateDevice(pool, physicalDevice, pCreateInfo, pAllocator, pDevice);
9265 }
9266 
on_vkGetDeviceQueue(android::base::BumpPool * pool,VkDevice device,uint32_t queueFamilyIndex,uint32_t queueIndex,VkQueue * pQueue)9267 void VkDecoderGlobalState::on_vkGetDeviceQueue(android::base::BumpPool* pool, VkDevice device,
9268                                                uint32_t queueFamilyIndex, uint32_t queueIndex,
9269                                                VkQueue* pQueue) {
9270     mImpl->on_vkGetDeviceQueue(pool, device, queueFamilyIndex, queueIndex, pQueue);
9271 }
9272 
on_vkGetDeviceQueue2(android::base::BumpPool * pool,VkDevice device,const VkDeviceQueueInfo2 * pQueueInfo,VkQueue * pQueue)9273 void VkDecoderGlobalState::on_vkGetDeviceQueue2(android::base::BumpPool* pool, VkDevice device,
9274                                                 const VkDeviceQueueInfo2* pQueueInfo,
9275                                                 VkQueue* pQueue) {
9276     mImpl->on_vkGetDeviceQueue2(pool, device, pQueueInfo, pQueue);
9277 }
9278 
on_vkDestroyDevice(android::base::BumpPool * pool,VkDevice device,const VkAllocationCallbacks * pAllocator)9279 void VkDecoderGlobalState::on_vkDestroyDevice(android::base::BumpPool* pool, VkDevice device,
9280                                               const VkAllocationCallbacks* pAllocator) {
9281     mImpl->on_vkDestroyDevice(pool, device, pAllocator);
9282 }
9283 
on_vkCreateBuffer(android::base::BumpPool * pool,VkDevice device,const VkBufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkBuffer * pBuffer)9284 VkResult VkDecoderGlobalState::on_vkCreateBuffer(android::base::BumpPool* pool, VkDevice device,
9285                                                  const VkBufferCreateInfo* pCreateInfo,
9286                                                  const VkAllocationCallbacks* pAllocator,
9287                                                  VkBuffer* pBuffer) {
9288     return mImpl->on_vkCreateBuffer(pool, device, pCreateInfo, pAllocator, pBuffer);
9289 }
9290 
on_vkDestroyBuffer(android::base::BumpPool * pool,VkDevice device,VkBuffer buffer,const VkAllocationCallbacks * pAllocator)9291 void VkDecoderGlobalState::on_vkDestroyBuffer(android::base::BumpPool* pool, VkDevice device,
9292                                               VkBuffer buffer,
9293                                               const VkAllocationCallbacks* pAllocator) {
9294     mImpl->on_vkDestroyBuffer(pool, device, buffer, pAllocator);
9295 }
9296 
on_vkBindBufferMemory(android::base::BumpPool * pool,VkDevice device,VkBuffer buffer,VkDeviceMemory memory,VkDeviceSize memoryOffset)9297 VkResult VkDecoderGlobalState::on_vkBindBufferMemory(android::base::BumpPool* pool, VkDevice device,
9298                                                      VkBuffer buffer, VkDeviceMemory memory,
9299                                                      VkDeviceSize memoryOffset) {
9300     return mImpl->on_vkBindBufferMemory(pool, device, buffer, memory, memoryOffset);
9301 }
9302 
on_vkBindBufferMemory2(android::base::BumpPool * pool,VkDevice device,uint32_t bindInfoCount,const VkBindBufferMemoryInfo * pBindInfos)9303 VkResult VkDecoderGlobalState::on_vkBindBufferMemory2(android::base::BumpPool* pool,
9304                                                       VkDevice device, uint32_t bindInfoCount,
9305                                                       const VkBindBufferMemoryInfo* pBindInfos) {
9306     return mImpl->on_vkBindBufferMemory2(pool, device, bindInfoCount, pBindInfos);
9307 }
9308 
on_vkBindBufferMemory2KHR(android::base::BumpPool * pool,VkDevice device,uint32_t bindInfoCount,const VkBindBufferMemoryInfo * pBindInfos)9309 VkResult VkDecoderGlobalState::on_vkBindBufferMemory2KHR(android::base::BumpPool* pool,
9310                                                          VkDevice device, uint32_t bindInfoCount,
9311                                                          const VkBindBufferMemoryInfo* pBindInfos) {
9312     return mImpl->on_vkBindBufferMemory2KHR(pool, device, bindInfoCount, pBindInfos);
9313 }
9314 
on_vkCreateImage(android::base::BumpPool * pool,VkDevice device,const VkImageCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImage * pImage)9315 VkResult VkDecoderGlobalState::on_vkCreateImage(android::base::BumpPool* pool, VkDevice device,
9316                                                 const VkImageCreateInfo* pCreateInfo,
9317                                                 const VkAllocationCallbacks* pAllocator,
9318                                                 VkImage* pImage) {
9319     return mImpl->on_vkCreateImage(pool, device, pCreateInfo, pAllocator, pImage);
9320 }
9321 
on_vkDestroyImage(android::base::BumpPool * pool,VkDevice device,VkImage image,const VkAllocationCallbacks * pAllocator)9322 void VkDecoderGlobalState::on_vkDestroyImage(android::base::BumpPool* pool, VkDevice device,
9323                                              VkImage image,
9324                                              const VkAllocationCallbacks* pAllocator) {
9325     mImpl->on_vkDestroyImage(pool, device, image, pAllocator);
9326 }
9327 
on_vkBindImageMemory(android::base::BumpPool * pool,VkDevice device,VkImage image,VkDeviceMemory memory,VkDeviceSize memoryOffset)9328 VkResult VkDecoderGlobalState::on_vkBindImageMemory(android::base::BumpPool* pool, VkDevice device,
9329                                                     VkImage image, VkDeviceMemory memory,
9330                                                     VkDeviceSize memoryOffset) {
9331     return mImpl->on_vkBindImageMemory(pool, device, image, memory, memoryOffset);
9332 }
9333 
on_vkBindImageMemory2(android::base::BumpPool * pool,VkDevice device,uint32_t bindInfoCount,const VkBindImageMemoryInfo * pBindInfos)9334 VkResult VkDecoderGlobalState::on_vkBindImageMemory2(android::base::BumpPool* pool, VkDevice device,
9335                                                      uint32_t bindInfoCount,
9336                                                      const VkBindImageMemoryInfo* pBindInfos) {
9337     return mImpl->on_vkBindImageMemory2(pool, device, bindInfoCount, pBindInfos);
9338 }
9339 
on_vkBindImageMemory2KHR(android::base::BumpPool * pool,VkDevice device,uint32_t bindInfoCount,const VkBindImageMemoryInfo * pBindInfos)9340 VkResult VkDecoderGlobalState::on_vkBindImageMemory2KHR(android::base::BumpPool* pool,
9341                                                         VkDevice device, uint32_t bindInfoCount,
9342                                                         const VkBindImageMemoryInfo* pBindInfos) {
9343     return mImpl->on_vkBindImageMemory2(pool, device, bindInfoCount, pBindInfos);
9344 }
9345 
on_vkCreateImageView(android::base::BumpPool * pool,VkDevice device,const VkImageViewCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImageView * pView)9346 VkResult VkDecoderGlobalState::on_vkCreateImageView(android::base::BumpPool* pool, VkDevice device,
9347                                                     const VkImageViewCreateInfo* pCreateInfo,
9348                                                     const VkAllocationCallbacks* pAllocator,
9349                                                     VkImageView* pView) {
9350     return mImpl->on_vkCreateImageView(pool, device, pCreateInfo, pAllocator, pView);
9351 }
9352 
on_vkDestroyImageView(android::base::BumpPool * pool,VkDevice device,VkImageView imageView,const VkAllocationCallbacks * pAllocator)9353 void VkDecoderGlobalState::on_vkDestroyImageView(android::base::BumpPool* pool, VkDevice device,
9354                                                  VkImageView imageView,
9355                                                  const VkAllocationCallbacks* pAllocator) {
9356     mImpl->on_vkDestroyImageView(pool, device, imageView, pAllocator);
9357 }
9358 
on_vkCreateSampler(android::base::BumpPool * pool,VkDevice device,const VkSamplerCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSampler * pSampler)9359 VkResult VkDecoderGlobalState::on_vkCreateSampler(android::base::BumpPool* pool, VkDevice device,
9360                                                   const VkSamplerCreateInfo* pCreateInfo,
9361                                                   const VkAllocationCallbacks* pAllocator,
9362                                                   VkSampler* pSampler) {
9363     return mImpl->on_vkCreateSampler(pool, device, pCreateInfo, pAllocator, pSampler);
9364 }
9365 
on_vkDestroySampler(android::base::BumpPool * pool,VkDevice device,VkSampler sampler,const VkAllocationCallbacks * pAllocator)9366 void VkDecoderGlobalState::on_vkDestroySampler(android::base::BumpPool* pool, VkDevice device,
9367                                                VkSampler sampler,
9368                                                const VkAllocationCallbacks* pAllocator) {
9369     mImpl->on_vkDestroySampler(pool, device, sampler, pAllocator);
9370 }
9371 
on_vkCreateSemaphore(android::base::BumpPool * pool,VkDevice device,const VkSemaphoreCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSemaphore * pSemaphore)9372 VkResult VkDecoderGlobalState::on_vkCreateSemaphore(android::base::BumpPool* pool, VkDevice device,
9373                                                     const VkSemaphoreCreateInfo* pCreateInfo,
9374                                                     const VkAllocationCallbacks* pAllocator,
9375                                                     VkSemaphore* pSemaphore) {
9376     return mImpl->on_vkCreateSemaphore(pool, device, pCreateInfo, pAllocator, pSemaphore);
9377 }
9378 
on_vkImportSemaphoreFdKHR(android::base::BumpPool * pool,VkDevice device,const VkImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo)9379 VkResult VkDecoderGlobalState::on_vkImportSemaphoreFdKHR(
9380     android::base::BumpPool* pool, VkDevice device,
9381     const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo) {
9382     return mImpl->on_vkImportSemaphoreFdKHR(pool, device, pImportSemaphoreFdInfo);
9383 }
9384 
on_vkGetSemaphoreFdKHR(android::base::BumpPool * pool,VkDevice device,const VkSemaphoreGetFdInfoKHR * pGetFdInfo,int * pFd)9385 VkResult VkDecoderGlobalState::on_vkGetSemaphoreFdKHR(android::base::BumpPool* pool,
9386                                                       VkDevice device,
9387                                                       const VkSemaphoreGetFdInfoKHR* pGetFdInfo,
9388                                                       int* pFd) {
9389     return mImpl->on_vkGetSemaphoreFdKHR(pool, device, pGetFdInfo, pFd);
9390 }
9391 
on_vkGetSemaphoreGOOGLE(android::base::BumpPool * pool,VkDevice device,VkSemaphore semaphore,uint64_t syncId)9392 VkResult VkDecoderGlobalState::on_vkGetSemaphoreGOOGLE(android::base::BumpPool* pool,
9393                                                        VkDevice device, VkSemaphore semaphore,
9394                                                        uint64_t syncId) {
9395     return mImpl->on_vkGetSemaphoreGOOGLE(pool, device, semaphore, syncId);
9396 }
9397 
on_vkDestroySemaphore(android::base::BumpPool * pool,VkDevice device,VkSemaphore semaphore,const VkAllocationCallbacks * pAllocator)9398 void VkDecoderGlobalState::on_vkDestroySemaphore(android::base::BumpPool* pool, VkDevice device,
9399                                                  VkSemaphore semaphore,
9400                                                  const VkAllocationCallbacks* pAllocator) {
9401     mImpl->on_vkDestroySemaphore(pool, device, semaphore, pAllocator);
9402 }
9403 
on_vkCreateFence(android::base::BumpPool * pool,VkDevice device,const VkFenceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkFence * pFence)9404 VkResult VkDecoderGlobalState::on_vkCreateFence(android::base::BumpPool* pool, VkDevice device,
9405                                                 const VkFenceCreateInfo* pCreateInfo,
9406                                                 const VkAllocationCallbacks* pAllocator,
9407                                                 VkFence* pFence) {
9408     return mImpl->on_vkCreateFence(pool, device, pCreateInfo, pAllocator, pFence);
9409 }
9410 
on_vkResetFences(android::base::BumpPool * pool,VkDevice device,uint32_t fenceCount,const VkFence * pFences)9411 VkResult VkDecoderGlobalState::on_vkResetFences(android::base::BumpPool* pool, VkDevice device,
9412                                                 uint32_t fenceCount, const VkFence* pFences) {
9413     return mImpl->on_vkResetFences(pool, device, fenceCount, pFences);
9414 }
9415 
on_vkDestroyFence(android::base::BumpPool * pool,VkDevice device,VkFence fence,const VkAllocationCallbacks * pAllocator)9416 void VkDecoderGlobalState::on_vkDestroyFence(android::base::BumpPool* pool, VkDevice device,
9417                                              VkFence fence,
9418                                              const VkAllocationCallbacks* pAllocator) {
9419     return mImpl->on_vkDestroyFence(pool, device, fence, pAllocator);
9420 }
9421 
on_vkCreateDescriptorSetLayout(android::base::BumpPool * pool,VkDevice device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorSetLayout * pSetLayout)9422 VkResult VkDecoderGlobalState::on_vkCreateDescriptorSetLayout(
9423     android::base::BumpPool* pool, VkDevice device,
9424     const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator,
9425     VkDescriptorSetLayout* pSetLayout) {
9426     return mImpl->on_vkCreateDescriptorSetLayout(pool, device, pCreateInfo, pAllocator, pSetLayout);
9427 }
9428 
on_vkDestroyDescriptorSetLayout(android::base::BumpPool * pool,VkDevice device,VkDescriptorSetLayout descriptorSetLayout,const VkAllocationCallbacks * pAllocator)9429 void VkDecoderGlobalState::on_vkDestroyDescriptorSetLayout(
9430     android::base::BumpPool* pool, VkDevice device, VkDescriptorSetLayout descriptorSetLayout,
9431     const VkAllocationCallbacks* pAllocator) {
9432     mImpl->on_vkDestroyDescriptorSetLayout(pool, device, descriptorSetLayout, pAllocator);
9433 }
9434 
on_vkCreateDescriptorPool(android::base::BumpPool * pool,VkDevice device,const VkDescriptorPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorPool * pDescriptorPool)9435 VkResult VkDecoderGlobalState::on_vkCreateDescriptorPool(
9436     android::base::BumpPool* pool, VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo,
9437     const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool) {
9438     return mImpl->on_vkCreateDescriptorPool(pool, device, pCreateInfo, pAllocator, pDescriptorPool);
9439 }
9440 
on_vkDestroyDescriptorPool(android::base::BumpPool * pool,VkDevice device,VkDescriptorPool descriptorPool,const VkAllocationCallbacks * pAllocator)9441 void VkDecoderGlobalState::on_vkDestroyDescriptorPool(android::base::BumpPool* pool,
9442                                                       VkDevice device,
9443                                                       VkDescriptorPool descriptorPool,
9444                                                       const VkAllocationCallbacks* pAllocator) {
9445     mImpl->on_vkDestroyDescriptorPool(pool, device, descriptorPool, pAllocator);
9446 }
9447 
on_vkResetDescriptorPool(android::base::BumpPool * pool,VkDevice device,VkDescriptorPool descriptorPool,VkDescriptorPoolResetFlags flags)9448 VkResult VkDecoderGlobalState::on_vkResetDescriptorPool(android::base::BumpPool* pool,
9449                                                         VkDevice device,
9450                                                         VkDescriptorPool descriptorPool,
9451                                                         VkDescriptorPoolResetFlags flags) {
9452     return mImpl->on_vkResetDescriptorPool(pool, device, descriptorPool, flags);
9453 }
9454 
on_vkAllocateDescriptorSets(android::base::BumpPool * pool,VkDevice device,const VkDescriptorSetAllocateInfo * pAllocateInfo,VkDescriptorSet * pDescriptorSets)9455 VkResult VkDecoderGlobalState::on_vkAllocateDescriptorSets(
9456     android::base::BumpPool* pool, VkDevice device,
9457     const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets) {
9458     return mImpl->on_vkAllocateDescriptorSets(pool, device, pAllocateInfo, pDescriptorSets);
9459 }
9460 
on_vkFreeDescriptorSets(android::base::BumpPool * pool,VkDevice device,VkDescriptorPool descriptorPool,uint32_t descriptorSetCount,const VkDescriptorSet * pDescriptorSets)9461 VkResult VkDecoderGlobalState::on_vkFreeDescriptorSets(android::base::BumpPool* pool,
9462                                                        VkDevice device,
9463                                                        VkDescriptorPool descriptorPool,
9464                                                        uint32_t descriptorSetCount,
9465                                                        const VkDescriptorSet* pDescriptorSets) {
9466     return mImpl->on_vkFreeDescriptorSets(pool, device, descriptorPool, descriptorSetCount,
9467                                           pDescriptorSets);
9468 }
9469 
on_vkUpdateDescriptorSets(android::base::BumpPool * pool,VkDevice device,uint32_t descriptorWriteCount,const VkWriteDescriptorSet * pDescriptorWrites,uint32_t descriptorCopyCount,const VkCopyDescriptorSet * pDescriptorCopies)9470 void VkDecoderGlobalState::on_vkUpdateDescriptorSets(android::base::BumpPool* pool, VkDevice device,
9471                                                      uint32_t descriptorWriteCount,
9472                                                      const VkWriteDescriptorSet* pDescriptorWrites,
9473                                                      uint32_t descriptorCopyCount,
9474                                                      const VkCopyDescriptorSet* pDescriptorCopies) {
9475     mImpl->on_vkUpdateDescriptorSets(pool, device, descriptorWriteCount, pDescriptorWrites,
9476                                      descriptorCopyCount, pDescriptorCopies);
9477 }
9478 
on_vkCreateShaderModule(android::base::BumpPool * pool,VkDevice boxed_device,const VkShaderModuleCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkShaderModule * pShaderModule)9479 VkResult VkDecoderGlobalState::on_vkCreateShaderModule(android::base::BumpPool* pool,
9480                                                        VkDevice boxed_device,
9481                                                        const VkShaderModuleCreateInfo* pCreateInfo,
9482                                                        const VkAllocationCallbacks* pAllocator,
9483                                                        VkShaderModule* pShaderModule) {
9484     return mImpl->on_vkCreateShaderModule(pool, boxed_device, pCreateInfo, pAllocator,
9485                                           pShaderModule);
9486 }
9487 
on_vkDestroyShaderModule(android::base::BumpPool * pool,VkDevice boxed_device,VkShaderModule shaderModule,const VkAllocationCallbacks * pAllocator)9488 void VkDecoderGlobalState::on_vkDestroyShaderModule(android::base::BumpPool* pool,
9489                                                     VkDevice boxed_device,
9490                                                     VkShaderModule shaderModule,
9491                                                     const VkAllocationCallbacks* pAllocator) {
9492     mImpl->on_vkDestroyShaderModule(pool, boxed_device, shaderModule, pAllocator);
9493 }
9494 
on_vkCreatePipelineCache(android::base::BumpPool * pool,VkDevice boxed_device,const VkPipelineCacheCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkPipelineCache * pPipelineCache)9495 VkResult VkDecoderGlobalState::on_vkCreatePipelineCache(
9496     android::base::BumpPool* pool, VkDevice boxed_device,
9497     const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator,
9498     VkPipelineCache* pPipelineCache) {
9499     return mImpl->on_vkCreatePipelineCache(pool, boxed_device, pCreateInfo, pAllocator,
9500                                            pPipelineCache);
9501 }
9502 
on_vkDestroyPipelineCache(android::base::BumpPool * pool,VkDevice boxed_device,VkPipelineCache pipelineCache,const VkAllocationCallbacks * pAllocator)9503 void VkDecoderGlobalState::on_vkDestroyPipelineCache(android::base::BumpPool* pool,
9504                                                      VkDevice boxed_device,
9505                                                      VkPipelineCache pipelineCache,
9506                                                      const VkAllocationCallbacks* pAllocator) {
9507     mImpl->on_vkDestroyPipelineCache(pool, boxed_device, pipelineCache, pAllocator);
9508 }
9509 
on_vkCreateGraphicsPipelines(android::base::BumpPool * pool,VkDevice boxed_device,VkPipelineCache pipelineCache,uint32_t createInfoCount,const VkGraphicsPipelineCreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines)9510 VkResult VkDecoderGlobalState::on_vkCreateGraphicsPipelines(
9511     android::base::BumpPool* pool, VkDevice boxed_device, VkPipelineCache pipelineCache,
9512     uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos,
9513     const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines) {
9514     return mImpl->on_vkCreateGraphicsPipelines(pool, boxed_device, pipelineCache, createInfoCount,
9515                                                pCreateInfos, pAllocator, pPipelines);
9516 }
9517 
on_vkCreateComputePipelines(android::base::BumpPool * pool,VkDevice boxed_device,VkPipelineCache pipelineCache,uint32_t createInfoCount,const VkComputePipelineCreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines)9518 VkResult VkDecoderGlobalState::on_vkCreateComputePipelines(
9519     android::base::BumpPool* pool, VkDevice boxed_device, VkPipelineCache pipelineCache,
9520     uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos,
9521     const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines) {
9522     return mImpl->on_vkCreateComputePipelines(pool, boxed_device, pipelineCache, createInfoCount,
9523                                                pCreateInfos, pAllocator, pPipelines);
9524 }
9525 
on_vkDestroyPipeline(android::base::BumpPool * pool,VkDevice boxed_device,VkPipeline pipeline,const VkAllocationCallbacks * pAllocator)9526 void VkDecoderGlobalState::on_vkDestroyPipeline(android::base::BumpPool* pool,
9527                                                 VkDevice boxed_device, VkPipeline pipeline,
9528                                                 const VkAllocationCallbacks* pAllocator) {
9529     mImpl->on_vkDestroyPipeline(pool, boxed_device, pipeline, pAllocator);
9530 }
9531 
on_vkCmdCopyBufferToImage(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,VkBuffer srcBuffer,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkBufferImageCopy * pRegions,const VkDecoderContext & context)9532 void VkDecoderGlobalState::on_vkCmdCopyBufferToImage(
9533     android::base::BumpPool* pool, VkCommandBuffer commandBuffer, VkBuffer srcBuffer,
9534     VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
9535     const VkBufferImageCopy* pRegions, const VkDecoderContext& context) {
9536     mImpl->on_vkCmdCopyBufferToImage(pool, commandBuffer, srcBuffer, dstImage, dstImageLayout,
9537                                      regionCount, pRegions, context);
9538 }
9539 
on_vkCmdCopyImage(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkImageCopy * pRegions)9540 void VkDecoderGlobalState::on_vkCmdCopyImage(android::base::BumpPool* pool,
9541                                              VkCommandBuffer commandBuffer, VkImage srcImage,
9542                                              VkImageLayout srcImageLayout, VkImage dstImage,
9543                                              VkImageLayout dstImageLayout, uint32_t regionCount,
9544                                              const VkImageCopy* pRegions) {
9545     mImpl->on_vkCmdCopyImage(pool, commandBuffer, srcImage, srcImageLayout, dstImage,
9546                              dstImageLayout, regionCount, pRegions);
9547 }
on_vkCmdCopyImageToBuffer(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkBuffer dstBuffer,uint32_t regionCount,const VkBufferImageCopy * pRegions)9548 void VkDecoderGlobalState::on_vkCmdCopyImageToBuffer(android::base::BumpPool* pool,
9549                                                      VkCommandBuffer commandBuffer,
9550                                                      VkImage srcImage, VkImageLayout srcImageLayout,
9551                                                      VkBuffer dstBuffer, uint32_t regionCount,
9552                                                      const VkBufferImageCopy* pRegions) {
9553     mImpl->on_vkCmdCopyImageToBuffer(pool, commandBuffer, srcImage, srcImageLayout, dstBuffer,
9554                                      regionCount, pRegions);
9555 }
9556 
on_vkCmdCopyBufferToImage2(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,const VkCopyBufferToImageInfo2 * pCopyBufferToImageInfo,const VkDecoderContext & context)9557 void VkDecoderGlobalState::on_vkCmdCopyBufferToImage2(android::base::BumpPool* pool,
9558                                 VkCommandBuffer commandBuffer,
9559                                 const VkCopyBufferToImageInfo2* pCopyBufferToImageInfo,
9560                                 const VkDecoderContext& context) {
9561     mImpl->on_vkCmdCopyBufferToImage2(pool, commandBuffer, pCopyBufferToImageInfo, context);
9562 }
9563 
on_vkCmdCopyImage2(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,const VkCopyImageInfo2 * pCopyImageInfo)9564 void VkDecoderGlobalState::on_vkCmdCopyImage2(android::base::BumpPool* pool,
9565     VkCommandBuffer commandBuffer,
9566     const VkCopyImageInfo2* pCopyImageInfo) {
9567     mImpl->on_vkCmdCopyImage2(pool, commandBuffer, pCopyImageInfo);
9568 }
9569 
on_vkCmdCopyImageToBuffer2(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,const VkCopyImageToBufferInfo2 * pCopyImageToBufferInfo)9570 void VkDecoderGlobalState::on_vkCmdCopyImageToBuffer2(android::base::BumpPool* pool,
9571                                 VkCommandBuffer commandBuffer,
9572                                 const VkCopyImageToBufferInfo2* pCopyImageToBufferInfo) {
9573     mImpl->on_vkCmdCopyImageToBuffer2(pool, commandBuffer, pCopyImageToBufferInfo);
9574 }
9575 
on_vkCmdCopyBufferToImage2KHR(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,const VkCopyBufferToImageInfo2KHR * pCopyBufferToImageInfo,const VkDecoderContext & context)9576 void VkDecoderGlobalState::on_vkCmdCopyBufferToImage2KHR(android::base::BumpPool* pool,
9577                                 VkCommandBuffer commandBuffer,
9578                                 const VkCopyBufferToImageInfo2KHR* pCopyBufferToImageInfo,
9579                                 const VkDecoderContext& context) {
9580     mImpl->on_vkCmdCopyBufferToImage2KHR(pool, commandBuffer, pCopyBufferToImageInfo, context);
9581 }
9582 
on_vkCmdCopyImage2KHR(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,const VkCopyImageInfo2KHR * pCopyImageInfo)9583 void VkDecoderGlobalState::on_vkCmdCopyImage2KHR(android::base::BumpPool* pool,
9584     VkCommandBuffer commandBuffer,
9585     const VkCopyImageInfo2KHR* pCopyImageInfo) {
9586     mImpl->on_vkCmdCopyImage2KHR(pool, commandBuffer, pCopyImageInfo);
9587 }
9588 
on_vkCmdCopyImageToBuffer2KHR(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,const VkCopyImageToBufferInfo2KHR * pCopyImageToBufferInfo)9589 void VkDecoderGlobalState::on_vkCmdCopyImageToBuffer2KHR(android::base::BumpPool* pool,
9590                                 VkCommandBuffer commandBuffer,
9591                                 const VkCopyImageToBufferInfo2KHR* pCopyImageToBufferInfo) {
9592     mImpl->on_vkCmdCopyImageToBuffer2KHR(pool, commandBuffer, pCopyImageToBufferInfo);
9593 }
9594 
on_vkGetImageMemoryRequirements(android::base::BumpPool * pool,VkDevice device,VkImage image,VkMemoryRequirements * pMemoryRequirements)9595 void VkDecoderGlobalState::on_vkGetImageMemoryRequirements(
9596     android::base::BumpPool* pool, VkDevice device, VkImage image,
9597     VkMemoryRequirements* pMemoryRequirements) {
9598     mImpl->on_vkGetImageMemoryRequirements(pool, device, image, pMemoryRequirements);
9599 }
9600 
on_vkGetImageMemoryRequirements2(android::base::BumpPool * pool,VkDevice device,const VkImageMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)9601 void VkDecoderGlobalState::on_vkGetImageMemoryRequirements2(
9602     android::base::BumpPool* pool, VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo,
9603     VkMemoryRequirements2* pMemoryRequirements) {
9604     mImpl->on_vkGetImageMemoryRequirements2(pool, device, pInfo, pMemoryRequirements);
9605 }
9606 
on_vkGetImageMemoryRequirements2KHR(android::base::BumpPool * pool,VkDevice device,const VkImageMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)9607 void VkDecoderGlobalState::on_vkGetImageMemoryRequirements2KHR(
9608     android::base::BumpPool* pool, VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo,
9609     VkMemoryRequirements2* pMemoryRequirements) {
9610     mImpl->on_vkGetImageMemoryRequirements2(pool, device, pInfo, pMemoryRequirements);
9611 }
9612 
on_vkGetBufferMemoryRequirements(android::base::BumpPool * pool,VkDevice device,VkBuffer buffer,VkMemoryRequirements * pMemoryRequirements)9613 void VkDecoderGlobalState::on_vkGetBufferMemoryRequirements(
9614     android::base::BumpPool* pool, VkDevice device, VkBuffer buffer,
9615     VkMemoryRequirements* pMemoryRequirements) {
9616     mImpl->on_vkGetBufferMemoryRequirements(pool, device, buffer, pMemoryRequirements);
9617 }
9618 
on_vkGetBufferMemoryRequirements2(android::base::BumpPool * pool,VkDevice device,const VkBufferMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)9619 void VkDecoderGlobalState::on_vkGetBufferMemoryRequirements2(
9620     android::base::BumpPool* pool, VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo,
9621     VkMemoryRequirements2* pMemoryRequirements) {
9622     mImpl->on_vkGetBufferMemoryRequirements2(pool, device, pInfo, pMemoryRequirements);
9623 }
9624 
on_vkGetBufferMemoryRequirements2KHR(android::base::BumpPool * pool,VkDevice device,const VkBufferMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)9625 void VkDecoderGlobalState::on_vkGetBufferMemoryRequirements2KHR(
9626     android::base::BumpPool* pool, VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo,
9627     VkMemoryRequirements2* pMemoryRequirements) {
9628     mImpl->on_vkGetBufferMemoryRequirements2(pool, device, pInfo, pMemoryRequirements);
9629 }
9630 
on_vkCmdPipelineBarrier(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,VkPipelineStageFlags srcStageMask,VkPipelineStageFlags dstStageMask,VkDependencyFlags dependencyFlags,uint32_t memoryBarrierCount,const VkMemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VkBufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VkImageMemoryBarrier * pImageMemoryBarriers)9631 void VkDecoderGlobalState::on_vkCmdPipelineBarrier(
9632     android::base::BumpPool* pool, VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
9633     VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
9634     uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers,
9635     uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers,
9636     uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers) {
9637     mImpl->on_vkCmdPipelineBarrier(pool, commandBuffer, srcStageMask, dstStageMask, dependencyFlags,
9638                                    memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount,
9639                                    pBufferMemoryBarriers, imageMemoryBarrierCount,
9640                                    pImageMemoryBarriers);
9641 }
9642 
on_vkCmdPipelineBarrier2(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,const VkDependencyInfo * pDependencyInfo)9643 void VkDecoderGlobalState::on_vkCmdPipelineBarrier2(android::base::BumpPool* pool,
9644                                                     VkCommandBuffer commandBuffer,
9645                                                     const VkDependencyInfo* pDependencyInfo) {
9646     mImpl->on_vkCmdPipelineBarrier2(pool, commandBuffer, pDependencyInfo);
9647 }
9648 
on_vkAllocateMemory(android::base::BumpPool * pool,VkDevice device,const VkMemoryAllocateInfo * pAllocateInfo,const VkAllocationCallbacks * pAllocator,VkDeviceMemory * pMemory)9649 VkResult VkDecoderGlobalState::on_vkAllocateMemory(android::base::BumpPool* pool, VkDevice device,
9650                                                    const VkMemoryAllocateInfo* pAllocateInfo,
9651                                                    const VkAllocationCallbacks* pAllocator,
9652                                                    VkDeviceMemory* pMemory) {
9653     return mImpl->on_vkAllocateMemory(pool, device, pAllocateInfo, pAllocator, pMemory);
9654 }
9655 
on_vkFreeMemory(android::base::BumpPool * pool,VkDevice device,VkDeviceMemory memory,const VkAllocationCallbacks * pAllocator)9656 void VkDecoderGlobalState::on_vkFreeMemory(android::base::BumpPool* pool, VkDevice device,
9657                                            VkDeviceMemory memory,
9658                                            const VkAllocationCallbacks* pAllocator) {
9659     mImpl->on_vkFreeMemory(pool, device, memory, pAllocator);
9660 }
9661 
on_vkMapMemory(android::base::BumpPool * pool,VkDevice device,VkDeviceMemory memory,VkDeviceSize offset,VkDeviceSize size,VkMemoryMapFlags flags,void ** ppData)9662 VkResult VkDecoderGlobalState::on_vkMapMemory(android::base::BumpPool* pool, VkDevice device,
9663                                               VkDeviceMemory memory, VkDeviceSize offset,
9664                                               VkDeviceSize size, VkMemoryMapFlags flags,
9665                                               void** ppData) {
9666     return mImpl->on_vkMapMemory(pool, device, memory, offset, size, flags, ppData);
9667 }
9668 
on_vkUnmapMemory(android::base::BumpPool * pool,VkDevice device,VkDeviceMemory memory)9669 void VkDecoderGlobalState::on_vkUnmapMemory(android::base::BumpPool* pool, VkDevice device,
9670                                             VkDeviceMemory memory) {
9671     mImpl->on_vkUnmapMemory(pool, device, memory);
9672 }
9673 
getMappedHostPointer(VkDeviceMemory memory)9674 uint8_t* VkDecoderGlobalState::getMappedHostPointer(VkDeviceMemory memory) {
9675     return mImpl->getMappedHostPointer(memory);
9676 }
9677 
getDeviceMemorySize(VkDeviceMemory memory)9678 VkDeviceSize VkDecoderGlobalState::getDeviceMemorySize(VkDeviceMemory memory) {
9679     return mImpl->getDeviceMemorySize(memory);
9680 }
9681 
usingDirectMapping() const9682 bool VkDecoderGlobalState::usingDirectMapping() const { return mImpl->usingDirectMapping(); }
9683 
getHostFeatureSupport() const9684 VkDecoderGlobalState::HostFeatureSupport VkDecoderGlobalState::getHostFeatureSupport() const {
9685     return mImpl->getHostFeatureSupport();
9686 }
9687 
9688 // VK_ANDROID_native_buffer
on_vkGetSwapchainGrallocUsageANDROID(android::base::BumpPool * pool,VkDevice device,VkFormat format,VkImageUsageFlags imageUsage,int * grallocUsage)9689 VkResult VkDecoderGlobalState::on_vkGetSwapchainGrallocUsageANDROID(android::base::BumpPool* pool,
9690                                                                     VkDevice device,
9691                                                                     VkFormat format,
9692                                                                     VkImageUsageFlags imageUsage,
9693                                                                     int* grallocUsage) {
9694     return mImpl->on_vkGetSwapchainGrallocUsageANDROID(pool, device, format, imageUsage,
9695                                                        grallocUsage);
9696 }
9697 
on_vkGetSwapchainGrallocUsage2ANDROID(android::base::BumpPool * pool,VkDevice device,VkFormat format,VkImageUsageFlags imageUsage,VkSwapchainImageUsageFlagsANDROID swapchainImageUsage,uint64_t * grallocConsumerUsage,uint64_t * grallocProducerUsage)9698 VkResult VkDecoderGlobalState::on_vkGetSwapchainGrallocUsage2ANDROID(
9699     android::base::BumpPool* pool, VkDevice device, VkFormat format, VkImageUsageFlags imageUsage,
9700     VkSwapchainImageUsageFlagsANDROID swapchainImageUsage, uint64_t* grallocConsumerUsage,
9701     uint64_t* grallocProducerUsage) {
9702     return mImpl->on_vkGetSwapchainGrallocUsage2ANDROID(pool, device, format, imageUsage,
9703                                                         swapchainImageUsage, grallocConsumerUsage,
9704                                                         grallocProducerUsage);
9705 }
9706 
on_vkAcquireImageANDROID(android::base::BumpPool * pool,VkDevice device,VkImage image,int nativeFenceFd,VkSemaphore semaphore,VkFence fence)9707 VkResult VkDecoderGlobalState::on_vkAcquireImageANDROID(android::base::BumpPool* pool,
9708                                                         VkDevice device, VkImage image,
9709                                                         int nativeFenceFd, VkSemaphore semaphore,
9710                                                         VkFence fence) {
9711     return mImpl->on_vkAcquireImageANDROID(pool, device, image, nativeFenceFd, semaphore, fence);
9712 }
9713 
on_vkQueueSignalReleaseImageANDROID(android::base::BumpPool * pool,VkQueue queue,uint32_t waitSemaphoreCount,const VkSemaphore * pWaitSemaphores,VkImage image,int * pNativeFenceFd)9714 VkResult VkDecoderGlobalState::on_vkQueueSignalReleaseImageANDROID(
9715     android::base::BumpPool* pool, VkQueue queue, uint32_t waitSemaphoreCount,
9716     const VkSemaphore* pWaitSemaphores, VkImage image, int* pNativeFenceFd) {
9717     return mImpl->on_vkQueueSignalReleaseImageANDROID(pool, queue, waitSemaphoreCount,
9718                                                       pWaitSemaphores, image, pNativeFenceFd);
9719 }
9720 
9721 // VK_GOOGLE_gfxstream
on_vkMapMemoryIntoAddressSpaceGOOGLE(android::base::BumpPool * pool,VkDevice device,VkDeviceMemory memory,uint64_t * pAddress)9722 VkResult VkDecoderGlobalState::on_vkMapMemoryIntoAddressSpaceGOOGLE(android::base::BumpPool* pool,
9723                                                                     VkDevice device,
9724                                                                     VkDeviceMemory memory,
9725                                                                     uint64_t* pAddress) {
9726     return mImpl->on_vkMapMemoryIntoAddressSpaceGOOGLE(pool, device, memory, pAddress);
9727 }
9728 
on_vkGetMemoryHostAddressInfoGOOGLE(android::base::BumpPool * pool,VkDevice device,VkDeviceMemory memory,uint64_t * pAddress,uint64_t * pSize,uint64_t * pHostmemId)9729 VkResult VkDecoderGlobalState::on_vkGetMemoryHostAddressInfoGOOGLE(
9730     android::base::BumpPool* pool, VkDevice device, VkDeviceMemory memory, uint64_t* pAddress,
9731     uint64_t* pSize, uint64_t* pHostmemId) {
9732     return mImpl->on_vkGetMemoryHostAddressInfoGOOGLE(pool, device, memory, pAddress, pSize,
9733                                                       pHostmemId);
9734 }
9735 
on_vkGetBlobGOOGLE(android::base::BumpPool * pool,VkDevice device,VkDeviceMemory memory)9736 VkResult VkDecoderGlobalState::on_vkGetBlobGOOGLE(android::base::BumpPool* pool, VkDevice device,
9737                                                   VkDeviceMemory memory) {
9738     return mImpl->on_vkGetBlobGOOGLE(pool, device, memory);
9739 }
9740 
on_vkFreeMemorySyncGOOGLE(android::base::BumpPool * pool,VkDevice device,VkDeviceMemory memory,const VkAllocationCallbacks * pAllocator)9741 VkResult VkDecoderGlobalState::on_vkFreeMemorySyncGOOGLE(android::base::BumpPool* pool,
9742                                                          VkDevice device, VkDeviceMemory memory,
9743                                                          const VkAllocationCallbacks* pAllocator) {
9744     return mImpl->on_vkFreeMemorySyncGOOGLE(pool, device, memory, pAllocator);
9745 }
9746 
on_vkAllocateCommandBuffers(android::base::BumpPool * pool,VkDevice device,const VkCommandBufferAllocateInfo * pAllocateInfo,VkCommandBuffer * pCommandBuffers)9747 VkResult VkDecoderGlobalState::on_vkAllocateCommandBuffers(
9748     android::base::BumpPool* pool, VkDevice device,
9749     const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers) {
9750     return mImpl->on_vkAllocateCommandBuffers(pool, device, pAllocateInfo, pCommandBuffers);
9751 }
9752 
on_vkCreateCommandPool(android::base::BumpPool * pool,VkDevice device,const VkCommandPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkCommandPool * pCommandPool)9753 VkResult VkDecoderGlobalState::on_vkCreateCommandPool(android::base::BumpPool* pool,
9754                                                       VkDevice device,
9755                                                       const VkCommandPoolCreateInfo* pCreateInfo,
9756                                                       const VkAllocationCallbacks* pAllocator,
9757                                                       VkCommandPool* pCommandPool) {
9758     return mImpl->on_vkCreateCommandPool(pool, device, pCreateInfo, pAllocator, pCommandPool);
9759 }
9760 
on_vkDestroyCommandPool(android::base::BumpPool * pool,VkDevice device,VkCommandPool commandPool,const VkAllocationCallbacks * pAllocator)9761 void VkDecoderGlobalState::on_vkDestroyCommandPool(android::base::BumpPool* pool, VkDevice device,
9762                                                    VkCommandPool commandPool,
9763                                                    const VkAllocationCallbacks* pAllocator) {
9764     mImpl->on_vkDestroyCommandPool(pool, device, commandPool, pAllocator);
9765 }
9766 
on_vkResetCommandPool(android::base::BumpPool * pool,VkDevice device,VkCommandPool commandPool,VkCommandPoolResetFlags flags)9767 VkResult VkDecoderGlobalState::on_vkResetCommandPool(android::base::BumpPool* pool, VkDevice device,
9768                                                      VkCommandPool commandPool,
9769                                                      VkCommandPoolResetFlags flags) {
9770     return mImpl->on_vkResetCommandPool(pool, device, commandPool, flags);
9771 }
9772 
on_vkCmdExecuteCommands(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,uint32_t commandBufferCount,const VkCommandBuffer * pCommandBuffers)9773 void VkDecoderGlobalState::on_vkCmdExecuteCommands(android::base::BumpPool* pool,
9774                                                    VkCommandBuffer commandBuffer,
9775                                                    uint32_t commandBufferCount,
9776                                                    const VkCommandBuffer* pCommandBuffers) {
9777     return mImpl->on_vkCmdExecuteCommands(pool, commandBuffer, commandBufferCount, pCommandBuffers);
9778 }
9779 
on_vkQueueSubmit(android::base::BumpPool * pool,VkQueue queue,uint32_t submitCount,const VkSubmitInfo * pSubmits,VkFence fence)9780 VkResult VkDecoderGlobalState::on_vkQueueSubmit(android::base::BumpPool* pool, VkQueue queue,
9781                                                 uint32_t submitCount, const VkSubmitInfo* pSubmits,
9782                                                 VkFence fence) {
9783     return mImpl->on_vkQueueSubmit(pool, queue, submitCount, pSubmits, fence);
9784 }
9785 
on_vkQueueSubmit2(android::base::BumpPool * pool,VkQueue queue,uint32_t submitCount,const VkSubmitInfo2 * pSubmits,VkFence fence)9786 VkResult VkDecoderGlobalState::on_vkQueueSubmit2(android::base::BumpPool* pool, VkQueue queue,
9787                                                  uint32_t submitCount,
9788                                                  const VkSubmitInfo2* pSubmits, VkFence fence) {
9789     return mImpl->on_vkQueueSubmit(pool, queue, submitCount, pSubmits, fence);
9790 }
9791 
on_vkQueueWaitIdle(android::base::BumpPool * pool,VkQueue queue)9792 VkResult VkDecoderGlobalState::on_vkQueueWaitIdle(android::base::BumpPool* pool, VkQueue queue) {
9793     return mImpl->on_vkQueueWaitIdle(pool, queue);
9794 }
9795 
on_vkResetCommandBuffer(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,VkCommandBufferResetFlags flags)9796 VkResult VkDecoderGlobalState::on_vkResetCommandBuffer(android::base::BumpPool* pool,
9797                                                        VkCommandBuffer commandBuffer,
9798                                                        VkCommandBufferResetFlags flags) {
9799     return mImpl->on_vkResetCommandBuffer(pool, commandBuffer, flags);
9800 }
9801 
on_vkFreeCommandBuffers(android::base::BumpPool * pool,VkDevice device,VkCommandPool commandPool,uint32_t commandBufferCount,const VkCommandBuffer * pCommandBuffers)9802 void VkDecoderGlobalState::on_vkFreeCommandBuffers(android::base::BumpPool* pool, VkDevice device,
9803                                                    VkCommandPool commandPool,
9804                                                    uint32_t commandBufferCount,
9805                                                    const VkCommandBuffer* pCommandBuffers) {
9806     return mImpl->on_vkFreeCommandBuffers(pool, device, commandPool, commandBufferCount,
9807                                           pCommandBuffers);
9808 }
9809 
on_vkGetPhysicalDeviceExternalSemaphoreProperties(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,VkExternalSemaphoreProperties * pExternalSemaphoreProperties)9810 void VkDecoderGlobalState::on_vkGetPhysicalDeviceExternalSemaphoreProperties(
9811     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
9812     const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
9813     VkExternalSemaphoreProperties* pExternalSemaphoreProperties) {
9814     return mImpl->on_vkGetPhysicalDeviceExternalSemaphoreProperties(
9815         pool, physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
9816 }
9817 
on_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,VkExternalSemaphoreProperties * pExternalSemaphoreProperties)9818 void VkDecoderGlobalState::on_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR(
9819     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
9820     const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
9821     VkExternalSemaphoreProperties* pExternalSemaphoreProperties) {
9822     return mImpl->on_vkGetPhysicalDeviceExternalSemaphoreProperties(
9823         pool, physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
9824 }
9825 
9826 // Descriptor update templates
on_vkCreateDescriptorUpdateTemplate(android::base::BumpPool * pool,VkDevice boxed_device,const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate)9827 VkResult VkDecoderGlobalState::on_vkCreateDescriptorUpdateTemplate(
9828     android::base::BumpPool* pool, VkDevice boxed_device,
9829     const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
9830     const VkAllocationCallbacks* pAllocator,
9831     VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) {
9832     return mImpl->on_vkCreateDescriptorUpdateTemplate(pool, boxed_device, pCreateInfo, pAllocator,
9833                                                       pDescriptorUpdateTemplate);
9834 }
9835 
on_vkCreateDescriptorUpdateTemplateKHR(android::base::BumpPool * pool,VkDevice boxed_device,const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate)9836 VkResult VkDecoderGlobalState::on_vkCreateDescriptorUpdateTemplateKHR(
9837     android::base::BumpPool* pool, VkDevice boxed_device,
9838     const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
9839     const VkAllocationCallbacks* pAllocator,
9840     VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) {
9841     return mImpl->on_vkCreateDescriptorUpdateTemplateKHR(pool, boxed_device, pCreateInfo,
9842                                                          pAllocator, pDescriptorUpdateTemplate);
9843 }
9844 
on_vkDestroyDescriptorUpdateTemplate(android::base::BumpPool * pool,VkDevice boxed_device,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const VkAllocationCallbacks * pAllocator)9845 void VkDecoderGlobalState::on_vkDestroyDescriptorUpdateTemplate(
9846     android::base::BumpPool* pool, VkDevice boxed_device,
9847     VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator) {
9848     mImpl->on_vkDestroyDescriptorUpdateTemplate(pool, boxed_device, descriptorUpdateTemplate,
9849                                                 pAllocator);
9850 }
9851 
on_vkDestroyDescriptorUpdateTemplateKHR(android::base::BumpPool * pool,VkDevice boxed_device,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const VkAllocationCallbacks * pAllocator)9852 void VkDecoderGlobalState::on_vkDestroyDescriptorUpdateTemplateKHR(
9853     android::base::BumpPool* pool, VkDevice boxed_device,
9854     VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator) {
9855     mImpl->on_vkDestroyDescriptorUpdateTemplateKHR(pool, boxed_device, descriptorUpdateTemplate,
9856                                                    pAllocator);
9857 }
9858 
on_vkUpdateDescriptorSetWithTemplateSizedGOOGLE(android::base::BumpPool * pool,VkDevice boxed_device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplate descriptorUpdateTemplate,uint32_t imageInfoCount,uint32_t bufferInfoCount,uint32_t bufferViewCount,const uint32_t * pImageInfoEntryIndices,const uint32_t * pBufferInfoEntryIndices,const uint32_t * pBufferViewEntryIndices,const VkDescriptorImageInfo * pImageInfos,const VkDescriptorBufferInfo * pBufferInfos,const VkBufferView * pBufferViews)9859 void VkDecoderGlobalState::on_vkUpdateDescriptorSetWithTemplateSizedGOOGLE(
9860     android::base::BumpPool* pool, VkDevice boxed_device, VkDescriptorSet descriptorSet,
9861     VkDescriptorUpdateTemplate descriptorUpdateTemplate, uint32_t imageInfoCount,
9862     uint32_t bufferInfoCount, uint32_t bufferViewCount, const uint32_t* pImageInfoEntryIndices,
9863     const uint32_t* pBufferInfoEntryIndices, const uint32_t* pBufferViewEntryIndices,
9864     const VkDescriptorImageInfo* pImageInfos, const VkDescriptorBufferInfo* pBufferInfos,
9865     const VkBufferView* pBufferViews) {
9866     mImpl->on_vkUpdateDescriptorSetWithTemplateSizedGOOGLE(
9867         pool, boxed_device, descriptorSet, descriptorUpdateTemplate, imageInfoCount,
9868         bufferInfoCount, bufferViewCount, pImageInfoEntryIndices, pBufferInfoEntryIndices,
9869         pBufferViewEntryIndices, pImageInfos, pBufferInfos, pBufferViews);
9870 }
9871 
on_vkUpdateDescriptorSetWithTemplateSized2GOOGLE(android::base::BumpPool * pool,VkDevice boxed_device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplate descriptorUpdateTemplate,uint32_t imageInfoCount,uint32_t bufferInfoCount,uint32_t bufferViewCount,uint32_t inlineUniformBlockCount,const uint32_t * pImageInfoEntryIndices,const uint32_t * pBufferInfoEntryIndices,const uint32_t * pBufferViewEntryIndices,const VkDescriptorImageInfo * pImageInfos,const VkDescriptorBufferInfo * pBufferInfos,const VkBufferView * pBufferViews,const uint8_t * pInlineUniformBlockData)9872 void VkDecoderGlobalState::on_vkUpdateDescriptorSetWithTemplateSized2GOOGLE(
9873     android::base::BumpPool* pool, VkDevice boxed_device, VkDescriptorSet descriptorSet,
9874     VkDescriptorUpdateTemplate descriptorUpdateTemplate, uint32_t imageInfoCount,
9875     uint32_t bufferInfoCount, uint32_t bufferViewCount, uint32_t inlineUniformBlockCount,
9876     const uint32_t* pImageInfoEntryIndices, const uint32_t* pBufferInfoEntryIndices,
9877     const uint32_t* pBufferViewEntryIndices, const VkDescriptorImageInfo* pImageInfos,
9878     const VkDescriptorBufferInfo* pBufferInfos, const VkBufferView* pBufferViews,
9879     const uint8_t* pInlineUniformBlockData) {
9880     mImpl->on_vkUpdateDescriptorSetWithTemplateSized2GOOGLE(
9881         pool, boxed_device, descriptorSet, descriptorUpdateTemplate, imageInfoCount,
9882         bufferInfoCount, bufferViewCount, inlineUniformBlockCount, pImageInfoEntryIndices,
9883         pBufferInfoEntryIndices, pBufferViewEntryIndices, pImageInfos, pBufferInfos, pBufferViews,
9884         pInlineUniformBlockData);
9885 }
9886 
on_vkBeginCommandBuffer(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,const VkCommandBufferBeginInfo * pBeginInfo,const VkDecoderContext & context)9887 VkResult VkDecoderGlobalState::on_vkBeginCommandBuffer(android::base::BumpPool* pool,
9888                                                        VkCommandBuffer commandBuffer,
9889                                                        const VkCommandBufferBeginInfo* pBeginInfo,
9890                                                        const VkDecoderContext& context) {
9891     return mImpl->on_vkBeginCommandBuffer(pool, commandBuffer, pBeginInfo, context);
9892 }
9893 
on_vkBeginCommandBufferAsyncGOOGLE(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,const VkCommandBufferBeginInfo * pBeginInfo,const VkDecoderContext & context)9894 void VkDecoderGlobalState::on_vkBeginCommandBufferAsyncGOOGLE(
9895     android::base::BumpPool* pool, VkCommandBuffer commandBuffer,
9896     const VkCommandBufferBeginInfo* pBeginInfo, const VkDecoderContext& context) {
9897     mImpl->on_vkBeginCommandBuffer(pool, commandBuffer, pBeginInfo, context);
9898 }
9899 
on_vkEndCommandBuffer(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,const VkDecoderContext & context)9900 VkResult VkDecoderGlobalState::on_vkEndCommandBuffer(android::base::BumpPool* pool,
9901                                                      VkCommandBuffer commandBuffer,
9902                                                      const VkDecoderContext& context) {
9903     return mImpl->on_vkEndCommandBuffer(pool, commandBuffer, context);
9904 }
9905 
on_vkEndCommandBufferAsyncGOOGLE(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,const VkDecoderContext & context)9906 void VkDecoderGlobalState::on_vkEndCommandBufferAsyncGOOGLE(android::base::BumpPool* pool,
9907                                                             VkCommandBuffer commandBuffer,
9908                                                             const VkDecoderContext& context) {
9909     mImpl->on_vkEndCommandBufferAsyncGOOGLE(pool, commandBuffer, context);
9910 }
9911 
on_vkResetCommandBufferAsyncGOOGLE(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,VkCommandBufferResetFlags flags)9912 void VkDecoderGlobalState::on_vkResetCommandBufferAsyncGOOGLE(android::base::BumpPool* pool,
9913                                                               VkCommandBuffer commandBuffer,
9914                                                               VkCommandBufferResetFlags flags) {
9915     mImpl->on_vkResetCommandBufferAsyncGOOGLE(pool, commandBuffer, flags);
9916 }
9917 
on_vkCommandBufferHostSyncGOOGLE(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,uint32_t needHostSync,uint32_t sequenceNumber)9918 void VkDecoderGlobalState::on_vkCommandBufferHostSyncGOOGLE(android::base::BumpPool* pool,
9919                                                             VkCommandBuffer commandBuffer,
9920                                                             uint32_t needHostSync,
9921                                                             uint32_t sequenceNumber) {
9922     mImpl->hostSyncCommandBuffer("hostSync", commandBuffer, needHostSync, sequenceNumber);
9923 }
9924 
on_vkCreateImageWithRequirementsGOOGLE(android::base::BumpPool * pool,VkDevice device,const VkImageCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImage * pImage,VkMemoryRequirements * pMemoryRequirements)9925 VkResult VkDecoderGlobalState::on_vkCreateImageWithRequirementsGOOGLE(
9926     android::base::BumpPool* pool, VkDevice device, const VkImageCreateInfo* pCreateInfo,
9927     const VkAllocationCallbacks* pAllocator, VkImage* pImage,
9928     VkMemoryRequirements* pMemoryRequirements) {
9929     return mImpl->on_vkCreateImageWithRequirementsGOOGLE(pool, device, pCreateInfo, pAllocator,
9930                                                          pImage, pMemoryRequirements);
9931 }
9932 
on_vkCreateBufferWithRequirementsGOOGLE(android::base::BumpPool * pool,VkDevice device,const VkBufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkBuffer * pBuffer,VkMemoryRequirements * pMemoryRequirements)9933 VkResult VkDecoderGlobalState::on_vkCreateBufferWithRequirementsGOOGLE(
9934     android::base::BumpPool* pool, VkDevice device, const VkBufferCreateInfo* pCreateInfo,
9935     const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer,
9936     VkMemoryRequirements* pMemoryRequirements) {
9937     return mImpl->on_vkCreateBufferWithRequirementsGOOGLE(pool, device, pCreateInfo, pAllocator,
9938                                                           pBuffer, pMemoryRequirements);
9939 }
9940 
on_vkCmdBindPipeline(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipeline pipeline)9941 void VkDecoderGlobalState::on_vkCmdBindPipeline(android::base::BumpPool* pool,
9942                                                 VkCommandBuffer commandBuffer,
9943                                                 VkPipelineBindPoint pipelineBindPoint,
9944                                                 VkPipeline pipeline) {
9945     mImpl->on_vkCmdBindPipeline(pool, commandBuffer, pipelineBindPoint, pipeline);
9946 }
9947 
on_vkCmdBindDescriptorSets(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipelineLayout layout,uint32_t firstSet,uint32_t descriptorSetCount,const VkDescriptorSet * pDescriptorSets,uint32_t dynamicOffsetCount,const uint32_t * pDynamicOffsets)9948 void VkDecoderGlobalState::on_vkCmdBindDescriptorSets(
9949     android::base::BumpPool* pool, VkCommandBuffer commandBuffer,
9950     VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet,
9951     uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets,
9952     uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets) {
9953     mImpl->on_vkCmdBindDescriptorSets(pool, commandBuffer, pipelineBindPoint, layout, firstSet,
9954                                       descriptorSetCount, pDescriptorSets, dynamicOffsetCount,
9955                                       pDynamicOffsets);
9956 }
9957 
on_vkCreateRenderPass(android::base::BumpPool * pool,VkDevice boxed_device,const VkRenderPassCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkRenderPass * pRenderPass)9958 VkResult VkDecoderGlobalState::on_vkCreateRenderPass(android::base::BumpPool* pool,
9959                                                      VkDevice boxed_device,
9960                                                      const VkRenderPassCreateInfo* pCreateInfo,
9961                                                      const VkAllocationCallbacks* pAllocator,
9962                                                      VkRenderPass* pRenderPass) {
9963     return mImpl->on_vkCreateRenderPass(pool, boxed_device, pCreateInfo, pAllocator, pRenderPass);
9964 }
9965 
on_vkCreateRenderPass2(android::base::BumpPool * pool,VkDevice boxed_device,const VkRenderPassCreateInfo2 * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkRenderPass * pRenderPass)9966 VkResult VkDecoderGlobalState::on_vkCreateRenderPass2(android::base::BumpPool* pool,
9967                                                       VkDevice boxed_device,
9968                                                       const VkRenderPassCreateInfo2* pCreateInfo,
9969                                                       const VkAllocationCallbacks* pAllocator,
9970                                                       VkRenderPass* pRenderPass) {
9971     return mImpl->on_vkCreateRenderPass2(pool, boxed_device, pCreateInfo, pAllocator, pRenderPass);
9972 }
9973 
on_vkCreateRenderPass2KHR(android::base::BumpPool * pool,VkDevice boxed_device,const VkRenderPassCreateInfo2KHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkRenderPass * pRenderPass)9974 VkResult VkDecoderGlobalState::on_vkCreateRenderPass2KHR(
9975     android::base::BumpPool* pool, VkDevice boxed_device,
9976     const VkRenderPassCreateInfo2KHR* pCreateInfo, const VkAllocationCallbacks* pAllocator,
9977     VkRenderPass* pRenderPass) {
9978     return mImpl->on_vkCreateRenderPass2(pool, boxed_device, pCreateInfo, pAllocator, pRenderPass);
9979 }
9980 
on_vkDestroyRenderPass(android::base::BumpPool * pool,VkDevice boxed_device,VkRenderPass renderPass,const VkAllocationCallbacks * pAllocator)9981 void VkDecoderGlobalState::on_vkDestroyRenderPass(android::base::BumpPool* pool,
9982                                                   VkDevice boxed_device, VkRenderPass renderPass,
9983                                                   const VkAllocationCallbacks* pAllocator) {
9984     mImpl->on_vkDestroyRenderPass(pool, boxed_device, renderPass, pAllocator);
9985 }
9986 
on_vkCmdBeginRenderPass(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,const VkRenderPassBeginInfo * pRenderPassBegin,VkSubpassContents contents)9987 void VkDecoderGlobalState::on_vkCmdBeginRenderPass(android::base::BumpPool* pool,
9988                                                    VkCommandBuffer commandBuffer,
9989                                                    const VkRenderPassBeginInfo* pRenderPassBegin,
9990                                                    VkSubpassContents contents) {
9991     return mImpl->on_vkCmdBeginRenderPass(pool, commandBuffer, pRenderPassBegin, contents);
9992 }
9993 
on_vkCmdBeginRenderPass2(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,const VkRenderPassBeginInfo * pRenderPassBegin,const VkSubpassBeginInfo * pSubpassBeginInfo)9994 void VkDecoderGlobalState::on_vkCmdBeginRenderPass2(android::base::BumpPool* pool,
9995                                                     VkCommandBuffer commandBuffer,
9996                                                     const VkRenderPassBeginInfo* pRenderPassBegin,
9997                                                     const VkSubpassBeginInfo* pSubpassBeginInfo) {
9998     return mImpl->on_vkCmdBeginRenderPass2(pool, commandBuffer, pRenderPassBegin,
9999                                            pSubpassBeginInfo);
10000 }
10001 
on_vkCmdBeginRenderPass2KHR(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,const VkRenderPassBeginInfo * pRenderPassBegin,const VkSubpassBeginInfo * pSubpassBeginInfo)10002 void VkDecoderGlobalState::on_vkCmdBeginRenderPass2KHR(
10003     android::base::BumpPool* pool, VkCommandBuffer commandBuffer,
10004     const VkRenderPassBeginInfo* pRenderPassBegin, const VkSubpassBeginInfo* pSubpassBeginInfo) {
10005     return mImpl->on_vkCmdBeginRenderPass2(pool, commandBuffer, pRenderPassBegin,
10006                                            pSubpassBeginInfo);
10007 }
10008 
on_vkCreateFramebuffer(android::base::BumpPool * pool,VkDevice boxed_device,const VkFramebufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkFramebuffer * pFramebuffer)10009 VkResult VkDecoderGlobalState::on_vkCreateFramebuffer(android::base::BumpPool* pool,
10010                                                       VkDevice boxed_device,
10011                                                       const VkFramebufferCreateInfo* pCreateInfo,
10012                                                       const VkAllocationCallbacks* pAllocator,
10013                                                       VkFramebuffer* pFramebuffer) {
10014     return mImpl->on_vkCreateFramebuffer(pool, boxed_device, pCreateInfo, pAllocator, pFramebuffer);
10015 }
10016 
on_vkDestroyFramebuffer(android::base::BumpPool * pool,VkDevice boxed_device,VkFramebuffer framebuffer,const VkAllocationCallbacks * pAllocator)10017 void VkDecoderGlobalState::on_vkDestroyFramebuffer(android::base::BumpPool* pool,
10018                                                    VkDevice boxed_device, VkFramebuffer framebuffer,
10019                                                    const VkAllocationCallbacks* pAllocator) {
10020     mImpl->on_vkDestroyFramebuffer(pool, boxed_device, framebuffer, pAllocator);
10021 }
10022 
on_vkQueueHostSyncGOOGLE(android::base::BumpPool * pool,VkQueue queue,uint32_t needHostSync,uint32_t sequenceNumber)10023 void VkDecoderGlobalState::on_vkQueueHostSyncGOOGLE(android::base::BumpPool* pool, VkQueue queue,
10024                                                     uint32_t needHostSync,
10025                                                     uint32_t sequenceNumber) {
10026     mImpl->hostSyncQueue("hostSyncQueue", queue, needHostSync, sequenceNumber);
10027 }
10028 
on_vkCmdCopyQueryPoolResults(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,VkBuffer dstBuffer,VkDeviceSize dstOffset,VkDeviceSize stride,VkQueryResultFlags flags)10029 void VkDecoderGlobalState::on_vkCmdCopyQueryPoolResults(android::base::BumpPool* pool,
10030                                                         VkCommandBuffer commandBuffer,
10031                                                         VkQueryPool queryPool, uint32_t firstQuery,
10032                                                         uint32_t queryCount, VkBuffer dstBuffer,
10033                                                         VkDeviceSize dstOffset, VkDeviceSize stride,
10034                                                         VkQueryResultFlags flags) {
10035     mImpl->on_vkCmdCopyQueryPoolResults(pool, commandBuffer, queryPool, firstQuery, queryCount,
10036                                         dstBuffer, dstOffset, stride, flags);
10037 }
10038 
on_vkQueueSubmitAsyncGOOGLE(android::base::BumpPool * pool,VkQueue queue,uint32_t submitCount,const VkSubmitInfo * pSubmits,VkFence fence)10039 void VkDecoderGlobalState::on_vkQueueSubmitAsyncGOOGLE(android::base::BumpPool* pool, VkQueue queue,
10040                                                        uint32_t submitCount,
10041                                                        const VkSubmitInfo* pSubmits,
10042                                                        VkFence fence) {
10043     mImpl->on_vkQueueSubmit(pool, queue, submitCount, pSubmits, fence);
10044 }
10045 
on_vkQueueSubmitAsync2GOOGLE(android::base::BumpPool * pool,VkQueue queue,uint32_t submitCount,const VkSubmitInfo2 * pSubmits,VkFence fence)10046 void VkDecoderGlobalState::on_vkQueueSubmitAsync2GOOGLE(android::base::BumpPool* pool,
10047                                                         VkQueue queue, uint32_t submitCount,
10048                                                         const VkSubmitInfo2* pSubmits,
10049                                                         VkFence fence) {
10050     mImpl->on_vkQueueSubmit(pool, queue, submitCount, pSubmits, fence);
10051 }
10052 
on_vkQueueWaitIdleAsyncGOOGLE(android::base::BumpPool * pool,VkQueue queue)10053 void VkDecoderGlobalState::on_vkQueueWaitIdleAsyncGOOGLE(android::base::BumpPool* pool,
10054                                                          VkQueue queue) {
10055     mImpl->on_vkQueueWaitIdle(pool, queue);
10056 }
10057 
on_vkQueueBindSparseAsyncGOOGLE(android::base::BumpPool * pool,VkQueue queue,uint32_t bindInfoCount,const VkBindSparseInfo * pBindInfo,VkFence fence)10058 void VkDecoderGlobalState::on_vkQueueBindSparseAsyncGOOGLE(android::base::BumpPool* pool,
10059                                                            VkQueue queue, uint32_t bindInfoCount,
10060                                                            const VkBindSparseInfo* pBindInfo,
10061                                                            VkFence fence) {
10062     VkResult res = mImpl->on_vkQueueBindSparse(pool, queue, bindInfoCount, pBindInfo, fence);
10063     if (res != VK_SUCCESS) {
10064         // Report an error here as we don't use the result after this call
10065         ERR("vkQueueBindSparse failed with: %s [%d], bindInfoCount=%d, fence=%p",
10066             string_VkResult(res), res, bindInfoCount, fence);
10067     }
10068 }
10069 
on_vkGetLinearImageLayoutGOOGLE(android::base::BumpPool * pool,VkDevice device,VkFormat format,VkDeviceSize * pOffset,VkDeviceSize * pRowPitchAlignment)10070 void VkDecoderGlobalState::on_vkGetLinearImageLayoutGOOGLE(android::base::BumpPool* pool,
10071                                                            VkDevice device, VkFormat format,
10072                                                            VkDeviceSize* pOffset,
10073                                                            VkDeviceSize* pRowPitchAlignment) {
10074     mImpl->on_vkGetLinearImageLayoutGOOGLE(pool, device, format, pOffset, pRowPitchAlignment);
10075 }
10076 
on_vkGetLinearImageLayout2GOOGLE(android::base::BumpPool * pool,VkDevice device,const VkImageCreateInfo * pCreateInfo,VkDeviceSize * pOffset,VkDeviceSize * pRowPitchAlignment)10077 void VkDecoderGlobalState::on_vkGetLinearImageLayout2GOOGLE(android::base::BumpPool* pool,
10078                                                             VkDevice device,
10079                                                             const VkImageCreateInfo* pCreateInfo,
10080                                                             VkDeviceSize* pOffset,
10081                                                             VkDeviceSize* pRowPitchAlignment) {
10082     mImpl->on_vkGetLinearImageLayout2GOOGLE(pool, device, pCreateInfo, pOffset, pRowPitchAlignment);
10083 }
10084 
on_vkQueueFlushCommandsGOOGLE(android::base::BumpPool * pool,VkQueue queue,VkCommandBuffer commandBuffer,VkDeviceSize dataSize,const void * pData,const VkDecoderContext & context)10085 void VkDecoderGlobalState::on_vkQueueFlushCommandsGOOGLE(android::base::BumpPool* pool,
10086                                                          VkQueue queue,
10087                                                          VkCommandBuffer commandBuffer,
10088                                                          VkDeviceSize dataSize, const void* pData,
10089                                                          const VkDecoderContext& context) {
10090     mImpl->on_vkQueueFlushCommandsGOOGLE(pool, queue, commandBuffer, dataSize, pData, context);
10091 }
10092 
on_vkQueueFlushCommandsFromAuxMemoryGOOGLE(android::base::BumpPool * pool,VkQueue queue,VkCommandBuffer commandBuffer,VkDeviceMemory deviceMemory,VkDeviceSize dataOffset,VkDeviceSize dataSize,const VkDecoderContext & context)10093 void VkDecoderGlobalState::on_vkQueueFlushCommandsFromAuxMemoryGOOGLE(
10094     android::base::BumpPool* pool, VkQueue queue, VkCommandBuffer commandBuffer,
10095     VkDeviceMemory deviceMemory, VkDeviceSize dataOffset, VkDeviceSize dataSize,
10096     const VkDecoderContext& context) {
10097     mImpl->on_vkQueueFlushCommandsFromAuxMemoryGOOGLE(pool, queue, commandBuffer, deviceMemory,
10098                                                       dataOffset, dataSize, context);
10099 }
10100 
on_vkQueueCommitDescriptorSetUpdatesGOOGLE(android::base::BumpPool * pool,VkQueue queue,uint32_t descriptorPoolCount,const VkDescriptorPool * pDescriptorPools,uint32_t descriptorSetCount,const VkDescriptorSetLayout * pDescriptorSetLayouts,const uint64_t * pDescriptorSetPoolIds,const uint32_t * pDescriptorSetWhichPool,const uint32_t * pDescriptorSetPendingAllocation,const uint32_t * pDescriptorWriteStartingIndices,uint32_t pendingDescriptorWriteCount,const VkWriteDescriptorSet * pPendingDescriptorWrites)10101 void VkDecoderGlobalState::on_vkQueueCommitDescriptorSetUpdatesGOOGLE(
10102     android::base::BumpPool* pool, VkQueue queue, uint32_t descriptorPoolCount,
10103     const VkDescriptorPool* pDescriptorPools, uint32_t descriptorSetCount,
10104     const VkDescriptorSetLayout* pDescriptorSetLayouts, const uint64_t* pDescriptorSetPoolIds,
10105     const uint32_t* pDescriptorSetWhichPool, const uint32_t* pDescriptorSetPendingAllocation,
10106     const uint32_t* pDescriptorWriteStartingIndices, uint32_t pendingDescriptorWriteCount,
10107     const VkWriteDescriptorSet* pPendingDescriptorWrites) {
10108     mImpl->on_vkQueueCommitDescriptorSetUpdatesGOOGLE(
10109         pool, queue, descriptorPoolCount, pDescriptorPools, descriptorSetCount,
10110         pDescriptorSetLayouts, pDescriptorSetPoolIds, pDescriptorSetWhichPool,
10111         pDescriptorSetPendingAllocation, pDescriptorWriteStartingIndices,
10112         pendingDescriptorWriteCount, pPendingDescriptorWrites);
10113 }
10114 
on_vkCollectDescriptorPoolIdsGOOGLE(android::base::BumpPool * pool,VkDevice device,VkDescriptorPool descriptorPool,uint32_t * pPoolIdCount,uint64_t * pPoolIds)10115 void VkDecoderGlobalState::on_vkCollectDescriptorPoolIdsGOOGLE(android::base::BumpPool* pool,
10116                                                                VkDevice device,
10117                                                                VkDescriptorPool descriptorPool,
10118                                                                uint32_t* pPoolIdCount,
10119                                                                uint64_t* pPoolIds) {
10120     mImpl->on_vkCollectDescriptorPoolIdsGOOGLE(pool, device, descriptorPool, pPoolIdCount,
10121                                                pPoolIds);
10122 }
10123 
on_vkQueueBindSparse(android::base::BumpPool * pool,VkQueue queue,uint32_t bindInfoCount,const VkBindSparseInfo * pBindInfo,VkFence fence)10124 VkResult VkDecoderGlobalState::on_vkQueueBindSparse(android::base::BumpPool* pool, VkQueue queue,
10125                                                     uint32_t bindInfoCount,
10126                                                     const VkBindSparseInfo* pBindInfo,
10127                                                     VkFence fence) {
10128     return mImpl->on_vkQueueBindSparse(pool, queue, bindInfoCount, pBindInfo, fence);
10129 }
10130 
on_vkQueueSignalReleaseImageANDROIDAsyncGOOGLE(android::base::BumpPool * pool,VkQueue queue,uint32_t waitSemaphoreCount,const VkSemaphore * pWaitSemaphores,VkImage image)10131 void VkDecoderGlobalState::on_vkQueueSignalReleaseImageANDROIDAsyncGOOGLE(
10132     android::base::BumpPool* pool, VkQueue queue, uint32_t waitSemaphoreCount,
10133     const VkSemaphore* pWaitSemaphores, VkImage image) {
10134     int fenceFd;
10135     mImpl->on_vkQueueSignalReleaseImageANDROID(pool, queue, waitSemaphoreCount, pWaitSemaphores,
10136                                                image, &fenceFd);
10137 }
10138 
on_vkCreateSamplerYcbcrConversion(android::base::BumpPool * pool,VkDevice device,const VkSamplerYcbcrConversionCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSamplerYcbcrConversion * pYcbcrConversion)10139 VkResult VkDecoderGlobalState::on_vkCreateSamplerYcbcrConversion(
10140     android::base::BumpPool* pool, VkDevice device,
10141     const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator,
10142     VkSamplerYcbcrConversion* pYcbcrConversion) {
10143     return mImpl->on_vkCreateSamplerYcbcrConversion(pool, device, pCreateInfo, pAllocator,
10144                                                     pYcbcrConversion);
10145 }
10146 
on_vkCreateSamplerYcbcrConversionKHR(android::base::BumpPool * pool,VkDevice device,const VkSamplerYcbcrConversionCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSamplerYcbcrConversion * pYcbcrConversion)10147 VkResult VkDecoderGlobalState::on_vkCreateSamplerYcbcrConversionKHR(
10148     android::base::BumpPool* pool, VkDevice device,
10149     const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator,
10150     VkSamplerYcbcrConversion* pYcbcrConversion) {
10151     return mImpl->on_vkCreateSamplerYcbcrConversion(pool, device, pCreateInfo, pAllocator,
10152                                                     pYcbcrConversion);
10153 }
10154 
on_vkDestroySamplerYcbcrConversion(android::base::BumpPool * pool,VkDevice device,VkSamplerYcbcrConversion ycbcrConversion,const VkAllocationCallbacks * pAllocator)10155 void VkDecoderGlobalState::on_vkDestroySamplerYcbcrConversion(
10156     android::base::BumpPool* pool, VkDevice device, VkSamplerYcbcrConversion ycbcrConversion,
10157     const VkAllocationCallbacks* pAllocator) {
10158     mImpl->on_vkDestroySamplerYcbcrConversion(pool, device, ycbcrConversion, pAllocator);
10159 }
10160 
on_vkDestroySamplerYcbcrConversionKHR(android::base::BumpPool * pool,VkDevice device,VkSamplerYcbcrConversion ycbcrConversion,const VkAllocationCallbacks * pAllocator)10161 void VkDecoderGlobalState::on_vkDestroySamplerYcbcrConversionKHR(
10162     android::base::BumpPool* pool, VkDevice device, VkSamplerYcbcrConversion ycbcrConversion,
10163     const VkAllocationCallbacks* pAllocator) {
10164     mImpl->on_vkDestroySamplerYcbcrConversion(pool, device, ycbcrConversion, pAllocator);
10165 }
10166 
on_vkEnumeratePhysicalDeviceGroups(android::base::BumpPool * pool,VkInstance instance,uint32_t * pPhysicalDeviceGroupCount,VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties)10167 VkResult VkDecoderGlobalState::on_vkEnumeratePhysicalDeviceGroups(
10168     android::base::BumpPool* pool, VkInstance instance, uint32_t* pPhysicalDeviceGroupCount,
10169     VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties) {
10170     return mImpl->on_vkEnumeratePhysicalDeviceGroups(pool, instance, pPhysicalDeviceGroupCount,
10171                                                      pPhysicalDeviceGroupProperties);
10172 }
10173 
on_vkEnumeratePhysicalDeviceGroupsKHR(android::base::BumpPool * pool,VkInstance instance,uint32_t * pPhysicalDeviceGroupCount,VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties)10174 VkResult VkDecoderGlobalState::on_vkEnumeratePhysicalDeviceGroupsKHR(
10175     android::base::BumpPool* pool, VkInstance instance, uint32_t* pPhysicalDeviceGroupCount,
10176     VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties) {
10177     return mImpl->on_vkEnumeratePhysicalDeviceGroups(pool, instance, pPhysicalDeviceGroupCount,
10178                                                      pPhysicalDeviceGroupProperties);
10179 }
10180 
on_DeviceLost()10181 void VkDecoderGlobalState::on_DeviceLost() { mImpl->on_DeviceLost(); }
10182 
on_CheckOutOfMemory(VkResult result,uint32_t opCode,const VkDecoderContext & context,std::optional<uint64_t> allocationSize)10183 void VkDecoderGlobalState::on_CheckOutOfMemory(VkResult result, uint32_t opCode,
10184                                                const VkDecoderContext& context,
10185                                                std::optional<uint64_t> allocationSize) {
10186     mImpl->on_CheckOutOfMemory(result, opCode, context, allocationSize);
10187 }
10188 
waitForFence(VkFence boxed_fence,uint64_t timeout)10189 VkResult VkDecoderGlobalState::waitForFence(VkFence boxed_fence, uint64_t timeout) {
10190     return mImpl->waitForFence(boxed_fence, timeout);
10191 }
10192 
getFenceStatus(VkFence boxed_fence)10193 VkResult VkDecoderGlobalState::getFenceStatus(VkFence boxed_fence) {
10194     return mImpl->getFenceStatus(boxed_fence);
10195 }
10196 
registerQsriCallback(VkImage image,VkQsriTimeline::Callback callback)10197 AsyncResult VkDecoderGlobalState::registerQsriCallback(VkImage image,
10198                                                        VkQsriTimeline::Callback callback) {
10199     return mImpl->registerQsriCallback(image, std::move(callback));
10200 }
10201 
deviceMemoryTransform_tohost(VkDeviceMemory * memory,uint32_t memoryCount,VkDeviceSize * offset,uint32_t offsetCount,VkDeviceSize * size,uint32_t sizeCount,uint32_t * typeIndex,uint32_t typeIndexCount,uint32_t * typeBits,uint32_t typeBitsCount)10202 void VkDecoderGlobalState::deviceMemoryTransform_tohost(VkDeviceMemory* memory,
10203                                                         uint32_t memoryCount, VkDeviceSize* offset,
10204                                                         uint32_t offsetCount, VkDeviceSize* size,
10205                                                         uint32_t sizeCount, uint32_t* typeIndex,
10206                                                         uint32_t typeIndexCount, uint32_t* typeBits,
10207                                                         uint32_t typeBitsCount) {
10208     // Not used currently
10209     (void)memory;
10210     (void)memoryCount;
10211     (void)offset;
10212     (void)offsetCount;
10213     (void)size;
10214     (void)sizeCount;
10215     (void)typeIndex;
10216     (void)typeIndexCount;
10217     (void)typeBits;
10218     (void)typeBitsCount;
10219 }
10220 
deviceMemoryTransform_fromhost(VkDeviceMemory * memory,uint32_t memoryCount,VkDeviceSize * offset,uint32_t offsetCount,VkDeviceSize * size,uint32_t sizeCount,uint32_t * typeIndex,uint32_t typeIndexCount,uint32_t * typeBits,uint32_t typeBitsCount)10221 void VkDecoderGlobalState::deviceMemoryTransform_fromhost(
10222     VkDeviceMemory* memory, uint32_t memoryCount, VkDeviceSize* offset, uint32_t offsetCount,
10223     VkDeviceSize* size, uint32_t sizeCount, uint32_t* typeIndex, uint32_t typeIndexCount,
10224     uint32_t* typeBits, uint32_t typeBitsCount) {
10225     // Not used currently
10226     (void)memory;
10227     (void)memoryCount;
10228     (void)offset;
10229     (void)offsetCount;
10230     (void)size;
10231     (void)sizeCount;
10232     (void)typeIndex;
10233     (void)typeIndexCount;
10234     (void)typeBits;
10235     (void)typeBitsCount;
10236 }
10237 
snapshot()10238 VkDecoderSnapshot* VkDecoderGlobalState::snapshot() { return mImpl->snapshot(); }
10239 
10240 #define DEFINE_TRANSFORMED_TYPE_IMPL(type)                                                        \
10241     void VkDecoderGlobalState::transformImpl_##type##_tohost(const type* val, uint32_t count) {   \
10242         mImpl->transformImpl_##type##_tohost(val, count);                                         \
10243     }                                                                                             \
10244     void VkDecoderGlobalState::transformImpl_##type##_fromhost(const type* val, uint32_t count) { \
10245         mImpl->transformImpl_##type##_fromhost(val, count);                                       \
10246     }
10247 
10248 LIST_TRANSFORMED_TYPES(DEFINE_TRANSFORMED_TYPE_IMPL)
10249 
10250 #define DEFINE_BOXED_DISPATCHABLE_HANDLE_API_DEF(type)                                         \
10251     type VkDecoderGlobalState::new_boxed_##type(type underlying, VulkanDispatch* dispatch,     \
10252                                                 bool ownDispatch) {                            \
10253         return mImpl->new_boxed_##type(underlying, dispatch, ownDispatch);                     \
10254     }                                                                                          \
10255     void VkDecoderGlobalState::delete_##type(type boxed) { mImpl->delete_##type(boxed); }      \
10256     type VkDecoderGlobalState::unbox_##type(type boxed) { return mImpl->unbox_##type(boxed); } \
10257     type VkDecoderGlobalState::try_unbox_##type(type boxed) {                                  \
10258         return mImpl->try_unbox_##type(boxed);                                                 \
10259     }                                                                                          \
10260     VulkanDispatch* VkDecoderGlobalState::dispatch_##type(type boxed) {                        \
10261         return mImpl->dispatch_##type(boxed);                                                  \
10262     }
10263 
10264 #define DEFINE_UNBOXED_TO_BOXED_DISPATCHABLE_HANDLE_API_DEF(type)                              \
10265     type VkDecoderGlobalState::unboxed_to_boxed_##type(type unboxed) {                         \
10266         return mImpl->unboxed_to_boxed_##type(unboxed);                                        \
10267     }
10268 
10269 #define DEFINE_BOXED_NON_DISPATCHABLE_HANDLE_API_DEF(type)                                     \
10270     type VkDecoderGlobalState::new_boxed_non_dispatchable_##type(type underlying) {            \
10271         return mImpl->new_boxed_non_dispatchable_##type(underlying);                           \
10272     }                                                                                          \
10273     void VkDecoderGlobalState::delete_##type(type boxed) { mImpl->delete_##type(boxed); }      \
10274     type VkDecoderGlobalState::unbox_##type(type boxed) { return mImpl->unbox_##type(boxed); } \
10275     type VkDecoderGlobalState::try_unbox_##type(type boxed) {                                  \
10276         return mImpl->try_unbox_##type(boxed);                                                 \
10277     }
10278 
GOLDFISH_VK_LIST_DISPATCHABLE_HANDLE_TYPES(DEFINE_BOXED_DISPATCHABLE_HANDLE_API_DEF)10279 GOLDFISH_VK_LIST_DISPATCHABLE_HANDLE_TYPES(DEFINE_BOXED_DISPATCHABLE_HANDLE_API_DEF)
10280 GOLDFISH_VK_LIST_NON_DISPATCHABLE_HANDLE_TYPES(DEFINE_BOXED_NON_DISPATCHABLE_HANDLE_API_DEF)
10281 
10282 // Custom unbox and non dispatchable handles should not use unboxed_to_boxed as there is no 1-1
10283 // mapping
10284 GOLDFISH_VK_LIST_DISPATCHABLE_REGULAR_UNBOX_HANDLE_TYPES(
10285     DEFINE_UNBOXED_TO_BOXED_DISPATCHABLE_HANDLE_API_DEF)
10286 
10287 #define DEFINE_BOXED_DISPATCHABLE_HANDLE_GLOBAL_API_DEF(type)                                     \
10288     type unbox_##type(type boxed) {                                                               \
10289         auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed);                           \
10290         if (!elt) return VK_NULL_HANDLE;                                                          \
10291         return (type)elt->underlying;                                                             \
10292     }                                                                                             \
10293     type try_unbox_##type(type boxed) {                                                           \
10294         auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed);                           \
10295         if (!elt) {                                                                               \
10296             WARN("%s: Failed to unbox %p", __func__, boxed);                                      \
10297             return VK_NULL_HANDLE;                                                                \
10298         }                                                                                         \
10299         return (type)elt->underlying;                                                             \
10300     }                                                                                             \
10301     VulkanDispatch* dispatch_##type(type boxed) {                                                 \
10302         auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed);                           \
10303         if (!elt) {                                                                               \
10304             ERR("%s: Failed to unbox %p", __func__, boxed);                                       \
10305             return nullptr;                                                                       \
10306         }                                                                                         \
10307         return elt->dispatch;                                                                     \
10308     }                                                                                             \
10309     void delete_##type(type boxed) {                                                              \
10310         if (!boxed) return;                                                                       \
10311         auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed);                           \
10312         if (!elt) return;                                                                         \
10313         releaseOrderMaintInfo(elt->ordMaintInfo);                                                 \
10314         if (elt->readStream) {                                                                    \
10315             sReadStreamRegistry.push(elt->readStream);                                            \
10316             elt->readStream = nullptr;                                                            \
10317         }                                                                                         \
10318         sBoxedHandleManager.remove((uint64_t)boxed);                                              \
10319     }                                                                                             \
10320     type unboxed_to_boxed_##type(type unboxed) {                                                  \
10321         AutoLock lock(sBoxedHandleManager.lock);                                                  \
10322         return (type)sBoxedHandleManager.getBoxedFromUnboxedLocked((uint64_t)(uintptr_t)unboxed); \
10323     }
10324 
10325 #define DEFINE_BOXED_NON_DISPATCHABLE_HANDLE_GLOBAL_API_DEF(type)                                 \
10326     type new_boxed_non_dispatchable_##type(type underlying) {                                     \
10327         return VkDecoderGlobalState::get()->new_boxed_non_dispatchable_##type(underlying);        \
10328     }                                                                                             \
10329     void delete_##type(type boxed) {                                                              \
10330         if (!boxed) return;                                                                       \
10331         sBoxedHandleManager.remove((uint64_t)boxed);                                              \
10332     }                                                                                             \
10333     void delayed_delete_##type(type boxed, VkDevice device, std::function<void()> callback) {     \
10334         sBoxedHandleManager.removeDelayed((uint64_t)boxed, device, callback);                     \
10335     }                                                                                             \
10336     type unbox_##type(type boxed) {                                                               \
10337         if (!boxed) return boxed;                                                                 \
10338         auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed);                           \
10339         if (!elt) {                                                                               \
10340             GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))                                       \
10341                 << "Unbox " << boxed << " failed, not found.";                                    \
10342             return VK_NULL_HANDLE;                                                                \
10343         }                                                                                         \
10344         return (type)elt->underlying;                                                             \
10345     }                                                                                             \
10346     type try_unbox_##type(type boxed) {                                                           \
10347         if (!boxed) return boxed;                                                                 \
10348         auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed);                           \
10349         if (!elt) {                                                                               \
10350             WARN("%s: Failed to unbox %p", __func__, boxed);                                      \
10351             return VK_NULL_HANDLE;                                                                \
10352         }                                                                                         \
10353         return (type)elt->underlying;                                                             \
10354     }                                                                                             \
10355     type unboxed_to_boxed_non_dispatchable_##type(type unboxed) {                                 \
10356         if (!unboxed) {                                                                           \
10357             return nullptr;                                                                       \
10358         }                                                                                         \
10359         AutoLock lock(sBoxedHandleManager.lock);                                                  \
10360         return (type)sBoxedHandleManager.getBoxedFromUnboxedLocked((uint64_t)(uintptr_t)unboxed); \
10361     }
10362 
10363 GOLDFISH_VK_LIST_DISPATCHABLE_HANDLE_TYPES(DEFINE_BOXED_DISPATCHABLE_HANDLE_GLOBAL_API_DEF)
10364 GOLDFISH_VK_LIST_NON_DISPATCHABLE_HANDLE_TYPES(DEFINE_BOXED_NON_DISPATCHABLE_HANDLE_GLOBAL_API_DEF)
10365 
10366 void BoxedHandleUnwrapAndDeletePreserveBoxedMapping::setup(android::base::BumpPool* pool,
10367                                                            uint64_t** bufPtr) {
10368     mPool = pool;
10369     mPreserveBufPtr = bufPtr;
10370 }
10371 
allocPreserve(size_t count)10372 void BoxedHandleUnwrapAndDeletePreserveBoxedMapping::allocPreserve(size_t count) {
10373     *mPreserveBufPtr = (uint64_t*)mPool->alloc(count * sizeof(uint64_t));
10374 }
10375 
10376 #define BOXED_DISPATCHABLE_HANDLE_UNWRAP_AND_DELETE_PRESERVE_BOXED_IMPL(type_name)        \
10377     void BoxedHandleUnwrapAndDeletePreserveBoxedMapping::mapHandles_##type_name(          \
10378         type_name* handles, size_t count) {                                               \
10379         allocPreserve(count);                                                             \
10380         for (size_t i = 0; i < count; ++i) {                                              \
10381             (*mPreserveBufPtr)[i] = (uint64_t)(handles[i]);                               \
10382             if (handles[i]) {                                                             \
10383                 handles[i] = VkDecoderGlobalState::get()->unbox_##type_name(handles[i]);  \
10384             } else {                                                                      \
10385                 handles[i] = (type_name) nullptr;                                         \
10386             };                                                                            \
10387         }                                                                                 \
10388     }                                                                                     \
10389     void BoxedHandleUnwrapAndDeletePreserveBoxedMapping::mapHandles_##type_name##_u64(    \
10390         const type_name* handles, uint64_t* handle_u64s, size_t count) {                  \
10391         allocPreserve(count);                                                             \
10392         for (size_t i = 0; i < count; ++i) {                                              \
10393             (*mPreserveBufPtr)[i] = (uint64_t)(handle_u64s[i]);                           \
10394             if (handles[i]) {                                                             \
10395                 handle_u64s[i] =                                                          \
10396                     (uint64_t)VkDecoderGlobalState::get()->unbox_##type_name(handles[i]); \
10397             } else {                                                                      \
10398                 handle_u64s[i] = 0;                                                       \
10399             }                                                                             \
10400         }                                                                                 \
10401     }                                                                                     \
10402     void BoxedHandleUnwrapAndDeletePreserveBoxedMapping::mapHandles_u64_##type_name(      \
10403         const uint64_t* handle_u64s, type_name* handles, size_t count) {                  \
10404         allocPreserve(count);                                                             \
10405         for (size_t i = 0; i < count; ++i) {                                              \
10406             (*mPreserveBufPtr)[i] = (uint64_t)(handle_u64s[i]);                           \
10407             if (handle_u64s[i]) {                                                         \
10408                 handles[i] = VkDecoderGlobalState::get()->unbox_##type_name(              \
10409                     (type_name)(uintptr_t)handle_u64s[i]);                                \
10410             } else {                                                                      \
10411                 handles[i] = (type_name) nullptr;                                         \
10412             }                                                                             \
10413         }                                                                                 \
10414     }
10415 
10416 #define BOXED_NON_DISPATCHABLE_HANDLE_UNWRAP_AND_DELETE_PRESERVE_BOXED_IMPL(type_name)    \
10417     void BoxedHandleUnwrapAndDeletePreserveBoxedMapping::mapHandles_##type_name(          \
10418         type_name* handles, size_t count) {                                               \
10419         allocPreserve(count);                                                             \
10420         for (size_t i = 0; i < count; ++i) {                                              \
10421             (*mPreserveBufPtr)[i] = (uint64_t)(handles[i]);                               \
10422             if (handles[i]) {                                                             \
10423                 auto boxed = handles[i];                                                  \
10424                 handles[i] = VkDecoderGlobalState::get()->unbox_##type_name(handles[i]);  \
10425                 delete_##type_name(boxed);                                                \
10426             } else {                                                                      \
10427                 handles[i] = (type_name) nullptr;                                         \
10428             };                                                                            \
10429         }                                                                                 \
10430     }                                                                                     \
10431     void BoxedHandleUnwrapAndDeletePreserveBoxedMapping::mapHandles_##type_name##_u64(    \
10432         const type_name* handles, uint64_t* handle_u64s, size_t count) {                  \
10433         allocPreserve(count);                                                             \
10434         for (size_t i = 0; i < count; ++i) {                                              \
10435             (*mPreserveBufPtr)[i] = (uint64_t)(handle_u64s[i]);                           \
10436             if (handles[i]) {                                                             \
10437                 auto boxed = handles[i];                                                  \
10438                 handle_u64s[i] =                                                          \
10439                     (uint64_t)VkDecoderGlobalState::get()->unbox_##type_name(handles[i]); \
10440                 delete_##type_name(boxed);                                                \
10441             } else {                                                                      \
10442                 handle_u64s[i] = 0;                                                       \
10443             }                                                                             \
10444         }                                                                                 \
10445     }                                                                                     \
10446     void BoxedHandleUnwrapAndDeletePreserveBoxedMapping::mapHandles_u64_##type_name(      \
10447         const uint64_t* handle_u64s, type_name* handles, size_t count) {                  \
10448         allocPreserve(count);                                                             \
10449         for (size_t i = 0; i < count; ++i) {                                              \
10450             (*mPreserveBufPtr)[i] = (uint64_t)(handle_u64s[i]);                           \
10451             if (handle_u64s[i]) {                                                         \
10452                 auto boxed = (type_name)(uintptr_t)handle_u64s[i];                        \
10453                 handles[i] = VkDecoderGlobalState::get()->unbox_##type_name(              \
10454                     (type_name)(uintptr_t)handle_u64s[i]);                                \
10455                 delete_##type_name(boxed);                                                \
10456             } else {                                                                      \
10457                 handles[i] = (type_name) nullptr;                                         \
10458             }                                                                             \
10459         }                                                                                 \
10460     }
10461 
10462 GOLDFISH_VK_LIST_DISPATCHABLE_HANDLE_TYPES(
10463     BOXED_DISPATCHABLE_HANDLE_UNWRAP_AND_DELETE_PRESERVE_BOXED_IMPL)
10464 GOLDFISH_VK_LIST_NON_DISPATCHABLE_HANDLE_TYPES(
10465     BOXED_NON_DISPATCHABLE_HANDLE_UNWRAP_AND_DELETE_PRESERVE_BOXED_IMPL)
10466 
10467 }  // namespace vk
10468 }  // namespace gfxstream
10469