xref: /aosp_15_r20/external/angle/src/libANGLE/renderer/vulkan/SyncVk.cpp (revision 8975f5c5ed3d1c378011245431ada316dfb6f244)
1 //
2 // Copyright 2016 The ANGLE Project Authors. All rights reserved.
3 // Use of this source code is governed by a BSD-style license that can be
4 // found in the LICENSE file.
5 //
6 // SyncVk.cpp:
7 //    Implements the class methods for SyncVk.
8 //
9 
10 #include "libANGLE/renderer/vulkan/SyncVk.h"
11 
12 #include "common/debug.h"
13 #include "libANGLE/Context.h"
14 #include "libANGLE/Display.h"
15 #include "libANGLE/renderer/vulkan/ContextVk.h"
16 #include "libANGLE/renderer/vulkan/DisplayVk.h"
17 
18 #if !defined(ANGLE_PLATFORM_WINDOWS)
19 #    include <poll.h>
20 #    include <unistd.h>
21 #else
22 #    include <io.h>
23 #endif
24 
25 namespace
26 {
27 // Wait for file descriptor to be signaled
SyncWaitFd(int fd,uint64_t timeoutNs,VkResult timeoutResult=VK_TIMEOUT)28 VkResult SyncWaitFd(int fd, uint64_t timeoutNs, VkResult timeoutResult = VK_TIMEOUT)
29 {
30 #if !defined(ANGLE_PLATFORM_WINDOWS)
31     struct pollfd fds;
32     int ret;
33 
34     // Convert nanoseconds to milliseconds
35     int timeoutMs = static_cast<int>(timeoutNs / 1000000);
36     // If timeoutNs was non-zero but less than one millisecond, make it a millisecond.
37     if (timeoutNs > 0 && timeoutNs < 1000000)
38     {
39         timeoutMs = 1;
40     }
41 
42     ASSERT(fd >= 0);
43 
44     fds.fd     = fd;
45     fds.events = POLLIN;
46 
47     do
48     {
49         ret = poll(&fds, 1, timeoutMs);
50         if (ret > 0)
51         {
52             if (fds.revents & (POLLERR | POLLNVAL))
53             {
54                 return VK_ERROR_UNKNOWN;
55             }
56             return VK_SUCCESS;
57         }
58         else if (ret == 0)
59         {
60             return timeoutResult;
61         }
62     } while (ret == -1 && (errno == EINTR || errno == EAGAIN));
63 
64     return VK_ERROR_UNKNOWN;
65 #else
66     UNREACHABLE();
67     return VK_ERROR_UNKNOWN;
68 #endif
69 }
70 
71 // Map VkResult to GLenum
MapVkResultToGlenum(VkResult vkResult,angle::Result angleResult,void * outResult)72 void MapVkResultToGlenum(VkResult vkResult, angle::Result angleResult, void *outResult)
73 {
74     GLenum *glEnumOut = static_cast<GLenum *>(outResult);
75     ASSERT(glEnumOut);
76 
77     if (angleResult != angle::Result::Continue)
78     {
79         *glEnumOut = GL_WAIT_FAILED;
80         return;
81     }
82 
83     switch (vkResult)
84     {
85         case VK_EVENT_SET:
86             *glEnumOut = GL_ALREADY_SIGNALED;
87             break;
88         case VK_SUCCESS:
89             *glEnumOut = GL_CONDITION_SATISFIED;
90             break;
91         case VK_TIMEOUT:
92             *glEnumOut = GL_TIMEOUT_EXPIRED;
93             break;
94         default:
95             *glEnumOut = GL_WAIT_FAILED;
96             break;
97     }
98 }
99 
100 // Map VkResult to EGLint
MapVkResultToEglint(VkResult result,angle::Result angleResult,void * outResult)101 void MapVkResultToEglint(VkResult result, angle::Result angleResult, void *outResult)
102 {
103     EGLint *eglIntOut = static_cast<EGLint *>(outResult);
104     ASSERT(eglIntOut);
105 
106     if (angleResult != angle::Result::Continue)
107     {
108         *eglIntOut = EGL_FALSE;
109         return;
110     }
111 
112     switch (result)
113     {
114         case VK_EVENT_SET:
115             // fall through.  EGL doesn't differentiate between event being already set, or set
116             // before timeout.
117         case VK_SUCCESS:
118             *eglIntOut = EGL_CONDITION_SATISFIED_KHR;
119             break;
120         case VK_TIMEOUT:
121             *eglIntOut = EGL_TIMEOUT_EXPIRED_KHR;
122             break;
123         default:
124             *eglIntOut = EGL_FALSE;
125             break;
126     }
127 }
128 
129 }  // anonymous namespace
130 
131 namespace rx
132 {
133 namespace vk
134 {
SyncHelper()135 SyncHelper::SyncHelper() {}
136 
~SyncHelper()137 SyncHelper::~SyncHelper() {}
138 
releaseToRenderer(Renderer * renderer)139 void SyncHelper::releaseToRenderer(Renderer *renderer) {}
140 
initialize(ContextVk * contextVk,SyncFenceScope scope)141 angle::Result SyncHelper::initialize(ContextVk *contextVk, SyncFenceScope scope)
142 {
143     ASSERT(!mUse.valid());
144     return contextVk->onSyncObjectInit(this, scope);
145 }
146 
prepareForClientWait(Context * context,ContextVk * contextVk,bool flushCommands,uint64_t timeout,VkResult * resultOut)147 angle::Result SyncHelper::prepareForClientWait(Context *context,
148                                                ContextVk *contextVk,
149                                                bool flushCommands,
150                                                uint64_t timeout,
151                                                VkResult *resultOut)
152 {
153     // If the event is already set, don't wait
154     bool alreadySignaled = false;
155     ANGLE_TRY(getStatus(context, contextVk, &alreadySignaled));
156     if (alreadySignaled)
157     {
158         *resultOut = VK_EVENT_SET;
159         return angle::Result::Continue;
160     }
161 
162     // If timeout is zero, there's no need to wait, so return timeout already.
163     if (timeout == 0)
164     {
165         *resultOut = VK_TIMEOUT;
166         return angle::Result::Continue;
167     }
168 
169     // Submit commands if requested
170     if (flushCommands && contextVk)
171     {
172         ANGLE_TRY(contextVk->flushCommandsAndEndRenderPassIfDeferredSyncInit(
173             RenderPassClosureReason::SyncObjectClientWait));
174     }
175 
176     *resultOut = VK_INCOMPLETE;
177     return angle::Result::Continue;
178 }
179 
clientWait(Context * context,ContextVk * contextVk,bool flushCommands,uint64_t timeout,MapVkResultToApiType mappingFunction,void * resultOut)180 angle::Result SyncHelper::clientWait(Context *context,
181                                      ContextVk *contextVk,
182                                      bool flushCommands,
183                                      uint64_t timeout,
184                                      MapVkResultToApiType mappingFunction,
185                                      void *resultOut)
186 {
187     ANGLE_TRACE_EVENT0("gpu.angle", "SyncHelper::clientWait");
188 
189     VkResult status = VK_INCOMPLETE;
190     ANGLE_TRY(prepareForClientWait(context, contextVk, flushCommands, timeout, &status));
191 
192     if (status != VK_INCOMPLETE)
193     {
194         mappingFunction(status, angle::Result::Continue, resultOut);
195         return angle::Result::Continue;
196     }
197 
198     Renderer *renderer = context->getRenderer();
199 
200     // If we need to perform a CPU wait don't set the resultOut parameter passed into the
201     // method, instead set the parameter passed into the unlocked tail call.
202     auto clientWaitUnlocked = [renderer, context, mappingFunction, use = mUse,
203                                timeout](void *resultOut) {
204         ANGLE_TRACE_EVENT0("gpu.angle", "SyncHelper::clientWait block (unlocked)");
205 
206         VkResult status = VK_INCOMPLETE;
207         angle::Result angleResult =
208             renderer->waitForResourceUseToFinishWithUserTimeout(context, use, timeout, &status);
209         // Note: resultOut may be nullptr through the glFinishFenceNV path, which does not have a
210         // return value.
211         if (resultOut != nullptr)
212         {
213             mappingFunction(status, angleResult, resultOut);
214         }
215     };
216 
217     // Schedule the wait to be run at the tail of the current call.
218     egl::Display::GetCurrentThreadUnlockedTailCall()->add(clientWaitUnlocked);
219     return angle::Result::Continue;
220 }
221 
finish(ContextVk * contextVk)222 angle::Result SyncHelper::finish(ContextVk *contextVk)
223 {
224     GLenum result;
225     return clientWait(contextVk, contextVk, true, UINT64_MAX, MapVkResultToGlenum, &result);
226 }
227 
serverWait(ContextVk * contextVk)228 angle::Result SyncHelper::serverWait(ContextVk *contextVk)
229 {
230     // If already signaled, no need to wait
231     bool alreadySignaled = false;
232     ANGLE_TRY(getStatus(contextVk, contextVk, &alreadySignaled));
233     if (alreadySignaled)
234     {
235         return angle::Result::Continue;
236     }
237 
238     // Every resource already tracks its usage and issues the appropriate barriers, so there's
239     // really nothing to do here.  An execution barrier is issued to strictly satisfy what the
240     // application asked for.
241     vk::OutsideRenderPassCommandBuffer *commandBuffer;
242     ANGLE_TRY(contextVk->getOutsideRenderPassCommandBuffer({}, &commandBuffer));
243     commandBuffer->pipelineBarrier(VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
244                                    VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, 0, 0, nullptr, 0, nullptr, 0,
245                                    nullptr);
246     return angle::Result::Continue;
247 }
248 
getStatus(Context * context,ContextVk * contextVk,bool * signaledOut)249 angle::Result SyncHelper::getStatus(Context *context, ContextVk *contextVk, bool *signaledOut)
250 {
251     // Submit commands if it was deferred on the context that issued the sync object
252     ANGLE_TRY(submitSyncIfDeferred(contextVk, RenderPassClosureReason::SyncObjectClientWait));
253     ASSERT(mUse.valid());
254     Renderer *renderer = context->getRenderer();
255     if (renderer->hasResourceUseFinished(mUse))
256     {
257         *signaledOut = true;
258     }
259     else
260     {
261         // Check completed commands once before returning, perhaps the serial is actually already
262         // finished.
263         // We don't call checkCompletedCommandsAndCleanup() to cleanup finished commands immediately
264         // if isAsyncCommandBufferResetAndGarbageCleanupEnabled feature is turned off.
265         // Because when isAsyncCommandBufferResetAndGarbageCleanupEnabled feature is turned off,
266         // vkResetCommandBuffer() is called in cleanup step, and it must take the
267         // CommandPoolAccess::mCmdPoolMutex lock, see details in
268         // CommandPoolAccess::collectPrimaryCommandBuffer. This means the cleanup step can
269         // be blocked by command buffer recording if another thread calls
270         // CommandPoolAccess::flushRenderPassCommands(), which is against EGL spec when
271         // eglClientWaitSync() should return immediately with timeout == 0.
272         if (renderer->isAsyncCommandBufferResetAndGarbageCleanupEnabled())
273         {
274             ANGLE_TRY(renderer->checkCompletedCommandsAndCleanup(context));
275         }
276         else
277         {
278             ANGLE_TRY(renderer->checkCompletedCommands(context));
279         }
280 
281         *signaledOut = renderer->hasResourceUseFinished(mUse);
282     }
283     return angle::Result::Continue;
284 }
285 
submitSyncIfDeferred(ContextVk * contextVk,RenderPassClosureReason reason)286 angle::Result SyncHelper::submitSyncIfDeferred(ContextVk *contextVk, RenderPassClosureReason reason)
287 {
288     if (contextVk == nullptr)
289     {
290         return angle::Result::Continue;
291     }
292 
293     if (contextVk->getRenderer()->hasResourceUseSubmitted(mUse))
294     {
295         return angle::Result::Continue;
296     }
297 
298     // The submission of a sync object may be deferred to allow further optimizations to an open
299     // render pass before a submission happens for another reason.  If the sync object is being
300     // waited on by the current context, the application must have used GL_SYNC_FLUSH_COMMANDS_BIT.
301     // However, when waited on by other contexts, the application must have ensured the original
302     // context is flushed.  Due to deferred flushes, a glFlush is not sufficient to guarantee this.
303     //
304     // Deferring the submission is restricted to non-EGL sync objects, so it's sufficient to ensure
305     // that the contexts in the share group issue their deferred flushes.
306     for (auto context : contextVk->getShareGroup()->getContexts())
307     {
308         ContextVk *sharedContextVk = vk::GetImpl(context.second);
309         if (sharedContextVk->hasUnsubmittedUse(mUse))
310         {
311             ANGLE_TRY(sharedContextVk->flushCommandsAndEndRenderPassIfDeferredSyncInit(reason));
312             break;
313         }
314     }
315     // Note mUse could still be invalid here if it is inserted on a fresh created context, i.e.,
316     // fence is tracking nothing and is finished when inserted..
317     ASSERT(contextVk->getRenderer()->hasResourceUseSubmitted(mUse));
318 
319     return angle::Result::Continue;
320 }
321 
ExternalFence()322 ExternalFence::ExternalFence()
323     : mDevice(VK_NULL_HANDLE), mFenceFdStatus(VK_INCOMPLETE), mFenceFd(kInvalidFenceFd)
324 {}
325 
~ExternalFence()326 ExternalFence::~ExternalFence()
327 {
328     if (mDevice != VK_NULL_HANDLE)
329     {
330         mFence.destroy(mDevice);
331     }
332 
333     if (mFenceFd != kInvalidFenceFd)
334     {
335         close(mFenceFd);
336     }
337 }
338 
init(VkDevice device,const VkFenceCreateInfo & createInfo)339 VkResult ExternalFence::init(VkDevice device, const VkFenceCreateInfo &createInfo)
340 {
341     ASSERT(device != VK_NULL_HANDLE);
342     ASSERT(mFenceFdStatus == VK_INCOMPLETE && mFenceFd == kInvalidFenceFd);
343     ASSERT(mDevice == VK_NULL_HANDLE);
344     mDevice = device;
345     return mFence.init(device, createInfo);
346 }
347 
init(int fenceFd)348 void ExternalFence::init(int fenceFd)
349 {
350     ASSERT(fenceFd != kInvalidFenceFd);
351     ASSERT(mFenceFdStatus == VK_INCOMPLETE && mFenceFd == kInvalidFenceFd);
352     mFenceFdStatus = VK_SUCCESS;
353     mFenceFd       = fenceFd;
354 }
355 
getStatus(VkDevice device) const356 VkResult ExternalFence::getStatus(VkDevice device) const
357 {
358     if (mFenceFdStatus == VK_SUCCESS)
359     {
360         return SyncWaitFd(mFenceFd, 0, VK_NOT_READY);
361     }
362     return mFence.getStatus(device);
363 }
364 
wait(VkDevice device,uint64_t timeout) const365 VkResult ExternalFence::wait(VkDevice device, uint64_t timeout) const
366 {
367     if (mFenceFdStatus == VK_SUCCESS)
368     {
369         return SyncWaitFd(mFenceFd, timeout);
370     }
371     return mFence.wait(device, timeout);
372 }
373 
exportFd(VkDevice device,const VkFenceGetFdInfoKHR & fenceGetFdInfo)374 void ExternalFence::exportFd(VkDevice device, const VkFenceGetFdInfoKHR &fenceGetFdInfo)
375 {
376     ASSERT(mFenceFdStatus == VK_INCOMPLETE && mFenceFd == kInvalidFenceFd);
377     mFenceFdStatus = mFence.exportFd(device, fenceGetFdInfo, &mFenceFd);
378     ASSERT(mFenceFdStatus != VK_INCOMPLETE);
379 }
380 
SyncHelperNativeFence()381 SyncHelperNativeFence::SyncHelperNativeFence()
382 {
383     mExternalFence = std::make_shared<ExternalFence>();
384 }
385 
~SyncHelperNativeFence()386 SyncHelperNativeFence::~SyncHelperNativeFence() {}
387 
releaseToRenderer(Renderer * renderer)388 void SyncHelperNativeFence::releaseToRenderer(Renderer *renderer)
389 {
390     mExternalFence.reset();
391 }
392 
initializeWithFd(ContextVk * contextVk,int inFd)393 angle::Result SyncHelperNativeFence::initializeWithFd(ContextVk *contextVk, int inFd)
394 {
395     ASSERT(inFd >= kInvalidFenceFd);
396 
397     // If valid FD provided by application - import it to fence.
398     if (inFd > kInvalidFenceFd)
399     {
400         // File descriptor ownership: EGL_ANDROID_native_fence_sync
401         // Whenever a file descriptor is passed into or returned from an
402         // EGL call in this extension, ownership of that file descriptor is
403         // transferred. The recipient of the file descriptor must close it when it is
404         // no longer needed, and the provider of the file descriptor must dup it
405         // before providing it if they require continued use of the native fence.
406         mExternalFence->init(inFd);
407         return angle::Result::Continue;
408     }
409 
410     Renderer *renderer = contextVk->getRenderer();
411     VkDevice device    = renderer->getDevice();
412 
413     VkExportFenceCreateInfo exportCreateInfo = {};
414     exportCreateInfo.sType                   = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO;
415     exportCreateInfo.pNext                   = nullptr;
416     exportCreateInfo.handleTypes             = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR;
417 
418     // Create fenceInfo base.
419     VkFenceCreateInfo fenceCreateInfo = {};
420     fenceCreateInfo.sType             = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
421     fenceCreateInfo.flags             = 0;
422     fenceCreateInfo.pNext             = &exportCreateInfo;
423 
424     // Initialize/create a VkFence handle
425     ANGLE_VK_TRY(contextVk, mExternalFence->init(device, fenceCreateInfo));
426 
427     // invalid FD provided by application - create one with fence.
428     /*
429       Spec: "When a fence sync object is created or when an EGL native fence sync
430       object is created with the EGL_SYNC_NATIVE_FENCE_FD_ANDROID attribute set to
431       EGL_NO_NATIVE_FENCE_FD_ANDROID, eglCreateSyncKHR also inserts a fence command
432       into the command stream of the bound client API's current context and associates it
433       with the newly created sync object.
434     */
435     // Flush current pending set of commands providing the fence...
436     ANGLE_TRY(contextVk->flushAndSubmitCommands(nullptr, &mExternalFence,
437                                                 RenderPassClosureReason::SyncObjectWithFdInit));
438     QueueSerial submitSerial = contextVk->getLastSubmittedQueueSerial();
439 
440     // exportFd is exporting VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR type handle which
441     // obeys copy semantics. This means that the fence must already be signaled or the work to
442     // signal it is in the graphics pipeline at the time we export the fd. Thus we need to
443     // call waitForQueueSerialToBeSubmittedToDevice() here.
444     ANGLE_TRY(renderer->waitForQueueSerialToBeSubmittedToDevice(contextVk, submitSerial));
445 
446     ANGLE_VK_TRY(contextVk, mExternalFence->getFenceFdStatus());
447 
448     return angle::Result::Continue;
449 }
450 
prepareForClientWait(Context * context,ContextVk * contextVk,bool flushCommands,uint64_t timeout,VkResult * resultOut)451 angle::Result SyncHelperNativeFence::prepareForClientWait(Context *context,
452                                                           ContextVk *contextVk,
453                                                           bool flushCommands,
454                                                           uint64_t timeout,
455                                                           VkResult *resultOut)
456 {
457     // If already signaled, don't wait
458     bool alreadySignaled = false;
459     ANGLE_TRY(getStatus(context, contextVk, &alreadySignaled));
460     if (alreadySignaled)
461     {
462         *resultOut = VK_SUCCESS;
463         return angle::Result::Continue;
464     }
465 
466     // If timeout is zero, there's no need to wait, so return timeout already.
467     if (timeout == 0)
468     {
469         *resultOut = VK_TIMEOUT;
470         return angle::Result::Continue;
471     }
472 
473     if (flushCommands && contextVk)
474     {
475         ANGLE_TRY(contextVk->flushAndSubmitCommands(nullptr, nullptr,
476                                                     RenderPassClosureReason::SyncObjectClientWait));
477     }
478 
479     *resultOut = VK_INCOMPLETE;
480     return angle::Result::Continue;
481 }
482 
clientWait(Context * context,ContextVk * contextVk,bool flushCommands,uint64_t timeout,MapVkResultToApiType mappingFunction,void * resultOut)483 angle::Result SyncHelperNativeFence::clientWait(Context *context,
484                                                 ContextVk *contextVk,
485                                                 bool flushCommands,
486                                                 uint64_t timeout,
487                                                 MapVkResultToApiType mappingFunction,
488                                                 void *resultOut)
489 {
490     ANGLE_TRACE_EVENT0("gpu.angle", "SyncHelperNativeFence::clientWait");
491 
492     VkResult status = VK_INCOMPLETE;
493     ANGLE_TRY(prepareForClientWait(context, contextVk, flushCommands, timeout, &status));
494 
495     if (status != VK_INCOMPLETE)
496     {
497         mappingFunction(status, angle::Result::Continue, resultOut);
498         return angle::Result::Continue;
499     }
500 
501     Renderer *renderer = context->getRenderer();
502 
503     auto clientWaitUnlocked = [device = renderer->getDevice(), fence = mExternalFence,
504                                mappingFunction, timeout](void *resultOut) {
505         ANGLE_TRACE_EVENT0("gpu.angle", "SyncHelperNativeFence::clientWait block (unlocked)");
506         ASSERT(resultOut);
507 
508         VkResult status = fence->wait(device, timeout);
509         mappingFunction(status, angle::Result::Continue, resultOut);
510     };
511 
512     egl::Display::GetCurrentThreadUnlockedTailCall()->add(clientWaitUnlocked);
513     return angle::Result::Continue;
514 }
515 
serverWait(ContextVk * contextVk)516 angle::Result SyncHelperNativeFence::serverWait(ContextVk *contextVk)
517 {
518     Renderer *renderer = contextVk->getRenderer();
519 
520     // If already signaled, no need to wait
521     bool alreadySignaled = false;
522     ANGLE_TRY(getStatus(contextVk, contextVk, &alreadySignaled));
523     if (alreadySignaled)
524     {
525         return angle::Result::Continue;
526     }
527 
528     VkDevice device = renderer->getDevice();
529     DeviceScoped<Semaphore> waitSemaphore(device);
530     // Wait semaphore for next vkQueueSubmit().
531     // Create a Semaphore with imported fenceFd.
532     ANGLE_VK_TRY(contextVk, waitSemaphore.get().init(device));
533 
534     VkImportSemaphoreFdInfoKHR importFdInfo = {};
535     importFdInfo.sType                      = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR;
536     importFdInfo.semaphore                  = waitSemaphore.get().getHandle();
537     importFdInfo.flags                      = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR;
538     importFdInfo.handleType                 = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR;
539     importFdInfo.fd                         = dup(mExternalFence->getFenceFd());
540     ANGLE_VK_TRY(contextVk, waitSemaphore.get().importFd(device, importFdInfo));
541 
542     // Add semaphore to next submit job.
543     contextVk->addWaitSemaphore(waitSemaphore.get().getHandle(),
544                                 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT);
545     contextVk->addGarbage(&waitSemaphore.get());  // This releases the handle.
546     return angle::Result::Continue;
547 }
548 
getStatus(Context * context,ContextVk * contextVk,bool * signaledOut)549 angle::Result SyncHelperNativeFence::getStatus(Context *context,
550                                                ContextVk *contextVk,
551                                                bool *signaledOut)
552 {
553     VkResult result = mExternalFence->getStatus(context->getDevice());
554     if (result != VK_NOT_READY)
555     {
556         ANGLE_VK_TRY(context, result);
557     }
558     *signaledOut = (result == VK_SUCCESS);
559     return angle::Result::Continue;
560 }
561 
dupNativeFenceFD(Context * context,int * fdOut) const562 angle::Result SyncHelperNativeFence::dupNativeFenceFD(Context *context, int *fdOut) const
563 {
564     if (mExternalFence->getFenceFd() == kInvalidFenceFd)
565     {
566         return angle::Result::Stop;
567     }
568 
569     *fdOut = dup(mExternalFence->getFenceFd());
570 
571     return angle::Result::Continue;
572 }
573 
574 }  // namespace vk
575 
SyncVk()576 SyncVk::SyncVk() : SyncImpl() {}
577 
~SyncVk()578 SyncVk::~SyncVk() {}
579 
onDestroy(const gl::Context * context)580 void SyncVk::onDestroy(const gl::Context *context)
581 {
582     mSyncHelper.releaseToRenderer(vk::GetImpl(context)->getRenderer());
583 }
584 
set(const gl::Context * context,GLenum condition,GLbitfield flags)585 angle::Result SyncVk::set(const gl::Context *context, GLenum condition, GLbitfield flags)
586 {
587     ASSERT(condition == GL_SYNC_GPU_COMMANDS_COMPLETE);
588     ASSERT(flags == 0);
589 
590     return mSyncHelper.initialize(vk::GetImpl(context), SyncFenceScope::CurrentContextToShareGroup);
591 }
592 
clientWait(const gl::Context * context,GLbitfield flags,GLuint64 timeout,GLenum * outResult)593 angle::Result SyncVk::clientWait(const gl::Context *context,
594                                  GLbitfield flags,
595                                  GLuint64 timeout,
596                                  GLenum *outResult)
597 {
598     ContextVk *contextVk = vk::GetImpl(context);
599 
600     ASSERT((flags & ~GL_SYNC_FLUSH_COMMANDS_BIT) == 0);
601 
602     bool flush = (flags & GL_SYNC_FLUSH_COMMANDS_BIT) != 0;
603 
604     return mSyncHelper.clientWait(contextVk, contextVk, flush, static_cast<uint64_t>(timeout),
605                                   MapVkResultToGlenum, outResult);
606 }
607 
serverWait(const gl::Context * context,GLbitfield flags,GLuint64 timeout)608 angle::Result SyncVk::serverWait(const gl::Context *context, GLbitfield flags, GLuint64 timeout)
609 {
610     ASSERT(flags == 0);
611     ASSERT(timeout == GL_TIMEOUT_IGNORED);
612 
613     ContextVk *contextVk = vk::GetImpl(context);
614     return mSyncHelper.serverWait(contextVk);
615 }
616 
getStatus(const gl::Context * context,GLint * outResult)617 angle::Result SyncVk::getStatus(const gl::Context *context, GLint *outResult)
618 {
619     ContextVk *contextVk = vk::GetImpl(context);
620     bool signaled        = false;
621     ANGLE_TRY(mSyncHelper.getStatus(contextVk, contextVk, &signaled));
622 
623     *outResult = signaled ? GL_SIGNALED : GL_UNSIGNALED;
624     return angle::Result::Continue;
625 }
626 
EGLSyncVk()627 EGLSyncVk::EGLSyncVk() : EGLSyncImpl(), mSyncHelper(nullptr) {}
628 
~EGLSyncVk()629 EGLSyncVk::~EGLSyncVk() {}
630 
onDestroy(const egl::Display * display)631 void EGLSyncVk::onDestroy(const egl::Display *display)
632 {
633     mSyncHelper->releaseToRenderer(vk::GetImpl(display)->getRenderer());
634 }
635 
initialize(const egl::Display * display,const gl::Context * context,EGLenum type,const egl::AttributeMap & attribs)636 egl::Error EGLSyncVk::initialize(const egl::Display *display,
637                                  const gl::Context *context,
638                                  EGLenum type,
639                                  const egl::AttributeMap &attribs)
640 {
641     ASSERT(context != nullptr);
642 
643     switch (type)
644     {
645         case EGL_SYNC_FENCE_KHR:
646         case EGL_SYNC_GLOBAL_FENCE_ANGLE:
647         {
648             vk::SyncHelper *syncHelper = new vk::SyncHelper();
649             mSyncHelper.reset(syncHelper);
650             const SyncFenceScope scope = type == EGL_SYNC_GLOBAL_FENCE_ANGLE
651                                              ? SyncFenceScope::AllContextsToAllContexts
652                                              : SyncFenceScope::CurrentContextToAllContexts;
653             if (syncHelper->initialize(vk::GetImpl(context), scope) == angle::Result::Stop)
654             {
655                 return egl::Error(EGL_BAD_ALLOC, "eglCreateSyncKHR failed to create sync object");
656             }
657             return egl::NoError();
658         }
659         case EGL_SYNC_NATIVE_FENCE_ANDROID:
660         {
661             vk::SyncHelperNativeFence *syncHelper = new vk::SyncHelperNativeFence();
662             mSyncHelper.reset(syncHelper);
663             EGLint nativeFenceFd =
664                 attribs.getAsInt(EGL_SYNC_NATIVE_FENCE_FD_ANDROID, EGL_NO_NATIVE_FENCE_FD_ANDROID);
665             return angle::ToEGL(syncHelper->initializeWithFd(vk::GetImpl(context), nativeFenceFd),
666                                 EGL_BAD_ALLOC);
667         }
668         default:
669             UNREACHABLE();
670             return egl::Error(EGL_BAD_ALLOC);
671     }
672 }
673 
clientWait(const egl::Display * display,const gl::Context * context,EGLint flags,EGLTime timeout,EGLint * outResult)674 egl::Error EGLSyncVk::clientWait(const egl::Display *display,
675                                  const gl::Context *context,
676                                  EGLint flags,
677                                  EGLTime timeout,
678                                  EGLint *outResult)
679 {
680     ASSERT((flags & ~EGL_SYNC_FLUSH_COMMANDS_BIT_KHR) == 0);
681 
682     bool flush = (flags & EGL_SYNC_FLUSH_COMMANDS_BIT_KHR) != 0;
683 
684     ContextVk *contextVk = context != nullptr && flush ? vk::GetImpl(context) : nullptr;
685     if (mSyncHelper->clientWait(vk::GetImpl(display), contextVk, flush,
686                                 static_cast<uint64_t>(timeout), MapVkResultToEglint,
687                                 outResult) == angle::Result::Stop)
688     {
689         return egl::Error(EGL_BAD_ALLOC);
690     }
691 
692     return egl::NoError();
693 }
694 
serverWait(const egl::Display * display,const gl::Context * context,EGLint flags)695 egl::Error EGLSyncVk::serverWait(const egl::Display *display,
696                                  const gl::Context *context,
697                                  EGLint flags)
698 {
699     // Server wait requires a valid bound context.
700     ASSERT(context);
701 
702     // No flags are currently implemented.
703     ASSERT(flags == 0);
704 
705     ContextVk *contextVk = vk::GetImpl(context);
706     return angle::ToEGL(mSyncHelper->serverWait(contextVk), EGL_BAD_ALLOC);
707 }
708 
getStatus(const egl::Display * display,EGLint * outStatus)709 egl::Error EGLSyncVk::getStatus(const egl::Display *display, EGLint *outStatus)
710 {
711     bool signaled = false;
712     if (mSyncHelper->getStatus(vk::GetImpl(display), nullptr, &signaled) == angle::Result::Stop)
713     {
714         return egl::Error(EGL_BAD_ALLOC);
715     }
716 
717     *outStatus = signaled ? EGL_SIGNALED_KHR : EGL_UNSIGNALED_KHR;
718     return egl::NoError();
719 }
720 
dupNativeFenceFD(const egl::Display * display,EGLint * fdOut) const721 egl::Error EGLSyncVk::dupNativeFenceFD(const egl::Display *display, EGLint *fdOut) const
722 {
723     return angle::ToEGL(mSyncHelper->dupNativeFenceFD(vk::GetImpl(display), fdOut),
724                         EGL_BAD_PARAMETER);
725 }
726 
727 }  // namespace rx
728