xref: /aosp_15_r20/frameworks/av/services/camera/virtualcamera/VirtualCameraRenderThread.cc (revision ec779b8e0859a360c3d303172224686826e6e0e1)
1 /*
2  * Copyright (C) 2023 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 // #define LOG_NDEBUG 0
18 #define LOG_TAG "VirtualCameraRenderThread"
19 #include "VirtualCameraRenderThread.h"
20 
21 #include <android_companion_virtualdevice_flags.h>
22 
23 #include <chrono>
24 #include <cstdint>
25 #include <cstring>
26 #include <future>
27 #include <memory>
28 #include <mutex>
29 #include <thread>
30 #include <utility>
31 #include <vector>
32 
33 #include "Exif.h"
34 #include "GLES/gl.h"
35 #include "VirtualCameraCaptureResult.h"
36 #include "VirtualCameraDevice.h"
37 #include "VirtualCameraSessionContext.h"
38 #include "aidl/android/hardware/camera/common/Status.h"
39 #include "aidl/android/hardware/camera/device/BufferStatus.h"
40 #include "aidl/android/hardware/camera/device/CameraBlob.h"
41 #include "aidl/android/hardware/camera/device/CameraBlobId.h"
42 #include "aidl/android/hardware/camera/device/CameraMetadata.h"
43 #include "aidl/android/hardware/camera/device/CaptureResult.h"
44 #include "aidl/android/hardware/camera/device/ErrorCode.h"
45 #include "aidl/android/hardware/camera/device/ICameraDeviceCallback.h"
46 #include "aidl/android/hardware/camera/device/NotifyMsg.h"
47 #include "aidl/android/hardware/camera/device/ShutterMsg.h"
48 #include "aidl/android/hardware/camera/device/StreamBuffer.h"
49 #include "android-base/thread_annotations.h"
50 #include "android/binder_auto_utils.h"
51 #include "android/hardware_buffer.h"
52 #include "system/camera_metadata.h"
53 #include "ui/GraphicBuffer.h"
54 #include "ui/Rect.h"
55 #include "util/EglFramebuffer.h"
56 #include "util/JpegUtil.h"
57 #include "util/Util.h"
58 #include "utils/Errors.h"
59 
60 namespace android {
61 namespace companion {
62 namespace virtualcamera {
63 
64 using ::aidl::android::hardware::camera::common::Status;
65 using ::aidl::android::hardware::camera::device::BufferStatus;
66 using ::aidl::android::hardware::camera::device::CameraBlob;
67 using ::aidl::android::hardware::camera::device::CameraBlobId;
68 using ::aidl::android::hardware::camera::device::CameraMetadata;
69 using ::aidl::android::hardware::camera::device::CaptureResult;
70 using ::aidl::android::hardware::camera::device::ErrorCode;
71 using ::aidl::android::hardware::camera::device::ErrorMsg;
72 using ::aidl::android::hardware::camera::device::ICameraDeviceCallback;
73 using ::aidl::android::hardware::camera::device::NotifyMsg;
74 using ::aidl::android::hardware::camera::device::ShutterMsg;
75 using ::aidl::android::hardware::camera::device::Stream;
76 using ::aidl::android::hardware::camera::device::StreamBuffer;
77 using ::aidl::android::hardware::graphics::common::PixelFormat;
78 using ::android::base::ScopedLockAssertion;
79 
80 using ::android::hardware::camera::common::helper::ExifUtils;
81 
82 namespace {
83 
84 // helper type for the visitor
85 template <class... Ts>
86 struct overloaded : Ts... {
87   using Ts::operator()...;
88 };
89 // explicit deduction guide (not needed as of C++20)
90 template <class... Ts>
91 overloaded(Ts...) -> overloaded<Ts...>;
92 
93 using namespace std::chrono_literals;
94 
95 namespace flags = ::android::companion::virtualdevice::flags;
96 
97 static constexpr std::chrono::milliseconds kAcquireFenceTimeout = 500ms;
98 
99 static constexpr size_t kJpegThumbnailBufferSize = 32 * 1024;  // 32 KiB
100 
101 static constexpr UpdateTextureTask kUpdateTextureTask;
102 
103 // The number of nanosecond to wait for the first frame to be drawn on the input surface
104 static constexpr std::chrono::nanoseconds kMaxWaitFirstFrame = 3s;
105 
createShutterNotifyMsg(int frameNumber,std::chrono::nanoseconds timestamp)106 NotifyMsg createShutterNotifyMsg(int frameNumber,
107                                  std::chrono::nanoseconds timestamp) {
108   NotifyMsg msg;
109   msg.set<NotifyMsg::Tag::shutter>(ShutterMsg{
110       .frameNumber = frameNumber,
111       .timestamp = timestamp.count(),
112   });
113   return msg;
114 }
115 
116 // Create a NotifyMsg for an error case. The default error is ERROR_BUFFER.
createErrorNotifyMsg(int frameNumber,int streamId,ErrorCode errorCode=ErrorCode::ERROR_BUFFER)117 NotifyMsg createErrorNotifyMsg(int frameNumber, int streamId,
118                                ErrorCode errorCode = ErrorCode::ERROR_BUFFER) {
119   NotifyMsg msg;
120   msg.set<NotifyMsg::Tag::error>(ErrorMsg{.frameNumber = frameNumber,
121                                           .errorStreamId = streamId,
122                                           .errorCode = errorCode});
123   return msg;
124 }
125 
createRequestErrorNotifyMsg(int frameNumber)126 NotifyMsg createRequestErrorNotifyMsg(int frameNumber) {
127   NotifyMsg msg;
128   msg.set<NotifyMsg::Tag::error>(
129       ErrorMsg{.frameNumber = frameNumber,
130                // errorStreamId needs to be set to -1 for ERROR_REQUEST
131                // (not tied to specific stream).
132                .errorStreamId = -1,
133                .errorCode = ErrorCode::ERROR_REQUEST});
134   return msg;
135 }
136 
allocateTemporaryFramebuffer(EGLDisplay eglDisplay,const uint width,const int height)137 std::shared_ptr<EglFrameBuffer> allocateTemporaryFramebuffer(
138     EGLDisplay eglDisplay, const uint width, const int height) {
139   const AHardwareBuffer_Desc desc{
140       .width = static_cast<uint32_t>(width),
141       .height = static_cast<uint32_t>(height),
142       .layers = 1,
143       .format = AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420,
144       .usage = AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER |
145                AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN,
146       .rfu0 = 0,
147       .rfu1 = 0};
148 
149   AHardwareBuffer* hwBufferPtr;
150   int status = AHardwareBuffer_allocate(&desc, &hwBufferPtr);
151   if (status != NO_ERROR) {
152     ALOGE(
153         "%s: Failed to allocate hardware buffer for temporary framebuffer: %d",
154         __func__, status);
155     return nullptr;
156   }
157 
158   return std::make_shared<EglFrameBuffer>(
159       eglDisplay,
160       std::shared_ptr<AHardwareBuffer>(hwBufferPtr, AHardwareBuffer_release));
161 }
162 
isYuvFormat(const PixelFormat pixelFormat)163 bool isYuvFormat(const PixelFormat pixelFormat) {
164   switch (static_cast<android_pixel_format_t>(pixelFormat)) {
165     case HAL_PIXEL_FORMAT_YCBCR_422_I:
166     case HAL_PIXEL_FORMAT_YCBCR_422_SP:
167     case HAL_PIXEL_FORMAT_Y16:
168     case HAL_PIXEL_FORMAT_YV12:
169     case HAL_PIXEL_FORMAT_YCBCR_420_888:
170       return true;
171     default:
172       return false;
173   }
174 }
175 
createExif(Resolution imageSize,const CameraMetadata resultMetadata,const std::vector<uint8_t> & compressedThumbnail={})176 std::vector<uint8_t> createExif(
177     Resolution imageSize, const CameraMetadata resultMetadata,
178     const std::vector<uint8_t>& compressedThumbnail = {}) {
179   std::unique_ptr<ExifUtils> exifUtils(ExifUtils::create());
180   exifUtils->initialize();
181 
182   // Make a copy of the metadata in order to converting it the HAL metadata
183   // format (as opposed to the AIDL class) and use the setFromMetadata method
184   // from ExifUtil
185   camera_metadata_t* rawSettings =
186       clone_camera_metadata((camera_metadata_t*)resultMetadata.metadata.data());
187   if (rawSettings != nullptr) {
188     android::hardware::camera::common::helper::CameraMetadata halMetadata(
189         rawSettings);
190     exifUtils->setFromMetadata(halMetadata, imageSize.width, imageSize.height);
191   }
192   exifUtils->setMake(VirtualCameraDevice::kDefaultMakeAndModel);
193   exifUtils->setModel(VirtualCameraDevice::kDefaultMakeAndModel);
194   exifUtils->setFlash(0);
195 
196   std::vector<uint8_t> app1Data;
197 
198   size_t thumbnailDataSize = compressedThumbnail.size();
199   const void* thumbnailData =
200       thumbnailDataSize > 0
201           ? reinterpret_cast<const void*>(compressedThumbnail.data())
202           : nullptr;
203 
204   if (!exifUtils->generateApp1(thumbnailData, thumbnailDataSize)) {
205     ALOGE("%s: Failed to generate APP1 segment for EXIF metadata", __func__);
206     return app1Data;
207   }
208 
209   const uint8_t* data = exifUtils->getApp1Buffer();
210   const size_t size = exifUtils->getApp1Length();
211 
212   app1Data.insert(app1Data.end(), data, data + size);
213   return app1Data;
214 }
215 
getMaxFrameDuration(const RequestSettings & requestSettings)216 std::chrono::nanoseconds getMaxFrameDuration(
217     const RequestSettings& requestSettings) {
218   if (requestSettings.fpsRange.has_value()) {
219     return std::chrono::nanoseconds(static_cast<uint64_t>(
220         1e9 / std::max(1, requestSettings.fpsRange->minFps)));
221   }
222   return std::chrono::nanoseconds(
223       static_cast<uint64_t>(1e9 / VirtualCameraDevice::kMinFps));
224 }
225 
226 }  // namespace
227 
CaptureRequestBuffer(int streamId,int bufferId,sp<Fence> fence)228 CaptureRequestBuffer::CaptureRequestBuffer(int streamId, int bufferId,
229                                            sp<Fence> fence)
230     : mStreamId(streamId), mBufferId(bufferId), mFence(fence) {
231 }
232 
getStreamId() const233 int CaptureRequestBuffer::getStreamId() const {
234   return mStreamId;
235 }
236 
getBufferId() const237 int CaptureRequestBuffer::getBufferId() const {
238   return mBufferId;
239 }
240 
getFence() const241 sp<Fence> CaptureRequestBuffer::getFence() const {
242   return mFence;
243 }
244 
VirtualCameraRenderThread(VirtualCameraSessionContext & sessionContext,const Resolution inputSurfaceSize,const Resolution reportedSensorSize,std::shared_ptr<ICameraDeviceCallback> cameraDeviceCallback)245 VirtualCameraRenderThread::VirtualCameraRenderThread(
246     VirtualCameraSessionContext& sessionContext,
247     const Resolution inputSurfaceSize, const Resolution reportedSensorSize,
248     std::shared_ptr<ICameraDeviceCallback> cameraDeviceCallback)
249     : mCameraDeviceCallback(cameraDeviceCallback),
250       mInputSurfaceSize(inputSurfaceSize),
251       mReportedSensorSize(reportedSensorSize),
252       mSessionContext(sessionContext),
253       mInputSurfaceFuture(mInputSurfacePromise.get_future()) {
254 }
255 
~VirtualCameraRenderThread()256 VirtualCameraRenderThread::~VirtualCameraRenderThread() {
257   stop();
258   if (mThread.joinable()) {
259     mThread.join();
260   }
261 }
262 
ProcessCaptureRequestTask(int frameNumber,const std::vector<CaptureRequestBuffer> & requestBuffers,const RequestSettings & requestSettings)263 ProcessCaptureRequestTask::ProcessCaptureRequestTask(
264     int frameNumber, const std::vector<CaptureRequestBuffer>& requestBuffers,
265     const RequestSettings& requestSettings)
266     : mFrameNumber(frameNumber),
267       mBuffers(requestBuffers),
268       mRequestSettings(requestSettings) {
269 }
270 
getFrameNumber() const271 int ProcessCaptureRequestTask::getFrameNumber() const {
272   return mFrameNumber;
273 }
274 
getBuffers() const275 const std::vector<CaptureRequestBuffer>& ProcessCaptureRequestTask::getBuffers()
276     const {
277   return mBuffers;
278 }
279 
getRequestSettings() const280 const RequestSettings& ProcessCaptureRequestTask::getRequestSettings() const {
281   return mRequestSettings;
282 }
283 
requestTextureUpdate()284 void VirtualCameraRenderThread::requestTextureUpdate() {
285   std::lock_guard<std::mutex> lock(mLock);
286   // If queue is not empty, we don't need to set the mTextureUpdateRequested
287   // flag, since the texture will be updated during ProcessCaptureRequestTask
288   // processing anyway.
289   if (mQueue.empty()) {
290     mTextureUpdateRequested = true;
291     mCondVar.notify_one();
292   }
293 }
294 
enqueueTask(std::unique_ptr<ProcessCaptureRequestTask> task)295 void VirtualCameraRenderThread::enqueueTask(
296     std::unique_ptr<ProcessCaptureRequestTask> task) {
297   std::lock_guard<std::mutex> lock(mLock);
298   // When enqueving process capture request task, clear the
299   // mTextureUpdateRequested flag. If this flag is set, the texture was not yet
300   // updated and it will be updated when processing ProcessCaptureRequestTask
301   // anyway.
302   mTextureUpdateRequested = false;
303   mQueue.emplace_back(std::move(task));
304   mCondVar.notify_one();
305 }
306 
flush()307 void VirtualCameraRenderThread::flush() {
308   std::lock_guard<std::mutex> lock(mLock);
309   while (!mQueue.empty()) {
310     std::unique_ptr<ProcessCaptureRequestTask> task = std::move(mQueue.front());
311     mQueue.pop_front();
312     flushCaptureRequest(*task);
313   }
314 }
315 
start()316 void VirtualCameraRenderThread::start() {
317   mThread = std::thread(&VirtualCameraRenderThread::threadLoop, this);
318 }
319 
stop()320 void VirtualCameraRenderThread::stop() {
321   {
322     std::lock_guard<std::mutex> lock(mLock);
323     mPendingExit = true;
324     mCondVar.notify_one();
325   }
326 }
327 
getInputSurface()328 sp<Surface> VirtualCameraRenderThread::getInputSurface() {
329   return mInputSurfaceFuture.get();
330 }
331 
dequeueTask()332 RenderThreadTask VirtualCameraRenderThread::dequeueTask() {
333   std::unique_lock<std::mutex> lock(mLock);
334   // Clang's thread safety analysis doesn't perform alias analysis,
335   // so it doesn't support moveable std::unique_lock.
336   //
337   // Lock assertion below is basically explicit declaration that
338   // the lock is held in this scope, which is true, since it's only
339   // released during waiting inside mCondVar.wait calls.
340   ScopedLockAssertion lockAssertion(mLock);
341 
342   mCondVar.wait(lock, [this]() REQUIRES(mLock) {
343     return mPendingExit || mTextureUpdateRequested || !mQueue.empty();
344   });
345   if (mPendingExit) {
346     // Render thread task with null task signals render thread to terminate.
347     return RenderThreadTask(nullptr);
348   }
349   if (mTextureUpdateRequested) {
350     // If mTextureUpdateRequested, it's guaranteed the queue is empty, return
351     // kUpdateTextureTask to signal we want render thread to update the texture
352     // (consume buffer from the queue).
353     mTextureUpdateRequested = false;
354     return RenderThreadTask(kUpdateTextureTask);
355   }
356   RenderThreadTask task(std::move(mQueue.front()));
357   mQueue.pop_front();
358   return task;
359 }
360 
threadLoop()361 void VirtualCameraRenderThread::threadLoop() {
362   ALOGV("Render thread starting");
363 
364   mEglDisplayContext = std::make_unique<EglDisplayContext>();
365   mEglTextureYuvProgram =
366       std::make_unique<EglTextureProgram>(EglTextureProgram::TextureFormat::YUV);
367   mEglTextureRgbProgram = std::make_unique<EglTextureProgram>(
368       EglTextureProgram::TextureFormat::RGBA);
369   mEglSurfaceTexture = std::make_unique<EglSurfaceTexture>(
370       mInputSurfaceSize.width, mInputSurfaceSize.height);
371   mEglSurfaceTexture->setFrameAvailableListener(
372       [this]() { requestTextureUpdate(); });
373 
374   mInputSurfacePromise.set_value(mEglSurfaceTexture->getSurface());
375 
376   while (RenderThreadTask task = dequeueTask()) {
377     std::visit(
378         overloaded{[this](const std::unique_ptr<ProcessCaptureRequestTask>& t) {
379                      processTask(*t);
380                    },
381                    [this](const UpdateTextureTask&) {
382                      ALOGV("Idle update of the texture");
383                      mEglSurfaceTexture->updateTexture();
384                    }},
385         task);
386   }
387 
388   // Destroy EGL utilities still on the render thread.
389   mEglSurfaceTexture.reset();
390   mEglTextureRgbProgram.reset();
391   mEglTextureYuvProgram.reset();
392   mEglDisplayContext.reset();
393 
394   ALOGV("Render thread exiting");
395 }
396 
processTask(const ProcessCaptureRequestTask & request)397 void VirtualCameraRenderThread::processTask(
398     const ProcessCaptureRequestTask& request) {
399   std::chrono::nanoseconds timestamp =
400       std::chrono::duration_cast<std::chrono::nanoseconds>(
401           std::chrono::steady_clock::now().time_since_epoch());
402   const std::chrono::nanoseconds lastAcquisitionTimestamp(
403       mLastAcquisitionTimestampNanoseconds.exchange(timestamp.count(),
404                                                     std::memory_order_relaxed));
405 
406   if (request.getRequestSettings().fpsRange) {
407     int maxFps = std::max(1, request.getRequestSettings().fpsRange->maxFps);
408     timestamp = throttleRendering(maxFps, lastAcquisitionTimestamp, timestamp);
409   }
410 
411   // Calculate the maximal amount of time we can afford to wait for next frame.
412   const bool isFirstFrameDrawn = mEglSurfaceTexture->isFirstFrameDrawn();
413   ALOGV("First Frame Drawn: %s", isFirstFrameDrawn ? "Yes" : "No");
414 
415   const std::chrono::nanoseconds maxFrameDuration =
416       isFirstFrameDrawn ? getMaxFrameDuration(request.getRequestSettings())
417                         : kMaxWaitFirstFrame;
418   const std::chrono::nanoseconds elapsedDuration =
419       isFirstFrameDrawn ? timestamp - lastAcquisitionTimestamp : 0ns;
420 
421   if (elapsedDuration < maxFrameDuration) {
422     // We can afford to wait for next frame.
423     // Note that if there's already new frame in the input Surface, the call
424     // below returns immediatelly.
425     bool gotNewFrame = mEglSurfaceTexture->waitForNextFrame(maxFrameDuration -
426                                                             elapsedDuration);
427     timestamp = std::chrono::duration_cast<std::chrono::nanoseconds>(
428         std::chrono::steady_clock::now().time_since_epoch());
429     if (!gotNewFrame) {
430       if (!mEglSurfaceTexture->isFirstFrameDrawn()) {
431         // We don't have any input ever drawn. This is considered as an error
432         // case. Notify the framework of the failure and return early.
433         ALOGW("Timed out waiting for first frame to be drawn.");
434         std::unique_ptr<CaptureResult> captureResult = createCaptureResult(
435             request.getFrameNumber(), /* metadata = */ nullptr);
436         notifyTimeout(request, *captureResult);
437         submitCaptureResult(std::move(captureResult));
438         return;
439       }
440 
441       ALOGV(
442           "%s: No new frame received on input surface after waiting for "
443           "%" PRIu64 "ns, repeating last frame.",
444           __func__,
445           static_cast<uint64_t>((timestamp - lastAcquisitionTimestamp).count()));
446     }
447     mLastAcquisitionTimestampNanoseconds.store(timestamp.count(),
448                                                std::memory_order_relaxed);
449   }
450   // Acquire new (most recent) image from the Surface.
451   mEglSurfaceTexture->updateTexture();
452   std::chrono::nanoseconds captureTimestamp = timestamp;
453 
454   if (flags::camera_timestamp_from_surface()) {
455     std::chrono::nanoseconds surfaceTimestamp =
456         getSurfaceTimestamp(elapsedDuration);
457     if (surfaceTimestamp.count() > 0) {
458       captureTimestamp = surfaceTimestamp;
459     }
460     ALOGV("%s captureTimestamp:%lld timestamp:%lld", __func__,
461           captureTimestamp.count(), timestamp.count());
462   }
463 
464   std::unique_ptr<CaptureResult> captureResult = createCaptureResult(
465       request.getFrameNumber(),
466       createCaptureResultMetadata(
467           captureTimestamp, request.getRequestSettings(), mReportedSensorSize));
468   renderOutputBuffers(request, *captureResult);
469 
470   auto status = notifyShutter(request, *captureResult, captureTimestamp);
471   if (!status.isOk()) {
472     ALOGE("%s: notify call failed: %s", __func__,
473           status.getDescription().c_str());
474     return;
475   }
476 
477   submitCaptureResult(std::move(captureResult));
478 }
479 
throttleRendering(int maxFps,std::chrono::nanoseconds lastAcquisitionTimestamp,std::chrono::nanoseconds timestamp)480 std::chrono::nanoseconds VirtualCameraRenderThread::throttleRendering(
481     int maxFps, std::chrono::nanoseconds lastAcquisitionTimestamp,
482     std::chrono::nanoseconds timestamp) {
483   const std::chrono::nanoseconds minFrameDuration(
484       static_cast<uint64_t>(1e9 / maxFps));
485   const std::chrono::nanoseconds frameDuration =
486       timestamp - lastAcquisitionTimestamp;
487   if (frameDuration < minFrameDuration) {
488     // We're too fast for the configured maxFps, let's wait a bit.
489     const std::chrono::nanoseconds sleepTime = minFrameDuration - frameDuration;
490     ALOGV("Current frame duration would  be %" PRIu64
491           " ns corresponding to, "
492           "sleeping for %" PRIu64
493           " ns before updating texture to match maxFps %d",
494           static_cast<uint64_t>(frameDuration.count()),
495           static_cast<uint64_t>(sleepTime.count()), maxFps);
496 
497     std::this_thread::sleep_for(sleepTime);
498     timestamp = std::chrono::duration_cast<std::chrono::nanoseconds>(
499         std::chrono::steady_clock::now().time_since_epoch());
500     mLastAcquisitionTimestampNanoseconds.store(timestamp.count(),
501                                                std::memory_order_relaxed);
502   }
503   return timestamp;
504 }
505 
getSurfaceTimestamp(std::chrono::nanoseconds timeSinceLastFrame)506 std::chrono::nanoseconds VirtualCameraRenderThread::getSurfaceTimestamp(
507     std::chrono::nanoseconds timeSinceLastFrame) {
508   std::chrono::nanoseconds surfaceTimestamp = mEglSurfaceTexture->getTimestamp();
509   uint64_t lastSurfaceTimestamp = mLastSurfaceTimestampNanoseconds.load();
510   if (lastSurfaceTimestamp > 0 &&
511       surfaceTimestamp.count() <= lastSurfaceTimestamp) {
512     // The timestamps were provided by the producer but we are
513     // repeating the last frame, so we increase the previous timestamp by
514     // the elapsed time sinced its capture, otherwise the camera framework
515     // will discard the frame.
516     surfaceTimestamp = std::chrono::nanoseconds(lastSurfaceTimestamp +
517                                                 timeSinceLastFrame.count());
518     ALOGI(
519         "Surface's timestamp is stall. Artificially increasing the surface "
520         "timestamp by %lld",
521         timeSinceLastFrame.count());
522   }
523   mLastSurfaceTimestampNanoseconds.store(surfaceTimestamp.count(),
524                                          std::memory_order_relaxed);
525   return surfaceTimestamp;
526 }
527 
createCaptureResult(int frameNumber,std::unique_ptr<CameraMetadata> metadata)528 std::unique_ptr<CaptureResult> VirtualCameraRenderThread::createCaptureResult(
529     int frameNumber, std::unique_ptr<CameraMetadata> metadata) {
530   std::unique_ptr<CaptureResult> captureResult =
531       std::make_unique<CaptureResult>();
532   captureResult->fmqResultSize = 0;
533   captureResult->frameNumber = frameNumber;
534   // Partial result needs to be set to 1 when metadata are present.
535   captureResult->partialResult = 1;
536   captureResult->inputBuffer.streamId = -1;
537   captureResult->physicalCameraMetadata.resize(0);
538   captureResult->result = metadata != nullptr ? *metadata : CameraMetadata();
539   return captureResult;
540 }
541 
renderOutputBuffers(const ProcessCaptureRequestTask & request,CaptureResult & captureResult)542 void VirtualCameraRenderThread::renderOutputBuffers(
543     const ProcessCaptureRequestTask& request, CaptureResult& captureResult) {
544   const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
545   captureResult.outputBuffers.resize(buffers.size());
546 
547   for (int i = 0; i < buffers.size(); ++i) {
548     const CaptureRequestBuffer& reqBuffer = buffers[i];
549     StreamBuffer& resBuffer = captureResult.outputBuffers[i];
550     resBuffer.streamId = reqBuffer.getStreamId();
551     resBuffer.bufferId = reqBuffer.getBufferId();
552     resBuffer.status = BufferStatus::OK;
553 
554     const std::optional<Stream> streamConfig =
555         mSessionContext.getStreamConfig(reqBuffer.getStreamId());
556 
557     if (!streamConfig.has_value()) {
558       resBuffer.status = BufferStatus::ERROR;
559       continue;
560     }
561 
562     auto status = streamConfig->format == PixelFormat::BLOB
563                       ? renderIntoBlobStreamBuffer(
564                             reqBuffer.getStreamId(), reqBuffer.getBufferId(),
565                             captureResult.result, request.getRequestSettings(),
566                             reqBuffer.getFence())
567                       : renderIntoImageStreamBuffer(reqBuffer.getStreamId(),
568                                                     reqBuffer.getBufferId(),
569                                                     reqBuffer.getFence());
570     if (!status.isOk()) {
571       resBuffer.status = BufferStatus::ERROR;
572     }
573   }
574 }
575 
notifyTimeout(const ProcessCaptureRequestTask & request,CaptureResult & captureResult)576 ::ndk::ScopedAStatus VirtualCameraRenderThread::notifyTimeout(
577     const ProcessCaptureRequestTask& request, CaptureResult& captureResult) {
578   const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
579   captureResult.outputBuffers.resize(buffers.size());
580 
581   std::vector<NotifyMsg> notifyMsgs;
582 
583   for (int i = 0; i < buffers.size(); ++i) {
584     const CaptureRequestBuffer& reqBuffer = buffers[i];
585     StreamBuffer& resBuffer = captureResult.outputBuffers[i];
586     resBuffer.streamId = reqBuffer.getStreamId();
587     resBuffer.bufferId = reqBuffer.getBufferId();
588     resBuffer.status = BufferStatus::ERROR;
589     notifyMsgs.push_back(createErrorNotifyMsg(
590         request.getFrameNumber(), resBuffer.streamId, ErrorCode::ERROR_REQUEST));
591   }
592   return mCameraDeviceCallback->notify(notifyMsgs);
593 }
594 
notifyShutter(const ProcessCaptureRequestTask & request,const CaptureResult & captureResult,std::chrono::nanoseconds captureTimestamp)595 ::ndk::ScopedAStatus VirtualCameraRenderThread::notifyShutter(
596     const ProcessCaptureRequestTask& request, const CaptureResult& captureResult,
597     std::chrono::nanoseconds captureTimestamp) {
598   std::vector<NotifyMsg> notifyMsgs{
599       createShutterNotifyMsg(request.getFrameNumber(), captureTimestamp)};
600   for (const StreamBuffer& resBuffer : captureResult.outputBuffers) {
601     if (resBuffer.status != BufferStatus::OK) {
602       notifyMsgs.push_back(
603           createErrorNotifyMsg(request.getFrameNumber(), resBuffer.streamId));
604     }
605   }
606 
607   return mCameraDeviceCallback->notify(notifyMsgs);
608 }
609 
submitCaptureResult(std::unique_ptr<CaptureResult> captureResult)610 ::ndk::ScopedAStatus VirtualCameraRenderThread::submitCaptureResult(
611     std::unique_ptr<CaptureResult> captureResult) {
612   std::vector<::aidl::android::hardware::camera::device::CaptureResult>
613       captureResults;
614   captureResults.push_back(std::move(*captureResult));
615 
616   ::ndk::ScopedAStatus status =
617       mCameraDeviceCallback->processCaptureResult(captureResults);
618   if (!status.isOk()) {
619     ALOGE("%s: processCaptureResult call failed: %s", __func__,
620           status.getDescription().c_str());
621     return status;
622   }
623 
624   ALOGV("%s: Successfully called processCaptureResult", __func__);
625   return status;
626 }
627 
flushCaptureRequest(const ProcessCaptureRequestTask & request)628 void VirtualCameraRenderThread::flushCaptureRequest(
629     const ProcessCaptureRequestTask& request) {
630   CaptureResult captureResult;
631   captureResult.fmqResultSize = 0;
632   captureResult.frameNumber = request.getFrameNumber();
633   captureResult.inputBuffer.streamId = -1;
634 
635   const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
636   captureResult.outputBuffers.resize(buffers.size());
637 
638   for (int i = 0; i < buffers.size(); ++i) {
639     const CaptureRequestBuffer& reqBuffer = buffers[i];
640     StreamBuffer& resBuffer = captureResult.outputBuffers[i];
641     resBuffer.streamId = reqBuffer.getStreamId();
642     resBuffer.bufferId = reqBuffer.getBufferId();
643     resBuffer.status = BufferStatus::ERROR;
644     sp<Fence> fence = reqBuffer.getFence();
645     if (fence != nullptr && fence->isValid()) {
646       resBuffer.releaseFence.fds.emplace_back(fence->dup());
647     }
648   }
649 
650   auto status = mCameraDeviceCallback->notify(
651       {createRequestErrorNotifyMsg(request.getFrameNumber())});
652   if (!status.isOk()) {
653     ALOGE("%s: notify call failed: %s", __func__,
654           status.getDescription().c_str());
655     return;
656   }
657 
658   std::vector<::aidl::android::hardware::camera::device::CaptureResult>
659       captureResults(1);
660   captureResults[0] = std::move(captureResult);
661 
662   status = mCameraDeviceCallback->processCaptureResult(captureResults);
663   if (!status.isOk()) {
664     ALOGE("%s: processCaptureResult call failed: %s", __func__,
665           status.getDescription().c_str());
666   }
667 }
668 
createThumbnail(const Resolution resolution,const int quality)669 std::vector<uint8_t> VirtualCameraRenderThread::createThumbnail(
670     const Resolution resolution, const int quality) {
671   if (resolution.width == 0 || resolution.height == 0) {
672     ALOGV("%s: Skipping thumbnail creation, zero size requested", __func__);
673     return {};
674   }
675 
676   ALOGV("%s: Creating thumbnail with size %d x %d, quality %d", __func__,
677         resolution.width, resolution.height, quality);
678   Resolution bufferSize = roundTo2DctSize(resolution);
679   std::shared_ptr<EglFrameBuffer> framebuffer = allocateTemporaryFramebuffer(
680       mEglDisplayContext->getEglDisplay(), bufferSize.width, bufferSize.height);
681   if (framebuffer == nullptr) {
682     ALOGE(
683         "Failed to allocate temporary framebuffer for JPEG thumbnail "
684         "compression");
685     return {};
686   }
687 
688   // TODO(b/324383963) Add support for letterboxing if the thumbnail sizese
689   // doesn't correspond
690   //  to input texture aspect ratio.
691   if (!renderIntoEglFramebuffer(*framebuffer, /*fence=*/nullptr,
692                                 Rect(resolution.width, resolution.height))
693            .isOk()) {
694     ALOGE(
695         "Failed to render input texture into temporary framebuffer for JPEG "
696         "thumbnail");
697     return {};
698   }
699 
700   std::vector<uint8_t> compressedThumbnail;
701   compressedThumbnail.resize(kJpegThumbnailBufferSize);
702   ALOGE("%s: Compressing thumbnail %d x %d", __func__, resolution.width,
703         resolution.height);
704   std::optional<size_t> compressedSize =
705       compressJpeg(resolution.width, resolution.height, quality,
706                    framebuffer->getHardwareBuffer(), {},
707                    compressedThumbnail.size(), compressedThumbnail.data());
708   if (!compressedSize.has_value()) {
709     ALOGE("%s: Failed to compress jpeg thumbnail", __func__);
710     return {};
711   }
712   compressedThumbnail.resize(compressedSize.value());
713   return compressedThumbnail;
714 }
715 
renderIntoBlobStreamBuffer(const int streamId,const int bufferId,const CameraMetadata & resultMetadata,const RequestSettings & requestSettings,sp<Fence> fence)716 ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoBlobStreamBuffer(
717     const int streamId, const int bufferId, const CameraMetadata& resultMetadata,
718     const RequestSettings& requestSettings, sp<Fence> fence) {
719   std::shared_ptr<AHardwareBuffer> hwBuffer =
720       mSessionContext.fetchHardwareBuffer(streamId, bufferId);
721   if (hwBuffer == nullptr) {
722     ALOGE("%s: Failed to fetch hardware buffer %d for streamId %d", __func__,
723           bufferId, streamId);
724     return cameraStatus(Status::INTERNAL_ERROR);
725   }
726 
727   std::optional<Stream> stream = mSessionContext.getStreamConfig(streamId);
728   if (!stream.has_value()) {
729     ALOGE("%s, failed to fetch information about stream %d", __func__, streamId);
730     return cameraStatus(Status::INTERNAL_ERROR);
731   }
732 
733   ALOGV("%s: Rendering JPEG with size %d x %d, quality %d", __func__,
734         stream->width, stream->height, requestSettings.jpegQuality);
735 
736   // Let's create YUV framebuffer and render the surface into this.
737   // This will take care about rescaling as well as potential format conversion.
738   // The buffer dimensions need to be rounded to nearest multiple of JPEG DCT
739   // size, however we pass the viewport corresponding to size of the stream so
740   // the image will be only rendered to the area corresponding to the stream
741   // size.
742   Resolution bufferSize =
743       roundTo2DctSize(Resolution(stream->width, stream->height));
744   std::shared_ptr<EglFrameBuffer> framebuffer = allocateTemporaryFramebuffer(
745       mEglDisplayContext->getEglDisplay(), bufferSize.width, bufferSize.height);
746   if (framebuffer == nullptr) {
747     ALOGE("Failed to allocate temporary framebuffer for JPEG compression");
748     return cameraStatus(Status::INTERNAL_ERROR);
749   }
750 
751   // Render into temporary framebuffer.
752   ndk::ScopedAStatus status = renderIntoEglFramebuffer(
753       *framebuffer, /*fence=*/nullptr, Rect(stream->width, stream->height));
754   if (!status.isOk()) {
755     ALOGE("Failed to render input texture into temporary framebuffer");
756     return status;
757   }
758 
759   PlanesLockGuard planesLock(hwBuffer, AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN,
760                              fence);
761   if (planesLock.getStatus() != OK) {
762     ALOGE("Failed to lock hwBuffer planes");
763     return cameraStatus(Status::INTERNAL_ERROR);
764   }
765 
766   std::vector<uint8_t> app1ExifData =
767       createExif(Resolution(stream->width, stream->height), resultMetadata,
768                  createThumbnail(requestSettings.thumbnailResolution,
769                                  requestSettings.thumbnailJpegQuality));
770 
771   unsigned long outBufferSize = stream->bufferSize - sizeof(CameraBlob);
772   void* outBuffer = (*planesLock).planes[0].data;
773   std::optional<size_t> compressedSize = compressJpeg(
774       stream->width, stream->height, requestSettings.jpegQuality,
775       framebuffer->getHardwareBuffer(), app1ExifData, outBufferSize, outBuffer);
776 
777   if (!compressedSize.has_value()) {
778     ALOGE("%s: Failed to compress JPEG image", __func__);
779     return cameraStatus(Status::INTERNAL_ERROR);
780   }
781 
782   // Add the transport header at the end of the JPEG output buffer.
783   //
784   // jpegBlobId must start at byte[buffer_size - sizeof(CameraBlob)],
785   // where the buffer_size is the size of gralloc buffer.
786   //
787   // See
788   // hardware/interfaces/camera/device/aidl/android/hardware/camera/device/CameraBlobId.aidl
789   // for the full explanation of the following code.
790   CameraBlob cameraBlob{
791       .blobId = CameraBlobId::JPEG,
792       .blobSizeBytes = static_cast<int32_t>(compressedSize.value())};
793 
794   // Copy the cameraBlob to the end of the JPEG buffer.
795   uint8_t* jpegStreamEndAddress =
796       reinterpret_cast<uint8_t*>((*planesLock).planes[0].data) +
797       (stream->bufferSize - sizeof(cameraBlob));
798   memcpy(jpegStreamEndAddress, &cameraBlob, sizeof(cameraBlob));
799 
800   ALOGV("%s: Successfully compressed JPEG image, resulting size %zu B",
801         __func__, compressedSize.value());
802 
803   return ndk::ScopedAStatus::ok();
804 }
805 
renderIntoImageStreamBuffer(int streamId,int bufferId,sp<Fence> fence)806 ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoImageStreamBuffer(
807     int streamId, int bufferId, sp<Fence> fence) {
808   ALOGV("%s", __func__);
809 
810   const std::chrono::nanoseconds before =
811       std::chrono::duration_cast<std::chrono::nanoseconds>(
812           std::chrono::steady_clock::now().time_since_epoch());
813 
814   // Render test pattern using EGL.
815   std::shared_ptr<EglFrameBuffer> framebuffer =
816       mSessionContext.fetchOrCreateEglFramebuffer(
817           mEglDisplayContext->getEglDisplay(), streamId, bufferId);
818   if (framebuffer == nullptr) {
819     ALOGE(
820         "%s: Failed to get EGL framebuffer corresponding to buffer id "
821         "%d for streamId %d",
822         __func__, bufferId, streamId);
823     return cameraStatus(Status::ILLEGAL_ARGUMENT);
824   }
825 
826   ndk::ScopedAStatus status = renderIntoEglFramebuffer(*framebuffer, fence);
827 
828   const std::chrono::nanoseconds after =
829       std::chrono::duration_cast<std::chrono::nanoseconds>(
830           std::chrono::steady_clock::now().time_since_epoch());
831 
832   ALOGV("Rendering to buffer %d, stream %d took %lld ns", bufferId, streamId,
833         after.count() - before.count());
834 
835   return ndk::ScopedAStatus::ok();
836 }
837 
renderIntoEglFramebuffer(EglFrameBuffer & framebuffer,sp<Fence> fence,std::optional<Rect> viewport)838 ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoEglFramebuffer(
839     EglFrameBuffer& framebuffer, sp<Fence> fence, std::optional<Rect> viewport) {
840   ALOGV("%s", __func__);
841   // Wait for fence to clear.
842   if (fence != nullptr && fence->isValid()) {
843     status_t ret = fence->wait(kAcquireFenceTimeout.count());
844     if (ret != 0) {
845       ALOGE("Timeout while waiting for the acquire fence for buffer");
846       return cameraStatus(Status::INTERNAL_ERROR);
847     }
848   }
849 
850   mEglDisplayContext->makeCurrent();
851   framebuffer.beforeDraw();
852 
853   Rect viewportRect =
854       viewport.value_or(Rect(framebuffer.getWidth(), framebuffer.getHeight()));
855   glViewport(viewportRect.left, viewportRect.top, viewportRect.getWidth(),
856              viewportRect.getHeight());
857 
858   sp<GraphicBuffer> textureBuffer = mEglSurfaceTexture->getCurrentBuffer();
859   if (textureBuffer == nullptr) {
860     // If there's no current buffer, nothing was written to the surface and
861     // texture is not initialized yet. Let's render the framebuffer black
862     // instead of rendering the texture.
863     glClearColor(0.0f, 0.5f, 0.5f, 0.0f);
864     glClear(GL_COLOR_BUFFER_BIT);
865   } else {
866     const bool renderSuccess =
867         isYuvFormat(static_cast<PixelFormat>(textureBuffer->getPixelFormat()))
868             ? mEglTextureYuvProgram->draw(
869                   mEglSurfaceTexture->getTextureId(),
870                   mEglSurfaceTexture->getTransformMatrix())
871             : mEglTextureRgbProgram->draw(
872                   mEglSurfaceTexture->getTextureId(),
873                   mEglSurfaceTexture->getTransformMatrix());
874     if (!renderSuccess) {
875       ALOGE("%s: Failed to render texture", __func__);
876       return cameraStatus(Status::INTERNAL_ERROR);
877     }
878   }
879   framebuffer.afterDraw();
880 
881   return ndk::ScopedAStatus::ok();
882 }
883 
884 }  // namespace virtualcamera
885 }  // namespace companion
886 }  // namespace android
887