xref: /aosp_15_r20/hardware/interfaces/automotive/evs/aidl/impl/default/src/EvsVideoEmulatedCamera.cpp (revision 4d7e907c777eeecc4c5bd7cf640a754fac206ff7)
1 /*
2  * Copyright (C) 2023 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "EvsVideoEmulatedCamera.h"
18 
19 #include <aidl/android/hardware/automotive/evs/EvsResult.h>
20 
21 #include <aidlcommonsupport/NativeHandle.h>
22 #include <android-base/logging.h>
23 #include <android-base/strings.h>
24 #include <media/stagefright/MediaCodecConstants.h>
25 #include <ui/GraphicBufferAllocator.h>
26 #include <utils/SystemClock.h>
27 
28 #include <fcntl.h>
29 #include <libyuv.h>
30 #include <sys/types.h>
31 #include <unistd.h>
32 
33 #include <chrono>
34 #include <cstddef>
35 #include <cstdint>
36 #include <tuple>
37 #include <utility>
38 
39 // Uncomment below line to dump decoded frames.
40 // #define DUMP_FRAMES (1)
41 
42 namespace aidl::android::hardware::automotive::evs::implementation {
43 
44 namespace {
45 
46 struct FormatDeleter {
operator ()aidl::android::hardware::automotive::evs::implementation::__anon79e138230111::FormatDeleter47     void operator()(AMediaFormat* format) const { AMediaFormat_delete(format); }
48 };
49 
fillRGBAFromNv12(const uint8_t * src_y,int src_stride_y,const uint8_t * src_uv,int src_stride_uv,const uint8_t *,int,uint8_t * dst_abgr,int dst_stride_abgr,int width,int height)50 int fillRGBAFromNv12(const uint8_t* src_y, int src_stride_y, const uint8_t* src_uv,
51                      int src_stride_uv, const uint8_t*, int, uint8_t* dst_abgr, int dst_stride_abgr,
52                      int width, int height) {
53     return libyuv::NV12ToABGR(src_y, src_stride_y, src_uv, src_stride_uv, dst_abgr, dst_stride_abgr,
54                               width, height);
55 }
56 
fillRGBAFromNv21(const uint8_t * src_y,int src_stride_y,const uint8_t * src_vu,int src_stride_vu,const uint8_t *,int,uint8_t * dst_abgr,int dst_stride_abgr,int width,int height)57 int fillRGBAFromNv21(const uint8_t* src_y, int src_stride_y, const uint8_t* src_vu,
58                      int src_stride_vu, const uint8_t*, int, uint8_t* dst_abgr, int dst_stride_abgr,
59                      int width, int height) {
60     return libyuv::NV21ToABGR(src_y, src_stride_y, src_vu, src_stride_vu, dst_abgr, dst_stride_abgr,
61                               width, height);
62 }
63 
fillRGBAFromYv12(const uint8_t * src_y,int src_stride_y,const uint8_t * src_u,int src_stride_u,const uint8_t * src_v,int src_stride_v,uint8_t * dst_abgr,int dst_stride_abgr,int width,int height)64 int fillRGBAFromYv12(const uint8_t* src_y, int src_stride_y, const uint8_t* src_u, int src_stride_u,
65                      const uint8_t* src_v, int src_stride_v, uint8_t* dst_abgr, int dst_stride_abgr,
66                      int width, int height) {
67     return libyuv::I420ToABGR(src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
68                               dst_abgr, dst_stride_abgr, width, height);
69 }
70 
fillRGBAFromI420(const uint8_t * src_y,int src_stride_y,const uint8_t * src_u,int src_stride_u,const uint8_t * src_v,int src_stride_v,uint8_t * dst_abgr,int dst_stride_abgr,int width,int height)71 int fillRGBAFromI420(const uint8_t* src_y, int src_stride_y, const uint8_t* src_u, int src_stride_u,
72                      const uint8_t* src_v, int src_stride_v, uint8_t* dst_abgr, int dst_stride_abgr,
73                      int width, int height) {
74     return libyuv::I420ToABGR(src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
75                               dst_abgr, dst_stride_abgr, width, height);
76 }
77 
78 }  // namespace
79 
EvsVideoEmulatedCamera(Sigil,const char * deviceName,std::unique_ptr<ConfigManager::CameraInfo> & camInfo)80 EvsVideoEmulatedCamera::EvsVideoEmulatedCamera(Sigil, const char* deviceName,
81                                                std::unique_ptr<ConfigManager::CameraInfo>& camInfo)
82     : mVideoFileName(deviceName), mCameraInfo(camInfo) {
83     mDescription.id = mVideoFileName;
84 
85     /* set camera metadata */
86     if (camInfo) {
87         uint8_t* ptr = reinterpret_cast<uint8_t*>(camInfo->characteristics);
88         const size_t len = get_camera_metadata_size(camInfo->characteristics);
89         mDescription.metadata.insert(mDescription.metadata.end(), ptr, ptr + len);
90     }
91 
92     initializeParameters();
93 }
94 
initialize()95 bool EvsVideoEmulatedCamera::initialize() {
96     // Open file.
97     mVideoFd = open(mVideoFileName.c_str(), 0, O_RDONLY);
98     if (mVideoFd < 0) {
99         PLOG(ERROR) << __func__ << ": Failed to open video file \"" << mVideoFileName << "\".";
100         return false;
101     }
102 
103     // Initialize Media Extractor.
104     {
105         mVideoExtractor.reset(AMediaExtractor_new());
106         off64_t filesize = lseek64(mVideoFd, 0, SEEK_END);
107         lseek(mVideoFd, 0, SEEK_SET);
108         const media_status_t status =
109                 AMediaExtractor_setDataSourceFd(mVideoExtractor.get(), mVideoFd, 0, filesize);
110         if (status != AMEDIA_OK) {
111             LOG(ERROR) << __func__
112                        << ": Received error when initializing media extractor. Error code: "
113                        << status << ".";
114             return false;
115         }
116     }
117 
118     return initializeMediaCodec();
119 }
120 
initializeMediaCodec()121 bool EvsVideoEmulatedCamera::initializeMediaCodec() {
122     // Initialize Media Codec and file format.
123     std::unique_ptr<AMediaFormat, FormatDeleter> format;
124     const char* mime;
125     bool selected = false;
126     int numTracks = AMediaExtractor_getTrackCount(mVideoExtractor.get());
127     for (int i = 0; i < numTracks; i++) {
128         format.reset(AMediaExtractor_getTrackFormat(mVideoExtractor.get(), i));
129         if (!AMediaFormat_getString(format.get(), AMEDIAFORMAT_KEY_MIME, &mime)) {
130             LOG(ERROR) << __func__ << ": Error in fetching format string";
131             continue;
132         }
133         if (!::android::base::StartsWith(mime, "video/")) {
134             continue;
135         }
136         const media_status_t status = AMediaExtractor_selectTrack(mVideoExtractor.get(), i);
137         if (status != AMEDIA_OK) {
138             LOG(ERROR) << __func__
139                        << ": Media extractor returned error to select track. Error Code: " << status
140                        << ".";
141             return false;
142         }
143         selected = true;
144         break;
145     }
146     if (!selected) {
147         LOG(ERROR) << __func__ << ": No video track in video file \"" << mVideoFileName << "\".";
148         return false;
149     }
150 
151     mVideoCodec.reset(AMediaCodec_createDecoderByType(mime));
152     if (!mVideoCodec) {
153         LOG(ERROR) << __func__ << ": Unable to create decoder.";
154         return false;
155     }
156 
157     mDescription.vendorFlags = 0xFFFFFFFF;  // Arbitrary test value
158     mUsage = GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_CAMERA_WRITE |
159              GRALLOC_USAGE_SW_READ_RARELY | GRALLOC_USAGE_SW_WRITE_RARELY;
160     mFormat = HAL_PIXEL_FORMAT_RGBA_8888;
161     AMediaFormat_setInt32(format.get(), AMEDIAFORMAT_KEY_COLOR_FORMAT, COLOR_FormatYUV420Flexible);
162     {
163         const media_status_t status =
164                 AMediaCodec_configure(mVideoCodec.get(), format.get(), nullptr, nullptr, 0);
165         if (status != AMEDIA_OK) {
166             LOG(ERROR) << __func__
167                        << ": Received error in configuring mCodec. Error code: " << status << ".";
168             return false;
169         }
170     }
171     format.reset(AMediaCodec_getOutputFormat(mVideoCodec.get()));
172     AMediaFormat_getInt32(format.get(), AMEDIAFORMAT_KEY_WIDTH, &mWidth);
173     AMediaFormat_getInt32(format.get(), AMEDIAFORMAT_KEY_HEIGHT, &mHeight);
174 
175     switch (mCameraInfo->format) {
176         default:
177         case ConfigManager::CameraInfo::PixelFormat::NV12:
178             mFillBuffer = fillRGBAFromNv12;
179             mUvStride = mWidth;
180             mDstStride = mWidth * 4;
181             break;
182         case ConfigManager::CameraInfo::PixelFormat::NV21:
183             mFillBuffer = fillRGBAFromNv21;
184             mUvStride = mWidth;
185             mDstStride = mWidth * 4;
186             break;
187         case ConfigManager::CameraInfo::PixelFormat::YV12:
188             mFillBuffer = fillRGBAFromYv12;
189             mUvStride = mWidth / 2;
190             mDstStride = mWidth * 4;
191             break;
192         case ConfigManager::CameraInfo::PixelFormat::I420:
193             mFillBuffer = fillRGBAFromI420;
194             mUvStride = mWidth / 2;
195             mDstStride = mWidth * 4;
196             break;
197     }
198     return true;
199 }
200 
generateFrames()201 void EvsVideoEmulatedCamera::generateFrames() {
202     while (true) {
203         {
204             std::lock_guard lock(mMutex);
205             if (mStreamState != StreamState::RUNNING) {
206                 return;
207             }
208         }
209         renderOneFrame();
210     }
211 }
212 
onCodecInputAvailable(const int32_t index)213 void EvsVideoEmulatedCamera::onCodecInputAvailable(const int32_t index) {
214     const size_t sampleSize = AMediaExtractor_getSampleSize(mVideoExtractor.get());
215     const int64_t presentationTime = AMediaExtractor_getSampleTime(mVideoExtractor.get());
216     size_t bufferSize = 0;
217     uint8_t* const codecInputBuffer =
218             AMediaCodec_getInputBuffer(mVideoCodec.get(), index, &bufferSize);
219     if (sampleSize > bufferSize) {
220         LOG(ERROR) << __func__ << ": Buffer is not large enough.";
221     }
222     if (presentationTime < 0) {
223         AMediaCodec_queueInputBuffer(mVideoCodec.get(), index, /* offset = */ 0,
224                                      /* size = */ 0, presentationTime,
225                                      AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM);
226         LOG(INFO) << __func__ << ": Reaching the end of stream.";
227         return;
228     }
229     const size_t readSize =
230             AMediaExtractor_readSampleData(mVideoExtractor.get(), codecInputBuffer, sampleSize);
231     const media_status_t status = AMediaCodec_queueInputBuffer(
232             mVideoCodec.get(), index, /*offset = */ 0, readSize, presentationTime, /* flags = */ 0);
233     if (status != AMEDIA_OK) {
234         LOG(ERROR) << __func__
235                    << ": Received error in queueing input buffer. Error code: " << status;
236     }
237 }
238 
onCodecOutputAvailable(const int32_t index,const AMediaCodecBufferInfo & info)239 void EvsVideoEmulatedCamera::onCodecOutputAvailable(const int32_t index,
240                                                     const AMediaCodecBufferInfo& info) {
241     using std::chrono::duration_cast;
242     using std::chrono::microseconds;
243     using std::chrono::nanoseconds;
244     using AidlPixelFormat = ::aidl::android::hardware::graphics::common::PixelFormat;
245     using ::aidl::android::hardware::graphics::common::BufferUsage;
246 
247     size_t decodedOutSize = 0;
248     uint8_t* const codecOutputBuffer =
249             AMediaCodec_getOutputBuffer(mVideoCodec.get(), index, &decodedOutSize) + info.offset;
250 
251     int color_format = 0;
252     const auto outFormat = AMediaCodec_getOutputFormat(mVideoCodec.get());
253     if (!AMediaFormat_getInt32(outFormat, AMEDIAFORMAT_KEY_COLOR_FORMAT, &color_format)) {
254         LOG(ERROR) << "Failed to get the color format.";
255         return;
256     }
257 
258     int stride = 0;
259     if (!AMediaFormat_getInt32(outFormat, AMEDIAFORMAT_KEY_STRIDE, &stride)) {
260         LOG(WARNING) << "Cannot find stride in format. Set as frame width.";
261         stride = mWidth;
262     }
263 
264     int slice_height = 0;
265     if (!AMediaFormat_getInt32(outFormat, AMEDIAFORMAT_KEY_SLICE_HEIGHT, &slice_height)) {
266         LOG(WARNING) << "Cannot find slice-height in format. Set as frame height.";
267         slice_height = mHeight;
268     }
269 
270     LOG(DEBUG) << "COLOR FORMAT: " << color_format << " stride: " << stride
271                << " height: " << slice_height;
272 
273     std::size_t renderBufferId = static_cast<std::size_t>(-1);
274     buffer_handle_t renderBufferHandle = nullptr;
275     {
276         std::lock_guard lock(mMutex);
277         if (mStreamState != StreamState::RUNNING) {
278             return;
279         }
280         std::tie(renderBufferId, renderBufferHandle) = useBuffer_unsafe();
281     }
282     if (!renderBufferHandle) {
283         LOG(DEBUG) << __func__ << ": Camera failed to get an available render buffer.";
284         return;
285     }
286     std::vector<BufferDesc> renderBufferDescs;
287     renderBufferDescs.push_back({
288             .buffer =
289                     {
290                             .description =
291                                     {
292                                             .width = static_cast<int32_t>(mWidth),
293                                             .height = static_cast<int32_t>(mHeight),
294                                             .layers = 1,
295                                             .format = static_cast<AidlPixelFormat>(mFormat),
296                                             .usage = static_cast<BufferUsage>(mUsage),
297                                             .stride = static_cast<int32_t>(mStride),
298                                     },
299                             .handle = ::android::dupToAidl(renderBufferHandle),
300                     },
301             .bufferId = static_cast<int32_t>(renderBufferId),
302             .deviceId = mDescription.id,
303             .timestamp = duration_cast<microseconds>(nanoseconds(::android::elapsedRealtimeNano()))
304                                  .count(),
305     });
306 
307     // Lock our output buffer for writing
308     uint8_t* pixels = nullptr;
309     auto& mapper = ::android::GraphicBufferMapper::get();
310     mapper.lock(renderBufferHandle, GRALLOC_USAGE_SW_WRITE_OFTEN | GRALLOC_USAGE_SW_READ_NEVER,
311                 ::android::Rect(mWidth, mHeight), (void**)&pixels);
312 
313     // If we failed to lock the pixel buffer, we're about to crash, but log it first
314     if (!pixels) {
315         LOG(ERROR) << __func__ << ": Camera failed to gain access to image buffer for writing";
316         return;
317     }
318 
319     // Decoded output is in YUV4:2:0.
320     std::size_t ySize = mHeight * mWidth;
321     std::size_t uvSize = ySize / 4;
322 
323     uint8_t* u_head = codecOutputBuffer + ySize;
324     uint8_t* v_head = u_head + uvSize;
325 
326 #if DUMP_FRAMES
327     // TODO: We may want to keep this "dump" option.
328     static int dumpCount = 0;
329     static bool dumpData = ++dumpCount < 10;
330     if (dumpData) {
331         std::string path = "/data/vendor/dump/";
332         path += "dump_" + std::to_string(dumpCount) + ".bin";
333 
334         ::android::base::unique_fd fd(
335                 open(path.data(), O_WRONLY | O_CREAT, S_IRUSR | S_IWUSR | S_IRGRP));
336         if (fd < 0) {
337             LOG(ERROR) << "Failed to open " << path;
338         } else {
339             auto len = write(fd.get(), codecOutputBuffer, info.size);
340             LOG(ERROR) << "Write " << len << " to " << path;
341         }
342     }
343 #endif
344     if (auto result = mFillBuffer(codecOutputBuffer, mWidth, u_head, mUvStride, v_head, mUvStride,
345                                   pixels, mDstStride, mWidth, mHeight);
346         result != 0) {
347         LOG(ERROR) << "Failed to convert I420 to BGRA";
348     }
349 #if DUMP_FRAMES
350     else if (dumpData) {
351         std::string path = "/data/vendor/dump/";
352         path += "dump_" + std::to_string(dumpCount) + "_rgba.bin";
353 
354         ::android::base::unique_fd fd(
355                 open(path.data(), O_WRONLY | O_CREAT, S_IRUSR | S_IWUSR | S_IRGRP));
356         if (fd < 0) {
357             LOG(ERROR) << "Failed to open " << path;
358         } else {
359             auto len = write(fd.get(), pixels, mStride * mHeight * 4);
360             LOG(ERROR) << "Write " << len << " to " << path;
361         }
362     }
363 #endif
364 
365     // Release our output buffer
366     mapper.unlock(renderBufferHandle);
367 
368     // Issue the (asynchronous) callback to the client -- can't be holding the lock
369     if (mStream && mStream->deliverFrame(renderBufferDescs).isOk()) {
370         LOG(DEBUG) << __func__ << ": Delivered " << renderBufferHandle
371                    << ", id = " << renderBufferId;
372     } else {
373         // This can happen if the client dies and is likely unrecoverable.
374         // To avoid consuming resources generating failing calls, we stop sending
375         // frames.  Note, however, that the stream remains in the "STREAMING" state
376         // until cleaned up on the main thread.
377         LOG(ERROR) << __func__ << ": Frame delivery call failed in the transport layer.";
378         doneWithFrame(renderBufferDescs);
379     }
380 }
381 
renderOneFrame()382 void EvsVideoEmulatedCamera::renderOneFrame() {
383     using std::chrono::duration_cast;
384     using std::chrono::microseconds;
385     using namespace std::chrono_literals;
386 
387     // push to codec input
388     while (true) {
389         int codecInputBufferIdx =
390                 AMediaCodec_dequeueInputBuffer(mVideoCodec.get(), /* timeoutUs = */ 0);
391         if (codecInputBufferIdx < 0) {
392             if (codecInputBufferIdx != AMEDIACODEC_INFO_TRY_AGAIN_LATER) {
393                 LOG(ERROR) << __func__
394                            << ": Received error in AMediaCodec_dequeueInputBuffer. Error code: "
395                            << codecInputBufferIdx;
396             }
397             break;
398         }
399         onCodecInputAvailable(codecInputBufferIdx);
400         AMediaExtractor_advance(mVideoExtractor.get());
401     }
402 
403     // pop from codec output
404 
405     AMediaCodecBufferInfo info;
406     int codecOutputputBufferIdx = AMediaCodec_dequeueOutputBuffer(
407             mVideoCodec.get(), &info, /* timeoutUs = */ duration_cast<microseconds>(1ms).count());
408     if (codecOutputputBufferIdx < 0) {
409         if (codecOutputputBufferIdx != AMEDIACODEC_INFO_TRY_AGAIN_LATER) {
410             LOG(ERROR) << __func__
411                        << ": Received error in AMediaCodec_dequeueOutputBuffer. Error code: "
412                        << codecOutputputBufferIdx;
413         }
414         return;
415     }
416     onCodecOutputAvailable(codecOutputputBufferIdx, info);
417     const auto release_status = AMediaCodec_releaseOutputBuffer(
418             mVideoCodec.get(), codecOutputputBufferIdx, /* render = */ false);
419     if (release_status != AMEDIA_OK) {
420         LOG(ERROR) << __func__
421                    << ": Received error in releasing output buffer. Error code: " << release_status;
422     }
423 
424     if ((info.flags & AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM) != 0) {
425         LOG(INFO) << "Start video playback from the beginning.";
426         AMediaExtractor_seekTo(mVideoExtractor.get(), /* seekPosUs= */ 0,
427                                AMEDIAEXTRACTOR_SEEK_CLOSEST_SYNC);
428         AMediaCodec_flush(mVideoCodec.get());
429     }
430 }
431 
initializeParameters()432 void EvsVideoEmulatedCamera::initializeParameters() {
433     mParams.emplace(
434             CameraParam::BRIGHTNESS,
435             new CameraParameterDesc(/* min= */ 0, /* max= */ 255, /* step= */ 1, /* value= */ 255));
436     mParams.emplace(
437             CameraParam::CONTRAST,
438             new CameraParameterDesc(/* min= */ 0, /* max= */ 255, /* step= */ 1, /* value= */ 255));
439     mParams.emplace(
440             CameraParam::SHARPNESS,
441             new CameraParameterDesc(/* min= */ 0, /* max= */ 255, /* step= */ 1, /* value= */ 255));
442 }
443 
allocateOneFrame(buffer_handle_t * handle)444 ::android::status_t EvsVideoEmulatedCamera::allocateOneFrame(buffer_handle_t* handle) {
445     static auto& alloc = ::android::GraphicBufferAllocator::get();
446     unsigned pixelsPerLine = 0;
447     const auto result = alloc.allocate(mWidth, mHeight, HAL_PIXEL_FORMAT_RGBA_8888, 1, mUsage,
448                                        handle, &pixelsPerLine, 0, "EvsVideoEmulatedCamera");
449     if (mStride == 0) {
450         // Gralloc defines stride in terms of pixels per line
451         mStride = pixelsPerLine;
452     } else if (mStride != pixelsPerLine) {
453         LOG(ERROR) << "We did not expect to get buffers with different strides!";
454     }
455     return result;
456 }
457 
startVideoStreamImpl_locked(const std::shared_ptr<evs::IEvsCameraStream> & receiver,ndk::ScopedAStatus &,std::unique_lock<std::mutex> &)458 bool EvsVideoEmulatedCamera::startVideoStreamImpl_locked(
459         const std::shared_ptr<evs::IEvsCameraStream>& receiver, ndk::ScopedAStatus& /* status */,
460         std::unique_lock<std::mutex>& /* lck */) {
461     mStream = receiver;
462 
463     if (auto status = AMediaCodec_start(mVideoCodec.get()); status != AMEDIA_OK) {
464         LOG(INFO) << __func__ << ": Received error in starting decoder. "
465                   << "Trying again after resetting this emulated device.";
466 
467         if (!initializeMediaCodec()) {
468             LOG(ERROR) << __func__ << ": Failed to re-configure the media codec.";
469             return false;
470         }
471 
472         AMediaExtractor_seekTo(mVideoExtractor.get(), /* seekPosUs= */ 0,
473                                AMEDIAEXTRACTOR_SEEK_CLOSEST_SYNC);
474         AMediaCodec_flush(mVideoCodec.get());
475 
476         if (auto status = AMediaCodec_start(mVideoCodec.get()); status != AMEDIA_OK) {
477             LOG(ERROR) << __func__ << ": Received error again in starting decoder. "
478                        << "Error code: " << status;
479             return false;
480         }
481     }
482     mCaptureThread = std::thread([this]() { generateFrames(); });
483 
484     return true;
485 }
486 
stopVideoStreamImpl_locked(ndk::ScopedAStatus &,std::unique_lock<std::mutex> & lck)487 bool EvsVideoEmulatedCamera::stopVideoStreamImpl_locked(ndk::ScopedAStatus& /* status */,
488                                                         std::unique_lock<std::mutex>& lck) {
489     const media_status_t status = AMediaCodec_stop(mVideoCodec.get());
490     lck.unlock();
491     if (mCaptureThread.joinable()) {
492         mCaptureThread.join();
493     }
494     lck.lock();
495     return status == AMEDIA_OK;
496 }
497 
postVideoStreamStop_locked(ndk::ScopedAStatus & status,std::unique_lock<std::mutex> & lck)498 bool EvsVideoEmulatedCamera::postVideoStreamStop_locked(ndk::ScopedAStatus& status,
499                                                         std::unique_lock<std::mutex>& lck) {
500     if (!Base::postVideoStreamStop_locked(status, lck)) {
501         return false;
502     }
503 
504     EvsEventDesc event = {
505             .aType = EvsEventType::STREAM_STOPPED,
506     };
507     if (auto result = mStream->notify(event); !result.isOk()) {
508         LOG(WARNING) << "Failed to notify the end of the stream.";
509     }
510 
511     mStream = nullptr;
512     return true;
513 }
514 
forcePrimaryClient(const std::shared_ptr<evs::IEvsDisplay> &)515 ndk::ScopedAStatus EvsVideoEmulatedCamera::forcePrimaryClient(
516         const std::shared_ptr<evs::IEvsDisplay>& /* display */) {
517     /* Because EVS HW module reference implementation expects a single client at
518      * a time, this returns a success code always.
519      */
520     return ndk::ScopedAStatus::ok();
521 }
522 
getCameraInfo(evs::CameraDesc * _aidl_return)523 ndk::ScopedAStatus EvsVideoEmulatedCamera::getCameraInfo(evs::CameraDesc* _aidl_return) {
524     *_aidl_return = mDescription;
525     return ndk::ScopedAStatus::ok();
526 }
527 
getExtendedInfo(int32_t opaqueIdentifier,std::vector<uint8_t> * value)528 ndk::ScopedAStatus EvsVideoEmulatedCamera::getExtendedInfo(int32_t opaqueIdentifier,
529                                                            std::vector<uint8_t>* value) {
530     const auto it = mExtInfo.find(opaqueIdentifier);
531     if (it == mExtInfo.end()) {
532         return ndk::ScopedAStatus::fromServiceSpecificError(
533                 static_cast<int>(EvsResult::INVALID_ARG));
534     } else {
535         *value = mExtInfo[opaqueIdentifier];
536     }
537     return ndk::ScopedAStatus::ok();
538 }
539 
getIntParameter(evs::CameraParam id,std::vector<int32_t> * value)540 ndk::ScopedAStatus EvsVideoEmulatedCamera::getIntParameter(evs::CameraParam id,
541                                                            std::vector<int32_t>* value) {
542     const auto it = mParams.find(id);
543     if (it == mParams.end()) {
544         return ndk::ScopedAStatus::fromServiceSpecificError(
545                 static_cast<int>(EvsResult::NOT_SUPPORTED));
546     }
547     value->push_back(it->second->value);
548     return ndk::ScopedAStatus::ok();
549 }
550 
getIntParameterRange(evs::CameraParam id,evs::ParameterRange * _aidl_return)551 ndk::ScopedAStatus EvsVideoEmulatedCamera::getIntParameterRange(evs::CameraParam id,
552                                                                 evs::ParameterRange* _aidl_return) {
553     const auto it = mParams.find(id);
554     if (it == mParams.end()) {
555         return ndk::ScopedAStatus::fromServiceSpecificError(
556                 static_cast<int>(EvsResult::NOT_SUPPORTED));
557     }
558     _aidl_return->min = it->second->range.min;
559     _aidl_return->max = it->second->range.max;
560     _aidl_return->step = it->second->range.step;
561     return ndk::ScopedAStatus::ok();
562 }
563 
getParameterList(std::vector<evs::CameraParam> * _aidl_return)564 ndk::ScopedAStatus EvsVideoEmulatedCamera::getParameterList(
565         std::vector<evs::CameraParam>* _aidl_return) {
566     if (mCameraInfo) {
567         _aidl_return->resize(mCameraInfo->controls.size());
568         std::size_t idx = 0;
569         for (const auto& [name, range] : mCameraInfo->controls) {
570             (*_aidl_return)[idx++] = name;
571         }
572     }
573     return ndk::ScopedAStatus::ok();
574 }
575 
getPhysicalCameraInfo(const std::string &,evs::CameraDesc * _aidl_return)576 ndk::ScopedAStatus EvsVideoEmulatedCamera::getPhysicalCameraInfo(const std::string& /* deviceId */,
577                                                                  evs::CameraDesc* _aidl_return) {
578     return getCameraInfo(_aidl_return);
579 }
580 
setExtendedInfo(int32_t opaqueIdentifier,const std::vector<uint8_t> & opaqueValue)581 ndk::ScopedAStatus EvsVideoEmulatedCamera::setExtendedInfo(
582         int32_t opaqueIdentifier, const std::vector<uint8_t>& opaqueValue) {
583     mExtInfo.insert_or_assign(opaqueIdentifier, opaqueValue);
584     return ndk::ScopedAStatus::ok();
585 }
586 
setIntParameter(evs::CameraParam id,int32_t value,std::vector<int32_t> * effectiveValue)587 ndk::ScopedAStatus EvsVideoEmulatedCamera::setIntParameter(evs::CameraParam id, int32_t value,
588                                                            std::vector<int32_t>* effectiveValue) {
589     const auto it = mParams.find(id);
590     if (it == mParams.end()) {
591         return ndk::ScopedAStatus::fromServiceSpecificError(
592                 static_cast<int>(EvsResult::NOT_SUPPORTED));
593     }
594     // Rounding down to the closest value.
595     int32_t candidate = value / it->second->range.step * it->second->range.step;
596     if (candidate < it->second->range.min || candidate > it->second->range.max) {
597         return ndk::ScopedAStatus::fromServiceSpecificError(
598                 static_cast<int>(EvsResult::INVALID_ARG));
599     }
600     it->second->value = candidate;
601     effectiveValue->push_back(candidate);
602     return ndk::ScopedAStatus::ok();
603 }
604 
setPrimaryClient()605 ndk::ScopedAStatus EvsVideoEmulatedCamera::setPrimaryClient() {
606     /* Because EVS HW module reference implementation expects a single client at
607      * a time, this returns a success code always.
608      */
609     return ndk::ScopedAStatus::ok();
610 }
611 
unsetPrimaryClient()612 ndk::ScopedAStatus EvsVideoEmulatedCamera::unsetPrimaryClient() {
613     /* Because EVS HW module reference implementation expects a single client at
614      * a time, there is no chance that this is called by the secondary client and
615      * therefore returns a success code always.
616      */
617     return ndk::ScopedAStatus::ok();
618 }
619 
Create(const char * deviceName)620 std::shared_ptr<EvsVideoEmulatedCamera> EvsVideoEmulatedCamera::Create(const char* deviceName) {
621     std::unique_ptr<ConfigManager::CameraInfo> nullCamInfo = nullptr;
622     return Create(deviceName, nullCamInfo);
623 }
624 
Create(const char * deviceName,std::unique_ptr<ConfigManager::CameraInfo> & camInfo,const evs::Stream *)625 std::shared_ptr<EvsVideoEmulatedCamera> EvsVideoEmulatedCamera::Create(
626         const char* deviceName, std::unique_ptr<ConfigManager::CameraInfo>& camInfo,
627         const evs::Stream* /* streamCfg */) {
628     std::shared_ptr<EvsVideoEmulatedCamera> c =
629             ndk::SharedRefBase::make<EvsVideoEmulatedCamera>(Sigil{}, deviceName, camInfo);
630     if (!c) {
631         LOG(ERROR) << "Failed to instantiate EvsVideoEmulatedCamera.";
632         return nullptr;
633     }
634     if (!c->initialize()) {
635         LOG(ERROR) << "Failed to initialize EvsVideoEmulatedCamera.";
636         return nullptr;
637     }
638     return c;
639 }
640 
shutdown()641 void EvsVideoEmulatedCamera::shutdown() {
642     mVideoCodec.reset();
643     mVideoExtractor.reset();
644     close(mVideoFd);
645     mVideoFd = 0;
646     Base::shutdown();
647 }
648 
649 }  // namespace aidl::android::hardware::automotive::evs::implementation
650