xref: /aosp_15_r20/hardware/interfaces/automotive/evs/aidl/vts/VtsHalEvsTargetTest.cpp (revision 4d7e907c777eeecc4c5bd7cf640a754fac206ff7)
1 /*
2  * Copyright (C) 2022 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "FrameHandler.h"
18 #include "FrameHandlerUltrasonics.h"
19 
20 #include <aidl/Gtest.h>
21 #include <aidl/Vintf.h>
22 #include <aidl/android/hardware/automotive/evs/BnEvsEnumeratorStatusCallback.h>
23 #include <aidl/android/hardware/automotive/evs/BufferDesc.h>
24 #include <aidl/android/hardware/automotive/evs/CameraDesc.h>
25 #include <aidl/android/hardware/automotive/evs/CameraParam.h>
26 #include <aidl/android/hardware/automotive/evs/DeviceStatus.h>
27 #include <aidl/android/hardware/automotive/evs/DisplayDesc.h>
28 #include <aidl/android/hardware/automotive/evs/DisplayState.h>
29 #include <aidl/android/hardware/automotive/evs/EvsEventDesc.h>
30 #include <aidl/android/hardware/automotive/evs/EvsEventType.h>
31 #include <aidl/android/hardware/automotive/evs/EvsResult.h>
32 #include <aidl/android/hardware/automotive/evs/IEvsCamera.h>
33 #include <aidl/android/hardware/automotive/evs/IEvsDisplay.h>
34 #include <aidl/android/hardware/automotive/evs/IEvsEnumerator.h>
35 #include <aidl/android/hardware/automotive/evs/IEvsEnumeratorStatusCallback.h>
36 #include <aidl/android/hardware/automotive/evs/IEvsUltrasonicsArray.h>
37 #include <aidl/android/hardware/automotive/evs/ParameterRange.h>
38 #include <aidl/android/hardware/automotive/evs/Stream.h>
39 #include <aidl/android/hardware/automotive/evs/UltrasonicsArrayDesc.h>
40 #include <aidl/android/hardware/common/NativeHandle.h>
41 #include <aidl/android/hardware/graphics/common/HardwareBufferDescription.h>
42 #include <aidl/android/hardware/graphics/common/PixelFormat.h>
43 #include <aidlcommonsupport/NativeHandle.h>
44 #include <android-base/logging.h>
45 #include <android/binder_ibinder.h>
46 #include <android/binder_manager.h>
47 #include <android/binder_process.h>
48 #include <android/binder_status.h>
49 #include <system/camera_metadata.h>
50 #include <ui/GraphicBuffer.h>
51 #include <ui/GraphicBufferAllocator.h>
52 #include <utils/Timers.h>
53 
54 #include <chrono>
55 #include <deque>
56 #include <thread>
57 #include <unordered_set>
58 
59 namespace {
60 
61 // These values are called out in the EVS design doc (as of Mar 8, 2017)
62 constexpr int kMaxStreamStartMilliseconds = 500;
63 constexpr int kMinimumFramesPerSecond = 10;
64 constexpr int kSecondsToMilliseconds = 1000;
65 constexpr int kMillisecondsToMicroseconds = 1000;
66 constexpr float kNanoToMilliseconds = 0.000001f;
67 constexpr float kNanoToSeconds = 0.000000001f;
68 
69 /*
70  * Please note that this is different from what is defined in
71  * libhardware/modules/camera/3_4/metadata/types.h; this has one additional
72  * field to store a framerate.
73  */
74 typedef struct {
75     int32_t id;
76     int32_t width;
77     int32_t height;
78     int32_t format;
79     int32_t direction;
80     int32_t framerate;
81 } RawStreamConfig;
82 constexpr size_t kStreamCfgSz = sizeof(RawStreamConfig) / sizeof(int32_t);
83 
84 using ::aidl::android::hardware::automotive::evs::BnEvsEnumeratorStatusCallback;
85 using ::aidl::android::hardware::automotive::evs::BufferDesc;
86 using ::aidl::android::hardware::automotive::evs::CameraDesc;
87 using ::aidl::android::hardware::automotive::evs::CameraParam;
88 using ::aidl::android::hardware::automotive::evs::DeviceStatus;
89 using ::aidl::android::hardware::automotive::evs::DisplayDesc;
90 using ::aidl::android::hardware::automotive::evs::DisplayState;
91 using ::aidl::android::hardware::automotive::evs::EvsEventDesc;
92 using ::aidl::android::hardware::automotive::evs::EvsEventType;
93 using ::aidl::android::hardware::automotive::evs::EvsResult;
94 using ::aidl::android::hardware::automotive::evs::IEvsCamera;
95 using ::aidl::android::hardware::automotive::evs::IEvsDisplay;
96 using ::aidl::android::hardware::automotive::evs::IEvsEnumerator;
97 using ::aidl::android::hardware::automotive::evs::IEvsEnumeratorStatusCallback;
98 using ::aidl::android::hardware::automotive::evs::IEvsUltrasonicsArray;
99 using ::aidl::android::hardware::automotive::evs::ParameterRange;
100 using ::aidl::android::hardware::automotive::evs::Stream;
101 using ::aidl::android::hardware::automotive::evs::UltrasonicsArrayDesc;
102 using ::aidl::android::hardware::graphics::common::BufferUsage;
103 using ::aidl::android::hardware::graphics::common::HardwareBufferDescription;
104 using ::aidl::android::hardware::graphics::common::PixelFormat;
105 using std::chrono_literals::operator""s;
106 
107 }  // namespace
108 
109 // The main test class for EVS
110 class EvsAidlTest : public ::testing::TestWithParam<std::string> {
111   public:
SetUp()112     virtual void SetUp() override {
113         // Make sure we can connect to the enumerator
114         std::string service_name = GetParam();
115         AIBinder* binder = AServiceManager_waitForService(service_name.data());
116         ASSERT_NE(binder, nullptr);
117         mEnumerator = IEvsEnumerator::fromBinder(::ndk::SpAIBinder(binder));
118         LOG(INFO) << "Test target service: " << service_name;
119 
120         ASSERT_TRUE(mEnumerator->isHardware(&mIsHwModule).isOk());
121     }
122 
TearDown()123     virtual void TearDown() override {
124         // Attempt to close any active camera
125         for (auto&& cam : mActiveCameras) {
126             if (cam != nullptr) {
127                 mEnumerator->closeCamera(cam);
128             }
129         }
130         mActiveCameras.clear();
131     }
132 
133   protected:
loadCameraList()134     void loadCameraList() {
135         // SetUp() must run first!
136         ASSERT_NE(mEnumerator, nullptr);
137 
138         // Get the camera list
139         ASSERT_TRUE(mEnumerator->getCameraList(&mCameraInfo).isOk())
140                 << "Failed to get a list of available cameras";
141         LOG(INFO) << "We have " << mCameraInfo.size() << " cameras.";
142     }
143 
loadUltrasonicsArrayList()144     void loadUltrasonicsArrayList() {
145         // SetUp() must run first!
146         ASSERT_NE(mEnumerator, nullptr);
147 
148         // Get the ultrasonics array list
149         auto result = mEnumerator->getUltrasonicsArrayList(&mUltrasonicsArraysInfo);
150         ASSERT_TRUE(result.isOk() ||
151                 // TODO(b/149874793): Remove below conditions when
152                 // getUltrasonicsArrayList() is implemented.
153                 (!result.isOk() && result.getServiceSpecificError() ==
154                         static_cast<int32_t>(EvsResult::NOT_IMPLEMENTED)))
155                 << "Failed to get a list of available ultrasonics arrays";
156         LOG(INFO) << "We have " << mCameraInfo.size() << " ultrasonics arrays.";
157     }
158 
isLogicalCamera(const camera_metadata_t * metadata)159     bool isLogicalCamera(const camera_metadata_t* metadata) {
160         if (metadata == nullptr) {
161             // A logical camera device must have a valid camera metadata.
162             return false;
163         }
164 
165         // Looking for LOGICAL_MULTI_CAMERA capability from metadata.
166         camera_metadata_ro_entry_t entry;
167         int rc = find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
168                                                &entry);
169         if (rc != 0) {
170             // No capabilities are found.
171             return false;
172         }
173 
174         for (size_t i = 0; i < entry.count; ++i) {
175             uint8_t cap = entry.data.u8[i];
176             if (cap == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA) {
177                 return true;
178             }
179         }
180 
181         return false;
182     }
183 
getPhysicalCameraIds(const std::string & id,bool & flag)184     std::unordered_set<std::string> getPhysicalCameraIds(const std::string& id, bool& flag) {
185         std::unordered_set<std::string> physicalCameras;
186         const auto it = std::find_if(mCameraInfo.begin(), mCameraInfo.end(),
187                                      [&id](const CameraDesc& desc) { return id == desc.id; });
188         if (it == mCameraInfo.end()) {
189             // Unknown camera is requested.  Return an empty list.
190             return physicalCameras;
191         }
192 
193         const camera_metadata_t* metadata = reinterpret_cast<camera_metadata_t*>(&it->metadata[0]);
194         flag = isLogicalCamera(metadata);
195         if (!flag) {
196             // EVS assumes that the device w/o a valid metadata is a physical
197             // device.
198             LOG(INFO) << id << " is not a logical camera device.";
199             physicalCameras.insert(id);
200             return physicalCameras;
201         }
202 
203         // Look for physical camera identifiers
204         camera_metadata_ro_entry entry;
205         int rc = find_camera_metadata_ro_entry(metadata, ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS,
206                                                &entry);
207         if (rc != 0) {
208             LOG(ERROR) << "No physical camera ID is found for a logical camera device";
209         }
210 
211         const uint8_t* ids = entry.data.u8;
212         size_t start = 0;
213         for (size_t i = 0; i < entry.count; ++i) {
214             if (ids[i] == '\0') {
215                 if (start != i) {
216                     std::string id(reinterpret_cast<const char*>(ids + start));
217                     physicalCameras.insert(id);
218                 }
219                 start = i + 1;
220             }
221         }
222 
223         LOG(INFO) << id << " consists of " << physicalCameras.size() << " physical camera devices";
224         return physicalCameras;
225     }
226 
getFirstStreamConfiguration(camera_metadata_t * metadata)227     Stream getFirstStreamConfiguration(camera_metadata_t* metadata) {
228         Stream targetCfg = {};
229         camera_metadata_entry_t streamCfgs;
230         if (!find_camera_metadata_entry(metadata, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
231                                         &streamCfgs)) {
232             // Stream configurations are found in metadata
233             RawStreamConfig* ptr = reinterpret_cast<RawStreamConfig*>(streamCfgs.data.i32);
234             for (unsigned offset = 0; offset < streamCfgs.count; offset += kStreamCfgSz) {
235                 if (ptr->direction == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT) {
236                     targetCfg.width = ptr->width;
237                     targetCfg.height = ptr->height;
238                     targetCfg.format = static_cast<PixelFormat>(ptr->format);
239                     break;
240                 }
241                 ++ptr;
242             }
243         }
244 
245         return targetCfg;
246     }
247 
248     class DeviceStatusCallback : public BnEvsEnumeratorStatusCallback {
deviceStatusChanged(const std::vector<DeviceStatus> &)249         ndk::ScopedAStatus deviceStatusChanged(const std::vector<DeviceStatus>&) override {
250             // This empty implementation returns always ok().
251             return ndk::ScopedAStatus::ok();
252         }
253     };
254 
255     // Every test needs access to the service
256     std::shared_ptr<IEvsEnumerator> mEnumerator;
257     // Empty unless/util loadCameraList() is called
258     std::vector<CameraDesc> mCameraInfo;
259     // boolean to tell current module under testing is HW module implementation
260     // or not
261     bool mIsHwModule;
262     // A list of active camera handles that are need to be cleaned up
263     std::deque<std::shared_ptr<IEvsCamera>> mActiveCameras;
264     // Empty unless/util loadUltrasonicsArrayList() is called
265     std::vector<UltrasonicsArrayDesc> mUltrasonicsArraysInfo;
266     // A list of active ultrasonics array handles that are to be cleaned up
267     std::deque<std::weak_ptr<IEvsUltrasonicsArray>> mActiveUltrasonicsArrays;
268 };
269 
270 // Test cases, their implementations, and corresponding requirements are
271 // documented at go/aae-evs-public-api-test.
272 
273 /*
274  * CameraOpenClean:
275  * Opens each camera reported by the enumerator and then explicitly closes it via a
276  * call to closeCamera.  Then repeats the test to ensure all cameras can be reopened.
277  */
TEST_P(EvsAidlTest,CameraOpenClean)278 TEST_P(EvsAidlTest, CameraOpenClean) {
279     LOG(INFO) << "Starting CameraOpenClean test";
280 
281     // Get the camera list
282     loadCameraList();
283 
284     // Open and close each camera twice
285     for (auto&& cam : mCameraInfo) {
286         bool isLogicalCam = false;
287         auto devices = getPhysicalCameraIds(cam.id, isLogicalCam);
288         if (mIsHwModule && isLogicalCam) {
289             LOG(INFO) << "Skip a logical device, " << cam.id << " for HW target.";
290             continue;
291         }
292 
293         // Read a target resolution from the metadata
294         Stream targetCfg = getFirstStreamConfiguration(
295                 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
296         ASSERT_GT(targetCfg.width, 0);
297         ASSERT_GT(targetCfg.height, 0);
298 
299         for (int pass = 0; pass < 2; pass++) {
300             std::shared_ptr<IEvsCamera> pCam;
301             ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
302             ASSERT_NE(pCam, nullptr);
303 
304             CameraDesc cameraInfo;
305             for (auto&& devName : devices) {
306                 ASSERT_TRUE(pCam->getPhysicalCameraInfo(devName, &cameraInfo).isOk());
307                 EXPECT_EQ(devName, cameraInfo.id);
308             }
309 
310             // Store a camera handle for a clean-up
311             mActiveCameras.push_back(pCam);
312 
313             // Verify that this camera self-identifies correctly
314             ASSERT_TRUE(pCam->getCameraInfo(&cameraInfo).isOk());
315             EXPECT_EQ(cam.id, cameraInfo.id);
316 
317             // Verify methods for extended info
318             const auto id = 0xFFFFFFFF;  // meaningless id
319             std::vector<uint8_t> values;
320             bool isSupported = false;
321             auto status = pCam->setExtendedInfo(id, values);
322             if (isLogicalCam) {
323                 EXPECT_TRUE(!status.isOk() && status.getServiceSpecificError() ==
324                                                       static_cast<int>(EvsResult::NOT_SUPPORTED));
325             } else {
326                 if (status.isOk()) {
327                     // 0xFFFFFFFF is valid for EVS HAL implementation under
328                     // test.
329                     isSupported = true;
330                 } else {
331                     EXPECT_TRUE(status.getServiceSpecificError() ==
332                                 static_cast<int>(EvsResult::INVALID_ARG));
333                 }
334             }
335 
336             status = pCam->getExtendedInfo(id, &values);
337             if (isLogicalCam) {
338                 EXPECT_TRUE(!status.isOk() && status.getServiceSpecificError() ==
339                                                       static_cast<int>(EvsResult::NOT_SUPPORTED));
340             } else {
341                 if (isSupported) {
342                     EXPECT_TRUE(status.isOk());
343                 } else {
344                     EXPECT_TRUE(!status.isOk() && status.getServiceSpecificError() ==
345                                                     static_cast<int>(EvsResult::INVALID_ARG));
346                 }
347             }
348 
349             // Explicitly close the camera so resources are released right away
350             ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
351             mActiveCameras.clear();
352         }
353     }
354 }
355 
356 /*
357  * CameraOpenAggressive:
358  * Opens each camera reported by the enumerator twice in a row without an intervening closeCamera
359  * call.  This ensures that the intended "aggressive open" behavior works.  This is necessary for
360  * the system to be tolerant of shutdown/restart race conditions.
361  */
TEST_P(EvsAidlTest,CameraOpenAggressive)362 TEST_P(EvsAidlTest, CameraOpenAggressive) {
363     LOG(INFO) << "Starting CameraOpenAggressive test";
364 
365     // Get the camera list
366     loadCameraList();
367 
368     // Open and close each camera twice
369     for (auto&& cam : mCameraInfo) {
370         bool isLogicalCam = false;
371         getPhysicalCameraIds(cam.id, isLogicalCam);
372         if (mIsHwModule && isLogicalCam) {
373             LOG(INFO) << "Skip a logical device, " << cam.id << " for HW target.";
374             continue;
375         }
376 
377         // Read a target resolution from the metadata
378         Stream targetCfg = getFirstStreamConfiguration(
379                 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
380         ASSERT_GT(targetCfg.width, 0);
381         ASSERT_GT(targetCfg.height, 0);
382 
383         mActiveCameras.clear();
384         std::shared_ptr<IEvsCamera> pCam;
385         ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
386         EXPECT_NE(pCam, nullptr);
387 
388         // Store a camera handle for a clean-up
389         mActiveCameras.push_back(pCam);
390 
391         // Verify that this camera self-identifies correctly
392         CameraDesc cameraInfo;
393         ASSERT_TRUE(pCam->getCameraInfo(&cameraInfo).isOk());
394         EXPECT_EQ(cam.id, cameraInfo.id);
395 
396         std::shared_ptr<IEvsCamera> pCam2;
397         ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam2).isOk());
398         EXPECT_NE(pCam2, nullptr);
399         EXPECT_NE(pCam, pCam2);
400 
401         // Store a camera handle for a clean-up
402         mActiveCameras.push_back(pCam2);
403 
404         auto status = pCam->setMaxFramesInFlight(2);
405         if (mIsHwModule) {
406             // Verify that the old camera rejects calls via HW module.
407             EXPECT_TRUE(!status.isOk() && status.getServiceSpecificError() ==
408                                                   static_cast<int>(EvsResult::OWNERSHIP_LOST));
409         } else {
410             // default implementation supports multiple clients.
411             EXPECT_TRUE(status.isOk());
412         }
413 
414         // Close the superseded camera
415         ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
416         mActiveCameras.pop_front();
417 
418         // Verify that the second camera instance self-identifies correctly
419         ASSERT_TRUE(pCam2->getCameraInfo(&cameraInfo).isOk());
420         EXPECT_EQ(cam.id, cameraInfo.id);
421 
422         // Close the second camera instance
423         ASSERT_TRUE(mEnumerator->closeCamera(pCam2).isOk());
424         mActiveCameras.pop_front();
425     }
426 
427     // Sleep here to ensure the destructor cleanup has time to run so we don't break follow on tests
428     sleep(1);  // I hate that this is an arbitrary time to wait.  :(  b/36122635
429 }
430 
431 /*
432  * CameraStreamPerformance:
433  * Measure and qualify the stream start up time and streaming frame rate of each reported camera
434  */
TEST_P(EvsAidlTest,CameraStreamPerformance)435 TEST_P(EvsAidlTest, CameraStreamPerformance) {
436     LOG(INFO) << "Starting CameraStreamPerformance test";
437 
438     // Get the camera list
439     loadCameraList();
440 
441     // Test each reported camera
442     for (auto&& cam : mCameraInfo) {
443         bool isLogicalCam = false;
444         auto devices = getPhysicalCameraIds(cam.id, isLogicalCam);
445         if (mIsHwModule && isLogicalCam) {
446             LOG(INFO) << "Skip a logical device " << cam.id;
447             continue;
448         }
449 
450         // Read a target resolution from the metadata
451         Stream targetCfg = getFirstStreamConfiguration(
452                 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
453         ASSERT_GT(targetCfg.width, 0);
454         ASSERT_GT(targetCfg.height, 0);
455 
456         std::shared_ptr<IEvsCamera> pCam;
457         ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
458         EXPECT_NE(pCam, nullptr);
459 
460         // Store a camera handle for a clean-up
461         mActiveCameras.push_back(pCam);
462 
463         // Set up a frame receiver object which will fire up its own thread
464         std::shared_ptr<FrameHandler> frameHandler = ndk::SharedRefBase::make<FrameHandler>(
465                 pCam, cam, nullptr, FrameHandler::eAutoReturn);
466         EXPECT_NE(frameHandler, nullptr);
467 
468         // Start the camera's video stream
469         nsecs_t start = systemTime(SYSTEM_TIME_MONOTONIC);
470         ASSERT_TRUE(frameHandler->startStream());
471 
472         // Ensure the first frame arrived within the expected time
473         frameHandler->waitForFrameCount(1);
474         nsecs_t firstFrame = systemTime(SYSTEM_TIME_MONOTONIC);
475         nsecs_t timeToFirstFrame = systemTime(SYSTEM_TIME_MONOTONIC) - start;
476 
477         // Extra delays are expected when we attempt to start a video stream on
478         // the logical camera device.  The amount of delay is expected the
479         // number of physical camera devices multiplied by
480         // kMaxStreamStartMilliseconds at most.
481         EXPECT_LE(nanoseconds_to_milliseconds(timeToFirstFrame),
482                   kMaxStreamStartMilliseconds * devices.size());
483         printf("%s: Measured time to first frame %0.2f ms\n", cam.id.data(),
484                timeToFirstFrame * kNanoToMilliseconds);
485         LOG(INFO) << cam.id << ": Measured time to first frame " << std::scientific
486                   << timeToFirstFrame * kNanoToMilliseconds << " ms.";
487 
488         // Check aspect ratio
489         unsigned width = 0, height = 0;
490         frameHandler->getFrameDimension(&width, &height);
491         EXPECT_GE(width, height);
492 
493         // Wait a bit, then ensure we get at least the required minimum number of frames
494         sleep(5);
495         nsecs_t end = systemTime(SYSTEM_TIME_MONOTONIC);
496 
497         // Even when the camera pointer goes out of scope, the FrameHandler object will
498         // keep the stream alive unless we tell it to shutdown.
499         // Also note that the FrameHandle and the Camera have a mutual circular reference, so
500         // we have to break that cycle in order for either of them to get cleaned up.
501         frameHandler->shutdown();
502 
503         unsigned framesReceived = 0;
504         frameHandler->getFramesCounters(&framesReceived, nullptr);
505         framesReceived = framesReceived - 1;  // Back out the first frame we already waited for
506         nsecs_t runTime = end - firstFrame;
507         float framesPerSecond = framesReceived / (runTime * kNanoToSeconds);
508         printf("Measured camera rate %3.2f fps\n", framesPerSecond);
509         LOG(INFO) << "Measured camera rate " << std::scientific << framesPerSecond << " fps.";
510         EXPECT_GE(framesPerSecond, kMinimumFramesPerSecond);
511 
512         // Explicitly release the camera
513         ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
514         mActiveCameras.clear();
515     }
516 }
517 
518 /*
519  * CameraStreamBuffering:
520  * Ensure the camera implementation behaves properly when the client holds onto buffers for more
521  * than one frame time.  The camera must cleanly skip frames until the client is ready again.
522  */
TEST_P(EvsAidlTest,CameraStreamBuffering)523 TEST_P(EvsAidlTest, CameraStreamBuffering) {
524     LOG(INFO) << "Starting CameraStreamBuffering test";
525 
526     // Arbitrary constant (should be > 1 and not too big)
527     static const unsigned int kBuffersToHold = 6;
528 
529     // Get the camera list
530     loadCameraList();
531 
532     // Test each reported camera
533     for (auto&& cam : mCameraInfo) {
534         bool isLogicalCam = false;
535         getPhysicalCameraIds(cam.id, isLogicalCam);
536         if (mIsHwModule && isLogicalCam) {
537             LOG(INFO) << "Skip a logical device " << cam.id << " for HW target.";
538             continue;
539         }
540 
541         // Read a target resolution from the metadata
542         Stream targetCfg = getFirstStreamConfiguration(
543                 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
544         ASSERT_GT(targetCfg.width, 0);
545         ASSERT_GT(targetCfg.height, 0);
546 
547         std::shared_ptr<IEvsCamera> pCam;
548         ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
549         EXPECT_NE(pCam, nullptr);
550 
551         // Store a camera handle for a clean-up
552         mActiveCameras.push_back(pCam);
553 
554         // Ask for a very large number of buffers in flight to ensure it errors correctly
555         auto badResult = pCam->setMaxFramesInFlight(std::numeric_limits<int32_t>::max());
556         EXPECT_TRUE(!badResult.isOk() && badResult.getServiceSpecificError() ==
557                                                  static_cast<int>(EvsResult::BUFFER_NOT_AVAILABLE));
558 
559         // Now ask for exactly two buffers in flight as we'll test behavior in that case
560         ASSERT_TRUE(pCam->setMaxFramesInFlight(kBuffersToHold).isOk());
561 
562         // Set up a frame receiver object which will fire up its own thread.
563         std::shared_ptr<FrameHandler> frameHandler = ndk::SharedRefBase::make<FrameHandler>(
564                 pCam, cam, nullptr, FrameHandler::eNoAutoReturn);
565         EXPECT_NE(frameHandler, nullptr);
566 
567         // Start the camera's video stream
568         ASSERT_TRUE(frameHandler->startStream());
569 
570         // Check that the video stream stalls once we've gotten exactly the number of buffers
571         // we requested since we told the frameHandler not to return them.
572         sleep(1);  // 1 second should be enough for at least 5 frames to be delivered worst case
573         unsigned framesReceived = 0;
574         frameHandler->getFramesCounters(&framesReceived, nullptr);
575         ASSERT_EQ(kBuffersToHold, framesReceived) << "Stream didn't stall at expected buffer limit";
576 
577         // Give back one buffer
578         ASSERT_TRUE(frameHandler->returnHeldBuffer());
579 
580         // Once we return a buffer, it shouldn't take more than 1/10 second to get a new one
581         // filled since we require 10fps minimum -- but give a 10% allowance just in case.
582         usleep(110 * kMillisecondsToMicroseconds);
583         frameHandler->getFramesCounters(&framesReceived, nullptr);
584         EXPECT_EQ(kBuffersToHold + 1, framesReceived) << "Stream should've resumed";
585 
586         // Even when the camera pointer goes out of scope, the FrameHandler object will
587         // keep the stream alive unless we tell it to shutdown.
588         // Also note that the FrameHandle and the Camera have a mutual circular reference, so
589         // we have to break that cycle in order for either of them to get cleaned up.
590         frameHandler->shutdown();
591 
592         // Explicitly release the camera
593         ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
594         mActiveCameras.clear();
595     }
596 }
597 
598 /*
599  * CameraToDisplayRoundTrip:
600  * End to end test of data flowing from the camera to the display.  Each delivered frame of camera
601  * imagery is simply copied to the display buffer and presented on screen.  This is the one test
602  * which a human could observe to see the operation of the system on the physical display.
603  */
TEST_P(EvsAidlTest,CameraToDisplayRoundTrip)604 TEST_P(EvsAidlTest, CameraToDisplayRoundTrip) {
605     LOG(INFO) << "Starting CameraToDisplayRoundTrip test";
606 
607     // Get the camera list
608     loadCameraList();
609 
610     // Request available display IDs
611     uint8_t targetDisplayId = 0;
612     std::vector<uint8_t> displayIds;
613     ASSERT_TRUE(mEnumerator->getDisplayIdList(&displayIds).isOk());
614     EXPECT_GT(displayIds.size(), 0);
615     targetDisplayId = displayIds[0];
616 
617     // Test each reported camera
618     for (auto&& cam : mCameraInfo) {
619         // Request exclusive access to the first EVS display
620         std::shared_ptr<IEvsDisplay> pDisplay;
621         ASSERT_TRUE(mEnumerator->openDisplay(targetDisplayId, &pDisplay).isOk());
622         EXPECT_NE(pDisplay, nullptr);
623         LOG(INFO) << "Display " << static_cast<int>(targetDisplayId) << " is in use.";
624 
625         // Get the display descriptor
626         DisplayDesc displayDesc;
627         ASSERT_TRUE(pDisplay->getDisplayInfo(&displayDesc).isOk());
628         LOG(INFO) << "    Resolution: " << displayDesc.width << "x" << displayDesc.height;
629         ASSERT_GT(displayDesc.width, 0);
630         ASSERT_GT(displayDesc.height, 0);
631 
632         bool isLogicalCam = false;
633         getPhysicalCameraIds(cam.id, isLogicalCam);
634         if (mIsHwModule && isLogicalCam) {
635             LOG(INFO) << "Skip a logical device " << cam.id << " for HW target.";
636             ASSERT_TRUE(mEnumerator->closeDisplay(pDisplay).isOk());
637             continue;
638         }
639 
640         // Read a target resolution from the metadata
641         Stream targetCfg = getFirstStreamConfiguration(
642                 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
643         ASSERT_GT(targetCfg.width, 0);
644         ASSERT_GT(targetCfg.height, 0);
645 
646         std::shared_ptr<IEvsCamera> pCam;
647         ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
648         EXPECT_NE(pCam, nullptr);
649 
650         // Store a camera handle for a clean-up
651         mActiveCameras.push_back(pCam);
652 
653         // Set up a frame receiver object which will fire up its own thread.
654         std::shared_ptr<FrameHandler> frameHandler = ndk::SharedRefBase::make<FrameHandler>(
655                 pCam, cam, pDisplay, FrameHandler::eAutoReturn);
656         EXPECT_NE(frameHandler, nullptr);
657 
658         // Activate the display
659         ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::VISIBLE_ON_NEXT_FRAME).isOk());
660 
661         // Start the camera's video stream
662         ASSERT_TRUE(frameHandler->startStream());
663 
664         // Wait a while to let the data flow
665         static const int kSecondsToWait = 5;
666         const int streamTimeMs =
667                 kSecondsToWait * kSecondsToMilliseconds - kMaxStreamStartMilliseconds;
668         const unsigned minimumFramesExpected =
669                 streamTimeMs * kMinimumFramesPerSecond / kSecondsToMilliseconds;
670         sleep(kSecondsToWait);
671         unsigned framesReceived = 0;
672         unsigned framesDisplayed = 0;
673         frameHandler->getFramesCounters(&framesReceived, &framesDisplayed);
674         EXPECT_EQ(framesReceived, framesDisplayed);
675         EXPECT_GE(framesDisplayed, minimumFramesExpected);
676 
677         // Turn off the display (yes, before the stream stops -- it should be handled)
678         ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::NOT_VISIBLE).isOk());
679 
680         // Shut down the streamer
681         frameHandler->shutdown();
682 
683         // Explicitly release the camera
684         ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
685         mActiveCameras.clear();
686 
687         // Explicitly release the display
688         ASSERT_TRUE(mEnumerator->closeDisplay(pDisplay).isOk());
689     }
690 }
691 
692 /*
693  * MultiCameraStream:
694  * Verify that each client can start and stop video streams on the same
695  * underlying camera.
696  */
TEST_P(EvsAidlTest,MultiCameraStream)697 TEST_P(EvsAidlTest, MultiCameraStream) {
698     LOG(INFO) << "Starting MultiCameraStream test";
699 
700     if (mIsHwModule) {
701         // This test is not for HW module implementation.
702         return;
703     }
704 
705     // Get the camera list
706     loadCameraList();
707 
708     // Test each reported camera
709     for (auto&& cam : mCameraInfo) {
710         // Read a target resolution from the metadata
711         Stream targetCfg = getFirstStreamConfiguration(
712                 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
713         ASSERT_GT(targetCfg.width, 0);
714         ASSERT_GT(targetCfg.height, 0);
715 
716         // Create two camera clients.
717         std::shared_ptr<IEvsCamera> pCam0;
718         ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam0).isOk());
719         EXPECT_NE(pCam0, nullptr);
720 
721         // Store a camera handle for a clean-up
722         mActiveCameras.push_back(pCam0);
723 
724         std::shared_ptr<IEvsCamera> pCam1;
725         ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam1).isOk());
726         EXPECT_NE(pCam1, nullptr);
727 
728         // Store a camera handle for a clean-up
729         mActiveCameras.push_back(pCam1);
730 
731         // Set up per-client frame receiver objects which will fire up its own thread
732         std::shared_ptr<FrameHandler> frameHandler0 = ndk::SharedRefBase::make<FrameHandler>(
733                 pCam0, cam, nullptr, FrameHandler::eAutoReturn);
734         std::shared_ptr<FrameHandler> frameHandler1 = ndk::SharedRefBase::make<FrameHandler>(
735                 pCam1, cam, nullptr, FrameHandler::eAutoReturn);
736         EXPECT_NE(frameHandler0, nullptr);
737         EXPECT_NE(frameHandler1, nullptr);
738 
739         // Start the camera's video stream via client 0
740         ASSERT_TRUE(frameHandler0->startStream());
741         ASSERT_TRUE(frameHandler1->startStream());
742 
743         // Ensure the stream starts
744         frameHandler0->waitForFrameCount(1);
745         frameHandler1->waitForFrameCount(1);
746 
747         nsecs_t firstFrame = systemTime(SYSTEM_TIME_MONOTONIC);
748 
749         // Wait a bit, then ensure both clients get at least the required minimum number of frames
750         sleep(5);
751         nsecs_t end = systemTime(SYSTEM_TIME_MONOTONIC);
752         unsigned framesReceived0 = 0, framesReceived1 = 0;
753         frameHandler0->getFramesCounters(&framesReceived0, nullptr);
754         frameHandler1->getFramesCounters(&framesReceived1, nullptr);
755         framesReceived0 = framesReceived0 - 1;  // Back out the first frame we already waited for
756         framesReceived1 = framesReceived1 - 1;  // Back out the first frame we already waited for
757         nsecs_t runTime = end - firstFrame;
758         float framesPerSecond0 = framesReceived0 / (runTime * kNanoToSeconds);
759         float framesPerSecond1 = framesReceived1 / (runTime * kNanoToSeconds);
760         LOG(INFO) << "Measured camera rate " << std::scientific << framesPerSecond0 << " fps and "
761                   << framesPerSecond1 << " fps";
762         EXPECT_GE(framesPerSecond0, kMinimumFramesPerSecond);
763         EXPECT_GE(framesPerSecond1, kMinimumFramesPerSecond);
764 
765         // Shutdown one client
766         frameHandler0->shutdown();
767 
768         // Read frame counters again
769         frameHandler0->getFramesCounters(&framesReceived0, nullptr);
770         frameHandler1->getFramesCounters(&framesReceived1, nullptr);
771 
772         // Wait a bit again
773         sleep(5);
774         unsigned framesReceivedAfterStop0 = 0, framesReceivedAfterStop1 = 0;
775         frameHandler0->getFramesCounters(&framesReceivedAfterStop0, nullptr);
776         frameHandler1->getFramesCounters(&framesReceivedAfterStop1, nullptr);
777         EXPECT_EQ(framesReceived0, framesReceivedAfterStop0);
778         EXPECT_LT(framesReceived1, framesReceivedAfterStop1);
779 
780         // Shutdown another
781         frameHandler1->shutdown();
782 
783         // Explicitly release the camera
784         ASSERT_TRUE(mEnumerator->closeCamera(pCam0).isOk());
785         ASSERT_TRUE(mEnumerator->closeCamera(pCam1).isOk());
786         mActiveCameras.clear();
787 
788         // TODO(b/145459970, b/145457727): below sleep() is added to ensure the
789         // destruction of active camera objects; this may be related with two
790         // issues.
791         sleep(1);
792     }
793 }
794 
795 /*
796  * CameraParameter:
797  * Verify that a client can adjust a camera parameter.
798  */
TEST_P(EvsAidlTest,CameraParameter)799 TEST_P(EvsAidlTest, CameraParameter) {
800     LOG(INFO) << "Starting CameraParameter test";
801 
802     // Get the camera list
803     loadCameraList();
804 
805     // Test each reported camera
806     for (auto&& cam : mCameraInfo) {
807         bool isLogicalCam = false;
808         getPhysicalCameraIds(cam.id, isLogicalCam);
809         if (isLogicalCam) {
810             // TODO(b/145465724): Support camera parameter programming on
811             // logical devices.
812             LOG(INFO) << "Skip a logical device " << cam.id;
813             continue;
814         }
815 
816         // Read a target resolution from the metadata
817         Stream targetCfg = getFirstStreamConfiguration(
818                 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
819         ASSERT_GT(targetCfg.width, 0);
820         ASSERT_GT(targetCfg.height, 0);
821 
822         // Create a camera client
823         std::shared_ptr<IEvsCamera> pCam;
824         ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
825         EXPECT_NE(pCam, nullptr);
826 
827         // Store a camera
828         mActiveCameras.push_back(pCam);
829 
830         // Get the parameter list
831         std::vector<CameraParam> cmds;
832         ASSERT_TRUE(pCam->getParameterList(&cmds).isOk());
833         if (cmds.size() < 1) {
834             continue;
835         }
836 
837         // Set up per-client frame receiver objects which will fire up its own thread
838         std::shared_ptr<FrameHandler> frameHandler = ndk::SharedRefBase::make<FrameHandler>(
839                 pCam, cam, nullptr, FrameHandler::eAutoReturn);
840         EXPECT_NE(frameHandler, nullptr);
841 
842         // Start the camera's video stream
843         ASSERT_TRUE(frameHandler->startStream());
844 
845         // Ensure the stream starts
846         frameHandler->waitForFrameCount(1);
847 
848         // Set current client is the primary client
849         ASSERT_TRUE(pCam->setPrimaryClient().isOk());
850         for (auto& cmd : cmds) {
851             // Get a valid parameter value range
852             ParameterRange range;
853             ASSERT_TRUE(pCam->getIntParameterRange(cmd, &range).isOk());
854 
855             std::vector<int32_t> values;
856             if (cmd == CameraParam::ABSOLUTE_FOCUS) {
857                 // Try to turn off auto-focus
858                 ASSERT_TRUE(pCam->setIntParameter(CameraParam::AUTO_FOCUS, 0, &values).isOk());
859                 for (auto&& v : values) {
860                     EXPECT_EQ(v, 0);
861                 }
862             }
863 
864             // Try to program a parameter with a random value [minVal, maxVal]
865             int32_t val0 = range.min + (std::rand() % (range.max - range.min));
866 
867             // Rounding down
868             val0 = val0 - (val0 % range.step);
869             values.clear();
870             ASSERT_TRUE(pCam->setIntParameter(cmd, val0, &values).isOk());
871 
872             values.clear();
873             ASSERT_TRUE(pCam->getIntParameter(cmd, &values).isOk());
874             for (auto&& v : values) {
875                 EXPECT_EQ(val0, v) << "Values are not matched.";
876             }
877         }
878         ASSERT_TRUE(pCam->unsetPrimaryClient().isOk());
879 
880         // Shutdown
881         frameHandler->shutdown();
882 
883         // Explicitly release the camera
884         ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
885         mActiveCameras.clear();
886     }
887 }
888 
889 /*
890  * CameraPrimaryClientRelease
891  * Verify that non-primary client gets notified when the primary client either
892  * terminates or releases a role.
893  */
TEST_P(EvsAidlTest,CameraPrimaryClientRelease)894 TEST_P(EvsAidlTest, CameraPrimaryClientRelease) {
895     LOG(INFO) << "Starting CameraPrimaryClientRelease test";
896 
897     if (mIsHwModule) {
898         // This test is not for HW module implementation.
899         return;
900     }
901 
902     // Get the camera list
903     loadCameraList();
904 
905     // Test each reported camera
906     for (auto&& cam : mCameraInfo) {
907         bool isLogicalCam = false;
908         getPhysicalCameraIds(cam.id, isLogicalCam);
909         if (isLogicalCam) {
910             // TODO(b/145465724): Support camera parameter programming on
911             // logical devices.
912             LOG(INFO) << "Skip a logical device " << cam.id;
913             continue;
914         }
915 
916         // Read a target resolution from the metadata
917         Stream targetCfg = getFirstStreamConfiguration(
918                 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
919         ASSERT_GT(targetCfg.width, 0);
920         ASSERT_GT(targetCfg.height, 0);
921 
922         // Create two camera clients.
923         std::shared_ptr<IEvsCamera> pPrimaryCam;
924         ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pPrimaryCam).isOk());
925         EXPECT_NE(pPrimaryCam, nullptr);
926 
927         // Store a camera handle for a clean-up
928         mActiveCameras.push_back(pPrimaryCam);
929 
930         std::shared_ptr<IEvsCamera> pSecondaryCam;
931         ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pSecondaryCam).isOk());
932         EXPECT_NE(pSecondaryCam, nullptr);
933 
934         // Store a camera handle for a clean-up
935         mActiveCameras.push_back(pSecondaryCam);
936 
937         // Set up per-client frame receiver objects which will fire up its own thread
938         std::shared_ptr<FrameHandler> frameHandlerPrimary = ndk::SharedRefBase::make<FrameHandler>(
939                 pPrimaryCam, cam, nullptr, FrameHandler::eAutoReturn);
940         std::shared_ptr<FrameHandler> frameHandlerSecondary =
941                 ndk::SharedRefBase::make<FrameHandler>(pSecondaryCam, cam, nullptr,
942                                                        FrameHandler::eAutoReturn);
943         EXPECT_NE(frameHandlerPrimary, nullptr);
944         EXPECT_NE(frameHandlerSecondary, nullptr);
945 
946         // Set one client as the primary client
947         ASSERT_TRUE(pPrimaryCam->setPrimaryClient().isOk());
948 
949         // Try to set another client as the primary client.
950         ASSERT_FALSE(pSecondaryCam->setPrimaryClient().isOk());
951 
952         // Start the camera's video stream via a primary client client.
953         ASSERT_TRUE(frameHandlerPrimary->startStream());
954 
955         // Ensure the stream starts
956         frameHandlerPrimary->waitForFrameCount(1);
957 
958         // Start the camera's video stream via another client
959         ASSERT_TRUE(frameHandlerSecondary->startStream());
960 
961         // Ensure the stream starts
962         frameHandlerSecondary->waitForFrameCount(1);
963 
964         // Non-primary client expects to receive a primary client role relesed
965         // notification.
966         EvsEventDesc aTargetEvent = {};
967         EvsEventDesc aNotification = {};
968 
969         bool listening = false;
970         std::mutex eventLock;
971         std::condition_variable eventCond;
972         std::thread listener =
973                 std::thread([&aNotification, &frameHandlerSecondary, &listening, &eventCond]() {
974                     // Notify that a listening thread is running.
975                     listening = true;
976                     eventCond.notify_all();
977 
978                     EvsEventDesc aTargetEvent;
979                     aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
980                     if (!frameHandlerSecondary->waitForEvent(aTargetEvent, aNotification, true)) {
981                         LOG(WARNING) << "A timer is expired before a target event is fired.";
982                     }
983                 });
984 
985         // Wait until a listening thread starts.
986         std::unique_lock<std::mutex> lock(eventLock);
987         auto timer = std::chrono::system_clock::now();
988         while (!listening) {
989             timer += 1s;
990             eventCond.wait_until(lock, timer);
991         }
992         lock.unlock();
993 
994         // Release a primary client role.
995         ASSERT_TRUE(pPrimaryCam->unsetPrimaryClient().isOk());
996 
997         // Join a listening thread.
998         if (listener.joinable()) {
999             listener.join();
1000         }
1001 
1002         // Verify change notifications.
1003         ASSERT_EQ(EvsEventType::MASTER_RELEASED, static_cast<EvsEventType>(aNotification.aType));
1004 
1005         // Non-primary becomes a primary client.
1006         ASSERT_TRUE(pSecondaryCam->setPrimaryClient().isOk());
1007 
1008         // Previous primary client fails to become a primary client.
1009         ASSERT_FALSE(pPrimaryCam->setPrimaryClient().isOk());
1010 
1011         listening = false;
1012         listener = std::thread([&aNotification, &frameHandlerPrimary, &listening, &eventCond]() {
1013             // Notify that a listening thread is running.
1014             listening = true;
1015             eventCond.notify_all();
1016 
1017             EvsEventDesc aTargetEvent;
1018             aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
1019             if (!frameHandlerPrimary->waitForEvent(aTargetEvent, aNotification, true)) {
1020                 LOG(WARNING) << "A timer is expired before a target event is fired.";
1021             }
1022         });
1023 
1024         // Wait until a listening thread starts.
1025         timer = std::chrono::system_clock::now();
1026         lock.lock();
1027         while (!listening) {
1028             eventCond.wait_until(lock, timer + 1s);
1029         }
1030         lock.unlock();
1031 
1032         // Closing current primary client.
1033         frameHandlerSecondary->shutdown();
1034 
1035         // Join a listening thread.
1036         if (listener.joinable()) {
1037             listener.join();
1038         }
1039 
1040         // Verify change notifications.
1041         ASSERT_EQ(EvsEventType::MASTER_RELEASED, static_cast<EvsEventType>(aNotification.aType));
1042 
1043         // Closing streams.
1044         frameHandlerPrimary->shutdown();
1045 
1046         // Explicitly release the camera
1047         ASSERT_TRUE(mEnumerator->closeCamera(pPrimaryCam).isOk());
1048         ASSERT_TRUE(mEnumerator->closeCamera(pSecondaryCam).isOk());
1049         mActiveCameras.clear();
1050     }
1051 }
1052 
1053 /*
1054  * MultiCameraParameter:
1055  * Verify that primary and non-primary clients behave as expected when they try to adjust
1056  * camera parameters.
1057  */
TEST_P(EvsAidlTest,MultiCameraParameter)1058 TEST_P(EvsAidlTest, MultiCameraParameter) {
1059     LOG(INFO) << "Starting MultiCameraParameter test";
1060 
1061     if (mIsHwModule) {
1062         // This test is not for HW module implementation.
1063         return;
1064     }
1065 
1066     // Get the camera list
1067     loadCameraList();
1068 
1069     // Test each reported camera
1070     for (auto&& cam : mCameraInfo) {
1071         bool isLogicalCam = false;
1072         getPhysicalCameraIds(cam.id, isLogicalCam);
1073         if (isLogicalCam) {
1074             // TODO(b/145465724): Support camera parameter programming on
1075             // logical devices.
1076             LOG(INFO) << "Skip a logical device " << cam.id;
1077             continue;
1078         }
1079 
1080         // Read a target resolution from the metadata
1081         Stream targetCfg = getFirstStreamConfiguration(
1082                 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
1083         ASSERT_GT(targetCfg.width, 0);
1084         ASSERT_GT(targetCfg.height, 0);
1085 
1086         // Create two camera clients.
1087         std::shared_ptr<IEvsCamera> pPrimaryCam;
1088         ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pPrimaryCam).isOk());
1089         EXPECT_NE(pPrimaryCam, nullptr);
1090 
1091         // Store a camera handle for a clean-up
1092         mActiveCameras.push_back(pPrimaryCam);
1093 
1094         std::shared_ptr<IEvsCamera> pSecondaryCam;
1095         ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pSecondaryCam).isOk());
1096         EXPECT_NE(pSecondaryCam, nullptr);
1097 
1098         // Store a camera handle for a clean-up
1099         mActiveCameras.push_back(pSecondaryCam);
1100 
1101         // Get the parameter list
1102         std::vector<CameraParam> camPrimaryCmds, camSecondaryCmds;
1103         ASSERT_TRUE(pPrimaryCam->getParameterList(&camPrimaryCmds).isOk());
1104         ASSERT_TRUE(pSecondaryCam->getParameterList(&camSecondaryCmds).isOk());
1105         if (camPrimaryCmds.size() < 1 || camSecondaryCmds.size() < 1) {
1106             // Skip a camera device if it does not support any parameter.
1107             continue;
1108         }
1109 
1110         // Set up per-client frame receiver objects which will fire up its own thread
1111         std::shared_ptr<FrameHandler> frameHandlerPrimary = ndk::SharedRefBase::make<FrameHandler>(
1112                 pPrimaryCam, cam, nullptr, FrameHandler::eAutoReturn);
1113         std::shared_ptr<FrameHandler> frameHandlerSecondary =
1114                 ndk::SharedRefBase::make<FrameHandler>(pSecondaryCam, cam, nullptr,
1115                                                        FrameHandler::eAutoReturn);
1116         EXPECT_NE(frameHandlerPrimary, nullptr);
1117         EXPECT_NE(frameHandlerSecondary, nullptr);
1118 
1119         // Set one client as the primary client.
1120         ASSERT_TRUE(pPrimaryCam->setPrimaryClient().isOk());
1121 
1122         // Try to set another client as the primary client.
1123         ASSERT_FALSE(pSecondaryCam->setPrimaryClient().isOk());
1124 
1125         // Start the camera's video stream via a primary client client.
1126         ASSERT_TRUE(frameHandlerPrimary->startStream());
1127 
1128         // Ensure the stream starts
1129         frameHandlerPrimary->waitForFrameCount(1);
1130 
1131         // Start the camera's video stream via another client
1132         ASSERT_TRUE(frameHandlerSecondary->startStream());
1133 
1134         // Ensure the stream starts
1135         frameHandlerSecondary->waitForFrameCount(1);
1136 
1137         int32_t val0 = 0;
1138         std::vector<int32_t> values;
1139         EvsEventDesc aNotification0 = {};
1140         EvsEventDesc aNotification1 = {};
1141         for (auto& cmd : camPrimaryCmds) {
1142             // Get a valid parameter value range
1143             ParameterRange range;
1144             ASSERT_TRUE(pPrimaryCam->getIntParameterRange(cmd, &range).isOk());
1145             if (cmd == CameraParam::ABSOLUTE_FOCUS) {
1146                 // Try to turn off auto-focus
1147                 values.clear();
1148                 ASSERT_TRUE(
1149                         pPrimaryCam->setIntParameter(CameraParam::AUTO_FOCUS, 0, &values).isOk());
1150                 for (auto&& v : values) {
1151                     EXPECT_EQ(v, 0);
1152                 }
1153             }
1154 
1155             // Calculate a parameter value to program.
1156             val0 = range.min + (std::rand() % (range.max - range.min));
1157             val0 = val0 - (val0 % range.step);
1158 
1159             // Prepare and start event listeners.
1160             bool listening0 = false;
1161             bool listening1 = false;
1162             std::condition_variable eventCond;
1163             std::thread listener0 = std::thread([cmd, val0, &aNotification0, &frameHandlerPrimary,
1164                                                  &listening0, &listening1, &eventCond]() {
1165                 listening0 = true;
1166                 if (listening1) {
1167                     eventCond.notify_all();
1168                 }
1169 
1170                 EvsEventDesc aTargetEvent;
1171                 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1172                 aTargetEvent.payload.push_back(static_cast<int32_t>(cmd));
1173                 aTargetEvent.payload.push_back(val0);
1174                 if (!frameHandlerPrimary->waitForEvent(aTargetEvent, aNotification0)) {
1175                     LOG(WARNING) << "A timer is expired before a target event is fired.";
1176                 }
1177             });
1178             std::thread listener1 = std::thread([cmd, val0, &aNotification1, &frameHandlerSecondary,
1179                                                  &listening0, &listening1, &eventCond]() {
1180                 listening1 = true;
1181                 if (listening0) {
1182                     eventCond.notify_all();
1183                 }
1184 
1185                 EvsEventDesc aTargetEvent;
1186                 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1187                 aTargetEvent.payload.push_back(static_cast<int32_t>(cmd));
1188                 aTargetEvent.payload.push_back(val0);
1189                 if (!frameHandlerSecondary->waitForEvent(aTargetEvent, aNotification1)) {
1190                     LOG(WARNING) << "A timer is expired before a target event is fired.";
1191                 }
1192             });
1193 
1194             // Wait until a listening thread starts.
1195             std::mutex eventLock;
1196             std::unique_lock<std::mutex> lock(eventLock);
1197             auto timer = std::chrono::system_clock::now();
1198             while (!listening0 || !listening1) {
1199                 eventCond.wait_until(lock, timer + 1s);
1200             }
1201             lock.unlock();
1202 
1203             // Try to program a parameter
1204             values.clear();
1205             ASSERT_TRUE(pPrimaryCam->setIntParameter(cmd, val0, &values).isOk());
1206             for (auto&& v : values) {
1207                 EXPECT_EQ(val0, v) << "Values are not matched.";
1208             }
1209 
1210             // Join a listening thread.
1211             if (listener0.joinable()) {
1212                 listener0.join();
1213             }
1214             if (listener1.joinable()) {
1215                 listener1.join();
1216             }
1217 
1218             // Verify a change notification
1219             ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
1220                       static_cast<EvsEventType>(aNotification0.aType));
1221             ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
1222                       static_cast<EvsEventType>(aNotification1.aType));
1223             ASSERT_GE(aNotification0.payload.size(), 2);
1224             ASSERT_GE(aNotification1.payload.size(), 2);
1225             ASSERT_EQ(cmd, static_cast<CameraParam>(aNotification0.payload[0]));
1226             ASSERT_EQ(cmd, static_cast<CameraParam>(aNotification1.payload[0]));
1227             for (auto&& v : values) {
1228                 ASSERT_EQ(v, aNotification0.payload[1]);
1229                 ASSERT_EQ(v, aNotification1.payload[1]);
1230             }
1231 
1232             // Clients expects to receive a parameter change notification
1233             // whenever a primary client client adjusts it.
1234             values.clear();
1235             ASSERT_TRUE(pPrimaryCam->getIntParameter(cmd, &values).isOk());
1236             for (auto&& v : values) {
1237                 EXPECT_EQ(val0, v) << "Values are not matched.";
1238             }
1239         }
1240 
1241         // Try to adjust a parameter via non-primary client
1242         values.clear();
1243         ASSERT_FALSE(pSecondaryCam->setIntParameter(camSecondaryCmds[0], val0, &values).isOk());
1244 
1245         // Non-primary client attempts to be a primary client
1246         ASSERT_FALSE(pSecondaryCam->setPrimaryClient().isOk());
1247 
1248         // Primary client retires from a primary client role
1249         bool listening = false;
1250         std::condition_variable eventCond;
1251         std::thread listener =
1252                 std::thread([&aNotification0, &frameHandlerSecondary, &listening, &eventCond]() {
1253                     listening = true;
1254                     eventCond.notify_all();
1255 
1256                     EvsEventDesc aTargetEvent;
1257                     aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
1258                     if (!frameHandlerSecondary->waitForEvent(aTargetEvent, aNotification0, true)) {
1259                         LOG(WARNING) << "A timer is expired before a target event is fired.";
1260                     }
1261                 });
1262 
1263         std::mutex eventLock;
1264         auto timer = std::chrono::system_clock::now();
1265         std::unique_lock<std::mutex> lock(eventLock);
1266         while (!listening) {
1267             eventCond.wait_until(lock, timer + 1s);
1268         }
1269         lock.unlock();
1270 
1271         ASSERT_TRUE(pPrimaryCam->unsetPrimaryClient().isOk());
1272 
1273         if (listener.joinable()) {
1274             listener.join();
1275         }
1276         ASSERT_EQ(EvsEventType::MASTER_RELEASED, static_cast<EvsEventType>(aNotification0.aType));
1277 
1278         // Try to adjust a parameter after being retired
1279         values.clear();
1280         ASSERT_FALSE(pPrimaryCam->setIntParameter(camPrimaryCmds[0], val0, &values).isOk());
1281 
1282         // Non-primary client becomes a primary client
1283         ASSERT_TRUE(pSecondaryCam->setPrimaryClient().isOk());
1284 
1285         // Try to adjust a parameter via new primary client
1286         for (auto& cmd : camSecondaryCmds) {
1287             // Get a valid parameter value range
1288             ParameterRange range;
1289             ASSERT_TRUE(pSecondaryCam->getIntParameterRange(cmd, &range).isOk());
1290 
1291             values.clear();
1292             if (cmd == CameraParam::ABSOLUTE_FOCUS) {
1293                 // Try to turn off auto-focus
1294                 values.clear();
1295                 ASSERT_TRUE(
1296                         pSecondaryCam->setIntParameter(CameraParam::AUTO_FOCUS, 0, &values).isOk());
1297                 for (auto&& v : values) {
1298                     EXPECT_EQ(v, 0);
1299                 }
1300             }
1301 
1302             // Calculate a parameter value to program.  This is being rounding down.
1303             val0 = range.min + (std::rand() % (range.max - range.min));
1304             val0 = val0 - (val0 % range.step);
1305 
1306             // Prepare and start event listeners.
1307             bool listening0 = false;
1308             bool listening1 = false;
1309             std::condition_variable eventCond;
1310             std::thread listener0 = std::thread([&]() {
1311                 listening0 = true;
1312                 if (listening1) {
1313                     eventCond.notify_all();
1314                 }
1315 
1316                 EvsEventDesc aTargetEvent;
1317                 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1318                 aTargetEvent.payload.push_back(static_cast<int32_t>(cmd));
1319                 aTargetEvent.payload.push_back(val0);
1320                 if (!frameHandlerPrimary->waitForEvent(aTargetEvent, aNotification0)) {
1321                     LOG(WARNING) << "A timer is expired before a target event is fired.";
1322                 }
1323             });
1324             std::thread listener1 = std::thread([&]() {
1325                 listening1 = true;
1326                 if (listening0) {
1327                     eventCond.notify_all();
1328                 }
1329 
1330                 EvsEventDesc aTargetEvent;
1331                 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1332                 aTargetEvent.payload.push_back(static_cast<int32_t>(cmd));
1333                 aTargetEvent.payload.push_back(val0);
1334                 if (!frameHandlerSecondary->waitForEvent(aTargetEvent, aNotification1)) {
1335                     LOG(WARNING) << "A timer is expired before a target event is fired.";
1336                 }
1337             });
1338 
1339             // Wait until a listening thread starts.
1340             std::mutex eventLock;
1341             std::unique_lock<std::mutex> lock(eventLock);
1342             auto timer = std::chrono::system_clock::now();
1343             while (!listening0 || !listening1) {
1344                 eventCond.wait_until(lock, timer + 1s);
1345             }
1346             lock.unlock();
1347 
1348             // Try to program a parameter
1349             values.clear();
1350             ASSERT_TRUE(pSecondaryCam->setIntParameter(cmd, val0, &values).isOk());
1351 
1352             // Clients expects to receive a parameter change notification
1353             // whenever a primary client client adjusts it.
1354             values.clear();
1355             ASSERT_TRUE(pSecondaryCam->getIntParameter(cmd, &values).isOk());
1356             for (auto&& v : values) {
1357                 EXPECT_EQ(val0, v) << "Values are not matched.";
1358             }
1359 
1360             // Join a listening thread.
1361             if (listener0.joinable()) {
1362                 listener0.join();
1363             }
1364             if (listener1.joinable()) {
1365                 listener1.join();
1366             }
1367 
1368             // Verify a change notification
1369             ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
1370                       static_cast<EvsEventType>(aNotification0.aType));
1371             ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
1372                       static_cast<EvsEventType>(aNotification1.aType));
1373             ASSERT_GE(aNotification0.payload.size(), 2);
1374             ASSERT_GE(aNotification1.payload.size(), 2);
1375             ASSERT_EQ(cmd, static_cast<CameraParam>(aNotification0.payload[0]));
1376             ASSERT_EQ(cmd, static_cast<CameraParam>(aNotification1.payload[0]));
1377             for (auto&& v : values) {
1378                 ASSERT_EQ(v, aNotification0.payload[1]);
1379                 ASSERT_EQ(v, aNotification1.payload[1]);
1380             }
1381         }
1382 
1383         // New primary client retires from the role
1384         ASSERT_TRUE(pSecondaryCam->unsetPrimaryClient().isOk());
1385 
1386         // Shutdown
1387         frameHandlerPrimary->shutdown();
1388         frameHandlerSecondary->shutdown();
1389 
1390         // Explicitly release the camera
1391         ASSERT_TRUE(mEnumerator->closeCamera(pPrimaryCam).isOk());
1392         ASSERT_TRUE(mEnumerator->closeCamera(pSecondaryCam).isOk());
1393         mActiveCameras.clear();
1394     }
1395 }
1396 
1397 /*
1398  * HighPriorityCameraClient:
1399  * EVS client, which owns the display, is priortized and therefore can take over
1400  * a primary client role from other EVS clients without the display.
1401  */
TEST_P(EvsAidlTest,HighPriorityCameraClient)1402 TEST_P(EvsAidlTest, HighPriorityCameraClient) {
1403     LOG(INFO) << "Starting HighPriorityCameraClient test";
1404 
1405     if (mIsHwModule) {
1406         // This test is not for HW module implementation.
1407         return;
1408     }
1409 
1410     // Get the camera list
1411     loadCameraList();
1412 
1413     // Test each reported camera
1414     for (auto&& cam : mCameraInfo) {
1415         bool isLogicalCam = false;
1416         if (getPhysicalCameraIds(cam.id, isLogicalCam); isLogicalCam) {
1417             LOG(INFO) << "Skip a logical device, " << cam.id;
1418             continue;
1419         }
1420 
1421         // Request available display IDs
1422         uint8_t targetDisplayId = 0;
1423         std::vector<uint8_t> displayIds;
1424         ASSERT_TRUE(mEnumerator->getDisplayIdList(&displayIds).isOk());
1425         EXPECT_GT(displayIds.size(), 0);
1426         targetDisplayId = displayIds[0];
1427 
1428         // Request exclusive access to the EVS display
1429         std::shared_ptr<IEvsDisplay> pDisplay;
1430         ASSERT_TRUE(mEnumerator->openDisplay(targetDisplayId, &pDisplay).isOk());
1431         EXPECT_NE(pDisplay, nullptr);
1432 
1433         // Read a target resolution from the metadata
1434         Stream targetCfg = getFirstStreamConfiguration(
1435                 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
1436         ASSERT_GT(targetCfg.width, 0);
1437         ASSERT_GT(targetCfg.height, 0);
1438 
1439         // Create two clients
1440         std::shared_ptr<IEvsCamera> pCam0;
1441         ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam0).isOk());
1442         EXPECT_NE(pCam0, nullptr);
1443 
1444         // Store a camera handle for a clean-up
1445         mActiveCameras.push_back(pCam0);
1446 
1447         std::shared_ptr<IEvsCamera> pCam1;
1448         ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam1).isOk());
1449         EXPECT_NE(pCam1, nullptr);
1450 
1451         // Store a camera handle for a clean-up
1452         mActiveCameras.push_back(pCam1);
1453 
1454         // Get the parameter list; this test will use the first command in both
1455         // lists.
1456         std::vector<CameraParam> cam0Cmds, cam1Cmds;
1457         ASSERT_TRUE(pCam0->getParameterList(&cam0Cmds).isOk());
1458         ASSERT_TRUE(pCam1->getParameterList(&cam1Cmds).isOk());
1459         if (cam0Cmds.size() < 1 || cam1Cmds.size() < 1) {
1460             // Cannot execute this test.
1461             ASSERT_TRUE(mEnumerator->closeDisplay(pDisplay).isOk());
1462             continue;
1463         }
1464 
1465         // Set up a frame receiver object which will fire up its own thread.
1466         std::shared_ptr<FrameHandler> frameHandler0 = ndk::SharedRefBase::make<FrameHandler>(
1467                 pCam0, cam, nullptr, FrameHandler::eAutoReturn);
1468         std::shared_ptr<FrameHandler> frameHandler1 = ndk::SharedRefBase::make<FrameHandler>(
1469                 pCam1, cam, nullptr, FrameHandler::eAutoReturn);
1470         EXPECT_NE(frameHandler0, nullptr);
1471         EXPECT_NE(frameHandler1, nullptr);
1472 
1473         // Activate the display
1474         ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::VISIBLE_ON_NEXT_FRAME).isOk());
1475 
1476         // Start the camera's video stream
1477         ASSERT_TRUE(frameHandler0->startStream());
1478         ASSERT_TRUE(frameHandler1->startStream());
1479 
1480         // Ensure the stream starts
1481         frameHandler0->waitForFrameCount(1);
1482         frameHandler1->waitForFrameCount(1);
1483 
1484         // Client 1 becomes a primary client and programs a parameter.
1485 
1486         // Get a valid parameter value range
1487         ParameterRange range;
1488         ASSERT_TRUE(pCam1->getIntParameterRange(cam1Cmds[0], &range).isOk());
1489 
1490         // Client1 becomes a primary client
1491         ASSERT_TRUE(pCam1->setPrimaryClient().isOk());
1492 
1493         std::vector<int32_t> values;
1494         EvsEventDesc aTargetEvent = {};
1495         EvsEventDesc aNotification = {};
1496         bool listening = false;
1497         std::mutex eventLock;
1498         std::condition_variable eventCond;
1499         if (cam1Cmds[0] == CameraParam::ABSOLUTE_FOCUS) {
1500             std::thread listener =
1501                     std::thread([&frameHandler0, &aNotification, &listening, &eventCond] {
1502                         listening = true;
1503                         eventCond.notify_all();
1504 
1505                         EvsEventDesc aTargetEvent;
1506                         aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1507                         aTargetEvent.payload.push_back(
1508                                 static_cast<int32_t>(CameraParam::AUTO_FOCUS));
1509                         aTargetEvent.payload.push_back(0);
1510                         if (!frameHandler0->waitForEvent(aTargetEvent, aNotification)) {
1511                             LOG(WARNING) << "A timer is expired before a target event is fired.";
1512                         }
1513                     });
1514 
1515             // Wait until a lister starts.
1516             std::unique_lock<std::mutex> lock(eventLock);
1517             auto timer = std::chrono::system_clock::now();
1518             while (!listening) {
1519                 eventCond.wait_until(lock, timer + 1s);
1520             }
1521             lock.unlock();
1522 
1523             // Try to turn off auto-focus
1524             ASSERT_TRUE(pCam1->setIntParameter(CameraParam::AUTO_FOCUS, 0, &values).isOk());
1525             for (auto&& v : values) {
1526                 EXPECT_EQ(v, 0);
1527             }
1528 
1529             // Join a listener
1530             if (listener.joinable()) {
1531                 listener.join();
1532             }
1533 
1534             // Make sure AUTO_FOCUS is off.
1535             ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
1536                       EvsEventType::PARAMETER_CHANGED);
1537         }
1538 
1539         // Try to program a parameter with a random value [minVal, maxVal] after
1540         // rounding it down.
1541         int32_t val0 = range.min + (std::rand() % (range.max - range.min));
1542         val0 = val0 - (val0 % range.step);
1543 
1544         std::thread listener = std::thread(
1545                 [&frameHandler1, &aNotification, &listening, &eventCond, &cam1Cmds, val0] {
1546                     listening = true;
1547                     eventCond.notify_all();
1548 
1549                     EvsEventDesc aTargetEvent;
1550                     aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1551                     aTargetEvent.payload.push_back(static_cast<int32_t>(cam1Cmds[0]));
1552                     aTargetEvent.payload.push_back(val0);
1553                     if (!frameHandler1->waitForEvent(aTargetEvent, aNotification)) {
1554                         LOG(WARNING) << "A timer is expired before a target event is fired.";
1555                     }
1556                 });
1557 
1558         // Wait until a lister starts.
1559         listening = false;
1560         std::unique_lock<std::mutex> lock(eventLock);
1561         auto timer = std::chrono::system_clock::now();
1562         while (!listening) {
1563             eventCond.wait_until(lock, timer + 1s);
1564         }
1565         lock.unlock();
1566 
1567         values.clear();
1568         ASSERT_TRUE(pCam1->setIntParameter(cam1Cmds[0], val0, &values).isOk());
1569         for (auto&& v : values) {
1570             EXPECT_EQ(val0, v);
1571         }
1572 
1573         // Join a listener
1574         if (listener.joinable()) {
1575             listener.join();
1576         }
1577 
1578         // Verify a change notification
1579         ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType), EvsEventType::PARAMETER_CHANGED);
1580         ASSERT_GE(aNotification.payload.size(), 2);
1581         ASSERT_EQ(static_cast<CameraParam>(aNotification.payload[0]), cam1Cmds[0]);
1582         for (auto&& v : values) {
1583             ASSERT_EQ(v, aNotification.payload[1]);
1584         }
1585 
1586         listener = std::thread([&frameHandler1, &aNotification, &listening, &eventCond] {
1587             listening = true;
1588             eventCond.notify_all();
1589 
1590             EvsEventDesc aTargetEvent;
1591             aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
1592             if (!frameHandler1->waitForEvent(aTargetEvent, aNotification, true)) {
1593                 LOG(WARNING) << "A timer is expired before a target event is fired.";
1594             }
1595         });
1596 
1597         // Wait until a lister starts.
1598         listening = false;
1599         lock.lock();
1600         timer = std::chrono::system_clock::now();
1601         while (!listening) {
1602             eventCond.wait_until(lock, timer + 1s);
1603         }
1604         lock.unlock();
1605 
1606         // Client 0 steals a primary client role
1607         ASSERT_TRUE(pCam0->forcePrimaryClient(pDisplay).isOk());
1608 
1609         // Join a listener
1610         if (listener.joinable()) {
1611             listener.join();
1612         }
1613 
1614         ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType), EvsEventType::MASTER_RELEASED);
1615 
1616         // Client 0 programs a parameter
1617         val0 = range.min + (std::rand() % (range.max - range.min));
1618 
1619         // Rounding down
1620         val0 = val0 - (val0 % range.step);
1621 
1622         if (cam0Cmds[0] == CameraParam::ABSOLUTE_FOCUS) {
1623             std::thread listener =
1624                     std::thread([&frameHandler1, &aNotification, &listening, &eventCond] {
1625                         listening = true;
1626                         eventCond.notify_all();
1627 
1628                         EvsEventDesc aTargetEvent;
1629                         aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1630                         aTargetEvent.payload.push_back(
1631                                 static_cast<int32_t>(CameraParam::AUTO_FOCUS));
1632                         aTargetEvent.payload.push_back(0);
1633                         if (!frameHandler1->waitForEvent(aTargetEvent, aNotification)) {
1634                             LOG(WARNING) << "A timer is expired before a target event is fired.";
1635                         }
1636                     });
1637 
1638             // Wait until a lister starts.
1639             std::unique_lock<std::mutex> lock(eventLock);
1640             auto timer = std::chrono::system_clock::now();
1641             while (!listening) {
1642                 eventCond.wait_until(lock, timer + 1s);
1643             }
1644             lock.unlock();
1645 
1646             // Try to turn off auto-focus
1647             values.clear();
1648             ASSERT_TRUE(pCam0->setIntParameter(CameraParam::AUTO_FOCUS, 0, &values).isOk());
1649             for (auto&& v : values) {
1650                 EXPECT_EQ(v, 0);
1651             }
1652 
1653             // Join a listener
1654             if (listener.joinable()) {
1655                 listener.join();
1656             }
1657 
1658             // Make sure AUTO_FOCUS is off.
1659             ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
1660                       EvsEventType::PARAMETER_CHANGED);
1661         }
1662 
1663         listener = std::thread(
1664                 [&frameHandler0, &aNotification, &listening, &eventCond, &cam0Cmds, val0] {
1665                     listening = true;
1666                     eventCond.notify_all();
1667 
1668                     EvsEventDesc aTargetEvent;
1669                     aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1670                     aTargetEvent.payload.push_back(static_cast<int32_t>(cam0Cmds[0]));
1671                     aTargetEvent.payload.push_back(val0);
1672                     if (!frameHandler0->waitForEvent(aTargetEvent, aNotification)) {
1673                         LOG(WARNING) << "A timer is expired before a target event is fired.";
1674                     }
1675                 });
1676 
1677         // Wait until a lister starts.
1678         listening = false;
1679         timer = std::chrono::system_clock::now();
1680         lock.lock();
1681         while (!listening) {
1682             eventCond.wait_until(lock, timer + 1s);
1683         }
1684         lock.unlock();
1685 
1686         values.clear();
1687         ASSERT_TRUE(pCam0->setIntParameter(cam0Cmds[0], val0, &values).isOk());
1688 
1689         // Join a listener
1690         if (listener.joinable()) {
1691             listener.join();
1692         }
1693         // Verify a change notification
1694         ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType), EvsEventType::PARAMETER_CHANGED);
1695         ASSERT_GE(aNotification.payload.size(), 2);
1696         ASSERT_EQ(static_cast<CameraParam>(aNotification.payload[0]), cam0Cmds[0]);
1697         for (auto&& v : values) {
1698             ASSERT_EQ(v, aNotification.payload[1]);
1699         }
1700 
1701         // Turn off the display (yes, before the stream stops -- it should be handled)
1702         ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::NOT_VISIBLE).isOk());
1703 
1704         // Shut down the streamer
1705         frameHandler0->shutdown();
1706         frameHandler1->shutdown();
1707 
1708         // Explicitly release the camera
1709         ASSERT_TRUE(mEnumerator->closeCamera(pCam0).isOk());
1710         ASSERT_TRUE(mEnumerator->closeCamera(pCam1).isOk());
1711         mActiveCameras.clear();
1712 
1713         // Explicitly release the display
1714         ASSERT_TRUE(mEnumerator->closeDisplay(pDisplay).isOk());
1715     }
1716 }
1717 
1718 /*
1719  * CameraUseStreamConfigToDisplay:
1720  * End to end test of data flowing from the camera to the display.  Similar to
1721  * CameraToDisplayRoundTrip test case but this case retrieves available stream
1722  * configurations from EVS and uses one of them to start a video stream.
1723  */
TEST_P(EvsAidlTest,CameraUseStreamConfigToDisplay)1724 TEST_P(EvsAidlTest, CameraUseStreamConfigToDisplay) {
1725     LOG(INFO) << "Starting CameraUseStreamConfigToDisplay test";
1726 
1727     // Get the camera list
1728     loadCameraList();
1729 
1730     // Request available display IDs
1731     uint8_t targetDisplayId = 0;
1732     std::vector<uint8_t> displayIds;
1733     ASSERT_TRUE(mEnumerator->getDisplayIdList(&displayIds).isOk());
1734     EXPECT_GT(displayIds.size(), 0);
1735     targetDisplayId = displayIds[0];
1736 
1737     // Test each reported camera
1738     for (auto&& cam : mCameraInfo) {
1739         // Request exclusive access to the EVS display
1740         std::shared_ptr<IEvsDisplay> pDisplay;
1741         ASSERT_TRUE(mEnumerator->openDisplay(targetDisplayId, &pDisplay).isOk());
1742         EXPECT_NE(pDisplay, nullptr);
1743 
1744         // choose a configuration that has a frame rate faster than minReqFps.
1745         Stream targetCfg = {};
1746         const int32_t minReqFps = 15;
1747         int32_t maxArea = 0;
1748         camera_metadata_entry_t streamCfgs;
1749         bool foundCfg = false;
1750         if (!find_camera_metadata_entry(reinterpret_cast<camera_metadata_t*>(cam.metadata.data()),
1751                                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
1752                                         &streamCfgs)) {
1753             // Stream configurations are found in metadata
1754             RawStreamConfig* ptr = reinterpret_cast<RawStreamConfig*>(streamCfgs.data.i32);
1755             for (unsigned offset = 0; offset < streamCfgs.count; offset += kStreamCfgSz) {
1756                 if (ptr->direction == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT) {
1757                     if (ptr->width * ptr->height > maxArea && ptr->framerate >= minReqFps) {
1758                         targetCfg.width = ptr->width;
1759                         targetCfg.height = ptr->height;
1760                         targetCfg.format = static_cast<PixelFormat>(ptr->format);
1761 
1762                         maxArea = ptr->width * ptr->height;
1763                         foundCfg = true;
1764                     }
1765                 }
1766                 ++ptr;
1767             }
1768         }
1769 
1770         if (!foundCfg) {
1771             // Current EVS camera does not provide stream configurations in the
1772             // metadata.
1773             continue;
1774         }
1775 
1776         std::shared_ptr<IEvsCamera> pCam;
1777         ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
1778         EXPECT_NE(pCam, nullptr);
1779 
1780         // Store a camera handle for a clean-up
1781         mActiveCameras.push_back(pCam);
1782 
1783         // Set up a frame receiver object which will fire up its own thread.
1784         std::shared_ptr<FrameHandler> frameHandler = ndk::SharedRefBase::make<FrameHandler>(
1785                 pCam, cam, pDisplay, FrameHandler::eAutoReturn);
1786         EXPECT_NE(frameHandler, nullptr);
1787 
1788         // Activate the display
1789         ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::VISIBLE_ON_NEXT_FRAME).isOk());
1790 
1791         // Start the camera's video stream
1792         ASSERT_TRUE(frameHandler->startStream());
1793 
1794         // Wait a while to let the data flow
1795         static const int kSecondsToWait = 5;
1796         const int streamTimeMs =
1797                 kSecondsToWait * kSecondsToMilliseconds - kMaxStreamStartMilliseconds;
1798         const unsigned minimumFramesExpected =
1799                 streamTimeMs * kMinimumFramesPerSecond / kSecondsToMilliseconds;
1800         sleep(kSecondsToWait);
1801         unsigned framesReceived = 0;
1802         unsigned framesDisplayed = 0;
1803         frameHandler->getFramesCounters(&framesReceived, &framesDisplayed);
1804         EXPECT_EQ(framesReceived, framesDisplayed);
1805         EXPECT_GE(framesDisplayed, minimumFramesExpected);
1806 
1807         // Turn off the display (yes, before the stream stops -- it should be handled)
1808         ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::NOT_VISIBLE).isOk());
1809 
1810         // Shut down the streamer
1811         frameHandler->shutdown();
1812 
1813         // Explicitly release the camera
1814         ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
1815         mActiveCameras.clear();
1816 
1817         // Explicitly release the display
1818         ASSERT_TRUE(mEnumerator->closeDisplay(pDisplay).isOk());
1819     }
1820 }
1821 
1822 /*
1823  * MultiCameraStreamUseConfig:
1824  * Verify that each client can start and stop video streams on the same
1825  * underlying camera with same configuration.
1826  */
TEST_P(EvsAidlTest,MultiCameraStreamUseConfig)1827 TEST_P(EvsAidlTest, MultiCameraStreamUseConfig) {
1828     LOG(INFO) << "Starting MultiCameraStream test";
1829 
1830     if (mIsHwModule) {
1831         // This test is not for HW module implementation.
1832         return;
1833     }
1834 
1835     // Get the camera list
1836     loadCameraList();
1837 
1838     // Test each reported camera
1839     for (auto&& cam : mCameraInfo) {
1840         // choose a configuration that has a frame rate faster than minReqFps.
1841         Stream targetCfg = {};
1842         const int32_t minReqFps = 15;
1843         int32_t maxArea = 0;
1844         camera_metadata_entry_t streamCfgs;
1845         bool foundCfg = false;
1846         if (!find_camera_metadata_entry(reinterpret_cast<camera_metadata_t*>(cam.metadata.data()),
1847                                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
1848                                         &streamCfgs)) {
1849             // Stream configurations are found in metadata
1850             RawStreamConfig* ptr = reinterpret_cast<RawStreamConfig*>(streamCfgs.data.i32);
1851             for (unsigned offset = 0; offset < streamCfgs.count; offset += kStreamCfgSz) {
1852                 if (ptr->direction == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT) {
1853                     if (ptr->width * ptr->height > maxArea && ptr->framerate >= minReqFps) {
1854                         targetCfg.width = ptr->width;
1855                         targetCfg.height = ptr->height;
1856                         targetCfg.format = static_cast<PixelFormat>(ptr->format);
1857 
1858                         maxArea = ptr->width * ptr->height;
1859                         foundCfg = true;
1860                     }
1861                 }
1862                 ++ptr;
1863             }
1864         }
1865 
1866         if (!foundCfg) {
1867             LOG(INFO) << "Device " << cam.id
1868                       << " does not provide a list of supported stream configurations, skipped";
1869             continue;
1870         }
1871 
1872         // Create the first camera client with a selected stream configuration.
1873         std::shared_ptr<IEvsCamera> pCam0;
1874         ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam0).isOk());
1875         EXPECT_NE(pCam0, nullptr);
1876 
1877         // Store a camera handle for a clean-up
1878         mActiveCameras.push_back(pCam0);
1879 
1880         // Try to create the second camera client with different stream
1881         // configuration.
1882         int32_t id = targetCfg.id;
1883         targetCfg.id += 1;  // EVS manager sees only the stream id.
1884         std::shared_ptr<IEvsCamera> pCam1;
1885         ASSERT_FALSE(mEnumerator->openCamera(cam.id, targetCfg, &pCam1).isOk());
1886 
1887         // Try again with same stream configuration.
1888         targetCfg.id = id;
1889         ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam1).isOk());
1890         EXPECT_NE(pCam1, nullptr);
1891 
1892         // Set up per-client frame receiver objects which will fire up its own thread
1893         std::shared_ptr<FrameHandler> frameHandler0 = ndk::SharedRefBase::make<FrameHandler>(
1894                 pCam0, cam, nullptr, FrameHandler::eAutoReturn);
1895         std::shared_ptr<FrameHandler> frameHandler1 = ndk::SharedRefBase::make<FrameHandler>(
1896                 pCam1, cam, nullptr, FrameHandler::eAutoReturn);
1897         EXPECT_NE(frameHandler0, nullptr);
1898         EXPECT_NE(frameHandler1, nullptr);
1899 
1900         // Start the camera's video stream via client 0
1901         ASSERT_TRUE(frameHandler0->startStream());
1902         ASSERT_TRUE(frameHandler1->startStream());
1903 
1904         // Ensure the stream starts
1905         frameHandler0->waitForFrameCount(1);
1906         frameHandler1->waitForFrameCount(1);
1907 
1908         nsecs_t firstFrame = systemTime(SYSTEM_TIME_MONOTONIC);
1909 
1910         // Wait a bit, then ensure both clients get at least the required minimum number of frames
1911         sleep(5);
1912         nsecs_t end = systemTime(SYSTEM_TIME_MONOTONIC);
1913         unsigned framesReceived0 = 0, framesReceived1 = 0;
1914         frameHandler0->getFramesCounters(&framesReceived0, nullptr);
1915         frameHandler1->getFramesCounters(&framesReceived1, nullptr);
1916         framesReceived0 = framesReceived0 - 1;  // Back out the first frame we already waited for
1917         framesReceived1 = framesReceived1 - 1;  // Back out the first frame we already waited for
1918         nsecs_t runTime = end - firstFrame;
1919         float framesPerSecond0 = framesReceived0 / (runTime * kNanoToSeconds);
1920         float framesPerSecond1 = framesReceived1 / (runTime * kNanoToSeconds);
1921         LOG(INFO) << "Measured camera rate " << std::scientific << framesPerSecond0 << " fps and "
1922                   << framesPerSecond1 << " fps";
1923         EXPECT_GE(framesPerSecond0, kMinimumFramesPerSecond);
1924         EXPECT_GE(framesPerSecond1, kMinimumFramesPerSecond);
1925 
1926         // Shutdown one client
1927         frameHandler0->shutdown();
1928 
1929         // Read frame counters again
1930         frameHandler0->getFramesCounters(&framesReceived0, nullptr);
1931         frameHandler1->getFramesCounters(&framesReceived1, nullptr);
1932 
1933         // Wait a bit again
1934         sleep(5);
1935         unsigned framesReceivedAfterStop0 = 0, framesReceivedAfterStop1 = 0;
1936         frameHandler0->getFramesCounters(&framesReceivedAfterStop0, nullptr);
1937         frameHandler1->getFramesCounters(&framesReceivedAfterStop1, nullptr);
1938         EXPECT_EQ(framesReceived0, framesReceivedAfterStop0);
1939         EXPECT_LT(framesReceived1, framesReceivedAfterStop1);
1940 
1941         // Shutdown another
1942         frameHandler1->shutdown();
1943 
1944         // Explicitly release the camera
1945         ASSERT_TRUE(mEnumerator->closeCamera(pCam0).isOk());
1946         ASSERT_TRUE(mEnumerator->closeCamera(pCam1).isOk());
1947         mActiveCameras.clear();
1948     }
1949 }
1950 
1951 /*
1952  * LogicalCameraMetadata:
1953  * Opens logical camera reported by the enumerator and validate its metadata by
1954  * checking its capability and locating supporting physical camera device
1955  * identifiers.
1956  */
TEST_P(EvsAidlTest,LogicalCameraMetadata)1957 TEST_P(EvsAidlTest, LogicalCameraMetadata) {
1958     LOG(INFO) << "Starting LogicalCameraMetadata test";
1959 
1960     // Get the camera list
1961     loadCameraList();
1962 
1963     // Open and close each camera twice
1964     for (auto&& cam : mCameraInfo) {
1965         bool isLogicalCam = false;
1966         auto devices = getPhysicalCameraIds(cam.id, isLogicalCam);
1967         if (isLogicalCam) {
1968             ASSERT_GE(devices.size(), 1) << "Logical camera device must have at least one physical "
1969                                             "camera device ID in its metadata.";
1970         }
1971     }
1972 }
1973 
1974 /*
1975  * CameraStreamExternalBuffering:
1976  * This is same with CameraStreamBuffering except frame buffers are allocated by
1977  * the test client and then imported by EVS framework.
1978  */
TEST_P(EvsAidlTest,CameraStreamExternalBuffering)1979 TEST_P(EvsAidlTest, CameraStreamExternalBuffering) {
1980     LOG(INFO) << "Starting CameraStreamExternalBuffering test";
1981 
1982     // Arbitrary constant (should be > 1 and not too big)
1983     static const unsigned int kBuffersToHold = 3;
1984 
1985     // Get the camera list
1986     loadCameraList();
1987 
1988     // Acquire the graphics buffer allocator
1989     android::GraphicBufferAllocator& alloc(android::GraphicBufferAllocator::get());
1990     const auto usage =
1991             GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_SW_READ_RARELY | GRALLOC_USAGE_SW_WRITE_OFTEN;
1992 
1993     // Test each reported camera
1994     for (auto&& cam : mCameraInfo) {
1995         bool isLogicalCam = false;
1996         getPhysicalCameraIds(cam.id, isLogicalCam);
1997         if (isLogicalCam) {
1998             LOG(INFO) << "Skip a logical device, " << cam.id;
1999             continue;
2000         }
2001 
2002         // Read a target resolution from the metadata
2003         Stream targetCfg = getFirstStreamConfiguration(
2004                 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
2005         ASSERT_GT(targetCfg.width, 0);
2006         ASSERT_GT(targetCfg.height, 0);
2007 
2008         // Allocate buffers to use
2009         std::vector<BufferDesc> buffers;
2010         buffers.resize(kBuffersToHold);
2011         for (auto i = 0; i < kBuffersToHold; ++i) {
2012             unsigned pixelsPerLine;
2013             buffer_handle_t memHandle = nullptr;
2014             android::status_t result =
2015                     alloc.allocate(targetCfg.width, targetCfg.height,
2016                                    static_cast<android::PixelFormat>(targetCfg.format),
2017                                    /* layerCount = */ 1, usage, &memHandle, &pixelsPerLine,
2018                                    /* graphicBufferId = */ 0,
2019                                    /* requestorName = */ "CameraStreamExternalBufferingTest");
2020             if (result != android::NO_ERROR) {
2021                 LOG(ERROR) << __FUNCTION__ << " failed to allocate memory.";
2022                 // Release previous allocated buffers
2023                 for (auto j = 0; j < i; j++) {
2024                     alloc.free(::android::dupFromAidl(buffers[i].buffer.handle));
2025                 }
2026                 return;
2027             } else {
2028                 BufferDesc buf;
2029                 HardwareBufferDescription* pDesc =
2030                         reinterpret_cast<HardwareBufferDescription*>(&buf.buffer.description);
2031                 pDesc->width = targetCfg.width;
2032                 pDesc->height = targetCfg.height;
2033                 pDesc->layers = 1;
2034                 pDesc->format = targetCfg.format;
2035                 pDesc->usage = static_cast<BufferUsage>(usage);
2036                 pDesc->stride = pixelsPerLine;
2037                 buf.buffer.handle = ::android::dupToAidl(memHandle);
2038                 buf.bufferId = i;  // Unique number to identify this buffer
2039                 buffers[i] = std::move(buf);
2040             }
2041         }
2042 
2043         std::shared_ptr<IEvsCamera> pCam;
2044         ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
2045         EXPECT_NE(pCam, nullptr);
2046 
2047         // Store a camera handle for a clean-up
2048         mActiveCameras.push_back(pCam);
2049 
2050         // Request to import buffers
2051         int delta = 0;
2052         auto status = pCam->importExternalBuffers(buffers, &delta);
2053         ASSERT_TRUE(status.isOk());
2054         EXPECT_GE(delta, kBuffersToHold);
2055 
2056         // Set up a frame receiver object which will fire up its own thread.
2057         std::shared_ptr<FrameHandler> frameHandler = ndk::SharedRefBase::make<FrameHandler>(
2058                 pCam, cam, nullptr, FrameHandler::eNoAutoReturn);
2059         EXPECT_NE(frameHandler, nullptr);
2060 
2061         // Start the camera's video stream
2062         ASSERT_TRUE(frameHandler->startStream());
2063 
2064         // Check that the video stream stalls once we've gotten exactly the number of buffers
2065         // we requested since we told the frameHandler not to return them.
2066         sleep(1);  // 1 second should be enough for at least 5 frames to be delivered worst case
2067         unsigned framesReceived = 0;
2068         frameHandler->getFramesCounters(&framesReceived, nullptr);
2069         ASSERT_LE(kBuffersToHold, framesReceived) << "Stream didn't stall at expected buffer limit";
2070 
2071         // Give back one buffer
2072         EXPECT_TRUE(frameHandler->returnHeldBuffer());
2073 
2074         // Once we return a buffer, it shouldn't take more than 1/10 second to get a new one
2075         // filled since we require 10fps minimum -- but give a 10% allowance just in case.
2076         unsigned framesReceivedAfter = 0;
2077         usleep(110 * kMillisecondsToMicroseconds);
2078         frameHandler->getFramesCounters(&framesReceivedAfter, nullptr);
2079         EXPECT_EQ(framesReceived + 1, framesReceivedAfter) << "Stream should've resumed";
2080 
2081         // Even when the camera pointer goes out of scope, the FrameHandler object will
2082         // keep the stream alive unless we tell it to shutdown.
2083         // Also note that the FrameHandle and the Camera have a mutual circular reference, so
2084         // we have to break that cycle in order for either of them to get cleaned up.
2085         frameHandler->shutdown();
2086 
2087         // Explicitly release the camera
2088         ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
2089         mActiveCameras.clear();
2090         // Release buffers
2091         for (auto& b : buffers) {
2092             alloc.free(::android::dupFromAidl(b.buffer.handle));
2093         }
2094         buffers.resize(0);
2095     }
2096 }
2097 
TEST_P(EvsAidlTest,DeviceStatusCallbackRegistration)2098 TEST_P(EvsAidlTest, DeviceStatusCallbackRegistration) {
2099     std::shared_ptr<IEvsEnumeratorStatusCallback> cb =
2100             ndk::SharedRefBase::make<DeviceStatusCallback>();
2101     ndk::ScopedAStatus status = mEnumerator->registerStatusCallback(cb);
2102     if (mIsHwModule) {
2103         ASSERT_TRUE(status.isOk());
2104     } else {
2105         // A callback registration may fail if a HIDL EVS HAL implementation is
2106         // running.
2107         ASSERT_TRUE(status.isOk() ||
2108                     status.getServiceSpecificError() == static_cast<int>(EvsResult::NOT_SUPPORTED));
2109     }
2110 }
2111 
2112 /*
2113  * UltrasonicsArrayOpenClean:
2114  * Opens each ultrasonics arrays reported by the enumerator and then explicitly closes it via a
2115  * call to closeUltrasonicsArray. Then repeats the test to ensure all ultrasonics arrays
2116  * can be reopened.
2117  */
TEST_P(EvsAidlTest,UltrasonicsArrayOpenClean)2118 TEST_P(EvsAidlTest, UltrasonicsArrayOpenClean) {
2119     LOG(INFO) << "Starting UltrasonicsArrayOpenClean test";
2120 
2121     // Get the ultrasonics array list
2122     loadUltrasonicsArrayList();
2123 
2124     // Open and close each ultrasonics array twice
2125     for (auto&& ultraInfo : mUltrasonicsArraysInfo) {
2126         for (int pass = 0; pass < 2; pass++) {
2127             std::shared_ptr<IEvsUltrasonicsArray> pUltrasonicsArray;
2128             ASSERT_TRUE(
2129                     mEnumerator
2130                             ->openUltrasonicsArray(ultraInfo.ultrasonicsArrayId, &pUltrasonicsArray)
2131                             .isOk());
2132             EXPECT_NE(pUltrasonicsArray, nullptr);
2133 
2134             // Verify that this ultrasonics array self-identifies correctly
2135             UltrasonicsArrayDesc desc;
2136             ASSERT_TRUE(pUltrasonicsArray->getUltrasonicArrayInfo(&desc).isOk());
2137             EXPECT_EQ(ultraInfo.ultrasonicsArrayId, desc.ultrasonicsArrayId);
2138             LOG(DEBUG) << "Found ultrasonics array " << ultraInfo.ultrasonicsArrayId;
2139 
2140             // Explicitly close the ultrasonics array so resources are released right away
2141             ASSERT_TRUE(mEnumerator->closeUltrasonicsArray(pUltrasonicsArray).isOk());
2142         }
2143     }
2144 }
2145 
2146 // Starts a stream and verifies all data received is valid.
TEST_P(EvsAidlTest,UltrasonicsVerifyStreamData)2147 TEST_P(EvsAidlTest, UltrasonicsVerifyStreamData) {
2148     LOG(INFO) << "Starting UltrasonicsVerifyStreamData";
2149 
2150     // Get the ultrasonics array list
2151     loadUltrasonicsArrayList();
2152 
2153     // For each ultrasonics array.
2154     for (auto&& ultraInfo : mUltrasonicsArraysInfo) {
2155         LOG(DEBUG) << "Testing ultrasonics array: " << ultraInfo.ultrasonicsArrayId;
2156 
2157         std::shared_ptr<IEvsUltrasonicsArray> pUltrasonicsArray;
2158         ASSERT_TRUE(
2159                 mEnumerator->openUltrasonicsArray(ultraInfo.ultrasonicsArrayId, &pUltrasonicsArray)
2160                         .isOk());
2161         EXPECT_NE(pUltrasonicsArray, nullptr);
2162 
2163         std::shared_ptr<FrameHandlerUltrasonics> frameHandler =
2164                 ndk::SharedRefBase::make<FrameHandlerUltrasonics>(pUltrasonicsArray);
2165         EXPECT_NE(frameHandler, nullptr);
2166 
2167         // Start stream.
2168         ASSERT_TRUE(pUltrasonicsArray->startStream(frameHandler).isOk());
2169 
2170         // Wait 5 seconds to receive frames.
2171         sleep(5);
2172 
2173         // Stop stream.
2174         ASSERT_TRUE(pUltrasonicsArray->stopStream().isOk());
2175 
2176         EXPECT_GT(frameHandler->getReceiveFramesCount(), 0);
2177         EXPECT_TRUE(frameHandler->areAllFramesValid());
2178 
2179         // Explicitly close the ultrasonics array so resources are released right away
2180         ASSERT_TRUE(mEnumerator->closeUltrasonicsArray(pUltrasonicsArray).isOk());
2181     }
2182 }
2183 
2184 // Sets frames in flight before and after start of stream and verfies success.
TEST_P(EvsAidlTest,UltrasonicsSetFramesInFlight)2185 TEST_P(EvsAidlTest, UltrasonicsSetFramesInFlight) {
2186     LOG(INFO) << "Starting UltrasonicsSetFramesInFlight";
2187 
2188     // Get the ultrasonics array list
2189     loadUltrasonicsArrayList();
2190 
2191     // For each ultrasonics array.
2192     for (auto&& ultraInfo : mUltrasonicsArraysInfo) {
2193         LOG(DEBUG) << "Testing ultrasonics array: " << ultraInfo.ultrasonicsArrayId;
2194 
2195         std::shared_ptr<IEvsUltrasonicsArray> pUltrasonicsArray;
2196         ASSERT_TRUE(
2197                 mEnumerator->openUltrasonicsArray(ultraInfo.ultrasonicsArrayId, &pUltrasonicsArray)
2198                         .isOk());
2199         EXPECT_NE(pUltrasonicsArray, nullptr);
2200 
2201         ASSERT_TRUE(pUltrasonicsArray->setMaxFramesInFlight(10).isOk());
2202 
2203         std::shared_ptr<FrameHandlerUltrasonics> frameHandler =
2204                 ndk::SharedRefBase::make<FrameHandlerUltrasonics>(pUltrasonicsArray);
2205         EXPECT_NE(frameHandler, nullptr);
2206 
2207         // Start stream.
2208         ASSERT_TRUE(pUltrasonicsArray->startStream(frameHandler).isOk());
2209         ASSERT_TRUE(pUltrasonicsArray->setMaxFramesInFlight(5).isOk());
2210 
2211         // Stop stream.
2212         ASSERT_TRUE(pUltrasonicsArray->stopStream().isOk());
2213 
2214         // Explicitly close the ultrasonics array so resources are released right away
2215         ASSERT_TRUE(mEnumerator->closeUltrasonicsArray(pUltrasonicsArray).isOk());
2216     }
2217 }
2218 
2219 /*
2220  * DisplayOpen:
2221  * Test both clean shut down and "aggressive open" device stealing behavior.
2222  */
TEST_P(EvsAidlTest,DisplayOpen)2223 TEST_P(EvsAidlTest, DisplayOpen) {
2224     LOG(INFO) << "Starting DisplayOpen test";
2225 
2226     // Request available display IDs.
2227     std::vector<uint8_t> displayIds;
2228     ASSERT_TRUE(mEnumerator->getDisplayIdList(&displayIds).isOk());
2229     EXPECT_GT(displayIds.size(), 0);
2230 
2231     for (const auto displayId : displayIds) {
2232         std::shared_ptr<IEvsDisplay> pDisplay;
2233 
2234         // Request exclusive access to each EVS display, then let it go.
2235         ASSERT_TRUE(mEnumerator->openDisplay(displayId, &pDisplay).isOk());
2236         ASSERT_NE(pDisplay, nullptr);
2237 
2238         {
2239             // Ask the display what its name is.
2240             DisplayDesc desc;
2241             ASSERT_TRUE(pDisplay->getDisplayInfo(&desc).isOk());
2242             LOG(DEBUG) << "Found display " << desc.id;
2243         }
2244 
2245         ASSERT_TRUE(mEnumerator->closeDisplay(pDisplay).isOk());
2246 
2247         // Ensure we can reopen the display after it has been closed.
2248         ASSERT_TRUE(mEnumerator->openDisplay(displayId, &pDisplay).isOk());
2249         ASSERT_NE(pDisplay, nullptr);
2250 
2251         // Open the display while its already open -- ownership should be transferred.
2252         std::shared_ptr<IEvsDisplay> pDisplay2;
2253         ASSERT_TRUE(mEnumerator->openDisplay(displayId, &pDisplay2).isOk());
2254         ASSERT_NE(pDisplay2, nullptr);
2255 
2256         {
2257             // Ensure the old display properly reports its assassination.
2258             DisplayState badState;
2259             EXPECT_TRUE(pDisplay->getDisplayState(&badState).isOk());
2260             EXPECT_EQ(badState, DisplayState::DEAD);
2261         }
2262 
2263         // Close only the newest display instance -- the other should already be a zombie.
2264         ASSERT_TRUE(mEnumerator->closeDisplay(pDisplay2).isOk());
2265 
2266         // Finally, validate that we can open the display after the provoked failure above.
2267         ASSERT_TRUE(mEnumerator->openDisplay(displayId, &pDisplay).isOk());
2268         ASSERT_NE(pDisplay, nullptr);
2269         ASSERT_TRUE(mEnumerator->closeDisplay(pDisplay).isOk());
2270     }
2271 }
2272 
2273 /*
2274  * DisplayStates:
2275  * Validate that display states transition as expected and can be queried from either the display
2276  * object itself or the owning enumerator.
2277  */
TEST_P(EvsAidlTest,DisplayStates)2278 TEST_P(EvsAidlTest, DisplayStates) {
2279     using std::literals::chrono_literals::operator""ms;
2280 
2281     LOG(INFO) << "Starting DisplayStates test";
2282 
2283     // Request available display IDs.
2284     std::vector<uint8_t> displayIds;
2285     ASSERT_TRUE(mEnumerator->getDisplayIdList(&displayIds).isOk());
2286     EXPECT_GT(displayIds.size(), 0);
2287 
2288     for (const auto displayId : displayIds) {
2289         // Ensure the display starts in the expected state.
2290         {
2291             DisplayState state;
2292             EXPECT_FALSE(mEnumerator->getDisplayState(&state).isOk());
2293         }
2294         for (const auto displayIdToQuery : displayIds) {
2295             DisplayState state;
2296             EXPECT_FALSE(mEnumerator->getDisplayStateById(displayIdToQuery, &state).isOk());
2297         }
2298 
2299         // Scope to limit the lifetime of the pDisplay pointer, and thus the IEvsDisplay object.
2300         {
2301             // Request exclusive access to the EVS display.
2302             std::shared_ptr<IEvsDisplay> pDisplay;
2303             ASSERT_TRUE(mEnumerator->openDisplay(displayId, &pDisplay).isOk());
2304             ASSERT_NE(pDisplay, nullptr);
2305             {
2306                 DisplayState state;
2307                 EXPECT_TRUE(mEnumerator->getDisplayState(&state).isOk());
2308                 EXPECT_EQ(state, DisplayState::NOT_VISIBLE);
2309             }
2310             for (const auto displayIdToQuery : displayIds) {
2311                 DisplayState state;
2312                 bool get_state_ok =
2313                         mEnumerator->getDisplayStateById(displayIdToQuery, &state).isOk();
2314                 if (displayIdToQuery != displayId) {
2315                     EXPECT_FALSE(get_state_ok);
2316                 } else if (get_state_ok) {
2317                     EXPECT_EQ(state, DisplayState::NOT_VISIBLE);
2318                 }
2319             }
2320 
2321             // Activate the display.
2322             EXPECT_TRUE(pDisplay->setDisplayState(DisplayState::VISIBLE_ON_NEXT_FRAME).isOk());
2323             {
2324                 DisplayState state;
2325                 EXPECT_TRUE(mEnumerator->getDisplayState(&state).isOk());
2326                 EXPECT_EQ(state, DisplayState::VISIBLE_ON_NEXT_FRAME);
2327             }
2328             {
2329                 DisplayState state;
2330                 EXPECT_TRUE(pDisplay->getDisplayState(&state).isOk());
2331                 EXPECT_EQ(state, DisplayState::VISIBLE_ON_NEXT_FRAME);
2332             }
2333             for (const auto displayIdToQuery : displayIds) {
2334                 DisplayState state;
2335                 bool get_state_ok =
2336                         mEnumerator->getDisplayStateById(displayIdToQuery, &state).isOk();
2337                 if (displayIdToQuery != displayId) {
2338                     EXPECT_FALSE(get_state_ok);
2339                 } else if (get_state_ok) {
2340                     EXPECT_EQ(state, DisplayState::VISIBLE_ON_NEXT_FRAME);
2341                 }
2342             }
2343 
2344             // Get the output buffer we'd use to display the imagery.
2345             BufferDesc tgtBuffer;
2346             ASSERT_TRUE(pDisplay->getTargetBuffer(&tgtBuffer).isOk());
2347 
2348             // Send the target buffer back for display (we didn't actually fill anything).
2349             EXPECT_TRUE(pDisplay->returnTargetBufferForDisplay(tgtBuffer).isOk());
2350 
2351             // Sleep for a tenth of a second to ensure the driver has time to get the image
2352             // displayed.
2353             std::this_thread::sleep_for(100ms);
2354             {
2355                 DisplayState state;
2356                 EXPECT_TRUE(mEnumerator->getDisplayState(&state).isOk());
2357                 EXPECT_EQ(state, DisplayState::VISIBLE);
2358             }
2359             {
2360                 DisplayState state;
2361                 EXPECT_TRUE(pDisplay->getDisplayState(&state).isOk());
2362                 EXPECT_EQ(state, DisplayState::VISIBLE);
2363             }
2364             for (const auto displayIdToQuery : displayIds) {
2365                 DisplayState state;
2366                 bool get_state_ok =
2367                         mEnumerator->getDisplayStateById(displayIdToQuery, &state).isOk();
2368                 if (displayIdToQuery != displayId) {
2369                     EXPECT_FALSE(get_state_ok);
2370                 } else if (get_state_ok) {
2371                     EXPECT_EQ(state, DisplayState::VISIBLE);
2372                 }
2373             }
2374 
2375             // Turn off the display.
2376             EXPECT_TRUE(pDisplay->setDisplayState(DisplayState::NOT_VISIBLE).isOk());
2377             std::this_thread::sleep_for(100ms);
2378             {
2379                 DisplayState state;
2380                 EXPECT_TRUE(mEnumerator->getDisplayState(&state).isOk());
2381                 EXPECT_EQ(state, DisplayState::NOT_VISIBLE);
2382             }
2383             {
2384                 DisplayState state;
2385                 EXPECT_TRUE(pDisplay->getDisplayState(&state).isOk());
2386                 EXPECT_EQ(state, DisplayState::NOT_VISIBLE);
2387             }
2388             for (const auto displayIdToQuery : displayIds) {
2389                 DisplayState state;
2390                 bool get_state_ok =
2391                         mEnumerator->getDisplayStateById(displayIdToQuery, &state).isOk();
2392                 if (displayIdToQuery != displayId) {
2393                     EXPECT_FALSE(get_state_ok);
2394                 } else if (get_state_ok) {
2395                     EXPECT_EQ(state, DisplayState::NOT_VISIBLE);
2396                 }
2397             }
2398 
2399             // Close the display.
2400             mEnumerator->closeDisplay(pDisplay);
2401         }
2402 
2403         // Now that the display pointer has gone out of scope, causing the IEvsDisplay interface
2404         // object to be destroyed, we should be back to the "not open" state.
2405         // NOTE:  If we want this to pass without the sleep above, we'd have to add the
2406         //        (now recommended) closeDisplay() call instead of relying on the smarter pointer
2407         //        going out of scope.  I've not done that because I want to verify that the deletion
2408         //        of the object does actually clean up (eventually).
2409         {
2410             DisplayState state;
2411             EXPECT_FALSE(mEnumerator->getDisplayState(&state).isOk());
2412         }
2413         for (const auto displayIdToQuery : displayIds) {
2414             DisplayState state;
2415             EXPECT_FALSE(mEnumerator->getDisplayStateById(displayIdToQuery, &state).isOk());
2416         }
2417     }
2418 }
2419 
2420 GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(EvsAidlTest);
2421 INSTANTIATE_TEST_SUITE_P(
2422         PerInstance, EvsAidlTest,
2423         testing::ValuesIn(android::getAidlHalInstanceNames(IEvsEnumerator::descriptor)),
2424         android::PrintInstanceNameToString);
2425 
main(int argc,char ** argv)2426 int main(int argc, char** argv) {
2427     ::testing::InitGoogleTest(&argc, argv);
2428     ABinderProcess_setThreadPoolMaxThreadCount(1);
2429     ABinderProcess_startThreadPool();
2430     return RUN_ALL_TESTS();
2431 }
2432