blob: 09e1088ee4ef5c70f775113c7646f85b85bb9d0f [file] [log] [blame]
Changyeon Jo80189012021-10-10 16:34:21 -07001/*
2 * Copyright (C) 2022 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "FrameHandler.h"
18#include "FrameHandlerUltrasonics.h"
19
20#include <aidl/Gtest.h>
21#include <aidl/Vintf.h>
22#include <aidl/android/hardware/automotive/evs/BufferDesc.h>
23#include <aidl/android/hardware/automotive/evs/CameraDesc.h>
24#include <aidl/android/hardware/automotive/evs/CameraParam.h>
25#include <aidl/android/hardware/automotive/evs/DisplayDesc.h>
26#include <aidl/android/hardware/automotive/evs/DisplayState.h>
27#include <aidl/android/hardware/automotive/evs/EvsEventDesc.h>
28#include <aidl/android/hardware/automotive/evs/EvsEventType.h>
29#include <aidl/android/hardware/automotive/evs/EvsResult.h>
30#include <aidl/android/hardware/automotive/evs/IEvsCamera.h>
31#include <aidl/android/hardware/automotive/evs/IEvsDisplay.h>
32#include <aidl/android/hardware/automotive/evs/IEvsEnumerator.h>
33#include <aidl/android/hardware/automotive/evs/IEvsUltrasonicsArray.h>
34#include <aidl/android/hardware/automotive/evs/ParameterRange.h>
35#include <aidl/android/hardware/automotive/evs/Stream.h>
36#include <aidl/android/hardware/automotive/evs/UltrasonicsArrayDesc.h>
37#include <aidl/android/hardware/common/NativeHandle.h>
38#include <aidl/android/hardware/graphics/common/HardwareBufferDescription.h>
39#include <aidl/android/hardware/graphics/common/PixelFormat.h>
40#include <aidlcommonsupport/NativeHandle.h>
41#include <android-base/logging.h>
42#include <android/binder_ibinder.h>
43#include <android/binder_manager.h>
44#include <android/binder_process.h>
45#include <android/binder_status.h>
46#include <system/camera_metadata.h>
47#include <ui/GraphicBuffer.h>
48#include <ui/GraphicBufferAllocator.h>
49#include <utils/Timers.h>
50
51#include <deque>
52#include <thread>
53#include <unordered_set>
54
55namespace {
56
57// These values are called out in the EVS design doc (as of Mar 8, 2017)
58constexpr int kMaxStreamStartMilliseconds = 500;
59constexpr int kMinimumFramesPerSecond = 10;
60constexpr int kSecondsToMilliseconds = 1000;
61constexpr int kMillisecondsToMicroseconds = 1000;
62constexpr float kNanoToMilliseconds = 0.000001f;
63constexpr float kNanoToSeconds = 0.000000001f;
64
65/*
66 * Please note that this is different from what is defined in
67 * libhardware/modules/camera/3_4/metadata/types.h; this has one additional
68 * field to store a framerate.
69 */
70typedef struct {
71 int32_t id;
72 int32_t width;
73 int32_t height;
74 int32_t format;
75 int32_t direction;
76 int32_t framerate;
77} RawStreamConfig;
78constexpr size_t kStreamCfgSz = sizeof(RawStreamConfig) / sizeof(int32_t);
79
80} // namespace
81
82using ::aidl::android::hardware::automotive::evs::BufferDesc;
83using ::aidl::android::hardware::automotive::evs::CameraDesc;
84using ::aidl::android::hardware::automotive::evs::CameraParam;
85using ::aidl::android::hardware::automotive::evs::DisplayDesc;
86using ::aidl::android::hardware::automotive::evs::DisplayState;
87using ::aidl::android::hardware::automotive::evs::EvsEventDesc;
88using ::aidl::android::hardware::automotive::evs::EvsEventType;
89using ::aidl::android::hardware::automotive::evs::EvsResult;
90using ::aidl::android::hardware::automotive::evs::IEvsCamera;
91using ::aidl::android::hardware::automotive::evs::IEvsDisplay;
92using ::aidl::android::hardware::automotive::evs::IEvsEnumerator;
93using ::aidl::android::hardware::automotive::evs::IEvsUltrasonicsArray;
94using ::aidl::android::hardware::automotive::evs::ParameterRange;
95using ::aidl::android::hardware::automotive::evs::Stream;
96using ::aidl::android::hardware::automotive::evs::UltrasonicsArrayDesc;
97using ::aidl::android::hardware::graphics::common::BufferUsage;
98using ::aidl::android::hardware::graphics::common::HardwareBufferDescription;
99using ::aidl::android::hardware::graphics::common::PixelFormat;
100using std::chrono_literals::operator""s;
101
102// The main test class for EVS
103class EvsAidlTest : public ::testing::TestWithParam<std::string> {
104 public:
105 virtual void SetUp() override {
106 // Make sure we can connect to the enumerator
107 std::string service_name = GetParam();
108 AIBinder* binder = AServiceManager_waitForService(service_name.data());
109 ASSERT_NE(binder, nullptr);
110 mEnumerator = IEvsEnumerator::fromBinder(::ndk::SpAIBinder(binder));
111 LOG(INFO) << "Test target service: " << service_name;
112
113 ASSERT_TRUE(mEnumerator->isHardware(&mIsHwModule).isOk());
114 }
115
116 virtual void TearDown() override {
117 // Attempt to close any active camera
118 for (auto&& cam : mActiveCameras) {
119 if (cam != nullptr) {
120 mEnumerator->closeCamera(cam);
121 }
122 }
123 mActiveCameras.clear();
124 }
125
126 protected:
127 void loadCameraList() {
128 // SetUp() must run first!
129 ASSERT_NE(mEnumerator, nullptr);
130
131 // Get the camera list
132 ASSERT_TRUE(mEnumerator->getCameraList(&mCameraInfo).isOk())
133 << "Failed to get a list of available cameras";
134 LOG(INFO) << "We have " << mCameraInfo.size() << " cameras.";
135 }
136
137 void loadUltrasonicsArrayList() {
138 // SetUp() must run first!
139 ASSERT_NE(mEnumerator, nullptr);
140
141 // Get the ultrasonics array list
Changyeon Jo9f6f5922022-04-12 19:29:10 -0700142 auto result = mEnumerator->getUltrasonicsArrayList(&mUltrasonicsArraysInfo);
143 ASSERT_TRUE(result.isOk() ||
144 // TODO(b/149874793): Remove below conditions when
145 // getUltrasonicsArrayList() is implemented.
146 (!result.isOk() && result.getServiceSpecificError() ==
147 static_cast<int32_t>(EvsResult::NOT_IMPLEMENTED)))
Changyeon Jo80189012021-10-10 16:34:21 -0700148 << "Failed to get a list of available ultrasonics arrays";
149 LOG(INFO) << "We have " << mCameraInfo.size() << " ultrasonics arrays.";
150 }
151
152 bool isLogicalCamera(const camera_metadata_t* metadata) {
153 if (metadata == nullptr) {
154 // A logical camera device must have a valid camera metadata.
155 return false;
156 }
157
158 // Looking for LOGICAL_MULTI_CAMERA capability from metadata.
159 camera_metadata_ro_entry_t entry;
160 int rc = find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
161 &entry);
162 if (rc != 0) {
163 // No capabilities are found.
164 return false;
165 }
166
167 for (size_t i = 0; i < entry.count; ++i) {
168 uint8_t cap = entry.data.u8[i];
169 if (cap == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA) {
170 return true;
171 }
172 }
173
174 return false;
175 }
176
177 std::unordered_set<std::string> getPhysicalCameraIds(const std::string& id, bool& flag) {
178 std::unordered_set<std::string> physicalCameras;
179 const auto it = std::find_if(mCameraInfo.begin(), mCameraInfo.end(),
180 [&id](const CameraDesc& desc) { return id == desc.id; });
181 if (it == mCameraInfo.end()) {
182 // Unknown camera is requested. Return an empty list.
183 return physicalCameras;
184 }
185
186 const camera_metadata_t* metadata = reinterpret_cast<camera_metadata_t*>(&it->metadata[0]);
187 flag = isLogicalCamera(metadata);
188 if (!flag) {
189 // EVS assumes that the device w/o a valid metadata is a physical
190 // device.
191 LOG(INFO) << id << " is not a logical camera device.";
192 physicalCameras.insert(id);
193 return physicalCameras;
194 }
195
196 // Look for physical camera identifiers
197 camera_metadata_ro_entry entry;
198 int rc = find_camera_metadata_ro_entry(metadata, ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS,
199 &entry);
200 if (rc != 0) {
201 LOG(ERROR) << "No physical camera ID is found for a logical camera device";
202 }
203
204 const uint8_t* ids = entry.data.u8;
205 size_t start = 0;
206 for (size_t i = 0; i < entry.count; ++i) {
207 if (ids[i] == '\0') {
208 if (start != i) {
209 std::string id(reinterpret_cast<const char*>(ids + start));
210 physicalCameras.insert(id);
211 }
212 start = i + 1;
213 }
214 }
215
216 LOG(INFO) << id << " consists of " << physicalCameras.size() << " physical camera devices";
217 return physicalCameras;
218 }
219
220 Stream getFirstStreamConfiguration(camera_metadata_t* metadata) {
221 Stream targetCfg = {};
222 camera_metadata_entry_t streamCfgs;
223 if (!find_camera_metadata_entry(metadata, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
224 &streamCfgs)) {
225 // Stream configurations are found in metadata
226 RawStreamConfig* ptr = reinterpret_cast<RawStreamConfig*>(streamCfgs.data.i32);
227 for (unsigned offset = 0; offset < streamCfgs.count; offset += kStreamCfgSz) {
228 if (ptr->direction == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT &&
229 ptr->format == HAL_PIXEL_FORMAT_RGBA_8888) {
230 targetCfg.width = ptr->width;
231 targetCfg.height = ptr->height;
232 targetCfg.format = static_cast<PixelFormat>(ptr->format);
233 break;
234 }
235 ++ptr;
236 }
237 }
238
239 return targetCfg;
240 }
241
242 // Every test needs access to the service
243 std::shared_ptr<IEvsEnumerator> mEnumerator;
244 // Empty unless/util loadCameraList() is called
245 std::vector<CameraDesc> mCameraInfo;
246 // boolean to tell current module under testing is HW module implementation
247 // or not
248 bool mIsHwModule;
249 // A list of active camera handles that are need to be cleaned up
250 std::deque<std::shared_ptr<IEvsCamera>> mActiveCameras;
251 // Empty unless/util loadUltrasonicsArrayList() is called
252 std::vector<UltrasonicsArrayDesc> mUltrasonicsArraysInfo;
253 // A list of active ultrasonics array handles that are to be cleaned up
254 std::deque<std::weak_ptr<IEvsUltrasonicsArray>> mActiveUltrasonicsArrays;
255};
256
257// Test cases, their implementations, and corresponding requirements are
258// documented at go/aae-evs-public-api-test.
259
260/*
261 * CameraOpenClean:
262 * Opens each camera reported by the enumerator and then explicitly closes it via a
263 * call to closeCamera. Then repeats the test to ensure all cameras can be reopened.
264 */
265TEST_P(EvsAidlTest, CameraOpenClean) {
266 LOG(INFO) << "Starting CameraOpenClean test";
267
268 // Get the camera list
269 loadCameraList();
270
271 // Open and close each camera twice
272 for (auto&& cam : mCameraInfo) {
273 bool isLogicalCam = false;
274 auto devices = getPhysicalCameraIds(cam.id, isLogicalCam);
275 if (mIsHwModule && isLogicalCam) {
276 LOG(INFO) << "Skip a logical device, " << cam.id << " for HW target.";
277 continue;
278 }
279
280 // Read a target resolution from the metadata
281 Stream targetCfg = getFirstStreamConfiguration(
282 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
283 ASSERT_GT(targetCfg.width, 0);
284 ASSERT_GT(targetCfg.height, 0);
285
286 for (int pass = 0; pass < 2; pass++) {
287 std::shared_ptr<IEvsCamera> pCam;
288 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
289 ASSERT_NE(pCam, nullptr);
290
291 CameraDesc cameraInfo;
292 for (auto&& devName : devices) {
293 ASSERT_TRUE(pCam->getPhysicalCameraInfo(devName, &cameraInfo).isOk());
294 EXPECT_EQ(devName, cameraInfo.id);
295 }
296
297 // Store a camera handle for a clean-up
298 mActiveCameras.push_back(pCam);
299
300 // Verify that this camera self-identifies correctly
301 ASSERT_TRUE(pCam->getCameraInfo(&cameraInfo).isOk());
302 EXPECT_EQ(cam.id, cameraInfo.id);
303
304 // Verify methods for extended info
305 const auto id = 0xFFFFFFFF; // meaningless id
306 std::vector<uint8_t> values;
307 auto status = pCam->setExtendedInfo(id, values);
308 if (isLogicalCam) {
309 EXPECT_TRUE(!status.isOk() && status.getServiceSpecificError() ==
310 static_cast<int>(EvsResult::NOT_SUPPORTED));
311 } else {
312 EXPECT_TRUE(status.isOk());
313 }
314
315 status = pCam->getExtendedInfo(id, &values);
316 if (isLogicalCam) {
317 EXPECT_TRUE(!status.isOk() && status.getServiceSpecificError() ==
318 static_cast<int>(EvsResult::NOT_SUPPORTED));
319 } else {
320 EXPECT_TRUE(status.isOk());
321 }
322
323 // Explicitly close the camera so resources are released right away
324 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
325 mActiveCameras.clear();
326 }
327 }
328}
329
330/*
331 * CameraOpenAggressive:
332 * Opens each camera reported by the enumerator twice in a row without an intervening closeCamera
333 * call. This ensures that the intended "aggressive open" behavior works. This is necessary for
334 * the system to be tolerant of shutdown/restart race conditions.
335 */
336TEST_P(EvsAidlTest, CameraOpenAggressive) {
337 LOG(INFO) << "Starting CameraOpenAggressive test";
338
339 // Get the camera list
340 loadCameraList();
341
342 // Open and close each camera twice
343 for (auto&& cam : mCameraInfo) {
344 bool isLogicalCam = false;
345 getPhysicalCameraIds(cam.id, isLogicalCam);
346 if (mIsHwModule && isLogicalCam) {
347 LOG(INFO) << "Skip a logical device, " << cam.id << " for HW target.";
348 continue;
349 }
350
351 // Read a target resolution from the metadata
352 Stream targetCfg = getFirstStreamConfiguration(
353 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
354 ASSERT_GT(targetCfg.width, 0);
355 ASSERT_GT(targetCfg.height, 0);
356
357 mActiveCameras.clear();
358 std::shared_ptr<IEvsCamera> pCam;
359 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
360 EXPECT_NE(pCam, nullptr);
361
362 // Store a camera handle for a clean-up
363 mActiveCameras.push_back(pCam);
364
365 // Verify that this camera self-identifies correctly
366 CameraDesc cameraInfo;
367 ASSERT_TRUE(pCam->getCameraInfo(&cameraInfo).isOk());
368 EXPECT_EQ(cam.id, cameraInfo.id);
369
370 std::shared_ptr<IEvsCamera> pCam2;
371 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam2).isOk());
372 EXPECT_NE(pCam2, nullptr);
373 EXPECT_NE(pCam, pCam2);
374
375 // Store a camera handle for a clean-up
376 mActiveCameras.push_back(pCam2);
377
378 auto status = pCam->setMaxFramesInFlight(2);
379 if (mIsHwModule) {
380 // Verify that the old camera rejects calls via HW module.
381 EXPECT_TRUE(!status.isOk() && status.getServiceSpecificError() ==
382 static_cast<int>(EvsResult::OWNERSHIP_LOST));
383 } else {
384 // default implementation supports multiple clients.
385 EXPECT_TRUE(status.isOk());
386 }
387
388 // Close the superseded camera
389 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
390 mActiveCameras.pop_front();
391
392 // Verify that the second camera instance self-identifies correctly
393 ASSERT_TRUE(pCam2->getCameraInfo(&cameraInfo).isOk());
394 EXPECT_EQ(cam.id, cameraInfo.id);
395
396 // Close the second camera instance
397 ASSERT_TRUE(mEnumerator->closeCamera(pCam2).isOk());
398 mActiveCameras.pop_front();
399 }
400
401 // Sleep here to ensure the destructor cleanup has time to run so we don't break follow on tests
402 sleep(1); // I hate that this is an arbitrary time to wait. :( b/36122635
403}
404
405/*
406 * CameraStreamPerformance:
407 * Measure and qualify the stream start up time and streaming frame rate of each reported camera
408 */
409TEST_P(EvsAidlTest, CameraStreamPerformance) {
410 LOG(INFO) << "Starting CameraStreamPerformance test";
411
412 // Get the camera list
413 loadCameraList();
414
415 // Test each reported camera
416 for (auto&& cam : mCameraInfo) {
417 bool isLogicalCam = false;
418 auto devices = getPhysicalCameraIds(cam.id, isLogicalCam);
419 if (mIsHwModule && isLogicalCam) {
420 LOG(INFO) << "Skip a logical device " << cam.id;
421 continue;
422 }
423
424 // Read a target resolution from the metadata
425 Stream targetCfg = getFirstStreamConfiguration(
426 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
427 ASSERT_GT(targetCfg.width, 0);
428 ASSERT_GT(targetCfg.height, 0);
429
430 std::shared_ptr<IEvsCamera> pCam;
431 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
432 EXPECT_NE(pCam, nullptr);
433
434 // Store a camera handle for a clean-up
435 mActiveCameras.push_back(pCam);
436
437 // Set up a frame receiver object which will fire up its own thread
Frederick Mayle7056b242022-03-29 02:38:12 +0000438 std::shared_ptr<FrameHandler> frameHandler = ndk::SharedRefBase::make<FrameHandler>(
439 pCam, cam, nullptr, FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -0700440 EXPECT_NE(frameHandler, nullptr);
441
442 // Start the camera's video stream
443 nsecs_t start = systemTime(SYSTEM_TIME_MONOTONIC);
444 ASSERT_TRUE(frameHandler->startStream());
445
446 // Ensure the first frame arrived within the expected time
447 frameHandler->waitForFrameCount(1);
448 nsecs_t firstFrame = systemTime(SYSTEM_TIME_MONOTONIC);
449 nsecs_t timeToFirstFrame = systemTime(SYSTEM_TIME_MONOTONIC) - start;
450
451 // Extra delays are expected when we attempt to start a video stream on
452 // the logical camera device. The amount of delay is expected the
453 // number of physical camera devices multiplied by
454 // kMaxStreamStartMilliseconds at most.
455 EXPECT_LE(nanoseconds_to_milliseconds(timeToFirstFrame),
456 kMaxStreamStartMilliseconds * devices.size());
457 printf("%s: Measured time to first frame %0.2f ms\n", cam.id.data(),
458 timeToFirstFrame * kNanoToMilliseconds);
459 LOG(INFO) << cam.id << ": Measured time to first frame " << std::scientific
460 << timeToFirstFrame * kNanoToMilliseconds << " ms.";
461
462 // Check aspect ratio
463 unsigned width = 0, height = 0;
464 frameHandler->getFrameDimension(&width, &height);
465 EXPECT_GE(width, height);
466
467 // Wait a bit, then ensure we get at least the required minimum number of frames
468 sleep(5);
469 nsecs_t end = systemTime(SYSTEM_TIME_MONOTONIC);
470
471 // Even when the camera pointer goes out of scope, the FrameHandler object will
472 // keep the stream alive unless we tell it to shutdown.
473 // Also note that the FrameHandle and the Camera have a mutual circular reference, so
474 // we have to break that cycle in order for either of them to get cleaned up.
475 frameHandler->shutdown();
476
477 unsigned framesReceived = 0;
478 frameHandler->getFramesCounters(&framesReceived, nullptr);
479 framesReceived = framesReceived - 1; // Back out the first frame we already waited for
480 nsecs_t runTime = end - firstFrame;
481 float framesPerSecond = framesReceived / (runTime * kNanoToSeconds);
482 printf("Measured camera rate %3.2f fps\n", framesPerSecond);
483 LOG(INFO) << "Measured camera rate " << std::scientific << framesPerSecond << " fps.";
484 EXPECT_GE(framesPerSecond, kMinimumFramesPerSecond);
485
486 // Explicitly release the camera
487 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
488 mActiveCameras.clear();
489 }
490}
491
492/*
493 * CameraStreamBuffering:
494 * Ensure the camera implementation behaves properly when the client holds onto buffers for more
495 * than one frame time. The camera must cleanly skip frames until the client is ready again.
496 */
497TEST_P(EvsAidlTest, CameraStreamBuffering) {
498 LOG(INFO) << "Starting CameraStreamBuffering test";
499
500 // Arbitrary constant (should be > 1 and not too big)
501 static const unsigned int kBuffersToHold = 6;
502
503 // Get the camera list
504 loadCameraList();
505
506 // Test each reported camera
507 for (auto&& cam : mCameraInfo) {
508 bool isLogicalCam = false;
509 getPhysicalCameraIds(cam.id, isLogicalCam);
510 if (mIsHwModule && isLogicalCam) {
511 LOG(INFO) << "Skip a logical device " << cam.id << " for HW target.";
512 continue;
513 }
514
515 // Read a target resolution from the metadata
516 Stream targetCfg = getFirstStreamConfiguration(
517 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
518 ASSERT_GT(targetCfg.width, 0);
519 ASSERT_GT(targetCfg.height, 0);
520
521 std::shared_ptr<IEvsCamera> pCam;
522 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
523 EXPECT_NE(pCam, nullptr);
524
525 // Store a camera handle for a clean-up
526 mActiveCameras.push_back(pCam);
527
528 // Ask for a very large number of buffers in flight to ensure it errors correctly
Changyeon Jo0d814ce2022-04-23 05:26:16 -0700529 auto badResult = pCam->setMaxFramesInFlight(std::numeric_limits<int32_t>::max());
Changyeon Jo80189012021-10-10 16:34:21 -0700530 EXPECT_TRUE(!badResult.isOk() && badResult.getServiceSpecificError() ==
Changyeon Jo9f6f5922022-04-12 19:29:10 -0700531 static_cast<int>(EvsResult::INVALID_ARG));
Changyeon Jo80189012021-10-10 16:34:21 -0700532
533 // Now ask for exactly two buffers in flight as we'll test behavior in that case
534 ASSERT_TRUE(pCam->setMaxFramesInFlight(kBuffersToHold).isOk());
535
536 // Set up a frame receiver object which will fire up its own thread.
Frederick Mayle7056b242022-03-29 02:38:12 +0000537 std::shared_ptr<FrameHandler> frameHandler = ndk::SharedRefBase::make<FrameHandler>(
538 pCam, cam, nullptr, FrameHandler::eNoAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -0700539 EXPECT_NE(frameHandler, nullptr);
540
541 // Start the camera's video stream
542 ASSERT_TRUE(frameHandler->startStream());
543
544 // Check that the video stream stalls once we've gotten exactly the number of buffers
545 // we requested since we told the frameHandler not to return them.
546 sleep(1); // 1 second should be enough for at least 5 frames to be delivered worst case
547 unsigned framesReceived = 0;
548 frameHandler->getFramesCounters(&framesReceived, nullptr);
549 ASSERT_EQ(kBuffersToHold, framesReceived) << "Stream didn't stall at expected buffer limit";
550
551 // Give back one buffer
552 ASSERT_TRUE(frameHandler->returnHeldBuffer());
553
554 // Once we return a buffer, it shouldn't take more than 1/10 second to get a new one
555 // filled since we require 10fps minimum -- but give a 10% allowance just in case.
556 usleep(110 * kMillisecondsToMicroseconds);
557 frameHandler->getFramesCounters(&framesReceived, nullptr);
558 EXPECT_EQ(kBuffersToHold + 1, framesReceived) << "Stream should've resumed";
559
560 // Even when the camera pointer goes out of scope, the FrameHandler object will
561 // keep the stream alive unless we tell it to shutdown.
562 // Also note that the FrameHandle and the Camera have a mutual circular reference, so
563 // we have to break that cycle in order for either of them to get cleaned up.
564 frameHandler->shutdown();
565
566 // Explicitly release the camera
567 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
568 mActiveCameras.clear();
569 }
570}
571
572/*
573 * CameraToDisplayRoundTrip:
574 * End to end test of data flowing from the camera to the display. Each delivered frame of camera
575 * imagery is simply copied to the display buffer and presented on screen. This is the one test
576 * which a human could observe to see the operation of the system on the physical display.
577 */
578TEST_P(EvsAidlTest, CameraToDisplayRoundTrip) {
579 LOG(INFO) << "Starting CameraToDisplayRoundTrip test";
580
581 // Get the camera list
582 loadCameraList();
583
584 // Request available display IDs
585 uint8_t targetDisplayId = 0;
586 std::vector<uint8_t> displayIds;
587 ASSERT_TRUE(mEnumerator->getDisplayIdList(&displayIds).isOk());
588 EXPECT_GT(displayIds.size(), 0);
589 targetDisplayId = displayIds[0];
590
591 // Request exclusive access to the first EVS display
592 std::shared_ptr<IEvsDisplay> pDisplay;
593 ASSERT_TRUE(mEnumerator->openDisplay(targetDisplayId, &pDisplay).isOk());
594 EXPECT_NE(pDisplay, nullptr);
Changyeon Jo9f6f5922022-04-12 19:29:10 -0700595 LOG(INFO) << "Display " << static_cast<int>(targetDisplayId) << " is in use.";
Changyeon Jo80189012021-10-10 16:34:21 -0700596
597 // Get the display descriptor
598 DisplayDesc displayDesc;
599 ASSERT_TRUE(pDisplay->getDisplayInfo(&displayDesc).isOk());
600 LOG(INFO) << " Resolution: " << displayDesc.width << "x" << displayDesc.height;
601 ASSERT_GT(displayDesc.width, 0);
602 ASSERT_GT(displayDesc.height, 0);
603
604 // Test each reported camera
605 for (auto&& cam : mCameraInfo) {
606 bool isLogicalCam = false;
607 getPhysicalCameraIds(cam.id, isLogicalCam);
608 if (mIsHwModule && isLogicalCam) {
609 LOG(INFO) << "Skip a logical device " << cam.id << " for HW target.";
610 continue;
611 }
612
613 // Read a target resolution from the metadata
614 Stream targetCfg = getFirstStreamConfiguration(
615 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
616 ASSERT_GT(targetCfg.width, 0);
617 ASSERT_GT(targetCfg.height, 0);
618
619 std::shared_ptr<IEvsCamera> pCam;
620 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
621 EXPECT_NE(pCam, nullptr);
622
623 // Store a camera handle for a clean-up
624 mActiveCameras.push_back(pCam);
625
626 // Set up a frame receiver object which will fire up its own thread.
Frederick Mayle7056b242022-03-29 02:38:12 +0000627 std::shared_ptr<FrameHandler> frameHandler = ndk::SharedRefBase::make<FrameHandler>(
628 pCam, cam, pDisplay, FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -0700629 EXPECT_NE(frameHandler, nullptr);
630
631 // Activate the display
632 ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::VISIBLE_ON_NEXT_FRAME).isOk());
633
634 // Start the camera's video stream
635 ASSERT_TRUE(frameHandler->startStream());
636
637 // Wait a while to let the data flow
638 static const int kSecondsToWait = 5;
639 const int streamTimeMs =
640 kSecondsToWait * kSecondsToMilliseconds - kMaxStreamStartMilliseconds;
641 const unsigned minimumFramesExpected =
642 streamTimeMs * kMinimumFramesPerSecond / kSecondsToMilliseconds;
643 sleep(kSecondsToWait);
644 unsigned framesReceived = 0;
645 unsigned framesDisplayed = 0;
646 frameHandler->getFramesCounters(&framesReceived, &framesDisplayed);
647 EXPECT_EQ(framesReceived, framesDisplayed);
648 EXPECT_GE(framesDisplayed, minimumFramesExpected);
649
650 // Turn off the display (yes, before the stream stops -- it should be handled)
651 ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::NOT_VISIBLE).isOk());
652
653 // Shut down the streamer
654 frameHandler->shutdown();
655
656 // Explicitly release the camera
657 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
658 mActiveCameras.clear();
659 }
660
661 // Explicitly release the display
662 ASSERT_TRUE(mEnumerator->closeDisplay(pDisplay).isOk());
663}
664
665/*
666 * MultiCameraStream:
667 * Verify that each client can start and stop video streams on the same
668 * underlying camera.
669 */
670TEST_P(EvsAidlTest, MultiCameraStream) {
671 LOG(INFO) << "Starting MultiCameraStream test";
672
673 if (mIsHwModule) {
674 // This test is not for HW module implementation.
675 return;
676 }
677
678 // Get the camera list
679 loadCameraList();
680
681 // Test each reported camera
682 for (auto&& cam : mCameraInfo) {
683 // Read a target resolution from the metadata
684 Stream targetCfg = getFirstStreamConfiguration(
685 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
686 ASSERT_GT(targetCfg.width, 0);
687 ASSERT_GT(targetCfg.height, 0);
688
689 // Create two camera clients.
690 std::shared_ptr<IEvsCamera> pCam0;
691 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam0).isOk());
692 EXPECT_NE(pCam0, nullptr);
693
694 // Store a camera handle for a clean-up
695 mActiveCameras.push_back(pCam0);
696
697 std::shared_ptr<IEvsCamera> pCam1;
698 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam1).isOk());
699 EXPECT_NE(pCam1, nullptr);
700
701 // Store a camera handle for a clean-up
702 mActiveCameras.push_back(pCam1);
703
704 // Set up per-client frame receiver objects which will fire up its own thread
Frederick Mayle7056b242022-03-29 02:38:12 +0000705 std::shared_ptr<FrameHandler> frameHandler0 = ndk::SharedRefBase::make<FrameHandler>(
706 pCam0, cam, nullptr, FrameHandler::eAutoReturn);
707 std::shared_ptr<FrameHandler> frameHandler1 = ndk::SharedRefBase::make<FrameHandler>(
708 pCam1, cam, nullptr, FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -0700709 EXPECT_NE(frameHandler0, nullptr);
710 EXPECT_NE(frameHandler1, nullptr);
711
712 // Start the camera's video stream via client 0
713 ASSERT_TRUE(frameHandler0->startStream());
714 ASSERT_TRUE(frameHandler1->startStream());
715
716 // Ensure the stream starts
717 frameHandler0->waitForFrameCount(1);
718 frameHandler1->waitForFrameCount(1);
719
720 nsecs_t firstFrame = systemTime(SYSTEM_TIME_MONOTONIC);
721
722 // Wait a bit, then ensure both clients get at least the required minimum number of frames
723 sleep(5);
724 nsecs_t end = systemTime(SYSTEM_TIME_MONOTONIC);
725 unsigned framesReceived0 = 0, framesReceived1 = 0;
726 frameHandler0->getFramesCounters(&framesReceived0, nullptr);
727 frameHandler1->getFramesCounters(&framesReceived1, nullptr);
728 framesReceived0 = framesReceived0 - 1; // Back out the first frame we already waited for
729 framesReceived1 = framesReceived1 - 1; // Back out the first frame we already waited for
730 nsecs_t runTime = end - firstFrame;
731 float framesPerSecond0 = framesReceived0 / (runTime * kNanoToSeconds);
732 float framesPerSecond1 = framesReceived1 / (runTime * kNanoToSeconds);
733 LOG(INFO) << "Measured camera rate " << std::scientific << framesPerSecond0 << " fps and "
734 << framesPerSecond1 << " fps";
735 EXPECT_GE(framesPerSecond0, kMinimumFramesPerSecond);
736 EXPECT_GE(framesPerSecond1, kMinimumFramesPerSecond);
737
738 // Shutdown one client
739 frameHandler0->shutdown();
740
741 // Read frame counters again
742 frameHandler0->getFramesCounters(&framesReceived0, nullptr);
743 frameHandler1->getFramesCounters(&framesReceived1, nullptr);
744
745 // Wait a bit again
746 sleep(5);
747 unsigned framesReceivedAfterStop0 = 0, framesReceivedAfterStop1 = 0;
748 frameHandler0->getFramesCounters(&framesReceivedAfterStop0, nullptr);
749 frameHandler1->getFramesCounters(&framesReceivedAfterStop1, nullptr);
750 EXPECT_EQ(framesReceived0, framesReceivedAfterStop0);
751 EXPECT_LT(framesReceived1, framesReceivedAfterStop1);
752
753 // Shutdown another
754 frameHandler1->shutdown();
755
756 // Explicitly release the camera
757 ASSERT_TRUE(mEnumerator->closeCamera(pCam0).isOk());
758 ASSERT_TRUE(mEnumerator->closeCamera(pCam1).isOk());
759 mActiveCameras.clear();
760
761 // TODO(b/145459970, b/145457727): below sleep() is added to ensure the
762 // destruction of active camera objects; this may be related with two
763 // issues.
764 sleep(1);
765 }
766}
767
768/*
769 * CameraParameter:
770 * Verify that a client can adjust a camera parameter.
771 */
772TEST_P(EvsAidlTest, CameraParameter) {
773 LOG(INFO) << "Starting CameraParameter test";
774
775 // Get the camera list
776 loadCameraList();
777
778 // Test each reported camera
779 for (auto&& cam : mCameraInfo) {
780 bool isLogicalCam = false;
781 getPhysicalCameraIds(cam.id, isLogicalCam);
782 if (isLogicalCam) {
783 // TODO(b/145465724): Support camera parameter programming on
784 // logical devices.
785 LOG(INFO) << "Skip a logical device " << cam.id;
786 continue;
787 }
788
789 // Read a target resolution from the metadata
790 Stream targetCfg = getFirstStreamConfiguration(
791 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
792 ASSERT_GT(targetCfg.width, 0);
793 ASSERT_GT(targetCfg.height, 0);
794
795 // Create a camera client
796 std::shared_ptr<IEvsCamera> pCam;
797 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
798 EXPECT_NE(pCam, nullptr);
799
800 // Store a camera
801 mActiveCameras.push_back(pCam);
802
803 // Get the parameter list
804 std::vector<CameraParam> cmds;
805 ASSERT_TRUE(pCam->getParameterList(&cmds).isOk());
806 if (cmds.size() < 1) {
807 continue;
808 }
809
810 // Set up per-client frame receiver objects which will fire up its own thread
Frederick Mayle7056b242022-03-29 02:38:12 +0000811 std::shared_ptr<FrameHandler> frameHandler = ndk::SharedRefBase::make<FrameHandler>(
812 pCam, cam, nullptr, FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -0700813 EXPECT_NE(frameHandler, nullptr);
814
815 // Start the camera's video stream
816 ASSERT_TRUE(frameHandler->startStream());
817
818 // Ensure the stream starts
819 frameHandler->waitForFrameCount(1);
820
821 // Set current client is the primary client
822 ASSERT_TRUE(pCam->setPrimaryClient().isOk());
823 for (auto& cmd : cmds) {
824 // Get a valid parameter value range
825 ParameterRange range;
826 ASSERT_TRUE(pCam->getIntParameterRange(cmd, &range).isOk());
827
828 std::vector<int32_t> values;
829 if (cmd == CameraParam::ABSOLUTE_FOCUS) {
830 // Try to turn off auto-focus
831 ASSERT_TRUE(pCam->setIntParameter(CameraParam::AUTO_FOCUS, 0, &values).isOk());
832 for (auto&& v : values) {
833 EXPECT_EQ(v, 0);
834 }
835 }
836
837 // Try to program a parameter with a random value [minVal, maxVal]
838 int32_t val0 = range.min + (std::rand() % (range.max - range.min));
839
840 // Rounding down
841 val0 = val0 - (val0 % range.step);
842 values.clear();
843 ASSERT_TRUE(pCam->setIntParameter(cmd, val0, &values).isOk());
844
845 values.clear();
846 ASSERT_TRUE(pCam->getIntParameter(cmd, &values).isOk());
847 for (auto&& v : values) {
848 EXPECT_EQ(val0, v) << "Values are not matched.";
849 }
850 }
851 ASSERT_TRUE(pCam->unsetPrimaryClient().isOk());
852
853 // Shutdown
854 frameHandler->shutdown();
855
856 // Explicitly release the camera
857 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
858 mActiveCameras.clear();
859 }
860}
861
862/*
863 * CameraPrimaryClientRelease
864 * Verify that non-primary client gets notified when the primary client either
865 * terminates or releases a role.
866 */
867TEST_P(EvsAidlTest, CameraPrimaryClientRelease) {
868 LOG(INFO) << "Starting CameraPrimaryClientRelease test";
869
870 if (mIsHwModule) {
871 // This test is not for HW module implementation.
872 return;
873 }
874
875 // Get the camera list
876 loadCameraList();
877
878 // Test each reported camera
879 for (auto&& cam : mCameraInfo) {
880 bool isLogicalCam = false;
881 getPhysicalCameraIds(cam.id, isLogicalCam);
882 if (isLogicalCam) {
883 // TODO(b/145465724): Support camera parameter programming on
884 // logical devices.
885 LOG(INFO) << "Skip a logical device " << cam.id;
886 continue;
887 }
888
889 // Read a target resolution from the metadata
890 Stream targetCfg = getFirstStreamConfiguration(
891 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
892 ASSERT_GT(targetCfg.width, 0);
893 ASSERT_GT(targetCfg.height, 0);
894
895 // Create two camera clients.
896 std::shared_ptr<IEvsCamera> pPrimaryCam;
897 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pPrimaryCam).isOk());
898 EXPECT_NE(pPrimaryCam, nullptr);
899
900 // Store a camera handle for a clean-up
901 mActiveCameras.push_back(pPrimaryCam);
902
903 std::shared_ptr<IEvsCamera> pSecondaryCam;
904 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pSecondaryCam).isOk());
905 EXPECT_NE(pSecondaryCam, nullptr);
906
907 // Store a camera handle for a clean-up
908 mActiveCameras.push_back(pSecondaryCam);
909
910 // Set up per-client frame receiver objects which will fire up its own thread
Frederick Mayle7056b242022-03-29 02:38:12 +0000911 std::shared_ptr<FrameHandler> frameHandlerPrimary = ndk::SharedRefBase::make<FrameHandler>(
Changyeon Jo80189012021-10-10 16:34:21 -0700912 pPrimaryCam, cam, nullptr, FrameHandler::eAutoReturn);
Frederick Mayle7056b242022-03-29 02:38:12 +0000913 std::shared_ptr<FrameHandler> frameHandlerSecondary =
914 ndk::SharedRefBase::make<FrameHandler>(pSecondaryCam, cam, nullptr,
915 FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -0700916 EXPECT_NE(frameHandlerPrimary, nullptr);
917 EXPECT_NE(frameHandlerSecondary, nullptr);
918
919 // Set one client as the primary client
920 ASSERT_TRUE(pPrimaryCam->setPrimaryClient().isOk());
921
922 // Try to set another client as the primary client.
923 ASSERT_FALSE(pSecondaryCam->setPrimaryClient().isOk());
924
925 // Start the camera's video stream via a primary client client.
926 ASSERT_TRUE(frameHandlerPrimary->startStream());
927
928 // Ensure the stream starts
929 frameHandlerPrimary->waitForFrameCount(1);
930
931 // Start the camera's video stream via another client
932 ASSERT_TRUE(frameHandlerSecondary->startStream());
933
934 // Ensure the stream starts
935 frameHandlerSecondary->waitForFrameCount(1);
936
937 // Non-primary client expects to receive a primary client role relesed
938 // notification.
939 EvsEventDesc aTargetEvent = {};
940 EvsEventDesc aNotification = {};
941
942 bool listening = false;
943 std::mutex eventLock;
944 std::condition_variable eventCond;
945 std::thread listener =
946 std::thread([&aNotification, &frameHandlerSecondary, &listening, &eventCond]() {
947 // Notify that a listening thread is running.
948 listening = true;
949 eventCond.notify_all();
950
951 EvsEventDesc aTargetEvent;
952 aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
953 if (!frameHandlerSecondary->waitForEvent(aTargetEvent, aNotification, true)) {
954 LOG(WARNING) << "A timer is expired before a target event is fired.";
955 }
956 });
957
958 // Wait until a listening thread starts.
959 std::unique_lock<std::mutex> lock(eventLock);
960 auto timer = std::chrono::system_clock::now();
961 while (!listening) {
962 timer += 1s;
963 eventCond.wait_until(lock, timer);
964 }
965 lock.unlock();
966
967 // Release a primary client role.
968 ASSERT_TRUE(pPrimaryCam->unsetPrimaryClient().isOk());
969
970 // Join a listening thread.
971 if (listener.joinable()) {
972 listener.join();
973 }
974
975 // Verify change notifications.
976 ASSERT_EQ(EvsEventType::MASTER_RELEASED, static_cast<EvsEventType>(aNotification.aType));
977
978 // Non-primary becomes a primary client.
979 ASSERT_TRUE(pSecondaryCam->setPrimaryClient().isOk());
980
981 // Previous primary client fails to become a primary client.
982 ASSERT_FALSE(pPrimaryCam->setPrimaryClient().isOk());
983
984 listening = false;
985 listener = std::thread([&aNotification, &frameHandlerPrimary, &listening, &eventCond]() {
986 // Notify that a listening thread is running.
987 listening = true;
988 eventCond.notify_all();
989
990 EvsEventDesc aTargetEvent;
991 aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
992 if (!frameHandlerPrimary->waitForEvent(aTargetEvent, aNotification, true)) {
993 LOG(WARNING) << "A timer is expired before a target event is fired.";
994 }
995 });
996
997 // Wait until a listening thread starts.
998 timer = std::chrono::system_clock::now();
999 lock.lock();
1000 while (!listening) {
1001 eventCond.wait_until(lock, timer + 1s);
1002 }
1003 lock.unlock();
1004
1005 // Closing current primary client.
1006 frameHandlerSecondary->shutdown();
1007
1008 // Join a listening thread.
1009 if (listener.joinable()) {
1010 listener.join();
1011 }
1012
1013 // Verify change notifications.
1014 ASSERT_EQ(EvsEventType::MASTER_RELEASED, static_cast<EvsEventType>(aNotification.aType));
1015
1016 // Closing streams.
1017 frameHandlerPrimary->shutdown();
1018
1019 // Explicitly release the camera
1020 ASSERT_TRUE(mEnumerator->closeCamera(pPrimaryCam).isOk());
1021 ASSERT_TRUE(mEnumerator->closeCamera(pSecondaryCam).isOk());
1022 mActiveCameras.clear();
1023 }
1024}
1025
1026/*
1027 * MultiCameraParameter:
1028 * Verify that primary and non-primary clients behave as expected when they try to adjust
1029 * camera parameters.
1030 */
1031TEST_P(EvsAidlTest, MultiCameraParameter) {
1032 LOG(INFO) << "Starting MultiCameraParameter test";
1033
1034 if (mIsHwModule) {
1035 // This test is not for HW module implementation.
1036 return;
1037 }
1038
1039 // Get the camera list
1040 loadCameraList();
1041
1042 // Test each reported camera
1043 for (auto&& cam : mCameraInfo) {
1044 bool isLogicalCam = false;
1045 getPhysicalCameraIds(cam.id, isLogicalCam);
1046 if (isLogicalCam) {
1047 // TODO(b/145465724): Support camera parameter programming on
1048 // logical devices.
1049 LOG(INFO) << "Skip a logical device " << cam.id;
1050 continue;
1051 }
1052
1053 // Read a target resolution from the metadata
1054 Stream targetCfg = getFirstStreamConfiguration(
1055 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
1056 ASSERT_GT(targetCfg.width, 0);
1057 ASSERT_GT(targetCfg.height, 0);
1058
1059 // Create two camera clients.
1060 std::shared_ptr<IEvsCamera> pPrimaryCam;
1061 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pPrimaryCam).isOk());
1062 EXPECT_NE(pPrimaryCam, nullptr);
1063
1064 // Store a camera handle for a clean-up
1065 mActiveCameras.push_back(pPrimaryCam);
1066
1067 std::shared_ptr<IEvsCamera> pSecondaryCam;
1068 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pSecondaryCam).isOk());
1069 EXPECT_NE(pSecondaryCam, nullptr);
1070
1071 // Store a camera handle for a clean-up
1072 mActiveCameras.push_back(pSecondaryCam);
1073
1074 // Get the parameter list
1075 std::vector<CameraParam> camPrimaryCmds, camSecondaryCmds;
1076 ASSERT_TRUE(pPrimaryCam->getParameterList(&camPrimaryCmds).isOk());
1077 ASSERT_TRUE(pSecondaryCam->getParameterList(&camSecondaryCmds).isOk());
1078 if (camPrimaryCmds.size() < 1 || camSecondaryCmds.size() < 1) {
1079 // Skip a camera device if it does not support any parameter.
1080 continue;
1081 }
1082
1083 // Set up per-client frame receiver objects which will fire up its own thread
Frederick Mayle7056b242022-03-29 02:38:12 +00001084 std::shared_ptr<FrameHandler> frameHandlerPrimary = ndk::SharedRefBase::make<FrameHandler>(
Changyeon Jo80189012021-10-10 16:34:21 -07001085 pPrimaryCam, cam, nullptr, FrameHandler::eAutoReturn);
Frederick Mayle7056b242022-03-29 02:38:12 +00001086 std::shared_ptr<FrameHandler> frameHandlerSecondary =
1087 ndk::SharedRefBase::make<FrameHandler>(pSecondaryCam, cam, nullptr,
1088 FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -07001089 EXPECT_NE(frameHandlerPrimary, nullptr);
1090 EXPECT_NE(frameHandlerSecondary, nullptr);
1091
1092 // Set one client as the primary client.
1093 ASSERT_TRUE(pPrimaryCam->setPrimaryClient().isOk());
1094
1095 // Try to set another client as the primary client.
1096 ASSERT_FALSE(pSecondaryCam->setPrimaryClient().isOk());
1097
1098 // Start the camera's video stream via a primary client client.
1099 ASSERT_TRUE(frameHandlerPrimary->startStream());
1100
1101 // Ensure the stream starts
1102 frameHandlerPrimary->waitForFrameCount(1);
1103
1104 // Start the camera's video stream via another client
1105 ASSERT_TRUE(frameHandlerSecondary->startStream());
1106
1107 // Ensure the stream starts
1108 frameHandlerSecondary->waitForFrameCount(1);
1109
1110 int32_t val0 = 0;
1111 std::vector<int32_t> values;
1112 EvsEventDesc aNotification0 = {};
1113 EvsEventDesc aNotification1 = {};
1114 for (auto& cmd : camPrimaryCmds) {
1115 // Get a valid parameter value range
1116 ParameterRange range;
1117 ASSERT_TRUE(pPrimaryCam->getIntParameterRange(cmd, &range).isOk());
1118 if (cmd == CameraParam::ABSOLUTE_FOCUS) {
1119 // Try to turn off auto-focus
1120 values.clear();
1121 ASSERT_TRUE(
1122 pPrimaryCam->setIntParameter(CameraParam::AUTO_FOCUS, 0, &values).isOk());
1123 for (auto&& v : values) {
1124 EXPECT_EQ(v, 0);
1125 }
1126 }
1127
1128 // Calculate a parameter value to program.
1129 val0 = range.min + (std::rand() % (range.max - range.min));
1130 val0 = val0 - (val0 % range.step);
1131
1132 // Prepare and start event listeners.
1133 bool listening0 = false;
1134 bool listening1 = false;
1135 std::condition_variable eventCond;
1136 std::thread listener0 = std::thread([cmd, val0, &aNotification0, &frameHandlerPrimary,
1137 &listening0, &listening1, &eventCond]() {
1138 listening0 = true;
1139 if (listening1) {
1140 eventCond.notify_all();
1141 }
1142
1143 EvsEventDesc aTargetEvent;
1144 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001145 aTargetEvent.payload.push_back(static_cast<int32_t>(cmd));
1146 aTargetEvent.payload.push_back(val0);
Changyeon Jo80189012021-10-10 16:34:21 -07001147 if (!frameHandlerPrimary->waitForEvent(aTargetEvent, aNotification0)) {
1148 LOG(WARNING) << "A timer is expired before a target event is fired.";
1149 }
1150 });
1151 std::thread listener1 = std::thread([cmd, val0, &aNotification1, &frameHandlerSecondary,
1152 &listening0, &listening1, &eventCond]() {
1153 listening1 = true;
1154 if (listening0) {
1155 eventCond.notify_all();
1156 }
1157
1158 EvsEventDesc aTargetEvent;
1159 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001160 aTargetEvent.payload.push_back(static_cast<int32_t>(cmd));
1161 aTargetEvent.payload.push_back(val0);
Changyeon Jo80189012021-10-10 16:34:21 -07001162 if (!frameHandlerSecondary->waitForEvent(aTargetEvent, aNotification1)) {
1163 LOG(WARNING) << "A timer is expired before a target event is fired.";
1164 }
1165 });
1166
1167 // Wait until a listening thread starts.
1168 std::mutex eventLock;
1169 std::unique_lock<std::mutex> lock(eventLock);
1170 auto timer = std::chrono::system_clock::now();
1171 while (!listening0 || !listening1) {
1172 eventCond.wait_until(lock, timer + 1s);
1173 }
1174 lock.unlock();
1175
1176 // Try to program a parameter
1177 values.clear();
1178 ASSERT_TRUE(pPrimaryCam->setIntParameter(cmd, val0, &values).isOk());
1179 for (auto&& v : values) {
1180 EXPECT_EQ(val0, v) << "Values are not matched.";
1181 }
1182
1183 // Join a listening thread.
1184 if (listener0.joinable()) {
1185 listener0.join();
1186 }
1187 if (listener1.joinable()) {
1188 listener1.join();
1189 }
1190
1191 // Verify a change notification
1192 ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
1193 static_cast<EvsEventType>(aNotification0.aType));
1194 ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
1195 static_cast<EvsEventType>(aNotification1.aType));
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001196 ASSERT_GE(aNotification0.payload.size(), 2);
1197 ASSERT_GE(aNotification1.payload.size(), 2);
Changyeon Jo80189012021-10-10 16:34:21 -07001198 ASSERT_EQ(cmd, static_cast<CameraParam>(aNotification0.payload[0]));
1199 ASSERT_EQ(cmd, static_cast<CameraParam>(aNotification1.payload[0]));
1200 for (auto&& v : values) {
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001201 ASSERT_EQ(v, aNotification0.payload[1]);
1202 ASSERT_EQ(v, aNotification1.payload[1]);
Changyeon Jo80189012021-10-10 16:34:21 -07001203 }
1204
1205 // Clients expects to receive a parameter change notification
1206 // whenever a primary client client adjusts it.
1207 values.clear();
1208 ASSERT_TRUE(pPrimaryCam->getIntParameter(cmd, &values).isOk());
1209 for (auto&& v : values) {
1210 EXPECT_EQ(val0, v) << "Values are not matched.";
1211 }
1212 }
1213
1214 // Try to adjust a parameter via non-primary client
1215 values.clear();
1216 ASSERT_FALSE(pSecondaryCam->setIntParameter(camSecondaryCmds[0], val0, &values).isOk());
1217
1218 // Non-primary client attempts to be a primary client
1219 ASSERT_FALSE(pSecondaryCam->setPrimaryClient().isOk());
1220
1221 // Primary client retires from a primary client role
1222 bool listening = false;
1223 std::condition_variable eventCond;
1224 std::thread listener =
1225 std::thread([&aNotification0, &frameHandlerSecondary, &listening, &eventCond]() {
1226 listening = true;
1227 eventCond.notify_all();
1228
1229 EvsEventDesc aTargetEvent;
1230 aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
1231 if (!frameHandlerSecondary->waitForEvent(aTargetEvent, aNotification0, true)) {
1232 LOG(WARNING) << "A timer is expired before a target event is fired.";
1233 }
1234 });
1235
1236 std::mutex eventLock;
1237 auto timer = std::chrono::system_clock::now();
1238 std::unique_lock<std::mutex> lock(eventLock);
1239 while (!listening) {
1240 eventCond.wait_until(lock, timer + 1s);
1241 }
1242 lock.unlock();
1243
1244 ASSERT_TRUE(pPrimaryCam->unsetPrimaryClient().isOk());
1245
1246 if (listener.joinable()) {
1247 listener.join();
1248 }
1249 ASSERT_EQ(EvsEventType::MASTER_RELEASED, static_cast<EvsEventType>(aNotification0.aType));
1250
1251 // Try to adjust a parameter after being retired
1252 values.clear();
1253 ASSERT_FALSE(pPrimaryCam->setIntParameter(camPrimaryCmds[0], val0, &values).isOk());
1254
1255 // Non-primary client becomes a primary client
1256 ASSERT_TRUE(pSecondaryCam->setPrimaryClient().isOk());
1257
1258 // Try to adjust a parameter via new primary client
1259 for (auto& cmd : camSecondaryCmds) {
1260 // Get a valid parameter value range
1261 ParameterRange range;
1262 ASSERT_TRUE(pSecondaryCam->getIntParameterRange(cmd, &range).isOk());
1263
1264 values.clear();
1265 if (cmd == CameraParam::ABSOLUTE_FOCUS) {
1266 // Try to turn off auto-focus
1267 values.clear();
1268 ASSERT_TRUE(
1269 pSecondaryCam->setIntParameter(CameraParam::AUTO_FOCUS, 0, &values).isOk());
1270 for (auto&& v : values) {
1271 EXPECT_EQ(v, 0);
1272 }
1273 }
1274
1275 // Calculate a parameter value to program. This is being rounding down.
1276 val0 = range.min + (std::rand() % (range.max - range.min));
1277 val0 = val0 - (val0 % range.step);
1278
1279 // Prepare and start event listeners.
1280 bool listening0 = false;
1281 bool listening1 = false;
1282 std::condition_variable eventCond;
1283 std::thread listener0 = std::thread([&]() {
1284 listening0 = true;
1285 if (listening1) {
1286 eventCond.notify_all();
1287 }
1288
1289 EvsEventDesc aTargetEvent;
1290 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001291 aTargetEvent.payload.push_back(static_cast<int32_t>(cmd));
1292 aTargetEvent.payload.push_back(val0);
Changyeon Jo80189012021-10-10 16:34:21 -07001293 if (!frameHandlerPrimary->waitForEvent(aTargetEvent, aNotification0)) {
1294 LOG(WARNING) << "A timer is expired before a target event is fired.";
1295 }
1296 });
1297 std::thread listener1 = std::thread([&]() {
1298 listening1 = true;
1299 if (listening0) {
1300 eventCond.notify_all();
1301 }
1302
1303 EvsEventDesc aTargetEvent;
1304 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001305 aTargetEvent.payload.push_back(static_cast<int32_t>(cmd));
1306 aTargetEvent.payload.push_back(val0);
Changyeon Jo80189012021-10-10 16:34:21 -07001307 if (!frameHandlerSecondary->waitForEvent(aTargetEvent, aNotification1)) {
1308 LOG(WARNING) << "A timer is expired before a target event is fired.";
1309 }
1310 });
1311
1312 // Wait until a listening thread starts.
1313 std::mutex eventLock;
1314 std::unique_lock<std::mutex> lock(eventLock);
1315 auto timer = std::chrono::system_clock::now();
1316 while (!listening0 || !listening1) {
1317 eventCond.wait_until(lock, timer + 1s);
1318 }
1319 lock.unlock();
1320
1321 // Try to program a parameter
1322 values.clear();
1323 ASSERT_TRUE(pSecondaryCam->setIntParameter(cmd, val0, &values).isOk());
1324
1325 // Clients expects to receive a parameter change notification
1326 // whenever a primary client client adjusts it.
1327 values.clear();
1328 ASSERT_TRUE(pSecondaryCam->getIntParameter(cmd, &values).isOk());
1329 for (auto&& v : values) {
1330 EXPECT_EQ(val0, v) << "Values are not matched.";
1331 }
1332
1333 // Join a listening thread.
1334 if (listener0.joinable()) {
1335 listener0.join();
1336 }
1337 if (listener1.joinable()) {
1338 listener1.join();
1339 }
1340
1341 // Verify a change notification
1342 ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
1343 static_cast<EvsEventType>(aNotification0.aType));
1344 ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
1345 static_cast<EvsEventType>(aNotification1.aType));
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001346 ASSERT_GE(aNotification0.payload.size(), 2);
1347 ASSERT_GE(aNotification1.payload.size(), 2);
Changyeon Jo80189012021-10-10 16:34:21 -07001348 ASSERT_EQ(cmd, static_cast<CameraParam>(aNotification0.payload[0]));
1349 ASSERT_EQ(cmd, static_cast<CameraParam>(aNotification1.payload[0]));
1350 for (auto&& v : values) {
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001351 ASSERT_EQ(v, aNotification0.payload[1]);
1352 ASSERT_EQ(v, aNotification1.payload[1]);
Changyeon Jo80189012021-10-10 16:34:21 -07001353 }
1354 }
1355
1356 // New primary client retires from the role
1357 ASSERT_TRUE(pSecondaryCam->unsetPrimaryClient().isOk());
1358
1359 // Shutdown
1360 frameHandlerPrimary->shutdown();
1361 frameHandlerSecondary->shutdown();
1362
1363 // Explicitly release the camera
1364 ASSERT_TRUE(mEnumerator->closeCamera(pPrimaryCam).isOk());
1365 ASSERT_TRUE(mEnumerator->closeCamera(pSecondaryCam).isOk());
1366 mActiveCameras.clear();
1367 }
1368}
1369
1370/*
1371 * HighPriorityCameraClient:
1372 * EVS client, which owns the display, is priortized and therefore can take over
1373 * a primary client role from other EVS clients without the display.
1374 */
1375TEST_P(EvsAidlTest, HighPriorityCameraClient) {
1376 LOG(INFO) << "Starting HighPriorityCameraClient test";
1377
1378 if (mIsHwModule) {
1379 // This test is not for HW module implementation.
1380 return;
1381 }
1382
1383 // Get the camera list
1384 loadCameraList();
1385
1386 // Request available display IDs
1387 uint8_t targetDisplayId = 0;
1388 std::vector<uint8_t> displayIds;
1389 ASSERT_TRUE(mEnumerator->getDisplayIdList(&displayIds).isOk());
1390 EXPECT_GT(displayIds.size(), 0);
1391 targetDisplayId = displayIds[0];
1392
1393 // Request exclusive access to the EVS display
1394 std::shared_ptr<IEvsDisplay> pDisplay;
1395 ASSERT_TRUE(mEnumerator->openDisplay(targetDisplayId, &pDisplay).isOk());
1396 EXPECT_NE(pDisplay, nullptr);
1397
1398 // Test each reported camera
1399 for (auto&& cam : mCameraInfo) {
1400 // Read a target resolution from the metadata
1401 Stream targetCfg = getFirstStreamConfiguration(
1402 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
1403 ASSERT_GT(targetCfg.width, 0);
1404 ASSERT_GT(targetCfg.height, 0);
1405
1406 // Create two clients
1407 std::shared_ptr<IEvsCamera> pCam0;
1408 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam0).isOk());
1409 EXPECT_NE(pCam0, nullptr);
1410
1411 // Store a camera handle for a clean-up
1412 mActiveCameras.push_back(pCam0);
1413
1414 std::shared_ptr<IEvsCamera> pCam1;
1415 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam1).isOk());
1416 EXPECT_NE(pCam1, nullptr);
1417
1418 // Store a camera handle for a clean-up
1419 mActiveCameras.push_back(pCam1);
1420
1421 // Get the parameter list; this test will use the first command in both
1422 // lists.
1423 std::vector<CameraParam> cam0Cmds, cam1Cmds;
1424 ASSERT_TRUE(pCam0->getParameterList(&cam0Cmds).isOk());
1425 ASSERT_TRUE(pCam1->getParameterList(&cam1Cmds).isOk());
1426 if (cam0Cmds.size() < 1 || cam1Cmds.size() < 1) {
1427 // Cannot execute this test.
1428 return;
1429 }
1430
1431 // Set up a frame receiver object which will fire up its own thread.
Frederick Mayle7056b242022-03-29 02:38:12 +00001432 std::shared_ptr<FrameHandler> frameHandler0 = ndk::SharedRefBase::make<FrameHandler>(
1433 pCam0, cam, nullptr, FrameHandler::eAutoReturn);
1434 std::shared_ptr<FrameHandler> frameHandler1 = ndk::SharedRefBase::make<FrameHandler>(
1435 pCam1, cam, nullptr, FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -07001436 EXPECT_NE(frameHandler0, nullptr);
1437 EXPECT_NE(frameHandler1, nullptr);
1438
1439 // Activate the display
1440 ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::VISIBLE_ON_NEXT_FRAME).isOk());
1441
1442 // Start the camera's video stream
1443 ASSERT_TRUE(frameHandler0->startStream());
1444 ASSERT_TRUE(frameHandler1->startStream());
1445
1446 // Ensure the stream starts
1447 frameHandler0->waitForFrameCount(1);
1448 frameHandler1->waitForFrameCount(1);
1449
1450 // Client 1 becomes a primary client and programs a parameter.
1451
1452 // Get a valid parameter value range
1453 ParameterRange range;
1454 ASSERT_TRUE(pCam1->getIntParameterRange(cam1Cmds[0], &range).isOk());
1455
1456 // Client1 becomes a primary client
1457 ASSERT_TRUE(pCam1->setPrimaryClient().isOk());
1458
1459 std::vector<int32_t> values;
1460 EvsEventDesc aTargetEvent = {};
1461 EvsEventDesc aNotification = {};
1462 bool listening = false;
1463 std::mutex eventLock;
1464 std::condition_variable eventCond;
1465 if (cam1Cmds[0] == CameraParam::ABSOLUTE_FOCUS) {
1466 std::thread listener =
1467 std::thread([&frameHandler0, &aNotification, &listening, &eventCond] {
1468 listening = true;
1469 eventCond.notify_all();
1470
1471 EvsEventDesc aTargetEvent;
1472 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001473 aTargetEvent.payload.push_back(
1474 static_cast<int32_t>(CameraParam::AUTO_FOCUS));
1475 aTargetEvent.payload.push_back(0);
Changyeon Jo80189012021-10-10 16:34:21 -07001476 if (!frameHandler0->waitForEvent(aTargetEvent, aNotification)) {
1477 LOG(WARNING) << "A timer is expired before a target event is fired.";
1478 }
1479 });
1480
1481 // Wait until a lister starts.
1482 std::unique_lock<std::mutex> lock(eventLock);
1483 auto timer = std::chrono::system_clock::now();
1484 while (!listening) {
1485 eventCond.wait_until(lock, timer + 1s);
1486 }
1487 lock.unlock();
1488
1489 // Try to turn off auto-focus
1490 ASSERT_TRUE(pCam1->setIntParameter(CameraParam::AUTO_FOCUS, 0, &values).isOk());
1491 for (auto&& v : values) {
1492 EXPECT_EQ(v, 0);
1493 }
1494
1495 // Join a listener
1496 if (listener.joinable()) {
1497 listener.join();
1498 }
1499
1500 // Make sure AUTO_FOCUS is off.
1501 ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
1502 EvsEventType::PARAMETER_CHANGED);
1503 }
1504
1505 // Try to program a parameter with a random value [minVal, maxVal] after
1506 // rounding it down.
1507 int32_t val0 = range.min + (std::rand() % (range.max - range.min));
1508 val0 = val0 - (val0 % range.step);
1509
1510 std::thread listener = std::thread(
1511 [&frameHandler1, &aNotification, &listening, &eventCond, &cam1Cmds, val0] {
1512 listening = true;
1513 eventCond.notify_all();
1514
1515 EvsEventDesc aTargetEvent;
1516 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001517 aTargetEvent.payload.push_back(static_cast<int32_t>(cam1Cmds[0]));
1518 aTargetEvent.payload.push_back(val0);
Changyeon Jo80189012021-10-10 16:34:21 -07001519 if (!frameHandler1->waitForEvent(aTargetEvent, aNotification)) {
1520 LOG(WARNING) << "A timer is expired before a target event is fired.";
1521 }
1522 });
1523
1524 // Wait until a lister starts.
1525 listening = false;
1526 std::unique_lock<std::mutex> lock(eventLock);
1527 auto timer = std::chrono::system_clock::now();
1528 while (!listening) {
1529 eventCond.wait_until(lock, timer + 1s);
1530 }
1531 lock.unlock();
1532
1533 values.clear();
1534 ASSERT_TRUE(pCam1->setIntParameter(cam1Cmds[0], val0, &values).isOk());
1535 for (auto&& v : values) {
1536 EXPECT_EQ(val0, v);
1537 }
1538
1539 // Join a listener
1540 if (listener.joinable()) {
1541 listener.join();
1542 }
1543
1544 // Verify a change notification
1545 ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType), EvsEventType::PARAMETER_CHANGED);
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001546 ASSERT_GE(aNotification.payload.size(), 2);
Changyeon Jo80189012021-10-10 16:34:21 -07001547 ASSERT_EQ(static_cast<CameraParam>(aNotification.payload[0]), cam1Cmds[0]);
1548 for (auto&& v : values) {
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001549 ASSERT_EQ(v, aNotification.payload[1]);
Changyeon Jo80189012021-10-10 16:34:21 -07001550 }
1551
1552 listener = std::thread([&frameHandler1, &aNotification, &listening, &eventCond] {
1553 listening = true;
1554 eventCond.notify_all();
1555
1556 EvsEventDesc aTargetEvent;
1557 aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
1558 if (!frameHandler1->waitForEvent(aTargetEvent, aNotification, true)) {
1559 LOG(WARNING) << "A timer is expired before a target event is fired.";
1560 }
1561 });
1562
1563 // Wait until a lister starts.
1564 listening = false;
1565 lock.lock();
1566 timer = std::chrono::system_clock::now();
1567 while (!listening) {
1568 eventCond.wait_until(lock, timer + 1s);
1569 }
1570 lock.unlock();
1571
1572 // Client 0 steals a primary client role
1573 ASSERT_TRUE(pCam0->forcePrimaryClient(pDisplay).isOk());
1574
1575 // Join a listener
1576 if (listener.joinable()) {
1577 listener.join();
1578 }
1579
1580 ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType), EvsEventType::MASTER_RELEASED);
1581
1582 // Client 0 programs a parameter
1583 val0 = range.min + (std::rand() % (range.max - range.min));
1584
1585 // Rounding down
1586 val0 = val0 - (val0 % range.step);
1587
1588 if (cam0Cmds[0] == CameraParam::ABSOLUTE_FOCUS) {
1589 std::thread listener =
1590 std::thread([&frameHandler1, &aNotification, &listening, &eventCond] {
1591 listening = true;
1592 eventCond.notify_all();
1593
1594 EvsEventDesc aTargetEvent;
1595 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001596 aTargetEvent.payload.push_back(
1597 static_cast<int32_t>(CameraParam::AUTO_FOCUS));
1598 aTargetEvent.payload.push_back(0);
Changyeon Jo80189012021-10-10 16:34:21 -07001599 if (!frameHandler1->waitForEvent(aTargetEvent, aNotification)) {
1600 LOG(WARNING) << "A timer is expired before a target event is fired.";
1601 }
1602 });
1603
1604 // Wait until a lister starts.
1605 std::unique_lock<std::mutex> lock(eventLock);
1606 auto timer = std::chrono::system_clock::now();
1607 while (!listening) {
1608 eventCond.wait_until(lock, timer + 1s);
1609 }
1610 lock.unlock();
1611
1612 // Try to turn off auto-focus
1613 values.clear();
1614 ASSERT_TRUE(pCam0->setIntParameter(CameraParam::AUTO_FOCUS, 0, &values).isOk());
1615 for (auto&& v : values) {
1616 EXPECT_EQ(v, 0);
1617 }
1618
1619 // Join a listener
1620 if (listener.joinable()) {
1621 listener.join();
1622 }
1623
1624 // Make sure AUTO_FOCUS is off.
1625 ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
1626 EvsEventType::PARAMETER_CHANGED);
1627 }
1628
1629 listener = std::thread(
1630 [&frameHandler0, &aNotification, &listening, &eventCond, &cam0Cmds, val0] {
1631 listening = true;
1632 eventCond.notify_all();
1633
1634 EvsEventDesc aTargetEvent;
1635 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001636 aTargetEvent.payload.push_back(static_cast<int32_t>(cam0Cmds[0]));
1637 aTargetEvent.payload.push_back(val0);
Changyeon Jo80189012021-10-10 16:34:21 -07001638 if (!frameHandler0->waitForEvent(aTargetEvent, aNotification)) {
1639 LOG(WARNING) << "A timer is expired before a target event is fired.";
1640 }
1641 });
1642
1643 // Wait until a lister starts.
1644 listening = false;
1645 timer = std::chrono::system_clock::now();
1646 lock.lock();
1647 while (!listening) {
1648 eventCond.wait_until(lock, timer + 1s);
1649 }
1650 lock.unlock();
1651
1652 values.clear();
1653 ASSERT_TRUE(pCam0->setIntParameter(cam0Cmds[0], val0, &values).isOk());
1654
1655 // Join a listener
1656 if (listener.joinable()) {
1657 listener.join();
1658 }
1659 // Verify a change notification
1660 ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType), EvsEventType::PARAMETER_CHANGED);
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001661 ASSERT_GE(aNotification.payload.size(), 2);
Changyeon Jo80189012021-10-10 16:34:21 -07001662 ASSERT_EQ(static_cast<CameraParam>(aNotification.payload[0]), cam0Cmds[0]);
1663 for (auto&& v : values) {
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001664 ASSERT_EQ(v, aNotification.payload[1]);
Changyeon Jo80189012021-10-10 16:34:21 -07001665 }
1666
1667 // Turn off the display (yes, before the stream stops -- it should be handled)
1668 ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::NOT_VISIBLE).isOk());
1669
1670 // Shut down the streamer
1671 frameHandler0->shutdown();
1672 frameHandler1->shutdown();
1673
1674 // Explicitly release the camera
1675 ASSERT_TRUE(mEnumerator->closeCamera(pCam0).isOk());
1676 ASSERT_TRUE(mEnumerator->closeCamera(pCam1).isOk());
1677 mActiveCameras.clear();
1678 }
1679
1680 // Explicitly release the display
1681 ASSERT_TRUE(mEnumerator->closeDisplay(pDisplay).isOk());
1682}
1683
1684/*
1685 * CameraUseStreamConfigToDisplay:
1686 * End to end test of data flowing from the camera to the display. Similar to
1687 * CameraToDisplayRoundTrip test case but this case retrieves available stream
1688 * configurations from EVS and uses one of them to start a video stream.
1689 */
1690TEST_P(EvsAidlTest, CameraUseStreamConfigToDisplay) {
1691 LOG(INFO) << "Starting CameraUseStreamConfigToDisplay test";
1692
1693 // Get the camera list
1694 loadCameraList();
1695
1696 // Request available display IDs
1697 uint8_t targetDisplayId = 0;
1698 std::vector<uint8_t> displayIds;
1699 ASSERT_TRUE(mEnumerator->getDisplayIdList(&displayIds).isOk());
1700 EXPECT_GT(displayIds.size(), 0);
1701 targetDisplayId = displayIds[0];
1702
1703 // Request exclusive access to the EVS display
1704 std::shared_ptr<IEvsDisplay> pDisplay;
1705 ASSERT_TRUE(mEnumerator->openDisplay(targetDisplayId, &pDisplay).isOk());
1706 EXPECT_NE(pDisplay, nullptr);
1707
1708 // Test each reported camera
1709 for (auto&& cam : mCameraInfo) {
1710 // choose a configuration that has a frame rate faster than minReqFps.
1711 Stream targetCfg = {};
1712 const int32_t minReqFps = 15;
1713 int32_t maxArea = 0;
1714 camera_metadata_entry_t streamCfgs;
1715 bool foundCfg = false;
1716 if (!find_camera_metadata_entry(reinterpret_cast<camera_metadata_t*>(cam.metadata.data()),
1717 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
1718 &streamCfgs)) {
1719 // Stream configurations are found in metadata
1720 RawStreamConfig* ptr = reinterpret_cast<RawStreamConfig*>(streamCfgs.data.i32);
1721 for (unsigned offset = 0; offset < streamCfgs.count; offset += kStreamCfgSz) {
1722 if (ptr->direction == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT &&
1723 ptr->format == HAL_PIXEL_FORMAT_RGBA_8888) {
1724 if (ptr->width * ptr->height > maxArea && ptr->framerate >= minReqFps) {
1725 targetCfg.width = ptr->width;
1726 targetCfg.height = ptr->height;
1727
1728 maxArea = ptr->width * ptr->height;
1729 foundCfg = true;
1730 }
1731 }
1732 ++ptr;
1733 }
1734 }
1735 targetCfg.format = static_cast<PixelFormat>(HAL_PIXEL_FORMAT_RGBA_8888);
1736
1737 if (!foundCfg) {
1738 // Current EVS camera does not provide stream configurations in the
1739 // metadata.
1740 continue;
1741 }
1742
1743 std::shared_ptr<IEvsCamera> pCam;
1744 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
1745 EXPECT_NE(pCam, nullptr);
1746
1747 // Store a camera handle for a clean-up
1748 mActiveCameras.push_back(pCam);
1749
1750 // Set up a frame receiver object which will fire up its own thread.
Frederick Mayle7056b242022-03-29 02:38:12 +00001751 std::shared_ptr<FrameHandler> frameHandler = ndk::SharedRefBase::make<FrameHandler>(
1752 pCam, cam, pDisplay, FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -07001753 EXPECT_NE(frameHandler, nullptr);
1754
1755 // Activate the display
1756 ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::VISIBLE_ON_NEXT_FRAME).isOk());
1757
1758 // Start the camera's video stream
1759 ASSERT_TRUE(frameHandler->startStream());
1760
1761 // Wait a while to let the data flow
1762 static const int kSecondsToWait = 5;
1763 const int streamTimeMs =
1764 kSecondsToWait * kSecondsToMilliseconds - kMaxStreamStartMilliseconds;
1765 const unsigned minimumFramesExpected =
1766 streamTimeMs * kMinimumFramesPerSecond / kSecondsToMilliseconds;
1767 sleep(kSecondsToWait);
1768 unsigned framesReceived = 0;
1769 unsigned framesDisplayed = 0;
1770 frameHandler->getFramesCounters(&framesReceived, &framesDisplayed);
1771 EXPECT_EQ(framesReceived, framesDisplayed);
1772 EXPECT_GE(framesDisplayed, minimumFramesExpected);
1773
1774 // Turn off the display (yes, before the stream stops -- it should be handled)
1775 ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::NOT_VISIBLE).isOk());
1776
1777 // Shut down the streamer
1778 frameHandler->shutdown();
1779
1780 // Explicitly release the camera
1781 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
1782 mActiveCameras.clear();
1783 }
1784
1785 // Explicitly release the display
1786 ASSERT_TRUE(mEnumerator->closeDisplay(pDisplay).isOk());
1787}
1788
1789/*
1790 * MultiCameraStreamUseConfig:
1791 * Verify that each client can start and stop video streams on the same
1792 * underlying camera with same configuration.
1793 */
1794TEST_P(EvsAidlTest, MultiCameraStreamUseConfig) {
1795 LOG(INFO) << "Starting MultiCameraStream test";
1796
1797 if (mIsHwModule) {
1798 // This test is not for HW module implementation.
1799 return;
1800 }
1801
1802 // Get the camera list
1803 loadCameraList();
1804
1805 // Test each reported camera
1806 for (auto&& cam : mCameraInfo) {
1807 // choose a configuration that has a frame rate faster than minReqFps.
1808 Stream targetCfg = {};
1809 const int32_t minReqFps = 15;
1810 int32_t maxArea = 0;
1811 camera_metadata_entry_t streamCfgs;
1812 bool foundCfg = false;
1813 if (!find_camera_metadata_entry(reinterpret_cast<camera_metadata_t*>(cam.metadata.data()),
1814 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
1815 &streamCfgs)) {
1816 // Stream configurations are found in metadata
1817 RawStreamConfig* ptr = reinterpret_cast<RawStreamConfig*>(streamCfgs.data.i32);
1818 for (unsigned offset = 0; offset < streamCfgs.count; offset += kStreamCfgSz) {
1819 if (ptr->direction == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT &&
1820 ptr->format == HAL_PIXEL_FORMAT_RGBA_8888) {
1821 if (ptr->width * ptr->height > maxArea && ptr->framerate >= minReqFps) {
1822 targetCfg.width = ptr->width;
1823 targetCfg.height = ptr->height;
1824
1825 maxArea = ptr->width * ptr->height;
1826 foundCfg = true;
1827 }
1828 }
1829 ++ptr;
1830 }
1831 }
1832 targetCfg.format = static_cast<PixelFormat>(HAL_PIXEL_FORMAT_RGBA_8888);
1833
1834 if (!foundCfg) {
1835 LOG(INFO) << "Device " << cam.id
1836 << " does not provide a list of supported stream configurations, skipped";
1837 continue;
1838 }
1839
1840 // Create the first camera client with a selected stream configuration.
1841 std::shared_ptr<IEvsCamera> pCam0;
1842 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam0).isOk());
1843 EXPECT_NE(pCam0, nullptr);
1844
1845 // Store a camera handle for a clean-up
1846 mActiveCameras.push_back(pCam0);
1847
1848 // Try to create the second camera client with different stream
1849 // configuration.
1850 int32_t id = targetCfg.id;
1851 targetCfg.id += 1; // EVS manager sees only the stream id.
1852 std::shared_ptr<IEvsCamera> pCam1;
1853 ASSERT_FALSE(mEnumerator->openCamera(cam.id, targetCfg, &pCam1).isOk());
1854
1855 // Try again with same stream configuration.
1856 targetCfg.id = id;
1857 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam1).isOk());
1858 EXPECT_NE(pCam1, nullptr);
1859
1860 // Set up per-client frame receiver objects which will fire up its own thread
Frederick Mayle7056b242022-03-29 02:38:12 +00001861 std::shared_ptr<FrameHandler> frameHandler0 = ndk::SharedRefBase::make<FrameHandler>(
1862 pCam0, cam, nullptr, FrameHandler::eAutoReturn);
1863 std::shared_ptr<FrameHandler> frameHandler1 = ndk::SharedRefBase::make<FrameHandler>(
1864 pCam1, cam, nullptr, FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -07001865 EXPECT_NE(frameHandler0, nullptr);
1866 EXPECT_NE(frameHandler1, nullptr);
1867
1868 // Start the camera's video stream via client 0
1869 ASSERT_TRUE(frameHandler0->startStream());
1870 ASSERT_TRUE(frameHandler1->startStream());
1871
1872 // Ensure the stream starts
1873 frameHandler0->waitForFrameCount(1);
1874 frameHandler1->waitForFrameCount(1);
1875
1876 nsecs_t firstFrame = systemTime(SYSTEM_TIME_MONOTONIC);
1877
1878 // Wait a bit, then ensure both clients get at least the required minimum number of frames
1879 sleep(5);
1880 nsecs_t end = systemTime(SYSTEM_TIME_MONOTONIC);
1881 unsigned framesReceived0 = 0, framesReceived1 = 0;
1882 frameHandler0->getFramesCounters(&framesReceived0, nullptr);
1883 frameHandler1->getFramesCounters(&framesReceived1, nullptr);
1884 framesReceived0 = framesReceived0 - 1; // Back out the first frame we already waited for
1885 framesReceived1 = framesReceived1 - 1; // Back out the first frame we already waited for
1886 nsecs_t runTime = end - firstFrame;
1887 float framesPerSecond0 = framesReceived0 / (runTime * kNanoToSeconds);
1888 float framesPerSecond1 = framesReceived1 / (runTime * kNanoToSeconds);
1889 LOG(INFO) << "Measured camera rate " << std::scientific << framesPerSecond0 << " fps and "
1890 << framesPerSecond1 << " fps";
1891 EXPECT_GE(framesPerSecond0, kMinimumFramesPerSecond);
1892 EXPECT_GE(framesPerSecond1, kMinimumFramesPerSecond);
1893
1894 // Shutdown one client
1895 frameHandler0->shutdown();
1896
1897 // Read frame counters again
1898 frameHandler0->getFramesCounters(&framesReceived0, nullptr);
1899 frameHandler1->getFramesCounters(&framesReceived1, nullptr);
1900
1901 // Wait a bit again
1902 sleep(5);
1903 unsigned framesReceivedAfterStop0 = 0, framesReceivedAfterStop1 = 0;
1904 frameHandler0->getFramesCounters(&framesReceivedAfterStop0, nullptr);
1905 frameHandler1->getFramesCounters(&framesReceivedAfterStop1, nullptr);
1906 EXPECT_EQ(framesReceived0, framesReceivedAfterStop0);
1907 EXPECT_LT(framesReceived1, framesReceivedAfterStop1);
1908
1909 // Shutdown another
1910 frameHandler1->shutdown();
1911
1912 // Explicitly release the camera
1913 ASSERT_TRUE(mEnumerator->closeCamera(pCam0).isOk());
1914 ASSERT_TRUE(mEnumerator->closeCamera(pCam1).isOk());
1915 mActiveCameras.clear();
1916 }
1917}
1918
1919/*
1920 * LogicalCameraMetadata:
1921 * Opens logical camera reported by the enumerator and validate its metadata by
1922 * checking its capability and locating supporting physical camera device
1923 * identifiers.
1924 */
1925TEST_P(EvsAidlTest, LogicalCameraMetadata) {
1926 LOG(INFO) << "Starting LogicalCameraMetadata test";
1927
1928 // Get the camera list
1929 loadCameraList();
1930
1931 // Open and close each camera twice
1932 for (auto&& cam : mCameraInfo) {
1933 bool isLogicalCam = false;
1934 auto devices = getPhysicalCameraIds(cam.id, isLogicalCam);
1935 if (isLogicalCam) {
1936 ASSERT_GE(devices.size(), 1) << "Logical camera device must have at least one physical "
1937 "camera device ID in its metadata.";
1938 }
1939 }
1940}
1941
1942/*
1943 * CameraStreamExternalBuffering:
1944 * This is same with CameraStreamBuffering except frame buffers are allocated by
1945 * the test client and then imported by EVS framework.
1946 */
1947TEST_P(EvsAidlTest, CameraStreamExternalBuffering) {
1948 LOG(INFO) << "Starting CameraStreamExternalBuffering test";
1949
1950 // Arbitrary constant (should be > 1 and not too big)
1951 static const unsigned int kBuffersToHold = 3;
1952
1953 // Get the camera list
1954 loadCameraList();
1955
1956 // Acquire the graphics buffer allocator
1957 android::GraphicBufferAllocator& alloc(android::GraphicBufferAllocator::get());
1958 const auto usage =
1959 GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_SW_READ_RARELY | GRALLOC_USAGE_SW_WRITE_OFTEN;
1960
1961 // Test each reported camera
1962 for (auto&& cam : mCameraInfo) {
1963 // Read a target resolution from the metadata
1964 Stream targetCfg = getFirstStreamConfiguration(
1965 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
1966 ASSERT_GT(targetCfg.width, 0);
1967 ASSERT_GT(targetCfg.height, 0);
1968
1969 // Allocate buffers to use
1970 std::vector<BufferDesc> buffers;
1971 buffers.resize(kBuffersToHold);
1972 for (auto i = 0; i < kBuffersToHold; ++i) {
1973 unsigned pixelsPerLine;
1974 buffer_handle_t memHandle = nullptr;
1975 android::status_t result =
1976 alloc.allocate(targetCfg.width, targetCfg.height,
1977 static_cast<android::PixelFormat>(targetCfg.format),
1978 /* layerCount = */ 1, usage, &memHandle, &pixelsPerLine,
1979 /* graphicBufferId = */ 0,
1980 /* requestorName = */ "CameraStreamExternalBufferingTest");
1981 if (result != android::NO_ERROR) {
1982 LOG(ERROR) << __FUNCTION__ << " failed to allocate memory.";
1983 // Release previous allocated buffers
1984 for (auto j = 0; j < i; j++) {
1985 alloc.free(::android::dupFromAidl(buffers[i].buffer.handle));
1986 }
1987 return;
1988 } else {
1989 BufferDesc buf;
1990 HardwareBufferDescription* pDesc =
1991 reinterpret_cast<HardwareBufferDescription*>(&buf.buffer.description);
1992 pDesc->width = targetCfg.width;
1993 pDesc->height = targetCfg.height;
1994 pDesc->layers = 1;
1995 pDesc->format = targetCfg.format;
1996 pDesc->usage = static_cast<BufferUsage>(usage);
1997 pDesc->stride = pixelsPerLine;
1998 buf.buffer.handle = ::android::dupToAidl(memHandle);
1999 buf.bufferId = i; // Unique number to identify this buffer
2000 buffers[i] = std::move(buf);
2001 }
2002 }
2003
2004 bool isLogicalCam = false;
2005 getPhysicalCameraIds(cam.id, isLogicalCam);
2006
2007 std::shared_ptr<IEvsCamera> pCam;
2008 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
2009 EXPECT_NE(pCam, nullptr);
2010
2011 // Store a camera handle for a clean-up
2012 mActiveCameras.push_back(pCam);
2013
2014 // Request to import buffers
2015 int delta = 0;
2016 auto status = pCam->importExternalBuffers(buffers, &delta);
2017 if (isLogicalCam) {
2018 ASSERT_FALSE(status.isOk());
2019 continue;
2020 }
2021
2022 ASSERT_TRUE(status.isOk());
2023 EXPECT_GE(delta, kBuffersToHold);
2024
2025 // Set up a frame receiver object which will fire up its own thread.
Frederick Mayle7056b242022-03-29 02:38:12 +00002026 std::shared_ptr<FrameHandler> frameHandler = ndk::SharedRefBase::make<FrameHandler>(
2027 pCam, cam, nullptr, FrameHandler::eNoAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -07002028 EXPECT_NE(frameHandler, nullptr);
2029
2030 // Start the camera's video stream
2031 ASSERT_TRUE(frameHandler->startStream());
2032
2033 // Check that the video stream stalls once we've gotten exactly the number of buffers
2034 // we requested since we told the frameHandler not to return them.
2035 sleep(1); // 1 second should be enough for at least 5 frames to be delivered worst case
2036 unsigned framesReceived = 0;
2037 frameHandler->getFramesCounters(&framesReceived, nullptr);
2038 ASSERT_LE(kBuffersToHold, framesReceived) << "Stream didn't stall at expected buffer limit";
2039
2040 // Give back one buffer
2041 EXPECT_TRUE(frameHandler->returnHeldBuffer());
2042
2043 // Once we return a buffer, it shouldn't take more than 1/10 second to get a new one
2044 // filled since we require 10fps minimum -- but give a 10% allowance just in case.
2045 unsigned framesReceivedAfter = 0;
2046 usleep(110 * kMillisecondsToMicroseconds);
2047 frameHandler->getFramesCounters(&framesReceivedAfter, nullptr);
2048 EXPECT_EQ(framesReceived + 1, framesReceivedAfter) << "Stream should've resumed";
2049
2050 // Even when the camera pointer goes out of scope, the FrameHandler object will
2051 // keep the stream alive unless we tell it to shutdown.
2052 // Also note that the FrameHandle and the Camera have a mutual circular reference, so
2053 // we have to break that cycle in order for either of them to get cleaned up.
2054 frameHandler->shutdown();
2055
2056 // Explicitly release the camera
2057 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
2058 mActiveCameras.clear();
2059 // Release buffers
2060 for (auto& b : buffers) {
2061 alloc.free(::android::dupFromAidl(b.buffer.handle));
2062 }
2063 buffers.resize(0);
2064 }
2065}
2066
2067/*
2068 * UltrasonicsArrayOpenClean:
2069 * Opens each ultrasonics arrays reported by the enumerator and then explicitly closes it via a
2070 * call to closeUltrasonicsArray. Then repeats the test to ensure all ultrasonics arrays
2071 * can be reopened.
2072 */
2073TEST_P(EvsAidlTest, UltrasonicsArrayOpenClean) {
2074 LOG(INFO) << "Starting UltrasonicsArrayOpenClean test";
2075
2076 // Get the ultrasonics array list
2077 loadUltrasonicsArrayList();
2078
2079 // Open and close each ultrasonics array twice
2080 for (auto&& ultraInfo : mUltrasonicsArraysInfo) {
2081 for (int pass = 0; pass < 2; pass++) {
2082 std::shared_ptr<IEvsUltrasonicsArray> pUltrasonicsArray;
2083 ASSERT_TRUE(
2084 mEnumerator
2085 ->openUltrasonicsArray(ultraInfo.ultrasonicsArrayId, &pUltrasonicsArray)
2086 .isOk());
2087 EXPECT_NE(pUltrasonicsArray, nullptr);
2088
2089 // Verify that this ultrasonics array self-identifies correctly
2090 UltrasonicsArrayDesc desc;
2091 ASSERT_TRUE(pUltrasonicsArray->getUltrasonicArrayInfo(&desc).isOk());
2092 EXPECT_EQ(ultraInfo.ultrasonicsArrayId, desc.ultrasonicsArrayId);
2093 LOG(DEBUG) << "Found ultrasonics array " << ultraInfo.ultrasonicsArrayId;
2094
2095 // Explicitly close the ultrasonics array so resources are released right away
2096 ASSERT_TRUE(mEnumerator->closeUltrasonicsArray(pUltrasonicsArray).isOk());
2097 }
2098 }
2099}
2100
2101// Starts a stream and verifies all data received is valid.
2102TEST_P(EvsAidlTest, UltrasonicsVerifyStreamData) {
2103 LOG(INFO) << "Starting UltrasonicsVerifyStreamData";
2104
2105 // Get the ultrasonics array list
2106 loadUltrasonicsArrayList();
2107
2108 // For each ultrasonics array.
2109 for (auto&& ultraInfo : mUltrasonicsArraysInfo) {
2110 LOG(DEBUG) << "Testing ultrasonics array: " << ultraInfo.ultrasonicsArrayId;
2111
2112 std::shared_ptr<IEvsUltrasonicsArray> pUltrasonicsArray;
2113 ASSERT_TRUE(
2114 mEnumerator->openUltrasonicsArray(ultraInfo.ultrasonicsArrayId, &pUltrasonicsArray)
2115 .isOk());
2116 EXPECT_NE(pUltrasonicsArray, nullptr);
2117
2118 std::shared_ptr<FrameHandlerUltrasonics> frameHandler =
Frederick Mayle7056b242022-03-29 02:38:12 +00002119 ndk::SharedRefBase::make<FrameHandlerUltrasonics>(pUltrasonicsArray);
Changyeon Jo80189012021-10-10 16:34:21 -07002120 EXPECT_NE(frameHandler, nullptr);
2121
2122 // Start stream.
2123 ASSERT_TRUE(pUltrasonicsArray->startStream(frameHandler).isOk());
2124
2125 // Wait 5 seconds to receive frames.
2126 sleep(5);
2127
2128 // Stop stream.
2129 ASSERT_TRUE(pUltrasonicsArray->stopStream().isOk());
2130
2131 EXPECT_GT(frameHandler->getReceiveFramesCount(), 0);
2132 EXPECT_TRUE(frameHandler->areAllFramesValid());
2133
2134 // Explicitly close the ultrasonics array so resources are released right away
2135 ASSERT_TRUE(mEnumerator->closeUltrasonicsArray(pUltrasonicsArray).isOk());
2136 }
2137}
2138
2139// Sets frames in flight before and after start of stream and verfies success.
2140TEST_P(EvsAidlTest, UltrasonicsSetFramesInFlight) {
2141 LOG(INFO) << "Starting UltrasonicsSetFramesInFlight";
2142
2143 // Get the ultrasonics array list
2144 loadUltrasonicsArrayList();
2145
2146 // For each ultrasonics array.
2147 for (auto&& ultraInfo : mUltrasonicsArraysInfo) {
2148 LOG(DEBUG) << "Testing ultrasonics array: " << ultraInfo.ultrasonicsArrayId;
2149
2150 std::shared_ptr<IEvsUltrasonicsArray> pUltrasonicsArray;
2151 ASSERT_TRUE(
2152 mEnumerator->openUltrasonicsArray(ultraInfo.ultrasonicsArrayId, &pUltrasonicsArray)
2153 .isOk());
2154 EXPECT_NE(pUltrasonicsArray, nullptr);
2155
2156 ASSERT_TRUE(pUltrasonicsArray->setMaxFramesInFlight(10).isOk());
2157
2158 std::shared_ptr<FrameHandlerUltrasonics> frameHandler =
Frederick Mayle7056b242022-03-29 02:38:12 +00002159 ndk::SharedRefBase::make<FrameHandlerUltrasonics>(pUltrasonicsArray);
Changyeon Jo80189012021-10-10 16:34:21 -07002160 EXPECT_NE(frameHandler, nullptr);
2161
2162 // Start stream.
2163 ASSERT_TRUE(pUltrasonicsArray->startStream(frameHandler).isOk());
2164 ASSERT_TRUE(pUltrasonicsArray->setMaxFramesInFlight(5).isOk());
2165
2166 // Stop stream.
2167 ASSERT_TRUE(pUltrasonicsArray->stopStream().isOk());
2168
2169 // Explicitly close the ultrasonics array so resources are released right away
2170 ASSERT_TRUE(mEnumerator->closeUltrasonicsArray(pUltrasonicsArray).isOk());
2171 }
2172}
2173
2174GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(EvsAidlTest);
2175INSTANTIATE_TEST_SUITE_P(
2176 PerInstance, EvsAidlTest,
2177 testing::ValuesIn(android::getAidlHalInstanceNames(IEvsEnumerator::descriptor)),
2178 android::PrintInstanceNameToString);
2179
2180int main(int argc, char** argv) {
2181 ::testing::InitGoogleTest(&argc, argv);
2182 ABinderProcess_setThreadPoolMaxThreadCount(1);
2183 ABinderProcess_startThreadPool();
2184 return RUN_ALL_TESTS();
2185}