blob: 0bbc87c4b05b739ff7af8640a99f7fc47ef9fe12 [file] [log] [blame]
Changyeon Jo80189012021-10-10 16:34:21 -07001/*
2 * Copyright (C) 2022 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "FrameHandler.h"
18#include "FrameHandlerUltrasonics.h"
19
20#include <aidl/Gtest.h>
21#include <aidl/Vintf.h>
Changyeon Jodbcf52c2022-05-11 00:01:31 -070022#include <aidl/android/hardware/automotive/evs/BnEvsEnumeratorStatusCallback.h>
Changyeon Jo80189012021-10-10 16:34:21 -070023#include <aidl/android/hardware/automotive/evs/BufferDesc.h>
24#include <aidl/android/hardware/automotive/evs/CameraDesc.h>
25#include <aidl/android/hardware/automotive/evs/CameraParam.h>
Changyeon Jodbcf52c2022-05-11 00:01:31 -070026#include <aidl/android/hardware/automotive/evs/DeviceStatus.h>
Changyeon Jo80189012021-10-10 16:34:21 -070027#include <aidl/android/hardware/automotive/evs/DisplayDesc.h>
28#include <aidl/android/hardware/automotive/evs/DisplayState.h>
29#include <aidl/android/hardware/automotive/evs/EvsEventDesc.h>
30#include <aidl/android/hardware/automotive/evs/EvsEventType.h>
31#include <aidl/android/hardware/automotive/evs/EvsResult.h>
32#include <aidl/android/hardware/automotive/evs/IEvsCamera.h>
33#include <aidl/android/hardware/automotive/evs/IEvsDisplay.h>
34#include <aidl/android/hardware/automotive/evs/IEvsEnumerator.h>
Changyeon Jodbcf52c2022-05-11 00:01:31 -070035#include <aidl/android/hardware/automotive/evs/IEvsEnumeratorStatusCallback.h>
Changyeon Jo80189012021-10-10 16:34:21 -070036#include <aidl/android/hardware/automotive/evs/IEvsUltrasonicsArray.h>
37#include <aidl/android/hardware/automotive/evs/ParameterRange.h>
38#include <aidl/android/hardware/automotive/evs/Stream.h>
39#include <aidl/android/hardware/automotive/evs/UltrasonicsArrayDesc.h>
40#include <aidl/android/hardware/common/NativeHandle.h>
41#include <aidl/android/hardware/graphics/common/HardwareBufferDescription.h>
42#include <aidl/android/hardware/graphics/common/PixelFormat.h>
43#include <aidlcommonsupport/NativeHandle.h>
44#include <android-base/logging.h>
45#include <android/binder_ibinder.h>
46#include <android/binder_manager.h>
47#include <android/binder_process.h>
48#include <android/binder_status.h>
49#include <system/camera_metadata.h>
50#include <ui/GraphicBuffer.h>
51#include <ui/GraphicBufferAllocator.h>
52#include <utils/Timers.h>
53
Hao Chene708da82023-03-28 16:20:57 -070054#include <chrono>
Changyeon Jo80189012021-10-10 16:34:21 -070055#include <deque>
56#include <thread>
57#include <unordered_set>
58
59namespace {
60
61// These values are called out in the EVS design doc (as of Mar 8, 2017)
62constexpr int kMaxStreamStartMilliseconds = 500;
63constexpr int kMinimumFramesPerSecond = 10;
64constexpr int kSecondsToMilliseconds = 1000;
65constexpr int kMillisecondsToMicroseconds = 1000;
66constexpr float kNanoToMilliseconds = 0.000001f;
67constexpr float kNanoToSeconds = 0.000000001f;
68
69/*
70 * Please note that this is different from what is defined in
71 * libhardware/modules/camera/3_4/metadata/types.h; this has one additional
72 * field to store a framerate.
73 */
74typedef struct {
75 int32_t id;
76 int32_t width;
77 int32_t height;
78 int32_t format;
79 int32_t direction;
80 int32_t framerate;
81} RawStreamConfig;
82constexpr size_t kStreamCfgSz = sizeof(RawStreamConfig) / sizeof(int32_t);
83
Changyeon Jodbcf52c2022-05-11 00:01:31 -070084using ::aidl::android::hardware::automotive::evs::BnEvsEnumeratorStatusCallback;
Changyeon Jo80189012021-10-10 16:34:21 -070085using ::aidl::android::hardware::automotive::evs::BufferDesc;
86using ::aidl::android::hardware::automotive::evs::CameraDesc;
87using ::aidl::android::hardware::automotive::evs::CameraParam;
Changyeon Jodbcf52c2022-05-11 00:01:31 -070088using ::aidl::android::hardware::automotive::evs::DeviceStatus;
Changyeon Jo80189012021-10-10 16:34:21 -070089using ::aidl::android::hardware::automotive::evs::DisplayDesc;
90using ::aidl::android::hardware::automotive::evs::DisplayState;
91using ::aidl::android::hardware::automotive::evs::EvsEventDesc;
92using ::aidl::android::hardware::automotive::evs::EvsEventType;
93using ::aidl::android::hardware::automotive::evs::EvsResult;
94using ::aidl::android::hardware::automotive::evs::IEvsCamera;
95using ::aidl::android::hardware::automotive::evs::IEvsDisplay;
96using ::aidl::android::hardware::automotive::evs::IEvsEnumerator;
Changyeon Jodbcf52c2022-05-11 00:01:31 -070097using ::aidl::android::hardware::automotive::evs::IEvsEnumeratorStatusCallback;
Changyeon Jo80189012021-10-10 16:34:21 -070098using ::aidl::android::hardware::automotive::evs::IEvsUltrasonicsArray;
99using ::aidl::android::hardware::automotive::evs::ParameterRange;
100using ::aidl::android::hardware::automotive::evs::Stream;
101using ::aidl::android::hardware::automotive::evs::UltrasonicsArrayDesc;
102using ::aidl::android::hardware::graphics::common::BufferUsage;
103using ::aidl::android::hardware::graphics::common::HardwareBufferDescription;
104using ::aidl::android::hardware::graphics::common::PixelFormat;
105using std::chrono_literals::operator""s;
106
Changyeon Jodbcf52c2022-05-11 00:01:31 -0700107} // namespace
108
Changyeon Jo80189012021-10-10 16:34:21 -0700109// The main test class for EVS
110class EvsAidlTest : public ::testing::TestWithParam<std::string> {
111 public:
112 virtual void SetUp() override {
113 // Make sure we can connect to the enumerator
114 std::string service_name = GetParam();
115 AIBinder* binder = AServiceManager_waitForService(service_name.data());
116 ASSERT_NE(binder, nullptr);
117 mEnumerator = IEvsEnumerator::fromBinder(::ndk::SpAIBinder(binder));
118 LOG(INFO) << "Test target service: " << service_name;
119
120 ASSERT_TRUE(mEnumerator->isHardware(&mIsHwModule).isOk());
121 }
122
123 virtual void TearDown() override {
124 // Attempt to close any active camera
125 for (auto&& cam : mActiveCameras) {
126 if (cam != nullptr) {
127 mEnumerator->closeCamera(cam);
128 }
129 }
130 mActiveCameras.clear();
131 }
132
133 protected:
134 void loadCameraList() {
135 // SetUp() must run first!
136 ASSERT_NE(mEnumerator, nullptr);
137
138 // Get the camera list
139 ASSERT_TRUE(mEnumerator->getCameraList(&mCameraInfo).isOk())
140 << "Failed to get a list of available cameras";
141 LOG(INFO) << "We have " << mCameraInfo.size() << " cameras.";
142 }
143
144 void loadUltrasonicsArrayList() {
145 // SetUp() must run first!
146 ASSERT_NE(mEnumerator, nullptr);
147
148 // Get the ultrasonics array list
Changyeon Jo9f6f5922022-04-12 19:29:10 -0700149 auto result = mEnumerator->getUltrasonicsArrayList(&mUltrasonicsArraysInfo);
150 ASSERT_TRUE(result.isOk() ||
151 // TODO(b/149874793): Remove below conditions when
152 // getUltrasonicsArrayList() is implemented.
153 (!result.isOk() && result.getServiceSpecificError() ==
154 static_cast<int32_t>(EvsResult::NOT_IMPLEMENTED)))
Changyeon Jo80189012021-10-10 16:34:21 -0700155 << "Failed to get a list of available ultrasonics arrays";
156 LOG(INFO) << "We have " << mCameraInfo.size() << " ultrasonics arrays.";
157 }
158
159 bool isLogicalCamera(const camera_metadata_t* metadata) {
160 if (metadata == nullptr) {
161 // A logical camera device must have a valid camera metadata.
162 return false;
163 }
164
165 // Looking for LOGICAL_MULTI_CAMERA capability from metadata.
166 camera_metadata_ro_entry_t entry;
167 int rc = find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
168 &entry);
169 if (rc != 0) {
170 // No capabilities are found.
171 return false;
172 }
173
174 for (size_t i = 0; i < entry.count; ++i) {
175 uint8_t cap = entry.data.u8[i];
176 if (cap == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA) {
177 return true;
178 }
179 }
180
181 return false;
182 }
183
184 std::unordered_set<std::string> getPhysicalCameraIds(const std::string& id, bool& flag) {
185 std::unordered_set<std::string> physicalCameras;
186 const auto it = std::find_if(mCameraInfo.begin(), mCameraInfo.end(),
187 [&id](const CameraDesc& desc) { return id == desc.id; });
188 if (it == mCameraInfo.end()) {
189 // Unknown camera is requested. Return an empty list.
190 return physicalCameras;
191 }
192
193 const camera_metadata_t* metadata = reinterpret_cast<camera_metadata_t*>(&it->metadata[0]);
194 flag = isLogicalCamera(metadata);
195 if (!flag) {
196 // EVS assumes that the device w/o a valid metadata is a physical
197 // device.
198 LOG(INFO) << id << " is not a logical camera device.";
199 physicalCameras.insert(id);
200 return physicalCameras;
201 }
202
203 // Look for physical camera identifiers
204 camera_metadata_ro_entry entry;
205 int rc = find_camera_metadata_ro_entry(metadata, ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS,
206 &entry);
207 if (rc != 0) {
208 LOG(ERROR) << "No physical camera ID is found for a logical camera device";
209 }
210
211 const uint8_t* ids = entry.data.u8;
212 size_t start = 0;
213 for (size_t i = 0; i < entry.count; ++i) {
214 if (ids[i] == '\0') {
215 if (start != i) {
216 std::string id(reinterpret_cast<const char*>(ids + start));
217 physicalCameras.insert(id);
218 }
219 start = i + 1;
220 }
221 }
222
223 LOG(INFO) << id << " consists of " << physicalCameras.size() << " physical camera devices";
224 return physicalCameras;
225 }
226
227 Stream getFirstStreamConfiguration(camera_metadata_t* metadata) {
228 Stream targetCfg = {};
229 camera_metadata_entry_t streamCfgs;
230 if (!find_camera_metadata_entry(metadata, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
231 &streamCfgs)) {
232 // Stream configurations are found in metadata
233 RawStreamConfig* ptr = reinterpret_cast<RawStreamConfig*>(streamCfgs.data.i32);
234 for (unsigned offset = 0; offset < streamCfgs.count; offset += kStreamCfgSz) {
Changyeon Jo7f5ad612022-08-17 21:47:58 -0700235 if (ptr->direction == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT) {
Changyeon Jo80189012021-10-10 16:34:21 -0700236 targetCfg.width = ptr->width;
237 targetCfg.height = ptr->height;
238 targetCfg.format = static_cast<PixelFormat>(ptr->format);
239 break;
240 }
241 ++ptr;
242 }
243 }
244
245 return targetCfg;
246 }
247
Changyeon Jodbcf52c2022-05-11 00:01:31 -0700248 class DeviceStatusCallback : public BnEvsEnumeratorStatusCallback {
249 ndk::ScopedAStatus deviceStatusChanged(const std::vector<DeviceStatus>&) override {
250 // This empty implementation returns always ok().
251 return ndk::ScopedAStatus::ok();
252 }
253 };
254
Changyeon Jo80189012021-10-10 16:34:21 -0700255 // Every test needs access to the service
256 std::shared_ptr<IEvsEnumerator> mEnumerator;
257 // Empty unless/util loadCameraList() is called
258 std::vector<CameraDesc> mCameraInfo;
259 // boolean to tell current module under testing is HW module implementation
260 // or not
261 bool mIsHwModule;
262 // A list of active camera handles that are need to be cleaned up
263 std::deque<std::shared_ptr<IEvsCamera>> mActiveCameras;
264 // Empty unless/util loadUltrasonicsArrayList() is called
265 std::vector<UltrasonicsArrayDesc> mUltrasonicsArraysInfo;
266 // A list of active ultrasonics array handles that are to be cleaned up
267 std::deque<std::weak_ptr<IEvsUltrasonicsArray>> mActiveUltrasonicsArrays;
268};
269
270// Test cases, their implementations, and corresponding requirements are
271// documented at go/aae-evs-public-api-test.
272
273/*
274 * CameraOpenClean:
275 * Opens each camera reported by the enumerator and then explicitly closes it via a
276 * call to closeCamera. Then repeats the test to ensure all cameras can be reopened.
277 */
278TEST_P(EvsAidlTest, CameraOpenClean) {
279 LOG(INFO) << "Starting CameraOpenClean test";
280
281 // Get the camera list
282 loadCameraList();
283
284 // Open and close each camera twice
285 for (auto&& cam : mCameraInfo) {
286 bool isLogicalCam = false;
287 auto devices = getPhysicalCameraIds(cam.id, isLogicalCam);
288 if (mIsHwModule && isLogicalCam) {
289 LOG(INFO) << "Skip a logical device, " << cam.id << " for HW target.";
290 continue;
291 }
292
293 // Read a target resolution from the metadata
294 Stream targetCfg = getFirstStreamConfiguration(
295 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
296 ASSERT_GT(targetCfg.width, 0);
297 ASSERT_GT(targetCfg.height, 0);
298
299 for (int pass = 0; pass < 2; pass++) {
300 std::shared_ptr<IEvsCamera> pCam;
301 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
302 ASSERT_NE(pCam, nullptr);
303
304 CameraDesc cameraInfo;
305 for (auto&& devName : devices) {
306 ASSERT_TRUE(pCam->getPhysicalCameraInfo(devName, &cameraInfo).isOk());
307 EXPECT_EQ(devName, cameraInfo.id);
308 }
309
310 // Store a camera handle for a clean-up
311 mActiveCameras.push_back(pCam);
312
313 // Verify that this camera self-identifies correctly
314 ASSERT_TRUE(pCam->getCameraInfo(&cameraInfo).isOk());
315 EXPECT_EQ(cam.id, cameraInfo.id);
316
317 // Verify methods for extended info
318 const auto id = 0xFFFFFFFF; // meaningless id
319 std::vector<uint8_t> values;
Changyeon Jo9cf7c9f2024-03-26 02:16:06 +0000320 bool isSupported = false;
Changyeon Jo80189012021-10-10 16:34:21 -0700321 auto status = pCam->setExtendedInfo(id, values);
322 if (isLogicalCam) {
323 EXPECT_TRUE(!status.isOk() && status.getServiceSpecificError() ==
324 static_cast<int>(EvsResult::NOT_SUPPORTED));
325 } else {
Changyeon Jo9cf7c9f2024-03-26 02:16:06 +0000326 if (status.isOk()) {
327 // 0xFFFFFFFF is valid for EVS HAL implementation under
328 // test.
329 isSupported = true;
330 } else {
331 EXPECT_TRUE(status.getServiceSpecificError() ==
332 static_cast<int>(EvsResult::INVALID_ARG));
333 }
Changyeon Jo80189012021-10-10 16:34:21 -0700334 }
335
336 status = pCam->getExtendedInfo(id, &values);
337 if (isLogicalCam) {
338 EXPECT_TRUE(!status.isOk() && status.getServiceSpecificError() ==
339 static_cast<int>(EvsResult::NOT_SUPPORTED));
340 } else {
Changyeon Jo9cf7c9f2024-03-26 02:16:06 +0000341 if (isSupported) {
342 EXPECT_TRUE(status.isOk());
343 } else {
344 EXPECT_TRUE(!status.isOk() && status.getServiceSpecificError() ==
345 static_cast<int>(EvsResult::INVALID_ARG));
346 }
Changyeon Jo80189012021-10-10 16:34:21 -0700347 }
348
349 // Explicitly close the camera so resources are released right away
350 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
351 mActiveCameras.clear();
352 }
353 }
354}
355
356/*
357 * CameraOpenAggressive:
358 * Opens each camera reported by the enumerator twice in a row without an intervening closeCamera
359 * call. This ensures that the intended "aggressive open" behavior works. This is necessary for
360 * the system to be tolerant of shutdown/restart race conditions.
361 */
362TEST_P(EvsAidlTest, CameraOpenAggressive) {
363 LOG(INFO) << "Starting CameraOpenAggressive test";
364
365 // Get the camera list
366 loadCameraList();
367
368 // Open and close each camera twice
369 for (auto&& cam : mCameraInfo) {
370 bool isLogicalCam = false;
371 getPhysicalCameraIds(cam.id, isLogicalCam);
372 if (mIsHwModule && isLogicalCam) {
373 LOG(INFO) << "Skip a logical device, " << cam.id << " for HW target.";
374 continue;
375 }
376
377 // Read a target resolution from the metadata
378 Stream targetCfg = getFirstStreamConfiguration(
379 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
380 ASSERT_GT(targetCfg.width, 0);
381 ASSERT_GT(targetCfg.height, 0);
382
383 mActiveCameras.clear();
384 std::shared_ptr<IEvsCamera> pCam;
385 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
386 EXPECT_NE(pCam, nullptr);
387
388 // Store a camera handle for a clean-up
389 mActiveCameras.push_back(pCam);
390
391 // Verify that this camera self-identifies correctly
392 CameraDesc cameraInfo;
393 ASSERT_TRUE(pCam->getCameraInfo(&cameraInfo).isOk());
394 EXPECT_EQ(cam.id, cameraInfo.id);
395
396 std::shared_ptr<IEvsCamera> pCam2;
397 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam2).isOk());
398 EXPECT_NE(pCam2, nullptr);
399 EXPECT_NE(pCam, pCam2);
400
401 // Store a camera handle for a clean-up
402 mActiveCameras.push_back(pCam2);
403
404 auto status = pCam->setMaxFramesInFlight(2);
405 if (mIsHwModule) {
406 // Verify that the old camera rejects calls via HW module.
407 EXPECT_TRUE(!status.isOk() && status.getServiceSpecificError() ==
408 static_cast<int>(EvsResult::OWNERSHIP_LOST));
409 } else {
410 // default implementation supports multiple clients.
411 EXPECT_TRUE(status.isOk());
412 }
413
414 // Close the superseded camera
415 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
416 mActiveCameras.pop_front();
417
418 // Verify that the second camera instance self-identifies correctly
419 ASSERT_TRUE(pCam2->getCameraInfo(&cameraInfo).isOk());
420 EXPECT_EQ(cam.id, cameraInfo.id);
421
422 // Close the second camera instance
423 ASSERT_TRUE(mEnumerator->closeCamera(pCam2).isOk());
424 mActiveCameras.pop_front();
425 }
426
427 // Sleep here to ensure the destructor cleanup has time to run so we don't break follow on tests
428 sleep(1); // I hate that this is an arbitrary time to wait. :( b/36122635
429}
430
431/*
432 * CameraStreamPerformance:
433 * Measure and qualify the stream start up time and streaming frame rate of each reported camera
434 */
435TEST_P(EvsAidlTest, CameraStreamPerformance) {
436 LOG(INFO) << "Starting CameraStreamPerformance test";
437
438 // Get the camera list
439 loadCameraList();
440
441 // Test each reported camera
442 for (auto&& cam : mCameraInfo) {
443 bool isLogicalCam = false;
444 auto devices = getPhysicalCameraIds(cam.id, isLogicalCam);
445 if (mIsHwModule && isLogicalCam) {
446 LOG(INFO) << "Skip a logical device " << cam.id;
447 continue;
448 }
449
450 // Read a target resolution from the metadata
451 Stream targetCfg = getFirstStreamConfiguration(
452 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
453 ASSERT_GT(targetCfg.width, 0);
454 ASSERT_GT(targetCfg.height, 0);
455
456 std::shared_ptr<IEvsCamera> pCam;
457 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
458 EXPECT_NE(pCam, nullptr);
459
460 // Store a camera handle for a clean-up
461 mActiveCameras.push_back(pCam);
462
463 // Set up a frame receiver object which will fire up its own thread
Frederick Mayle7056b242022-03-29 02:38:12 +0000464 std::shared_ptr<FrameHandler> frameHandler = ndk::SharedRefBase::make<FrameHandler>(
465 pCam, cam, nullptr, FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -0700466 EXPECT_NE(frameHandler, nullptr);
467
468 // Start the camera's video stream
469 nsecs_t start = systemTime(SYSTEM_TIME_MONOTONIC);
470 ASSERT_TRUE(frameHandler->startStream());
471
472 // Ensure the first frame arrived within the expected time
473 frameHandler->waitForFrameCount(1);
474 nsecs_t firstFrame = systemTime(SYSTEM_TIME_MONOTONIC);
475 nsecs_t timeToFirstFrame = systemTime(SYSTEM_TIME_MONOTONIC) - start;
476
477 // Extra delays are expected when we attempt to start a video stream on
478 // the logical camera device. The amount of delay is expected the
479 // number of physical camera devices multiplied by
480 // kMaxStreamStartMilliseconds at most.
481 EXPECT_LE(nanoseconds_to_milliseconds(timeToFirstFrame),
482 kMaxStreamStartMilliseconds * devices.size());
483 printf("%s: Measured time to first frame %0.2f ms\n", cam.id.data(),
484 timeToFirstFrame * kNanoToMilliseconds);
485 LOG(INFO) << cam.id << ": Measured time to first frame " << std::scientific
486 << timeToFirstFrame * kNanoToMilliseconds << " ms.";
487
488 // Check aspect ratio
489 unsigned width = 0, height = 0;
490 frameHandler->getFrameDimension(&width, &height);
491 EXPECT_GE(width, height);
492
493 // Wait a bit, then ensure we get at least the required minimum number of frames
494 sleep(5);
495 nsecs_t end = systemTime(SYSTEM_TIME_MONOTONIC);
496
497 // Even when the camera pointer goes out of scope, the FrameHandler object will
498 // keep the stream alive unless we tell it to shutdown.
499 // Also note that the FrameHandle and the Camera have a mutual circular reference, so
500 // we have to break that cycle in order for either of them to get cleaned up.
501 frameHandler->shutdown();
502
503 unsigned framesReceived = 0;
504 frameHandler->getFramesCounters(&framesReceived, nullptr);
505 framesReceived = framesReceived - 1; // Back out the first frame we already waited for
506 nsecs_t runTime = end - firstFrame;
507 float framesPerSecond = framesReceived / (runTime * kNanoToSeconds);
508 printf("Measured camera rate %3.2f fps\n", framesPerSecond);
509 LOG(INFO) << "Measured camera rate " << std::scientific << framesPerSecond << " fps.";
510 EXPECT_GE(framesPerSecond, kMinimumFramesPerSecond);
511
512 // Explicitly release the camera
513 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
514 mActiveCameras.clear();
515 }
516}
517
518/*
519 * CameraStreamBuffering:
520 * Ensure the camera implementation behaves properly when the client holds onto buffers for more
521 * than one frame time. The camera must cleanly skip frames until the client is ready again.
522 */
523TEST_P(EvsAidlTest, CameraStreamBuffering) {
524 LOG(INFO) << "Starting CameraStreamBuffering test";
525
526 // Arbitrary constant (should be > 1 and not too big)
527 static const unsigned int kBuffersToHold = 6;
528
529 // Get the camera list
530 loadCameraList();
531
532 // Test each reported camera
533 for (auto&& cam : mCameraInfo) {
534 bool isLogicalCam = false;
535 getPhysicalCameraIds(cam.id, isLogicalCam);
536 if (mIsHwModule && isLogicalCam) {
537 LOG(INFO) << "Skip a logical device " << cam.id << " for HW target.";
538 continue;
539 }
540
541 // Read a target resolution from the metadata
542 Stream targetCfg = getFirstStreamConfiguration(
543 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
544 ASSERT_GT(targetCfg.width, 0);
545 ASSERT_GT(targetCfg.height, 0);
546
547 std::shared_ptr<IEvsCamera> pCam;
548 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
549 EXPECT_NE(pCam, nullptr);
550
551 // Store a camera handle for a clean-up
552 mActiveCameras.push_back(pCam);
553
554 // Ask for a very large number of buffers in flight to ensure it errors correctly
Changyeon Jo0d814ce2022-04-23 05:26:16 -0700555 auto badResult = pCam->setMaxFramesInFlight(std::numeric_limits<int32_t>::max());
Changyeon Jo80189012021-10-10 16:34:21 -0700556 EXPECT_TRUE(!badResult.isOk() && badResult.getServiceSpecificError() ==
Changyeon Job440b232022-05-10 22:49:28 -0700557 static_cast<int>(EvsResult::BUFFER_NOT_AVAILABLE));
Changyeon Jo80189012021-10-10 16:34:21 -0700558
559 // Now ask for exactly two buffers in flight as we'll test behavior in that case
560 ASSERT_TRUE(pCam->setMaxFramesInFlight(kBuffersToHold).isOk());
561
562 // Set up a frame receiver object which will fire up its own thread.
Frederick Mayle7056b242022-03-29 02:38:12 +0000563 std::shared_ptr<FrameHandler> frameHandler = ndk::SharedRefBase::make<FrameHandler>(
564 pCam, cam, nullptr, FrameHandler::eNoAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -0700565 EXPECT_NE(frameHandler, nullptr);
566
567 // Start the camera's video stream
568 ASSERT_TRUE(frameHandler->startStream());
569
570 // Check that the video stream stalls once we've gotten exactly the number of buffers
571 // we requested since we told the frameHandler not to return them.
572 sleep(1); // 1 second should be enough for at least 5 frames to be delivered worst case
573 unsigned framesReceived = 0;
574 frameHandler->getFramesCounters(&framesReceived, nullptr);
575 ASSERT_EQ(kBuffersToHold, framesReceived) << "Stream didn't stall at expected buffer limit";
576
577 // Give back one buffer
578 ASSERT_TRUE(frameHandler->returnHeldBuffer());
579
580 // Once we return a buffer, it shouldn't take more than 1/10 second to get a new one
581 // filled since we require 10fps minimum -- but give a 10% allowance just in case.
582 usleep(110 * kMillisecondsToMicroseconds);
583 frameHandler->getFramesCounters(&framesReceived, nullptr);
584 EXPECT_EQ(kBuffersToHold + 1, framesReceived) << "Stream should've resumed";
585
586 // Even when the camera pointer goes out of scope, the FrameHandler object will
587 // keep the stream alive unless we tell it to shutdown.
588 // Also note that the FrameHandle and the Camera have a mutual circular reference, so
589 // we have to break that cycle in order for either of them to get cleaned up.
590 frameHandler->shutdown();
591
592 // Explicitly release the camera
593 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
594 mActiveCameras.clear();
595 }
596}
597
598/*
599 * CameraToDisplayRoundTrip:
600 * End to end test of data flowing from the camera to the display. Each delivered frame of camera
601 * imagery is simply copied to the display buffer and presented on screen. This is the one test
602 * which a human could observe to see the operation of the system on the physical display.
603 */
604TEST_P(EvsAidlTest, CameraToDisplayRoundTrip) {
605 LOG(INFO) << "Starting CameraToDisplayRoundTrip test";
606
607 // Get the camera list
608 loadCameraList();
609
610 // Request available display IDs
611 uint8_t targetDisplayId = 0;
612 std::vector<uint8_t> displayIds;
613 ASSERT_TRUE(mEnumerator->getDisplayIdList(&displayIds).isOk());
614 EXPECT_GT(displayIds.size(), 0);
615 targetDisplayId = displayIds[0];
616
Changyeon Jo80189012021-10-10 16:34:21 -0700617 // Test each reported camera
618 for (auto&& cam : mCameraInfo) {
Changyeon Jo017cb982022-11-16 22:04:38 +0000619 // Request exclusive access to the first EVS display
620 std::shared_ptr<IEvsDisplay> pDisplay;
621 ASSERT_TRUE(mEnumerator->openDisplay(targetDisplayId, &pDisplay).isOk());
622 EXPECT_NE(pDisplay, nullptr);
623 LOG(INFO) << "Display " << static_cast<int>(targetDisplayId) << " is in use.";
624
625 // Get the display descriptor
626 DisplayDesc displayDesc;
627 ASSERT_TRUE(pDisplay->getDisplayInfo(&displayDesc).isOk());
628 LOG(INFO) << " Resolution: " << displayDesc.width << "x" << displayDesc.height;
629 ASSERT_GT(displayDesc.width, 0);
630 ASSERT_GT(displayDesc.height, 0);
631
Changyeon Jo80189012021-10-10 16:34:21 -0700632 bool isLogicalCam = false;
633 getPhysicalCameraIds(cam.id, isLogicalCam);
634 if (mIsHwModule && isLogicalCam) {
635 LOG(INFO) << "Skip a logical device " << cam.id << " for HW target.";
Changyeon Jo7793baa2023-01-19 13:18:47 -0800636 ASSERT_TRUE(mEnumerator->closeDisplay(pDisplay).isOk());
Changyeon Jo80189012021-10-10 16:34:21 -0700637 continue;
638 }
639
640 // Read a target resolution from the metadata
641 Stream targetCfg = getFirstStreamConfiguration(
642 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
643 ASSERT_GT(targetCfg.width, 0);
644 ASSERT_GT(targetCfg.height, 0);
645
646 std::shared_ptr<IEvsCamera> pCam;
647 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
648 EXPECT_NE(pCam, nullptr);
649
650 // Store a camera handle for a clean-up
651 mActiveCameras.push_back(pCam);
652
653 // Set up a frame receiver object which will fire up its own thread.
Frederick Mayle7056b242022-03-29 02:38:12 +0000654 std::shared_ptr<FrameHandler> frameHandler = ndk::SharedRefBase::make<FrameHandler>(
655 pCam, cam, pDisplay, FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -0700656 EXPECT_NE(frameHandler, nullptr);
657
658 // Activate the display
659 ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::VISIBLE_ON_NEXT_FRAME).isOk());
660
661 // Start the camera's video stream
662 ASSERT_TRUE(frameHandler->startStream());
663
664 // Wait a while to let the data flow
665 static const int kSecondsToWait = 5;
666 const int streamTimeMs =
667 kSecondsToWait * kSecondsToMilliseconds - kMaxStreamStartMilliseconds;
668 const unsigned minimumFramesExpected =
669 streamTimeMs * kMinimumFramesPerSecond / kSecondsToMilliseconds;
670 sleep(kSecondsToWait);
671 unsigned framesReceived = 0;
672 unsigned framesDisplayed = 0;
673 frameHandler->getFramesCounters(&framesReceived, &framesDisplayed);
674 EXPECT_EQ(framesReceived, framesDisplayed);
675 EXPECT_GE(framesDisplayed, minimumFramesExpected);
676
677 // Turn off the display (yes, before the stream stops -- it should be handled)
678 ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::NOT_VISIBLE).isOk());
679
680 // Shut down the streamer
681 frameHandler->shutdown();
682
683 // Explicitly release the camera
684 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
685 mActiveCameras.clear();
Changyeon Jo80189012021-10-10 16:34:21 -0700686
Changyeon Jo017cb982022-11-16 22:04:38 +0000687 // Explicitly release the display
688 ASSERT_TRUE(mEnumerator->closeDisplay(pDisplay).isOk());
689 }
Changyeon Jo80189012021-10-10 16:34:21 -0700690}
691
692/*
693 * MultiCameraStream:
694 * Verify that each client can start and stop video streams on the same
695 * underlying camera.
696 */
697TEST_P(EvsAidlTest, MultiCameraStream) {
698 LOG(INFO) << "Starting MultiCameraStream test";
699
700 if (mIsHwModule) {
701 // This test is not for HW module implementation.
702 return;
703 }
704
705 // Get the camera list
706 loadCameraList();
707
708 // Test each reported camera
709 for (auto&& cam : mCameraInfo) {
710 // Read a target resolution from the metadata
711 Stream targetCfg = getFirstStreamConfiguration(
712 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
713 ASSERT_GT(targetCfg.width, 0);
714 ASSERT_GT(targetCfg.height, 0);
715
716 // Create two camera clients.
717 std::shared_ptr<IEvsCamera> pCam0;
718 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam0).isOk());
719 EXPECT_NE(pCam0, nullptr);
720
721 // Store a camera handle for a clean-up
722 mActiveCameras.push_back(pCam0);
723
724 std::shared_ptr<IEvsCamera> pCam1;
725 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam1).isOk());
726 EXPECT_NE(pCam1, nullptr);
727
728 // Store a camera handle for a clean-up
729 mActiveCameras.push_back(pCam1);
730
731 // Set up per-client frame receiver objects which will fire up its own thread
Frederick Mayle7056b242022-03-29 02:38:12 +0000732 std::shared_ptr<FrameHandler> frameHandler0 = ndk::SharedRefBase::make<FrameHandler>(
733 pCam0, cam, nullptr, FrameHandler::eAutoReturn);
734 std::shared_ptr<FrameHandler> frameHandler1 = ndk::SharedRefBase::make<FrameHandler>(
735 pCam1, cam, nullptr, FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -0700736 EXPECT_NE(frameHandler0, nullptr);
737 EXPECT_NE(frameHandler1, nullptr);
738
739 // Start the camera's video stream via client 0
740 ASSERT_TRUE(frameHandler0->startStream());
741 ASSERT_TRUE(frameHandler1->startStream());
742
743 // Ensure the stream starts
744 frameHandler0->waitForFrameCount(1);
745 frameHandler1->waitForFrameCount(1);
746
747 nsecs_t firstFrame = systemTime(SYSTEM_TIME_MONOTONIC);
748
749 // Wait a bit, then ensure both clients get at least the required minimum number of frames
750 sleep(5);
751 nsecs_t end = systemTime(SYSTEM_TIME_MONOTONIC);
752 unsigned framesReceived0 = 0, framesReceived1 = 0;
753 frameHandler0->getFramesCounters(&framesReceived0, nullptr);
754 frameHandler1->getFramesCounters(&framesReceived1, nullptr);
755 framesReceived0 = framesReceived0 - 1; // Back out the first frame we already waited for
756 framesReceived1 = framesReceived1 - 1; // Back out the first frame we already waited for
757 nsecs_t runTime = end - firstFrame;
758 float framesPerSecond0 = framesReceived0 / (runTime * kNanoToSeconds);
759 float framesPerSecond1 = framesReceived1 / (runTime * kNanoToSeconds);
760 LOG(INFO) << "Measured camera rate " << std::scientific << framesPerSecond0 << " fps and "
761 << framesPerSecond1 << " fps";
762 EXPECT_GE(framesPerSecond0, kMinimumFramesPerSecond);
763 EXPECT_GE(framesPerSecond1, kMinimumFramesPerSecond);
764
765 // Shutdown one client
766 frameHandler0->shutdown();
767
768 // Read frame counters again
769 frameHandler0->getFramesCounters(&framesReceived0, nullptr);
770 frameHandler1->getFramesCounters(&framesReceived1, nullptr);
771
772 // Wait a bit again
773 sleep(5);
774 unsigned framesReceivedAfterStop0 = 0, framesReceivedAfterStop1 = 0;
775 frameHandler0->getFramesCounters(&framesReceivedAfterStop0, nullptr);
776 frameHandler1->getFramesCounters(&framesReceivedAfterStop1, nullptr);
777 EXPECT_EQ(framesReceived0, framesReceivedAfterStop0);
778 EXPECT_LT(framesReceived1, framesReceivedAfterStop1);
779
780 // Shutdown another
781 frameHandler1->shutdown();
782
783 // Explicitly release the camera
784 ASSERT_TRUE(mEnumerator->closeCamera(pCam0).isOk());
785 ASSERT_TRUE(mEnumerator->closeCamera(pCam1).isOk());
786 mActiveCameras.clear();
787
788 // TODO(b/145459970, b/145457727): below sleep() is added to ensure the
789 // destruction of active camera objects; this may be related with two
790 // issues.
791 sleep(1);
792 }
793}
794
795/*
796 * CameraParameter:
797 * Verify that a client can adjust a camera parameter.
798 */
799TEST_P(EvsAidlTest, CameraParameter) {
800 LOG(INFO) << "Starting CameraParameter test";
801
802 // Get the camera list
803 loadCameraList();
804
805 // Test each reported camera
806 for (auto&& cam : mCameraInfo) {
807 bool isLogicalCam = false;
808 getPhysicalCameraIds(cam.id, isLogicalCam);
809 if (isLogicalCam) {
810 // TODO(b/145465724): Support camera parameter programming on
811 // logical devices.
812 LOG(INFO) << "Skip a logical device " << cam.id;
813 continue;
814 }
815
816 // Read a target resolution from the metadata
817 Stream targetCfg = getFirstStreamConfiguration(
818 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
819 ASSERT_GT(targetCfg.width, 0);
820 ASSERT_GT(targetCfg.height, 0);
821
822 // Create a camera client
823 std::shared_ptr<IEvsCamera> pCam;
824 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
825 EXPECT_NE(pCam, nullptr);
826
827 // Store a camera
828 mActiveCameras.push_back(pCam);
829
830 // Get the parameter list
831 std::vector<CameraParam> cmds;
832 ASSERT_TRUE(pCam->getParameterList(&cmds).isOk());
833 if (cmds.size() < 1) {
834 continue;
835 }
836
837 // Set up per-client frame receiver objects which will fire up its own thread
Frederick Mayle7056b242022-03-29 02:38:12 +0000838 std::shared_ptr<FrameHandler> frameHandler = ndk::SharedRefBase::make<FrameHandler>(
839 pCam, cam, nullptr, FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -0700840 EXPECT_NE(frameHandler, nullptr);
841
842 // Start the camera's video stream
843 ASSERT_TRUE(frameHandler->startStream());
844
845 // Ensure the stream starts
846 frameHandler->waitForFrameCount(1);
847
848 // Set current client is the primary client
849 ASSERT_TRUE(pCam->setPrimaryClient().isOk());
850 for (auto& cmd : cmds) {
851 // Get a valid parameter value range
852 ParameterRange range;
853 ASSERT_TRUE(pCam->getIntParameterRange(cmd, &range).isOk());
854
855 std::vector<int32_t> values;
856 if (cmd == CameraParam::ABSOLUTE_FOCUS) {
857 // Try to turn off auto-focus
858 ASSERT_TRUE(pCam->setIntParameter(CameraParam::AUTO_FOCUS, 0, &values).isOk());
859 for (auto&& v : values) {
860 EXPECT_EQ(v, 0);
861 }
862 }
863
864 // Try to program a parameter with a random value [minVal, maxVal]
865 int32_t val0 = range.min + (std::rand() % (range.max - range.min));
866
867 // Rounding down
868 val0 = val0 - (val0 % range.step);
869 values.clear();
870 ASSERT_TRUE(pCam->setIntParameter(cmd, val0, &values).isOk());
871
872 values.clear();
873 ASSERT_TRUE(pCam->getIntParameter(cmd, &values).isOk());
874 for (auto&& v : values) {
875 EXPECT_EQ(val0, v) << "Values are not matched.";
876 }
877 }
878 ASSERT_TRUE(pCam->unsetPrimaryClient().isOk());
879
880 // Shutdown
881 frameHandler->shutdown();
882
883 // Explicitly release the camera
884 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
885 mActiveCameras.clear();
886 }
887}
888
889/*
890 * CameraPrimaryClientRelease
891 * Verify that non-primary client gets notified when the primary client either
892 * terminates or releases a role.
893 */
894TEST_P(EvsAidlTest, CameraPrimaryClientRelease) {
895 LOG(INFO) << "Starting CameraPrimaryClientRelease test";
896
897 if (mIsHwModule) {
898 // This test is not for HW module implementation.
899 return;
900 }
901
902 // Get the camera list
903 loadCameraList();
904
905 // Test each reported camera
906 for (auto&& cam : mCameraInfo) {
907 bool isLogicalCam = false;
908 getPhysicalCameraIds(cam.id, isLogicalCam);
909 if (isLogicalCam) {
910 // TODO(b/145465724): Support camera parameter programming on
911 // logical devices.
912 LOG(INFO) << "Skip a logical device " << cam.id;
913 continue;
914 }
915
916 // Read a target resolution from the metadata
917 Stream targetCfg = getFirstStreamConfiguration(
918 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
919 ASSERT_GT(targetCfg.width, 0);
920 ASSERT_GT(targetCfg.height, 0);
921
922 // Create two camera clients.
923 std::shared_ptr<IEvsCamera> pPrimaryCam;
924 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pPrimaryCam).isOk());
925 EXPECT_NE(pPrimaryCam, nullptr);
926
927 // Store a camera handle for a clean-up
928 mActiveCameras.push_back(pPrimaryCam);
929
930 std::shared_ptr<IEvsCamera> pSecondaryCam;
931 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pSecondaryCam).isOk());
932 EXPECT_NE(pSecondaryCam, nullptr);
933
934 // Store a camera handle for a clean-up
935 mActiveCameras.push_back(pSecondaryCam);
936
937 // Set up per-client frame receiver objects which will fire up its own thread
Frederick Mayle7056b242022-03-29 02:38:12 +0000938 std::shared_ptr<FrameHandler> frameHandlerPrimary = ndk::SharedRefBase::make<FrameHandler>(
Changyeon Jo80189012021-10-10 16:34:21 -0700939 pPrimaryCam, cam, nullptr, FrameHandler::eAutoReturn);
Frederick Mayle7056b242022-03-29 02:38:12 +0000940 std::shared_ptr<FrameHandler> frameHandlerSecondary =
941 ndk::SharedRefBase::make<FrameHandler>(pSecondaryCam, cam, nullptr,
942 FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -0700943 EXPECT_NE(frameHandlerPrimary, nullptr);
944 EXPECT_NE(frameHandlerSecondary, nullptr);
945
946 // Set one client as the primary client
947 ASSERT_TRUE(pPrimaryCam->setPrimaryClient().isOk());
948
949 // Try to set another client as the primary client.
950 ASSERT_FALSE(pSecondaryCam->setPrimaryClient().isOk());
951
952 // Start the camera's video stream via a primary client client.
953 ASSERT_TRUE(frameHandlerPrimary->startStream());
954
955 // Ensure the stream starts
956 frameHandlerPrimary->waitForFrameCount(1);
957
958 // Start the camera's video stream via another client
959 ASSERT_TRUE(frameHandlerSecondary->startStream());
960
961 // Ensure the stream starts
962 frameHandlerSecondary->waitForFrameCount(1);
963
964 // Non-primary client expects to receive a primary client role relesed
965 // notification.
966 EvsEventDesc aTargetEvent = {};
967 EvsEventDesc aNotification = {};
968
969 bool listening = false;
970 std::mutex eventLock;
971 std::condition_variable eventCond;
972 std::thread listener =
973 std::thread([&aNotification, &frameHandlerSecondary, &listening, &eventCond]() {
974 // Notify that a listening thread is running.
975 listening = true;
976 eventCond.notify_all();
977
978 EvsEventDesc aTargetEvent;
979 aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
980 if (!frameHandlerSecondary->waitForEvent(aTargetEvent, aNotification, true)) {
981 LOG(WARNING) << "A timer is expired before a target event is fired.";
982 }
983 });
984
985 // Wait until a listening thread starts.
986 std::unique_lock<std::mutex> lock(eventLock);
987 auto timer = std::chrono::system_clock::now();
988 while (!listening) {
989 timer += 1s;
990 eventCond.wait_until(lock, timer);
991 }
992 lock.unlock();
993
994 // Release a primary client role.
995 ASSERT_TRUE(pPrimaryCam->unsetPrimaryClient().isOk());
996
997 // Join a listening thread.
998 if (listener.joinable()) {
999 listener.join();
1000 }
1001
1002 // Verify change notifications.
1003 ASSERT_EQ(EvsEventType::MASTER_RELEASED, static_cast<EvsEventType>(aNotification.aType));
1004
1005 // Non-primary becomes a primary client.
1006 ASSERT_TRUE(pSecondaryCam->setPrimaryClient().isOk());
1007
1008 // Previous primary client fails to become a primary client.
1009 ASSERT_FALSE(pPrimaryCam->setPrimaryClient().isOk());
1010
1011 listening = false;
1012 listener = std::thread([&aNotification, &frameHandlerPrimary, &listening, &eventCond]() {
1013 // Notify that a listening thread is running.
1014 listening = true;
1015 eventCond.notify_all();
1016
1017 EvsEventDesc aTargetEvent;
1018 aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
1019 if (!frameHandlerPrimary->waitForEvent(aTargetEvent, aNotification, true)) {
1020 LOG(WARNING) << "A timer is expired before a target event is fired.";
1021 }
1022 });
1023
1024 // Wait until a listening thread starts.
1025 timer = std::chrono::system_clock::now();
1026 lock.lock();
1027 while (!listening) {
1028 eventCond.wait_until(lock, timer + 1s);
1029 }
1030 lock.unlock();
1031
1032 // Closing current primary client.
1033 frameHandlerSecondary->shutdown();
1034
1035 // Join a listening thread.
1036 if (listener.joinable()) {
1037 listener.join();
1038 }
1039
1040 // Verify change notifications.
1041 ASSERT_EQ(EvsEventType::MASTER_RELEASED, static_cast<EvsEventType>(aNotification.aType));
1042
1043 // Closing streams.
1044 frameHandlerPrimary->shutdown();
1045
1046 // Explicitly release the camera
1047 ASSERT_TRUE(mEnumerator->closeCamera(pPrimaryCam).isOk());
1048 ASSERT_TRUE(mEnumerator->closeCamera(pSecondaryCam).isOk());
1049 mActiveCameras.clear();
1050 }
1051}
1052
1053/*
1054 * MultiCameraParameter:
1055 * Verify that primary and non-primary clients behave as expected when they try to adjust
1056 * camera parameters.
1057 */
1058TEST_P(EvsAidlTest, MultiCameraParameter) {
1059 LOG(INFO) << "Starting MultiCameraParameter test";
1060
1061 if (mIsHwModule) {
1062 // This test is not for HW module implementation.
1063 return;
1064 }
1065
1066 // Get the camera list
1067 loadCameraList();
1068
1069 // Test each reported camera
1070 for (auto&& cam : mCameraInfo) {
1071 bool isLogicalCam = false;
1072 getPhysicalCameraIds(cam.id, isLogicalCam);
1073 if (isLogicalCam) {
1074 // TODO(b/145465724): Support camera parameter programming on
1075 // logical devices.
1076 LOG(INFO) << "Skip a logical device " << cam.id;
1077 continue;
1078 }
1079
1080 // Read a target resolution from the metadata
1081 Stream targetCfg = getFirstStreamConfiguration(
1082 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
1083 ASSERT_GT(targetCfg.width, 0);
1084 ASSERT_GT(targetCfg.height, 0);
1085
1086 // Create two camera clients.
1087 std::shared_ptr<IEvsCamera> pPrimaryCam;
1088 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pPrimaryCam).isOk());
1089 EXPECT_NE(pPrimaryCam, nullptr);
1090
1091 // Store a camera handle for a clean-up
1092 mActiveCameras.push_back(pPrimaryCam);
1093
1094 std::shared_ptr<IEvsCamera> pSecondaryCam;
1095 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pSecondaryCam).isOk());
1096 EXPECT_NE(pSecondaryCam, nullptr);
1097
1098 // Store a camera handle for a clean-up
1099 mActiveCameras.push_back(pSecondaryCam);
1100
1101 // Get the parameter list
1102 std::vector<CameraParam> camPrimaryCmds, camSecondaryCmds;
1103 ASSERT_TRUE(pPrimaryCam->getParameterList(&camPrimaryCmds).isOk());
1104 ASSERT_TRUE(pSecondaryCam->getParameterList(&camSecondaryCmds).isOk());
1105 if (camPrimaryCmds.size() < 1 || camSecondaryCmds.size() < 1) {
1106 // Skip a camera device if it does not support any parameter.
1107 continue;
1108 }
1109
1110 // Set up per-client frame receiver objects which will fire up its own thread
Frederick Mayle7056b242022-03-29 02:38:12 +00001111 std::shared_ptr<FrameHandler> frameHandlerPrimary = ndk::SharedRefBase::make<FrameHandler>(
Changyeon Jo80189012021-10-10 16:34:21 -07001112 pPrimaryCam, cam, nullptr, FrameHandler::eAutoReturn);
Frederick Mayle7056b242022-03-29 02:38:12 +00001113 std::shared_ptr<FrameHandler> frameHandlerSecondary =
1114 ndk::SharedRefBase::make<FrameHandler>(pSecondaryCam, cam, nullptr,
1115 FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -07001116 EXPECT_NE(frameHandlerPrimary, nullptr);
1117 EXPECT_NE(frameHandlerSecondary, nullptr);
1118
1119 // Set one client as the primary client.
1120 ASSERT_TRUE(pPrimaryCam->setPrimaryClient().isOk());
1121
1122 // Try to set another client as the primary client.
1123 ASSERT_FALSE(pSecondaryCam->setPrimaryClient().isOk());
1124
1125 // Start the camera's video stream via a primary client client.
1126 ASSERT_TRUE(frameHandlerPrimary->startStream());
1127
1128 // Ensure the stream starts
1129 frameHandlerPrimary->waitForFrameCount(1);
1130
1131 // Start the camera's video stream via another client
1132 ASSERT_TRUE(frameHandlerSecondary->startStream());
1133
1134 // Ensure the stream starts
1135 frameHandlerSecondary->waitForFrameCount(1);
1136
1137 int32_t val0 = 0;
1138 std::vector<int32_t> values;
1139 EvsEventDesc aNotification0 = {};
1140 EvsEventDesc aNotification1 = {};
1141 for (auto& cmd : camPrimaryCmds) {
1142 // Get a valid parameter value range
1143 ParameterRange range;
1144 ASSERT_TRUE(pPrimaryCam->getIntParameterRange(cmd, &range).isOk());
1145 if (cmd == CameraParam::ABSOLUTE_FOCUS) {
1146 // Try to turn off auto-focus
1147 values.clear();
1148 ASSERT_TRUE(
1149 pPrimaryCam->setIntParameter(CameraParam::AUTO_FOCUS, 0, &values).isOk());
1150 for (auto&& v : values) {
1151 EXPECT_EQ(v, 0);
1152 }
1153 }
1154
1155 // Calculate a parameter value to program.
1156 val0 = range.min + (std::rand() % (range.max - range.min));
1157 val0 = val0 - (val0 % range.step);
1158
1159 // Prepare and start event listeners.
1160 bool listening0 = false;
1161 bool listening1 = false;
1162 std::condition_variable eventCond;
1163 std::thread listener0 = std::thread([cmd, val0, &aNotification0, &frameHandlerPrimary,
1164 &listening0, &listening1, &eventCond]() {
1165 listening0 = true;
1166 if (listening1) {
1167 eventCond.notify_all();
1168 }
1169
1170 EvsEventDesc aTargetEvent;
1171 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001172 aTargetEvent.payload.push_back(static_cast<int32_t>(cmd));
1173 aTargetEvent.payload.push_back(val0);
Changyeon Jo80189012021-10-10 16:34:21 -07001174 if (!frameHandlerPrimary->waitForEvent(aTargetEvent, aNotification0)) {
1175 LOG(WARNING) << "A timer is expired before a target event is fired.";
1176 }
1177 });
1178 std::thread listener1 = std::thread([cmd, val0, &aNotification1, &frameHandlerSecondary,
1179 &listening0, &listening1, &eventCond]() {
1180 listening1 = true;
1181 if (listening0) {
1182 eventCond.notify_all();
1183 }
1184
1185 EvsEventDesc aTargetEvent;
1186 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001187 aTargetEvent.payload.push_back(static_cast<int32_t>(cmd));
1188 aTargetEvent.payload.push_back(val0);
Changyeon Jo80189012021-10-10 16:34:21 -07001189 if (!frameHandlerSecondary->waitForEvent(aTargetEvent, aNotification1)) {
1190 LOG(WARNING) << "A timer is expired before a target event is fired.";
1191 }
1192 });
1193
1194 // Wait until a listening thread starts.
1195 std::mutex eventLock;
1196 std::unique_lock<std::mutex> lock(eventLock);
1197 auto timer = std::chrono::system_clock::now();
1198 while (!listening0 || !listening1) {
1199 eventCond.wait_until(lock, timer + 1s);
1200 }
1201 lock.unlock();
1202
1203 // Try to program a parameter
1204 values.clear();
1205 ASSERT_TRUE(pPrimaryCam->setIntParameter(cmd, val0, &values).isOk());
1206 for (auto&& v : values) {
1207 EXPECT_EQ(val0, v) << "Values are not matched.";
1208 }
1209
1210 // Join a listening thread.
1211 if (listener0.joinable()) {
1212 listener0.join();
1213 }
1214 if (listener1.joinable()) {
1215 listener1.join();
1216 }
1217
1218 // Verify a change notification
1219 ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
1220 static_cast<EvsEventType>(aNotification0.aType));
1221 ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
1222 static_cast<EvsEventType>(aNotification1.aType));
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001223 ASSERT_GE(aNotification0.payload.size(), 2);
1224 ASSERT_GE(aNotification1.payload.size(), 2);
Changyeon Jo80189012021-10-10 16:34:21 -07001225 ASSERT_EQ(cmd, static_cast<CameraParam>(aNotification0.payload[0]));
1226 ASSERT_EQ(cmd, static_cast<CameraParam>(aNotification1.payload[0]));
1227 for (auto&& v : values) {
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001228 ASSERT_EQ(v, aNotification0.payload[1]);
1229 ASSERT_EQ(v, aNotification1.payload[1]);
Changyeon Jo80189012021-10-10 16:34:21 -07001230 }
1231
1232 // Clients expects to receive a parameter change notification
1233 // whenever a primary client client adjusts it.
1234 values.clear();
1235 ASSERT_TRUE(pPrimaryCam->getIntParameter(cmd, &values).isOk());
1236 for (auto&& v : values) {
1237 EXPECT_EQ(val0, v) << "Values are not matched.";
1238 }
1239 }
1240
1241 // Try to adjust a parameter via non-primary client
1242 values.clear();
1243 ASSERT_FALSE(pSecondaryCam->setIntParameter(camSecondaryCmds[0], val0, &values).isOk());
1244
1245 // Non-primary client attempts to be a primary client
1246 ASSERT_FALSE(pSecondaryCam->setPrimaryClient().isOk());
1247
1248 // Primary client retires from a primary client role
1249 bool listening = false;
1250 std::condition_variable eventCond;
1251 std::thread listener =
1252 std::thread([&aNotification0, &frameHandlerSecondary, &listening, &eventCond]() {
1253 listening = true;
1254 eventCond.notify_all();
1255
1256 EvsEventDesc aTargetEvent;
1257 aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
1258 if (!frameHandlerSecondary->waitForEvent(aTargetEvent, aNotification0, true)) {
1259 LOG(WARNING) << "A timer is expired before a target event is fired.";
1260 }
1261 });
1262
1263 std::mutex eventLock;
1264 auto timer = std::chrono::system_clock::now();
1265 std::unique_lock<std::mutex> lock(eventLock);
1266 while (!listening) {
1267 eventCond.wait_until(lock, timer + 1s);
1268 }
1269 lock.unlock();
1270
1271 ASSERT_TRUE(pPrimaryCam->unsetPrimaryClient().isOk());
1272
1273 if (listener.joinable()) {
1274 listener.join();
1275 }
1276 ASSERT_EQ(EvsEventType::MASTER_RELEASED, static_cast<EvsEventType>(aNotification0.aType));
1277
1278 // Try to adjust a parameter after being retired
1279 values.clear();
1280 ASSERT_FALSE(pPrimaryCam->setIntParameter(camPrimaryCmds[0], val0, &values).isOk());
1281
1282 // Non-primary client becomes a primary client
1283 ASSERT_TRUE(pSecondaryCam->setPrimaryClient().isOk());
1284
1285 // Try to adjust a parameter via new primary client
1286 for (auto& cmd : camSecondaryCmds) {
1287 // Get a valid parameter value range
1288 ParameterRange range;
1289 ASSERT_TRUE(pSecondaryCam->getIntParameterRange(cmd, &range).isOk());
1290
1291 values.clear();
1292 if (cmd == CameraParam::ABSOLUTE_FOCUS) {
1293 // Try to turn off auto-focus
1294 values.clear();
1295 ASSERT_TRUE(
1296 pSecondaryCam->setIntParameter(CameraParam::AUTO_FOCUS, 0, &values).isOk());
1297 for (auto&& v : values) {
1298 EXPECT_EQ(v, 0);
1299 }
1300 }
1301
1302 // Calculate a parameter value to program. This is being rounding down.
1303 val0 = range.min + (std::rand() % (range.max - range.min));
1304 val0 = val0 - (val0 % range.step);
1305
1306 // Prepare and start event listeners.
1307 bool listening0 = false;
1308 bool listening1 = false;
1309 std::condition_variable eventCond;
1310 std::thread listener0 = std::thread([&]() {
1311 listening0 = true;
1312 if (listening1) {
1313 eventCond.notify_all();
1314 }
1315
1316 EvsEventDesc aTargetEvent;
1317 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001318 aTargetEvent.payload.push_back(static_cast<int32_t>(cmd));
1319 aTargetEvent.payload.push_back(val0);
Changyeon Jo80189012021-10-10 16:34:21 -07001320 if (!frameHandlerPrimary->waitForEvent(aTargetEvent, aNotification0)) {
1321 LOG(WARNING) << "A timer is expired before a target event is fired.";
1322 }
1323 });
1324 std::thread listener1 = std::thread([&]() {
1325 listening1 = true;
1326 if (listening0) {
1327 eventCond.notify_all();
1328 }
1329
1330 EvsEventDesc aTargetEvent;
1331 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001332 aTargetEvent.payload.push_back(static_cast<int32_t>(cmd));
1333 aTargetEvent.payload.push_back(val0);
Changyeon Jo80189012021-10-10 16:34:21 -07001334 if (!frameHandlerSecondary->waitForEvent(aTargetEvent, aNotification1)) {
1335 LOG(WARNING) << "A timer is expired before a target event is fired.";
1336 }
1337 });
1338
1339 // Wait until a listening thread starts.
1340 std::mutex eventLock;
1341 std::unique_lock<std::mutex> lock(eventLock);
1342 auto timer = std::chrono::system_clock::now();
1343 while (!listening0 || !listening1) {
1344 eventCond.wait_until(lock, timer + 1s);
1345 }
1346 lock.unlock();
1347
1348 // Try to program a parameter
1349 values.clear();
1350 ASSERT_TRUE(pSecondaryCam->setIntParameter(cmd, val0, &values).isOk());
1351
1352 // Clients expects to receive a parameter change notification
1353 // whenever a primary client client adjusts it.
1354 values.clear();
1355 ASSERT_TRUE(pSecondaryCam->getIntParameter(cmd, &values).isOk());
1356 for (auto&& v : values) {
1357 EXPECT_EQ(val0, v) << "Values are not matched.";
1358 }
1359
1360 // Join a listening thread.
1361 if (listener0.joinable()) {
1362 listener0.join();
1363 }
1364 if (listener1.joinable()) {
1365 listener1.join();
1366 }
1367
1368 // Verify a change notification
1369 ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
1370 static_cast<EvsEventType>(aNotification0.aType));
1371 ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
1372 static_cast<EvsEventType>(aNotification1.aType));
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001373 ASSERT_GE(aNotification0.payload.size(), 2);
1374 ASSERT_GE(aNotification1.payload.size(), 2);
Changyeon Jo80189012021-10-10 16:34:21 -07001375 ASSERT_EQ(cmd, static_cast<CameraParam>(aNotification0.payload[0]));
1376 ASSERT_EQ(cmd, static_cast<CameraParam>(aNotification1.payload[0]));
1377 for (auto&& v : values) {
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001378 ASSERT_EQ(v, aNotification0.payload[1]);
1379 ASSERT_EQ(v, aNotification1.payload[1]);
Changyeon Jo80189012021-10-10 16:34:21 -07001380 }
1381 }
1382
1383 // New primary client retires from the role
1384 ASSERT_TRUE(pSecondaryCam->unsetPrimaryClient().isOk());
1385
1386 // Shutdown
1387 frameHandlerPrimary->shutdown();
1388 frameHandlerSecondary->shutdown();
1389
1390 // Explicitly release the camera
1391 ASSERT_TRUE(mEnumerator->closeCamera(pPrimaryCam).isOk());
1392 ASSERT_TRUE(mEnumerator->closeCamera(pSecondaryCam).isOk());
1393 mActiveCameras.clear();
1394 }
1395}
1396
1397/*
1398 * HighPriorityCameraClient:
1399 * EVS client, which owns the display, is priortized and therefore can take over
1400 * a primary client role from other EVS clients without the display.
1401 */
1402TEST_P(EvsAidlTest, HighPriorityCameraClient) {
1403 LOG(INFO) << "Starting HighPriorityCameraClient test";
1404
1405 if (mIsHwModule) {
1406 // This test is not for HW module implementation.
1407 return;
1408 }
1409
1410 // Get the camera list
1411 loadCameraList();
1412
Changyeon Jo80189012021-10-10 16:34:21 -07001413 // Test each reported camera
1414 for (auto&& cam : mCameraInfo) {
Changyeon Jo017cb982022-11-16 22:04:38 +00001415 // Request available display IDs
1416 uint8_t targetDisplayId = 0;
1417 std::vector<uint8_t> displayIds;
1418 ASSERT_TRUE(mEnumerator->getDisplayIdList(&displayIds).isOk());
1419 EXPECT_GT(displayIds.size(), 0);
1420 targetDisplayId = displayIds[0];
1421
1422 // Request exclusive access to the EVS display
1423 std::shared_ptr<IEvsDisplay> pDisplay;
1424 ASSERT_TRUE(mEnumerator->openDisplay(targetDisplayId, &pDisplay).isOk());
1425 EXPECT_NE(pDisplay, nullptr);
1426
Changyeon Jo80189012021-10-10 16:34:21 -07001427 // Read a target resolution from the metadata
1428 Stream targetCfg = getFirstStreamConfiguration(
1429 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
1430 ASSERT_GT(targetCfg.width, 0);
1431 ASSERT_GT(targetCfg.height, 0);
1432
1433 // Create two clients
1434 std::shared_ptr<IEvsCamera> pCam0;
1435 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam0).isOk());
1436 EXPECT_NE(pCam0, nullptr);
1437
1438 // Store a camera handle for a clean-up
1439 mActiveCameras.push_back(pCam0);
1440
1441 std::shared_ptr<IEvsCamera> pCam1;
1442 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam1).isOk());
1443 EXPECT_NE(pCam1, nullptr);
1444
1445 // Store a camera handle for a clean-up
1446 mActiveCameras.push_back(pCam1);
1447
1448 // Get the parameter list; this test will use the first command in both
1449 // lists.
1450 std::vector<CameraParam> cam0Cmds, cam1Cmds;
1451 ASSERT_TRUE(pCam0->getParameterList(&cam0Cmds).isOk());
1452 ASSERT_TRUE(pCam1->getParameterList(&cam1Cmds).isOk());
1453 if (cam0Cmds.size() < 1 || cam1Cmds.size() < 1) {
1454 // Cannot execute this test.
Changyeon Jo7793baa2023-01-19 13:18:47 -08001455 ASSERT_TRUE(mEnumerator->closeDisplay(pDisplay).isOk());
1456 continue;
Changyeon Jo80189012021-10-10 16:34:21 -07001457 }
1458
1459 // Set up a frame receiver object which will fire up its own thread.
Frederick Mayle7056b242022-03-29 02:38:12 +00001460 std::shared_ptr<FrameHandler> frameHandler0 = ndk::SharedRefBase::make<FrameHandler>(
1461 pCam0, cam, nullptr, FrameHandler::eAutoReturn);
1462 std::shared_ptr<FrameHandler> frameHandler1 = ndk::SharedRefBase::make<FrameHandler>(
1463 pCam1, cam, nullptr, FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -07001464 EXPECT_NE(frameHandler0, nullptr);
1465 EXPECT_NE(frameHandler1, nullptr);
1466
1467 // Activate the display
1468 ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::VISIBLE_ON_NEXT_FRAME).isOk());
1469
1470 // Start the camera's video stream
1471 ASSERT_TRUE(frameHandler0->startStream());
1472 ASSERT_TRUE(frameHandler1->startStream());
1473
1474 // Ensure the stream starts
1475 frameHandler0->waitForFrameCount(1);
1476 frameHandler1->waitForFrameCount(1);
1477
1478 // Client 1 becomes a primary client and programs a parameter.
1479
1480 // Get a valid parameter value range
1481 ParameterRange range;
1482 ASSERT_TRUE(pCam1->getIntParameterRange(cam1Cmds[0], &range).isOk());
1483
1484 // Client1 becomes a primary client
1485 ASSERT_TRUE(pCam1->setPrimaryClient().isOk());
1486
1487 std::vector<int32_t> values;
1488 EvsEventDesc aTargetEvent = {};
1489 EvsEventDesc aNotification = {};
1490 bool listening = false;
1491 std::mutex eventLock;
1492 std::condition_variable eventCond;
1493 if (cam1Cmds[0] == CameraParam::ABSOLUTE_FOCUS) {
1494 std::thread listener =
1495 std::thread([&frameHandler0, &aNotification, &listening, &eventCond] {
1496 listening = true;
1497 eventCond.notify_all();
1498
1499 EvsEventDesc aTargetEvent;
1500 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001501 aTargetEvent.payload.push_back(
1502 static_cast<int32_t>(CameraParam::AUTO_FOCUS));
1503 aTargetEvent.payload.push_back(0);
Changyeon Jo80189012021-10-10 16:34:21 -07001504 if (!frameHandler0->waitForEvent(aTargetEvent, aNotification)) {
1505 LOG(WARNING) << "A timer is expired before a target event is fired.";
1506 }
1507 });
1508
1509 // Wait until a lister starts.
1510 std::unique_lock<std::mutex> lock(eventLock);
1511 auto timer = std::chrono::system_clock::now();
1512 while (!listening) {
1513 eventCond.wait_until(lock, timer + 1s);
1514 }
1515 lock.unlock();
1516
1517 // Try to turn off auto-focus
1518 ASSERT_TRUE(pCam1->setIntParameter(CameraParam::AUTO_FOCUS, 0, &values).isOk());
1519 for (auto&& v : values) {
1520 EXPECT_EQ(v, 0);
1521 }
1522
1523 // Join a listener
1524 if (listener.joinable()) {
1525 listener.join();
1526 }
1527
1528 // Make sure AUTO_FOCUS is off.
1529 ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
1530 EvsEventType::PARAMETER_CHANGED);
1531 }
1532
1533 // Try to program a parameter with a random value [minVal, maxVal] after
1534 // rounding it down.
1535 int32_t val0 = range.min + (std::rand() % (range.max - range.min));
1536 val0 = val0 - (val0 % range.step);
1537
1538 std::thread listener = std::thread(
1539 [&frameHandler1, &aNotification, &listening, &eventCond, &cam1Cmds, val0] {
1540 listening = true;
1541 eventCond.notify_all();
1542
1543 EvsEventDesc aTargetEvent;
1544 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001545 aTargetEvent.payload.push_back(static_cast<int32_t>(cam1Cmds[0]));
1546 aTargetEvent.payload.push_back(val0);
Changyeon Jo80189012021-10-10 16:34:21 -07001547 if (!frameHandler1->waitForEvent(aTargetEvent, aNotification)) {
1548 LOG(WARNING) << "A timer is expired before a target event is fired.";
1549 }
1550 });
1551
1552 // Wait until a lister starts.
1553 listening = false;
1554 std::unique_lock<std::mutex> lock(eventLock);
1555 auto timer = std::chrono::system_clock::now();
1556 while (!listening) {
1557 eventCond.wait_until(lock, timer + 1s);
1558 }
1559 lock.unlock();
1560
1561 values.clear();
1562 ASSERT_TRUE(pCam1->setIntParameter(cam1Cmds[0], val0, &values).isOk());
1563 for (auto&& v : values) {
1564 EXPECT_EQ(val0, v);
1565 }
1566
1567 // Join a listener
1568 if (listener.joinable()) {
1569 listener.join();
1570 }
1571
1572 // Verify a change notification
1573 ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType), EvsEventType::PARAMETER_CHANGED);
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001574 ASSERT_GE(aNotification.payload.size(), 2);
Changyeon Jo80189012021-10-10 16:34:21 -07001575 ASSERT_EQ(static_cast<CameraParam>(aNotification.payload[0]), cam1Cmds[0]);
1576 for (auto&& v : values) {
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001577 ASSERT_EQ(v, aNotification.payload[1]);
Changyeon Jo80189012021-10-10 16:34:21 -07001578 }
1579
1580 listener = std::thread([&frameHandler1, &aNotification, &listening, &eventCond] {
1581 listening = true;
1582 eventCond.notify_all();
1583
1584 EvsEventDesc aTargetEvent;
1585 aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
1586 if (!frameHandler1->waitForEvent(aTargetEvent, aNotification, true)) {
1587 LOG(WARNING) << "A timer is expired before a target event is fired.";
1588 }
1589 });
1590
1591 // Wait until a lister starts.
1592 listening = false;
1593 lock.lock();
1594 timer = std::chrono::system_clock::now();
1595 while (!listening) {
1596 eventCond.wait_until(lock, timer + 1s);
1597 }
1598 lock.unlock();
1599
1600 // Client 0 steals a primary client role
1601 ASSERT_TRUE(pCam0->forcePrimaryClient(pDisplay).isOk());
1602
1603 // Join a listener
1604 if (listener.joinable()) {
1605 listener.join();
1606 }
1607
1608 ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType), EvsEventType::MASTER_RELEASED);
1609
1610 // Client 0 programs a parameter
1611 val0 = range.min + (std::rand() % (range.max - range.min));
1612
1613 // Rounding down
1614 val0 = val0 - (val0 % range.step);
1615
1616 if (cam0Cmds[0] == CameraParam::ABSOLUTE_FOCUS) {
1617 std::thread listener =
1618 std::thread([&frameHandler1, &aNotification, &listening, &eventCond] {
1619 listening = true;
1620 eventCond.notify_all();
1621
1622 EvsEventDesc aTargetEvent;
1623 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001624 aTargetEvent.payload.push_back(
1625 static_cast<int32_t>(CameraParam::AUTO_FOCUS));
1626 aTargetEvent.payload.push_back(0);
Changyeon Jo80189012021-10-10 16:34:21 -07001627 if (!frameHandler1->waitForEvent(aTargetEvent, aNotification)) {
1628 LOG(WARNING) << "A timer is expired before a target event is fired.";
1629 }
1630 });
1631
1632 // Wait until a lister starts.
1633 std::unique_lock<std::mutex> lock(eventLock);
1634 auto timer = std::chrono::system_clock::now();
1635 while (!listening) {
1636 eventCond.wait_until(lock, timer + 1s);
1637 }
1638 lock.unlock();
1639
1640 // Try to turn off auto-focus
1641 values.clear();
1642 ASSERT_TRUE(pCam0->setIntParameter(CameraParam::AUTO_FOCUS, 0, &values).isOk());
1643 for (auto&& v : values) {
1644 EXPECT_EQ(v, 0);
1645 }
1646
1647 // Join a listener
1648 if (listener.joinable()) {
1649 listener.join();
1650 }
1651
1652 // Make sure AUTO_FOCUS is off.
1653 ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
1654 EvsEventType::PARAMETER_CHANGED);
1655 }
1656
1657 listener = std::thread(
1658 [&frameHandler0, &aNotification, &listening, &eventCond, &cam0Cmds, val0] {
1659 listening = true;
1660 eventCond.notify_all();
1661
1662 EvsEventDesc aTargetEvent;
1663 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001664 aTargetEvent.payload.push_back(static_cast<int32_t>(cam0Cmds[0]));
1665 aTargetEvent.payload.push_back(val0);
Changyeon Jo80189012021-10-10 16:34:21 -07001666 if (!frameHandler0->waitForEvent(aTargetEvent, aNotification)) {
1667 LOG(WARNING) << "A timer is expired before a target event is fired.";
1668 }
1669 });
1670
1671 // Wait until a lister starts.
1672 listening = false;
1673 timer = std::chrono::system_clock::now();
1674 lock.lock();
1675 while (!listening) {
1676 eventCond.wait_until(lock, timer + 1s);
1677 }
1678 lock.unlock();
1679
1680 values.clear();
1681 ASSERT_TRUE(pCam0->setIntParameter(cam0Cmds[0], val0, &values).isOk());
1682
1683 // Join a listener
1684 if (listener.joinable()) {
1685 listener.join();
1686 }
1687 // Verify a change notification
1688 ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType), EvsEventType::PARAMETER_CHANGED);
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001689 ASSERT_GE(aNotification.payload.size(), 2);
Changyeon Jo80189012021-10-10 16:34:21 -07001690 ASSERT_EQ(static_cast<CameraParam>(aNotification.payload[0]), cam0Cmds[0]);
1691 for (auto&& v : values) {
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001692 ASSERT_EQ(v, aNotification.payload[1]);
Changyeon Jo80189012021-10-10 16:34:21 -07001693 }
1694
1695 // Turn off the display (yes, before the stream stops -- it should be handled)
1696 ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::NOT_VISIBLE).isOk());
1697
1698 // Shut down the streamer
1699 frameHandler0->shutdown();
1700 frameHandler1->shutdown();
1701
1702 // Explicitly release the camera
1703 ASSERT_TRUE(mEnumerator->closeCamera(pCam0).isOk());
1704 ASSERT_TRUE(mEnumerator->closeCamera(pCam1).isOk());
1705 mActiveCameras.clear();
Changyeon Jo80189012021-10-10 16:34:21 -07001706
Changyeon Jo017cb982022-11-16 22:04:38 +00001707 // Explicitly release the display
1708 ASSERT_TRUE(mEnumerator->closeDisplay(pDisplay).isOk());
1709 }
Changyeon Jo80189012021-10-10 16:34:21 -07001710}
1711
1712/*
1713 * CameraUseStreamConfigToDisplay:
1714 * End to end test of data flowing from the camera to the display. Similar to
1715 * CameraToDisplayRoundTrip test case but this case retrieves available stream
1716 * configurations from EVS and uses one of them to start a video stream.
1717 */
1718TEST_P(EvsAidlTest, CameraUseStreamConfigToDisplay) {
1719 LOG(INFO) << "Starting CameraUseStreamConfigToDisplay test";
1720
1721 // Get the camera list
1722 loadCameraList();
1723
1724 // Request available display IDs
1725 uint8_t targetDisplayId = 0;
1726 std::vector<uint8_t> displayIds;
1727 ASSERT_TRUE(mEnumerator->getDisplayIdList(&displayIds).isOk());
1728 EXPECT_GT(displayIds.size(), 0);
1729 targetDisplayId = displayIds[0];
1730
Changyeon Jo80189012021-10-10 16:34:21 -07001731 // Test each reported camera
1732 for (auto&& cam : mCameraInfo) {
Changyeon Jo017cb982022-11-16 22:04:38 +00001733 // Request exclusive access to the EVS display
1734 std::shared_ptr<IEvsDisplay> pDisplay;
1735 ASSERT_TRUE(mEnumerator->openDisplay(targetDisplayId, &pDisplay).isOk());
1736 EXPECT_NE(pDisplay, nullptr);
1737
Changyeon Jo80189012021-10-10 16:34:21 -07001738 // choose a configuration that has a frame rate faster than minReqFps.
1739 Stream targetCfg = {};
1740 const int32_t minReqFps = 15;
1741 int32_t maxArea = 0;
1742 camera_metadata_entry_t streamCfgs;
1743 bool foundCfg = false;
1744 if (!find_camera_metadata_entry(reinterpret_cast<camera_metadata_t*>(cam.metadata.data()),
1745 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
1746 &streamCfgs)) {
1747 // Stream configurations are found in metadata
1748 RawStreamConfig* ptr = reinterpret_cast<RawStreamConfig*>(streamCfgs.data.i32);
1749 for (unsigned offset = 0; offset < streamCfgs.count; offset += kStreamCfgSz) {
Changyeon Jo7f5ad612022-08-17 21:47:58 -07001750 if (ptr->direction == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT) {
Changyeon Jo80189012021-10-10 16:34:21 -07001751 if (ptr->width * ptr->height > maxArea && ptr->framerate >= minReqFps) {
1752 targetCfg.width = ptr->width;
1753 targetCfg.height = ptr->height;
Changyeon Jo7f5ad612022-08-17 21:47:58 -07001754 targetCfg.format = static_cast<PixelFormat>(ptr->format);
Changyeon Jo80189012021-10-10 16:34:21 -07001755
1756 maxArea = ptr->width * ptr->height;
1757 foundCfg = true;
1758 }
1759 }
1760 ++ptr;
1761 }
1762 }
Changyeon Jo80189012021-10-10 16:34:21 -07001763
1764 if (!foundCfg) {
1765 // Current EVS camera does not provide stream configurations in the
1766 // metadata.
1767 continue;
1768 }
1769
1770 std::shared_ptr<IEvsCamera> pCam;
1771 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
1772 EXPECT_NE(pCam, nullptr);
1773
1774 // Store a camera handle for a clean-up
1775 mActiveCameras.push_back(pCam);
1776
1777 // Set up a frame receiver object which will fire up its own thread.
Frederick Mayle7056b242022-03-29 02:38:12 +00001778 std::shared_ptr<FrameHandler> frameHandler = ndk::SharedRefBase::make<FrameHandler>(
1779 pCam, cam, pDisplay, FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -07001780 EXPECT_NE(frameHandler, nullptr);
1781
1782 // Activate the display
1783 ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::VISIBLE_ON_NEXT_FRAME).isOk());
1784
1785 // Start the camera's video stream
1786 ASSERT_TRUE(frameHandler->startStream());
1787
1788 // Wait a while to let the data flow
1789 static const int kSecondsToWait = 5;
1790 const int streamTimeMs =
1791 kSecondsToWait * kSecondsToMilliseconds - kMaxStreamStartMilliseconds;
1792 const unsigned minimumFramesExpected =
1793 streamTimeMs * kMinimumFramesPerSecond / kSecondsToMilliseconds;
1794 sleep(kSecondsToWait);
1795 unsigned framesReceived = 0;
1796 unsigned framesDisplayed = 0;
1797 frameHandler->getFramesCounters(&framesReceived, &framesDisplayed);
1798 EXPECT_EQ(framesReceived, framesDisplayed);
1799 EXPECT_GE(framesDisplayed, minimumFramesExpected);
1800
1801 // Turn off the display (yes, before the stream stops -- it should be handled)
1802 ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::NOT_VISIBLE).isOk());
1803
1804 // Shut down the streamer
1805 frameHandler->shutdown();
1806
1807 // Explicitly release the camera
1808 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
1809 mActiveCameras.clear();
Changyeon Jo80189012021-10-10 16:34:21 -07001810
Changyeon Jo017cb982022-11-16 22:04:38 +00001811 // Explicitly release the display
1812 ASSERT_TRUE(mEnumerator->closeDisplay(pDisplay).isOk());
1813 }
Changyeon Jo80189012021-10-10 16:34:21 -07001814}
1815
1816/*
1817 * MultiCameraStreamUseConfig:
1818 * Verify that each client can start and stop video streams on the same
1819 * underlying camera with same configuration.
1820 */
1821TEST_P(EvsAidlTest, MultiCameraStreamUseConfig) {
1822 LOG(INFO) << "Starting MultiCameraStream test";
1823
1824 if (mIsHwModule) {
1825 // This test is not for HW module implementation.
1826 return;
1827 }
1828
1829 // Get the camera list
1830 loadCameraList();
1831
1832 // Test each reported camera
1833 for (auto&& cam : mCameraInfo) {
1834 // choose a configuration that has a frame rate faster than minReqFps.
1835 Stream targetCfg = {};
1836 const int32_t minReqFps = 15;
1837 int32_t maxArea = 0;
1838 camera_metadata_entry_t streamCfgs;
1839 bool foundCfg = false;
1840 if (!find_camera_metadata_entry(reinterpret_cast<camera_metadata_t*>(cam.metadata.data()),
1841 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
1842 &streamCfgs)) {
1843 // Stream configurations are found in metadata
1844 RawStreamConfig* ptr = reinterpret_cast<RawStreamConfig*>(streamCfgs.data.i32);
1845 for (unsigned offset = 0; offset < streamCfgs.count; offset += kStreamCfgSz) {
Changyeon Jo7f5ad612022-08-17 21:47:58 -07001846 if (ptr->direction == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT) {
Changyeon Jo80189012021-10-10 16:34:21 -07001847 if (ptr->width * ptr->height > maxArea && ptr->framerate >= minReqFps) {
1848 targetCfg.width = ptr->width;
1849 targetCfg.height = ptr->height;
Changyeon Jo7f5ad612022-08-17 21:47:58 -07001850 targetCfg.format = static_cast<PixelFormat>(ptr->format);
Changyeon Jo80189012021-10-10 16:34:21 -07001851
1852 maxArea = ptr->width * ptr->height;
1853 foundCfg = true;
1854 }
1855 }
1856 ++ptr;
1857 }
1858 }
Changyeon Jo80189012021-10-10 16:34:21 -07001859
1860 if (!foundCfg) {
1861 LOG(INFO) << "Device " << cam.id
1862 << " does not provide a list of supported stream configurations, skipped";
1863 continue;
1864 }
1865
1866 // Create the first camera client with a selected stream configuration.
1867 std::shared_ptr<IEvsCamera> pCam0;
1868 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam0).isOk());
1869 EXPECT_NE(pCam0, nullptr);
1870
1871 // Store a camera handle for a clean-up
1872 mActiveCameras.push_back(pCam0);
1873
1874 // Try to create the second camera client with different stream
1875 // configuration.
1876 int32_t id = targetCfg.id;
1877 targetCfg.id += 1; // EVS manager sees only the stream id.
1878 std::shared_ptr<IEvsCamera> pCam1;
1879 ASSERT_FALSE(mEnumerator->openCamera(cam.id, targetCfg, &pCam1).isOk());
1880
1881 // Try again with same stream configuration.
1882 targetCfg.id = id;
1883 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam1).isOk());
1884 EXPECT_NE(pCam1, nullptr);
1885
1886 // Set up per-client frame receiver objects which will fire up its own thread
Frederick Mayle7056b242022-03-29 02:38:12 +00001887 std::shared_ptr<FrameHandler> frameHandler0 = ndk::SharedRefBase::make<FrameHandler>(
1888 pCam0, cam, nullptr, FrameHandler::eAutoReturn);
1889 std::shared_ptr<FrameHandler> frameHandler1 = ndk::SharedRefBase::make<FrameHandler>(
1890 pCam1, cam, nullptr, FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -07001891 EXPECT_NE(frameHandler0, nullptr);
1892 EXPECT_NE(frameHandler1, nullptr);
1893
1894 // Start the camera's video stream via client 0
1895 ASSERT_TRUE(frameHandler0->startStream());
1896 ASSERT_TRUE(frameHandler1->startStream());
1897
1898 // Ensure the stream starts
1899 frameHandler0->waitForFrameCount(1);
1900 frameHandler1->waitForFrameCount(1);
1901
1902 nsecs_t firstFrame = systemTime(SYSTEM_TIME_MONOTONIC);
1903
1904 // Wait a bit, then ensure both clients get at least the required minimum number of frames
1905 sleep(5);
1906 nsecs_t end = systemTime(SYSTEM_TIME_MONOTONIC);
1907 unsigned framesReceived0 = 0, framesReceived1 = 0;
1908 frameHandler0->getFramesCounters(&framesReceived0, nullptr);
1909 frameHandler1->getFramesCounters(&framesReceived1, nullptr);
1910 framesReceived0 = framesReceived0 - 1; // Back out the first frame we already waited for
1911 framesReceived1 = framesReceived1 - 1; // Back out the first frame we already waited for
1912 nsecs_t runTime = end - firstFrame;
1913 float framesPerSecond0 = framesReceived0 / (runTime * kNanoToSeconds);
1914 float framesPerSecond1 = framesReceived1 / (runTime * kNanoToSeconds);
1915 LOG(INFO) << "Measured camera rate " << std::scientific << framesPerSecond0 << " fps and "
1916 << framesPerSecond1 << " fps";
1917 EXPECT_GE(framesPerSecond0, kMinimumFramesPerSecond);
1918 EXPECT_GE(framesPerSecond1, kMinimumFramesPerSecond);
1919
1920 // Shutdown one client
1921 frameHandler0->shutdown();
1922
1923 // Read frame counters again
1924 frameHandler0->getFramesCounters(&framesReceived0, nullptr);
1925 frameHandler1->getFramesCounters(&framesReceived1, nullptr);
1926
1927 // Wait a bit again
1928 sleep(5);
1929 unsigned framesReceivedAfterStop0 = 0, framesReceivedAfterStop1 = 0;
1930 frameHandler0->getFramesCounters(&framesReceivedAfterStop0, nullptr);
1931 frameHandler1->getFramesCounters(&framesReceivedAfterStop1, nullptr);
1932 EXPECT_EQ(framesReceived0, framesReceivedAfterStop0);
1933 EXPECT_LT(framesReceived1, framesReceivedAfterStop1);
1934
1935 // Shutdown another
1936 frameHandler1->shutdown();
1937
1938 // Explicitly release the camera
1939 ASSERT_TRUE(mEnumerator->closeCamera(pCam0).isOk());
1940 ASSERT_TRUE(mEnumerator->closeCamera(pCam1).isOk());
1941 mActiveCameras.clear();
1942 }
1943}
1944
1945/*
1946 * LogicalCameraMetadata:
1947 * Opens logical camera reported by the enumerator and validate its metadata by
1948 * checking its capability and locating supporting physical camera device
1949 * identifiers.
1950 */
1951TEST_P(EvsAidlTest, LogicalCameraMetadata) {
1952 LOG(INFO) << "Starting LogicalCameraMetadata test";
1953
1954 // Get the camera list
1955 loadCameraList();
1956
1957 // Open and close each camera twice
1958 for (auto&& cam : mCameraInfo) {
1959 bool isLogicalCam = false;
1960 auto devices = getPhysicalCameraIds(cam.id, isLogicalCam);
1961 if (isLogicalCam) {
1962 ASSERT_GE(devices.size(), 1) << "Logical camera device must have at least one physical "
1963 "camera device ID in its metadata.";
1964 }
1965 }
1966}
1967
1968/*
1969 * CameraStreamExternalBuffering:
1970 * This is same with CameraStreamBuffering except frame buffers are allocated by
1971 * the test client and then imported by EVS framework.
1972 */
1973TEST_P(EvsAidlTest, CameraStreamExternalBuffering) {
1974 LOG(INFO) << "Starting CameraStreamExternalBuffering test";
1975
1976 // Arbitrary constant (should be > 1 and not too big)
1977 static const unsigned int kBuffersToHold = 3;
1978
1979 // Get the camera list
1980 loadCameraList();
1981
1982 // Acquire the graphics buffer allocator
1983 android::GraphicBufferAllocator& alloc(android::GraphicBufferAllocator::get());
1984 const auto usage =
1985 GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_SW_READ_RARELY | GRALLOC_USAGE_SW_WRITE_OFTEN;
1986
1987 // Test each reported camera
1988 for (auto&& cam : mCameraInfo) {
1989 // Read a target resolution from the metadata
1990 Stream targetCfg = getFirstStreamConfiguration(
1991 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
1992 ASSERT_GT(targetCfg.width, 0);
1993 ASSERT_GT(targetCfg.height, 0);
1994
1995 // Allocate buffers to use
1996 std::vector<BufferDesc> buffers;
1997 buffers.resize(kBuffersToHold);
1998 for (auto i = 0; i < kBuffersToHold; ++i) {
1999 unsigned pixelsPerLine;
2000 buffer_handle_t memHandle = nullptr;
2001 android::status_t result =
2002 alloc.allocate(targetCfg.width, targetCfg.height,
2003 static_cast<android::PixelFormat>(targetCfg.format),
2004 /* layerCount = */ 1, usage, &memHandle, &pixelsPerLine,
2005 /* graphicBufferId = */ 0,
2006 /* requestorName = */ "CameraStreamExternalBufferingTest");
2007 if (result != android::NO_ERROR) {
2008 LOG(ERROR) << __FUNCTION__ << " failed to allocate memory.";
2009 // Release previous allocated buffers
2010 for (auto j = 0; j < i; j++) {
2011 alloc.free(::android::dupFromAidl(buffers[i].buffer.handle));
2012 }
2013 return;
2014 } else {
2015 BufferDesc buf;
2016 HardwareBufferDescription* pDesc =
2017 reinterpret_cast<HardwareBufferDescription*>(&buf.buffer.description);
2018 pDesc->width = targetCfg.width;
2019 pDesc->height = targetCfg.height;
2020 pDesc->layers = 1;
2021 pDesc->format = targetCfg.format;
2022 pDesc->usage = static_cast<BufferUsage>(usage);
2023 pDesc->stride = pixelsPerLine;
2024 buf.buffer.handle = ::android::dupToAidl(memHandle);
2025 buf.bufferId = i; // Unique number to identify this buffer
2026 buffers[i] = std::move(buf);
2027 }
2028 }
2029
2030 bool isLogicalCam = false;
2031 getPhysicalCameraIds(cam.id, isLogicalCam);
2032
2033 std::shared_ptr<IEvsCamera> pCam;
2034 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
2035 EXPECT_NE(pCam, nullptr);
2036
2037 // Store a camera handle for a clean-up
2038 mActiveCameras.push_back(pCam);
2039
2040 // Request to import buffers
2041 int delta = 0;
2042 auto status = pCam->importExternalBuffers(buffers, &delta);
2043 if (isLogicalCam) {
2044 ASSERT_FALSE(status.isOk());
2045 continue;
2046 }
2047
2048 ASSERT_TRUE(status.isOk());
2049 EXPECT_GE(delta, kBuffersToHold);
2050
2051 // Set up a frame receiver object which will fire up its own thread.
Frederick Mayle7056b242022-03-29 02:38:12 +00002052 std::shared_ptr<FrameHandler> frameHandler = ndk::SharedRefBase::make<FrameHandler>(
2053 pCam, cam, nullptr, FrameHandler::eNoAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -07002054 EXPECT_NE(frameHandler, nullptr);
2055
2056 // Start the camera's video stream
2057 ASSERT_TRUE(frameHandler->startStream());
2058
2059 // Check that the video stream stalls once we've gotten exactly the number of buffers
2060 // we requested since we told the frameHandler not to return them.
2061 sleep(1); // 1 second should be enough for at least 5 frames to be delivered worst case
2062 unsigned framesReceived = 0;
2063 frameHandler->getFramesCounters(&framesReceived, nullptr);
2064 ASSERT_LE(kBuffersToHold, framesReceived) << "Stream didn't stall at expected buffer limit";
2065
2066 // Give back one buffer
2067 EXPECT_TRUE(frameHandler->returnHeldBuffer());
2068
2069 // Once we return a buffer, it shouldn't take more than 1/10 second to get a new one
2070 // filled since we require 10fps minimum -- but give a 10% allowance just in case.
2071 unsigned framesReceivedAfter = 0;
2072 usleep(110 * kMillisecondsToMicroseconds);
2073 frameHandler->getFramesCounters(&framesReceivedAfter, nullptr);
2074 EXPECT_EQ(framesReceived + 1, framesReceivedAfter) << "Stream should've resumed";
2075
2076 // Even when the camera pointer goes out of scope, the FrameHandler object will
2077 // keep the stream alive unless we tell it to shutdown.
2078 // Also note that the FrameHandle and the Camera have a mutual circular reference, so
2079 // we have to break that cycle in order for either of them to get cleaned up.
2080 frameHandler->shutdown();
2081
2082 // Explicitly release the camera
2083 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
2084 mActiveCameras.clear();
2085 // Release buffers
2086 for (auto& b : buffers) {
2087 alloc.free(::android::dupFromAidl(b.buffer.handle));
2088 }
2089 buffers.resize(0);
2090 }
2091}
2092
Changyeon Jodbcf52c2022-05-11 00:01:31 -07002093TEST_P(EvsAidlTest, DeviceStatusCallbackRegistration) {
2094 std::shared_ptr<IEvsEnumeratorStatusCallback> cb =
2095 ndk::SharedRefBase::make<DeviceStatusCallback>();
2096 ndk::ScopedAStatus status = mEnumerator->registerStatusCallback(cb);
2097 if (mIsHwModule) {
2098 ASSERT_TRUE(status.isOk());
2099 } else {
2100 // A callback registration may fail if a HIDL EVS HAL implementation is
2101 // running.
2102 ASSERT_TRUE(status.isOk() ||
2103 status.getServiceSpecificError() == static_cast<int>(EvsResult::NOT_SUPPORTED));
2104 }
2105}
2106
Changyeon Jo80189012021-10-10 16:34:21 -07002107/*
2108 * UltrasonicsArrayOpenClean:
2109 * Opens each ultrasonics arrays reported by the enumerator and then explicitly closes it via a
2110 * call to closeUltrasonicsArray. Then repeats the test to ensure all ultrasonics arrays
2111 * can be reopened.
2112 */
2113TEST_P(EvsAidlTest, UltrasonicsArrayOpenClean) {
2114 LOG(INFO) << "Starting UltrasonicsArrayOpenClean test";
2115
2116 // Get the ultrasonics array list
2117 loadUltrasonicsArrayList();
2118
2119 // Open and close each ultrasonics array twice
2120 for (auto&& ultraInfo : mUltrasonicsArraysInfo) {
2121 for (int pass = 0; pass < 2; pass++) {
2122 std::shared_ptr<IEvsUltrasonicsArray> pUltrasonicsArray;
2123 ASSERT_TRUE(
2124 mEnumerator
2125 ->openUltrasonicsArray(ultraInfo.ultrasonicsArrayId, &pUltrasonicsArray)
2126 .isOk());
2127 EXPECT_NE(pUltrasonicsArray, nullptr);
2128
2129 // Verify that this ultrasonics array self-identifies correctly
2130 UltrasonicsArrayDesc desc;
2131 ASSERT_TRUE(pUltrasonicsArray->getUltrasonicArrayInfo(&desc).isOk());
2132 EXPECT_EQ(ultraInfo.ultrasonicsArrayId, desc.ultrasonicsArrayId);
2133 LOG(DEBUG) << "Found ultrasonics array " << ultraInfo.ultrasonicsArrayId;
2134
2135 // Explicitly close the ultrasonics array so resources are released right away
2136 ASSERT_TRUE(mEnumerator->closeUltrasonicsArray(pUltrasonicsArray).isOk());
2137 }
2138 }
2139}
2140
2141// Starts a stream and verifies all data received is valid.
2142TEST_P(EvsAidlTest, UltrasonicsVerifyStreamData) {
2143 LOG(INFO) << "Starting UltrasonicsVerifyStreamData";
2144
2145 // Get the ultrasonics array list
2146 loadUltrasonicsArrayList();
2147
2148 // For each ultrasonics array.
2149 for (auto&& ultraInfo : mUltrasonicsArraysInfo) {
2150 LOG(DEBUG) << "Testing ultrasonics array: " << ultraInfo.ultrasonicsArrayId;
2151
2152 std::shared_ptr<IEvsUltrasonicsArray> pUltrasonicsArray;
2153 ASSERT_TRUE(
2154 mEnumerator->openUltrasonicsArray(ultraInfo.ultrasonicsArrayId, &pUltrasonicsArray)
2155 .isOk());
2156 EXPECT_NE(pUltrasonicsArray, nullptr);
2157
2158 std::shared_ptr<FrameHandlerUltrasonics> frameHandler =
Frederick Mayle7056b242022-03-29 02:38:12 +00002159 ndk::SharedRefBase::make<FrameHandlerUltrasonics>(pUltrasonicsArray);
Changyeon Jo80189012021-10-10 16:34:21 -07002160 EXPECT_NE(frameHandler, nullptr);
2161
2162 // Start stream.
2163 ASSERT_TRUE(pUltrasonicsArray->startStream(frameHandler).isOk());
2164
2165 // Wait 5 seconds to receive frames.
2166 sleep(5);
2167
2168 // Stop stream.
2169 ASSERT_TRUE(pUltrasonicsArray->stopStream().isOk());
2170
2171 EXPECT_GT(frameHandler->getReceiveFramesCount(), 0);
2172 EXPECT_TRUE(frameHandler->areAllFramesValid());
2173
2174 // Explicitly close the ultrasonics array so resources are released right away
2175 ASSERT_TRUE(mEnumerator->closeUltrasonicsArray(pUltrasonicsArray).isOk());
2176 }
2177}
2178
2179// Sets frames in flight before and after start of stream and verfies success.
2180TEST_P(EvsAidlTest, UltrasonicsSetFramesInFlight) {
2181 LOG(INFO) << "Starting UltrasonicsSetFramesInFlight";
2182
2183 // Get the ultrasonics array list
2184 loadUltrasonicsArrayList();
2185
2186 // For each ultrasonics array.
2187 for (auto&& ultraInfo : mUltrasonicsArraysInfo) {
2188 LOG(DEBUG) << "Testing ultrasonics array: " << ultraInfo.ultrasonicsArrayId;
2189
2190 std::shared_ptr<IEvsUltrasonicsArray> pUltrasonicsArray;
2191 ASSERT_TRUE(
2192 mEnumerator->openUltrasonicsArray(ultraInfo.ultrasonicsArrayId, &pUltrasonicsArray)
2193 .isOk());
2194 EXPECT_NE(pUltrasonicsArray, nullptr);
2195
2196 ASSERT_TRUE(pUltrasonicsArray->setMaxFramesInFlight(10).isOk());
2197
2198 std::shared_ptr<FrameHandlerUltrasonics> frameHandler =
Frederick Mayle7056b242022-03-29 02:38:12 +00002199 ndk::SharedRefBase::make<FrameHandlerUltrasonics>(pUltrasonicsArray);
Changyeon Jo80189012021-10-10 16:34:21 -07002200 EXPECT_NE(frameHandler, nullptr);
2201
2202 // Start stream.
2203 ASSERT_TRUE(pUltrasonicsArray->startStream(frameHandler).isOk());
2204 ASSERT_TRUE(pUltrasonicsArray->setMaxFramesInFlight(5).isOk());
2205
2206 // Stop stream.
2207 ASSERT_TRUE(pUltrasonicsArray->stopStream().isOk());
2208
2209 // Explicitly close the ultrasonics array so resources are released right away
2210 ASSERT_TRUE(mEnumerator->closeUltrasonicsArray(pUltrasonicsArray).isOk());
2211 }
2212}
2213
Hao Chene708da82023-03-28 16:20:57 -07002214/*
2215 * DisplayOpen:
2216 * Test both clean shut down and "aggressive open" device stealing behavior.
2217 */
2218TEST_P(EvsAidlTest, DisplayOpen) {
2219 LOG(INFO) << "Starting DisplayOpen test";
2220
2221 // Request available display IDs.
2222 std::vector<uint8_t> displayIds;
2223 ASSERT_TRUE(mEnumerator->getDisplayIdList(&displayIds).isOk());
2224 EXPECT_GT(displayIds.size(), 0);
2225
2226 for (const auto displayId : displayIds) {
2227 std::shared_ptr<IEvsDisplay> pDisplay;
2228
2229 // Request exclusive access to each EVS display, then let it go.
2230 ASSERT_TRUE(mEnumerator->openDisplay(displayId, &pDisplay).isOk());
2231 ASSERT_NE(pDisplay, nullptr);
2232
2233 {
2234 // Ask the display what its name is.
2235 DisplayDesc desc;
2236 ASSERT_TRUE(pDisplay->getDisplayInfo(&desc).isOk());
2237 LOG(DEBUG) << "Found display " << desc.id;
2238 }
2239
2240 ASSERT_TRUE(mEnumerator->closeDisplay(pDisplay).isOk());
2241
2242 // Ensure we can reopen the display after it has been closed.
2243 ASSERT_TRUE(mEnumerator->openDisplay(displayId, &pDisplay).isOk());
2244 ASSERT_NE(pDisplay, nullptr);
2245
2246 // Open the display while its already open -- ownership should be transferred.
2247 std::shared_ptr<IEvsDisplay> pDisplay2;
2248 ASSERT_TRUE(mEnumerator->openDisplay(displayId, &pDisplay2).isOk());
2249 ASSERT_NE(pDisplay2, nullptr);
2250
2251 {
2252 // Ensure the old display properly reports its assassination.
2253 DisplayState badState;
2254 EXPECT_TRUE(pDisplay->getDisplayState(&badState).isOk());
2255 EXPECT_EQ(badState, DisplayState::DEAD);
2256 }
2257
2258 // Close only the newest display instance -- the other should already be a zombie.
2259 ASSERT_TRUE(mEnumerator->closeDisplay(pDisplay2).isOk());
2260
2261 // Finally, validate that we can open the display after the provoked failure above.
2262 ASSERT_TRUE(mEnumerator->openDisplay(displayId, &pDisplay).isOk());
2263 ASSERT_NE(pDisplay, nullptr);
2264 ASSERT_TRUE(mEnumerator->closeDisplay(pDisplay).isOk());
2265 }
2266}
2267
2268/*
2269 * DisplayStates:
2270 * Validate that display states transition as expected and can be queried from either the display
2271 * object itself or the owning enumerator.
2272 */
2273TEST_P(EvsAidlTest, DisplayStates) {
2274 using std::literals::chrono_literals::operator""ms;
2275
2276 LOG(INFO) << "Starting DisplayStates test";
2277
2278 // Request available display IDs.
2279 std::vector<uint8_t> displayIds;
2280 ASSERT_TRUE(mEnumerator->getDisplayIdList(&displayIds).isOk());
2281 EXPECT_GT(displayIds.size(), 0);
2282
2283 for (const auto displayId : displayIds) {
2284 // Ensure the display starts in the expected state.
2285 {
2286 DisplayState state;
2287 EXPECT_FALSE(mEnumerator->getDisplayState(&state).isOk());
2288 }
Hao Chenbe819392023-03-29 16:18:47 -07002289 for (const auto displayIdToQuery : displayIds) {
2290 DisplayState state;
2291 EXPECT_FALSE(mEnumerator->getDisplayStateById(displayIdToQuery, &state).isOk());
2292 }
Hao Chene708da82023-03-28 16:20:57 -07002293
2294 // Scope to limit the lifetime of the pDisplay pointer, and thus the IEvsDisplay object.
2295 {
2296 // Request exclusive access to the EVS display.
2297 std::shared_ptr<IEvsDisplay> pDisplay;
2298 ASSERT_TRUE(mEnumerator->openDisplay(displayId, &pDisplay).isOk());
2299 ASSERT_NE(pDisplay, nullptr);
2300 {
2301 DisplayState state;
2302 EXPECT_TRUE(mEnumerator->getDisplayState(&state).isOk());
2303 EXPECT_EQ(state, DisplayState::NOT_VISIBLE);
2304 }
Hao Chenbe819392023-03-29 16:18:47 -07002305 for (const auto displayIdToQuery : displayIds) {
2306 DisplayState state;
Hao Chen6cfe1f82023-04-14 15:44:22 -07002307 bool get_state_ok =
2308 mEnumerator->getDisplayStateById(displayIdToQuery, &state).isOk();
2309 if (displayIdToQuery != displayId) {
2310 EXPECT_FALSE(get_state_ok);
2311 } else if (get_state_ok) {
Hao Chenbe819392023-03-29 16:18:47 -07002312 EXPECT_EQ(state, DisplayState::NOT_VISIBLE);
Hao Chenbe819392023-03-29 16:18:47 -07002313 }
2314 }
Hao Chene708da82023-03-28 16:20:57 -07002315
2316 // Activate the display.
2317 EXPECT_TRUE(pDisplay->setDisplayState(DisplayState::VISIBLE_ON_NEXT_FRAME).isOk());
2318 {
2319 DisplayState state;
2320 EXPECT_TRUE(mEnumerator->getDisplayState(&state).isOk());
2321 EXPECT_EQ(state, DisplayState::VISIBLE_ON_NEXT_FRAME);
2322 }
2323 {
2324 DisplayState state;
2325 EXPECT_TRUE(pDisplay->getDisplayState(&state).isOk());
2326 EXPECT_EQ(state, DisplayState::VISIBLE_ON_NEXT_FRAME);
2327 }
Hao Chenbe819392023-03-29 16:18:47 -07002328 for (const auto displayIdToQuery : displayIds) {
2329 DisplayState state;
Hao Chen6cfe1f82023-04-14 15:44:22 -07002330 bool get_state_ok =
2331 mEnumerator->getDisplayStateById(displayIdToQuery, &state).isOk();
2332 if (displayIdToQuery != displayId) {
2333 EXPECT_FALSE(get_state_ok);
2334 } else if (get_state_ok) {
Hao Chenbe819392023-03-29 16:18:47 -07002335 EXPECT_EQ(state, DisplayState::VISIBLE_ON_NEXT_FRAME);
Hao Chenbe819392023-03-29 16:18:47 -07002336 }
2337 }
Hao Chene708da82023-03-28 16:20:57 -07002338
2339 // Get the output buffer we'd use to display the imagery.
2340 BufferDesc tgtBuffer;
2341 ASSERT_TRUE(pDisplay->getTargetBuffer(&tgtBuffer).isOk());
2342
2343 // Send the target buffer back for display (we didn't actually fill anything).
2344 EXPECT_TRUE(pDisplay->returnTargetBufferForDisplay(tgtBuffer).isOk());
2345
2346 // Sleep for a tenth of a second to ensure the driver has time to get the image
2347 // displayed.
2348 std::this_thread::sleep_for(100ms);
2349 {
2350 DisplayState state;
2351 EXPECT_TRUE(mEnumerator->getDisplayState(&state).isOk());
2352 EXPECT_EQ(state, DisplayState::VISIBLE);
2353 }
2354 {
2355 DisplayState state;
2356 EXPECT_TRUE(pDisplay->getDisplayState(&state).isOk());
2357 EXPECT_EQ(state, DisplayState::VISIBLE);
2358 }
Hao Chenbe819392023-03-29 16:18:47 -07002359 for (const auto displayIdToQuery : displayIds) {
2360 DisplayState state;
Hao Chen6cfe1f82023-04-14 15:44:22 -07002361 bool get_state_ok =
2362 mEnumerator->getDisplayStateById(displayIdToQuery, &state).isOk();
2363 if (displayIdToQuery != displayId) {
2364 EXPECT_FALSE(get_state_ok);
2365 } else if (get_state_ok) {
Hao Chenbe819392023-03-29 16:18:47 -07002366 EXPECT_EQ(state, DisplayState::VISIBLE);
Hao Chenbe819392023-03-29 16:18:47 -07002367 }
2368 }
Hao Chene708da82023-03-28 16:20:57 -07002369
2370 // Turn off the display.
2371 EXPECT_TRUE(pDisplay->setDisplayState(DisplayState::NOT_VISIBLE).isOk());
2372 std::this_thread::sleep_for(100ms);
2373 {
2374 DisplayState state;
2375 EXPECT_TRUE(mEnumerator->getDisplayState(&state).isOk());
2376 EXPECT_EQ(state, DisplayState::NOT_VISIBLE);
2377 }
2378 {
2379 DisplayState state;
2380 EXPECT_TRUE(pDisplay->getDisplayState(&state).isOk());
2381 EXPECT_EQ(state, DisplayState::NOT_VISIBLE);
2382 }
Hao Chenbe819392023-03-29 16:18:47 -07002383 for (const auto displayIdToQuery : displayIds) {
2384 DisplayState state;
Hao Chen6cfe1f82023-04-14 15:44:22 -07002385 bool get_state_ok =
2386 mEnumerator->getDisplayStateById(displayIdToQuery, &state).isOk();
2387 if (displayIdToQuery != displayId) {
2388 EXPECT_FALSE(get_state_ok);
2389 } else if (get_state_ok) {
Hao Chenbe819392023-03-29 16:18:47 -07002390 EXPECT_EQ(state, DisplayState::NOT_VISIBLE);
Hao Chenbe819392023-03-29 16:18:47 -07002391 }
2392 }
Hao Chene708da82023-03-28 16:20:57 -07002393
2394 // Close the display.
2395 mEnumerator->closeDisplay(pDisplay);
2396 }
2397
2398 // Now that the display pointer has gone out of scope, causing the IEvsDisplay interface
2399 // object to be destroyed, we should be back to the "not open" state.
2400 // NOTE: If we want this to pass without the sleep above, we'd have to add the
2401 // (now recommended) closeDisplay() call instead of relying on the smarter pointer
2402 // going out of scope. I've not done that because I want to verify that the deletion
2403 // of the object does actually clean up (eventually).
2404 {
2405 DisplayState state;
2406 EXPECT_FALSE(mEnumerator->getDisplayState(&state).isOk());
2407 }
Hao Chenbe819392023-03-29 16:18:47 -07002408 for (const auto displayIdToQuery : displayIds) {
2409 DisplayState state;
2410 EXPECT_FALSE(mEnumerator->getDisplayStateById(displayIdToQuery, &state).isOk());
2411 }
Hao Chene708da82023-03-28 16:20:57 -07002412 }
2413}
2414
Changyeon Jo80189012021-10-10 16:34:21 -07002415GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(EvsAidlTest);
2416INSTANTIATE_TEST_SUITE_P(
2417 PerInstance, EvsAidlTest,
2418 testing::ValuesIn(android::getAidlHalInstanceNames(IEvsEnumerator::descriptor)),
2419 android::PrintInstanceNameToString);
2420
2421int main(int argc, char** argv) {
2422 ::testing::InitGoogleTest(&argc, argv);
2423 ABinderProcess_setThreadPoolMaxThreadCount(1);
2424 ABinderProcess_startThreadPool();
2425 return RUN_ALL_TESTS();
2426}