blob: 477de3147870e9c6075c18a416158dd3d09b03f8 [file] [log] [blame]
Changyeon Jo80189012021-10-10 16:34:21 -07001/*
2 * Copyright (C) 2022 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "FrameHandler.h"
18#include "FrameHandlerUltrasonics.h"
19
20#include <aidl/Gtest.h>
21#include <aidl/Vintf.h>
Changyeon Jodbcf52c2022-05-11 00:01:31 -070022#include <aidl/android/hardware/automotive/evs/BnEvsEnumeratorStatusCallback.h>
Changyeon Jo80189012021-10-10 16:34:21 -070023#include <aidl/android/hardware/automotive/evs/BufferDesc.h>
24#include <aidl/android/hardware/automotive/evs/CameraDesc.h>
25#include <aidl/android/hardware/automotive/evs/CameraParam.h>
Changyeon Jodbcf52c2022-05-11 00:01:31 -070026#include <aidl/android/hardware/automotive/evs/DeviceStatus.h>
Changyeon Jo80189012021-10-10 16:34:21 -070027#include <aidl/android/hardware/automotive/evs/DisplayDesc.h>
28#include <aidl/android/hardware/automotive/evs/DisplayState.h>
29#include <aidl/android/hardware/automotive/evs/EvsEventDesc.h>
30#include <aidl/android/hardware/automotive/evs/EvsEventType.h>
31#include <aidl/android/hardware/automotive/evs/EvsResult.h>
32#include <aidl/android/hardware/automotive/evs/IEvsCamera.h>
33#include <aidl/android/hardware/automotive/evs/IEvsDisplay.h>
34#include <aidl/android/hardware/automotive/evs/IEvsEnumerator.h>
Changyeon Jodbcf52c2022-05-11 00:01:31 -070035#include <aidl/android/hardware/automotive/evs/IEvsEnumeratorStatusCallback.h>
Changyeon Jo80189012021-10-10 16:34:21 -070036#include <aidl/android/hardware/automotive/evs/IEvsUltrasonicsArray.h>
37#include <aidl/android/hardware/automotive/evs/ParameterRange.h>
38#include <aidl/android/hardware/automotive/evs/Stream.h>
39#include <aidl/android/hardware/automotive/evs/UltrasonicsArrayDesc.h>
40#include <aidl/android/hardware/common/NativeHandle.h>
41#include <aidl/android/hardware/graphics/common/HardwareBufferDescription.h>
42#include <aidl/android/hardware/graphics/common/PixelFormat.h>
43#include <aidlcommonsupport/NativeHandle.h>
44#include <android-base/logging.h>
45#include <android/binder_ibinder.h>
46#include <android/binder_manager.h>
47#include <android/binder_process.h>
48#include <android/binder_status.h>
49#include <system/camera_metadata.h>
50#include <ui/GraphicBuffer.h>
51#include <ui/GraphicBufferAllocator.h>
52#include <utils/Timers.h>
53
Hao Chene708da82023-03-28 16:20:57 -070054#include <chrono>
Changyeon Jo80189012021-10-10 16:34:21 -070055#include <deque>
56#include <thread>
57#include <unordered_set>
58
59namespace {
60
61// These values are called out in the EVS design doc (as of Mar 8, 2017)
62constexpr int kMaxStreamStartMilliseconds = 500;
63constexpr int kMinimumFramesPerSecond = 10;
64constexpr int kSecondsToMilliseconds = 1000;
65constexpr int kMillisecondsToMicroseconds = 1000;
66constexpr float kNanoToMilliseconds = 0.000001f;
67constexpr float kNanoToSeconds = 0.000000001f;
68
69/*
70 * Please note that this is different from what is defined in
71 * libhardware/modules/camera/3_4/metadata/types.h; this has one additional
72 * field to store a framerate.
73 */
74typedef struct {
75 int32_t id;
76 int32_t width;
77 int32_t height;
78 int32_t format;
79 int32_t direction;
80 int32_t framerate;
81} RawStreamConfig;
82constexpr size_t kStreamCfgSz = sizeof(RawStreamConfig) / sizeof(int32_t);
83
Changyeon Jodbcf52c2022-05-11 00:01:31 -070084using ::aidl::android::hardware::automotive::evs::BnEvsEnumeratorStatusCallback;
Changyeon Jo80189012021-10-10 16:34:21 -070085using ::aidl::android::hardware::automotive::evs::BufferDesc;
86using ::aidl::android::hardware::automotive::evs::CameraDesc;
87using ::aidl::android::hardware::automotive::evs::CameraParam;
Changyeon Jodbcf52c2022-05-11 00:01:31 -070088using ::aidl::android::hardware::automotive::evs::DeviceStatus;
Changyeon Jo80189012021-10-10 16:34:21 -070089using ::aidl::android::hardware::automotive::evs::DisplayDesc;
90using ::aidl::android::hardware::automotive::evs::DisplayState;
91using ::aidl::android::hardware::automotive::evs::EvsEventDesc;
92using ::aidl::android::hardware::automotive::evs::EvsEventType;
93using ::aidl::android::hardware::automotive::evs::EvsResult;
94using ::aidl::android::hardware::automotive::evs::IEvsCamera;
95using ::aidl::android::hardware::automotive::evs::IEvsDisplay;
96using ::aidl::android::hardware::automotive::evs::IEvsEnumerator;
Changyeon Jodbcf52c2022-05-11 00:01:31 -070097using ::aidl::android::hardware::automotive::evs::IEvsEnumeratorStatusCallback;
Changyeon Jo80189012021-10-10 16:34:21 -070098using ::aidl::android::hardware::automotive::evs::IEvsUltrasonicsArray;
99using ::aidl::android::hardware::automotive::evs::ParameterRange;
100using ::aidl::android::hardware::automotive::evs::Stream;
101using ::aidl::android::hardware::automotive::evs::UltrasonicsArrayDesc;
102using ::aidl::android::hardware::graphics::common::BufferUsage;
103using ::aidl::android::hardware::graphics::common::HardwareBufferDescription;
104using ::aidl::android::hardware::graphics::common::PixelFormat;
105using std::chrono_literals::operator""s;
106
Changyeon Jodbcf52c2022-05-11 00:01:31 -0700107} // namespace
108
Changyeon Jo80189012021-10-10 16:34:21 -0700109// The main test class for EVS
110class EvsAidlTest : public ::testing::TestWithParam<std::string> {
111 public:
112 virtual void SetUp() override {
113 // Make sure we can connect to the enumerator
114 std::string service_name = GetParam();
115 AIBinder* binder = AServiceManager_waitForService(service_name.data());
116 ASSERT_NE(binder, nullptr);
117 mEnumerator = IEvsEnumerator::fromBinder(::ndk::SpAIBinder(binder));
118 LOG(INFO) << "Test target service: " << service_name;
119
120 ASSERT_TRUE(mEnumerator->isHardware(&mIsHwModule).isOk());
121 }
122
123 virtual void TearDown() override {
124 // Attempt to close any active camera
125 for (auto&& cam : mActiveCameras) {
126 if (cam != nullptr) {
127 mEnumerator->closeCamera(cam);
128 }
129 }
130 mActiveCameras.clear();
131 }
132
133 protected:
134 void loadCameraList() {
135 // SetUp() must run first!
136 ASSERT_NE(mEnumerator, nullptr);
137
138 // Get the camera list
139 ASSERT_TRUE(mEnumerator->getCameraList(&mCameraInfo).isOk())
140 << "Failed to get a list of available cameras";
141 LOG(INFO) << "We have " << mCameraInfo.size() << " cameras.";
142 }
143
144 void loadUltrasonicsArrayList() {
145 // SetUp() must run first!
146 ASSERT_NE(mEnumerator, nullptr);
147
148 // Get the ultrasonics array list
Changyeon Jo9f6f5922022-04-12 19:29:10 -0700149 auto result = mEnumerator->getUltrasonicsArrayList(&mUltrasonicsArraysInfo);
150 ASSERT_TRUE(result.isOk() ||
151 // TODO(b/149874793): Remove below conditions when
152 // getUltrasonicsArrayList() is implemented.
153 (!result.isOk() && result.getServiceSpecificError() ==
154 static_cast<int32_t>(EvsResult::NOT_IMPLEMENTED)))
Changyeon Jo80189012021-10-10 16:34:21 -0700155 << "Failed to get a list of available ultrasonics arrays";
156 LOG(INFO) << "We have " << mCameraInfo.size() << " ultrasonics arrays.";
157 }
158
159 bool isLogicalCamera(const camera_metadata_t* metadata) {
160 if (metadata == nullptr) {
161 // A logical camera device must have a valid camera metadata.
162 return false;
163 }
164
165 // Looking for LOGICAL_MULTI_CAMERA capability from metadata.
166 camera_metadata_ro_entry_t entry;
167 int rc = find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
168 &entry);
169 if (rc != 0) {
170 // No capabilities are found.
171 return false;
172 }
173
174 for (size_t i = 0; i < entry.count; ++i) {
175 uint8_t cap = entry.data.u8[i];
176 if (cap == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA) {
177 return true;
178 }
179 }
180
181 return false;
182 }
183
184 std::unordered_set<std::string> getPhysicalCameraIds(const std::string& id, bool& flag) {
185 std::unordered_set<std::string> physicalCameras;
186 const auto it = std::find_if(mCameraInfo.begin(), mCameraInfo.end(),
187 [&id](const CameraDesc& desc) { return id == desc.id; });
188 if (it == mCameraInfo.end()) {
189 // Unknown camera is requested. Return an empty list.
190 return physicalCameras;
191 }
192
193 const camera_metadata_t* metadata = reinterpret_cast<camera_metadata_t*>(&it->metadata[0]);
194 flag = isLogicalCamera(metadata);
195 if (!flag) {
196 // EVS assumes that the device w/o a valid metadata is a physical
197 // device.
198 LOG(INFO) << id << " is not a logical camera device.";
199 physicalCameras.insert(id);
200 return physicalCameras;
201 }
202
203 // Look for physical camera identifiers
204 camera_metadata_ro_entry entry;
205 int rc = find_camera_metadata_ro_entry(metadata, ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS,
206 &entry);
207 if (rc != 0) {
208 LOG(ERROR) << "No physical camera ID is found for a logical camera device";
209 }
210
211 const uint8_t* ids = entry.data.u8;
212 size_t start = 0;
213 for (size_t i = 0; i < entry.count; ++i) {
214 if (ids[i] == '\0') {
215 if (start != i) {
216 std::string id(reinterpret_cast<const char*>(ids + start));
217 physicalCameras.insert(id);
218 }
219 start = i + 1;
220 }
221 }
222
223 LOG(INFO) << id << " consists of " << physicalCameras.size() << " physical camera devices";
224 return physicalCameras;
225 }
226
227 Stream getFirstStreamConfiguration(camera_metadata_t* metadata) {
228 Stream targetCfg = {};
229 camera_metadata_entry_t streamCfgs;
230 if (!find_camera_metadata_entry(metadata, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
231 &streamCfgs)) {
232 // Stream configurations are found in metadata
233 RawStreamConfig* ptr = reinterpret_cast<RawStreamConfig*>(streamCfgs.data.i32);
234 for (unsigned offset = 0; offset < streamCfgs.count; offset += kStreamCfgSz) {
Changyeon Jo7f5ad612022-08-17 21:47:58 -0700235 if (ptr->direction == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT) {
Changyeon Jo80189012021-10-10 16:34:21 -0700236 targetCfg.width = ptr->width;
237 targetCfg.height = ptr->height;
238 targetCfg.format = static_cast<PixelFormat>(ptr->format);
239 break;
240 }
241 ++ptr;
242 }
243 }
244
245 return targetCfg;
246 }
247
Changyeon Jodbcf52c2022-05-11 00:01:31 -0700248 class DeviceStatusCallback : public BnEvsEnumeratorStatusCallback {
249 ndk::ScopedAStatus deviceStatusChanged(const std::vector<DeviceStatus>&) override {
250 // This empty implementation returns always ok().
251 return ndk::ScopedAStatus::ok();
252 }
253 };
254
Changyeon Jo80189012021-10-10 16:34:21 -0700255 // Every test needs access to the service
256 std::shared_ptr<IEvsEnumerator> mEnumerator;
257 // Empty unless/util loadCameraList() is called
258 std::vector<CameraDesc> mCameraInfo;
259 // boolean to tell current module under testing is HW module implementation
260 // or not
261 bool mIsHwModule;
262 // A list of active camera handles that are need to be cleaned up
263 std::deque<std::shared_ptr<IEvsCamera>> mActiveCameras;
264 // Empty unless/util loadUltrasonicsArrayList() is called
265 std::vector<UltrasonicsArrayDesc> mUltrasonicsArraysInfo;
266 // A list of active ultrasonics array handles that are to be cleaned up
267 std::deque<std::weak_ptr<IEvsUltrasonicsArray>> mActiveUltrasonicsArrays;
268};
269
270// Test cases, their implementations, and corresponding requirements are
271// documented at go/aae-evs-public-api-test.
272
273/*
274 * CameraOpenClean:
275 * Opens each camera reported by the enumerator and then explicitly closes it via a
276 * call to closeCamera. Then repeats the test to ensure all cameras can be reopened.
277 */
278TEST_P(EvsAidlTest, CameraOpenClean) {
279 LOG(INFO) << "Starting CameraOpenClean test";
280
281 // Get the camera list
282 loadCameraList();
283
284 // Open and close each camera twice
285 for (auto&& cam : mCameraInfo) {
286 bool isLogicalCam = false;
287 auto devices = getPhysicalCameraIds(cam.id, isLogicalCam);
288 if (mIsHwModule && isLogicalCam) {
289 LOG(INFO) << "Skip a logical device, " << cam.id << " for HW target.";
290 continue;
291 }
292
293 // Read a target resolution from the metadata
294 Stream targetCfg = getFirstStreamConfiguration(
295 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
296 ASSERT_GT(targetCfg.width, 0);
297 ASSERT_GT(targetCfg.height, 0);
298
299 for (int pass = 0; pass < 2; pass++) {
300 std::shared_ptr<IEvsCamera> pCam;
301 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
302 ASSERT_NE(pCam, nullptr);
303
304 CameraDesc cameraInfo;
305 for (auto&& devName : devices) {
306 ASSERT_TRUE(pCam->getPhysicalCameraInfo(devName, &cameraInfo).isOk());
307 EXPECT_EQ(devName, cameraInfo.id);
308 }
309
310 // Store a camera handle for a clean-up
311 mActiveCameras.push_back(pCam);
312
313 // Verify that this camera self-identifies correctly
314 ASSERT_TRUE(pCam->getCameraInfo(&cameraInfo).isOk());
315 EXPECT_EQ(cam.id, cameraInfo.id);
316
317 // Verify methods for extended info
318 const auto id = 0xFFFFFFFF; // meaningless id
319 std::vector<uint8_t> values;
320 auto status = pCam->setExtendedInfo(id, values);
321 if (isLogicalCam) {
322 EXPECT_TRUE(!status.isOk() && status.getServiceSpecificError() ==
323 static_cast<int>(EvsResult::NOT_SUPPORTED));
324 } else {
325 EXPECT_TRUE(status.isOk());
326 }
327
328 status = pCam->getExtendedInfo(id, &values);
329 if (isLogicalCam) {
330 EXPECT_TRUE(!status.isOk() && status.getServiceSpecificError() ==
331 static_cast<int>(EvsResult::NOT_SUPPORTED));
332 } else {
333 EXPECT_TRUE(status.isOk());
334 }
335
336 // Explicitly close the camera so resources are released right away
337 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
338 mActiveCameras.clear();
339 }
340 }
341}
342
343/*
344 * CameraOpenAggressive:
345 * Opens each camera reported by the enumerator twice in a row without an intervening closeCamera
346 * call. This ensures that the intended "aggressive open" behavior works. This is necessary for
347 * the system to be tolerant of shutdown/restart race conditions.
348 */
349TEST_P(EvsAidlTest, CameraOpenAggressive) {
350 LOG(INFO) << "Starting CameraOpenAggressive test";
351
352 // Get the camera list
353 loadCameraList();
354
355 // Open and close each camera twice
356 for (auto&& cam : mCameraInfo) {
357 bool isLogicalCam = false;
358 getPhysicalCameraIds(cam.id, isLogicalCam);
359 if (mIsHwModule && isLogicalCam) {
360 LOG(INFO) << "Skip a logical device, " << cam.id << " for HW target.";
361 continue;
362 }
363
364 // Read a target resolution from the metadata
365 Stream targetCfg = getFirstStreamConfiguration(
366 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
367 ASSERT_GT(targetCfg.width, 0);
368 ASSERT_GT(targetCfg.height, 0);
369
370 mActiveCameras.clear();
371 std::shared_ptr<IEvsCamera> pCam;
372 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
373 EXPECT_NE(pCam, nullptr);
374
375 // Store a camera handle for a clean-up
376 mActiveCameras.push_back(pCam);
377
378 // Verify that this camera self-identifies correctly
379 CameraDesc cameraInfo;
380 ASSERT_TRUE(pCam->getCameraInfo(&cameraInfo).isOk());
381 EXPECT_EQ(cam.id, cameraInfo.id);
382
383 std::shared_ptr<IEvsCamera> pCam2;
384 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam2).isOk());
385 EXPECT_NE(pCam2, nullptr);
386 EXPECT_NE(pCam, pCam2);
387
388 // Store a camera handle for a clean-up
389 mActiveCameras.push_back(pCam2);
390
391 auto status = pCam->setMaxFramesInFlight(2);
392 if (mIsHwModule) {
393 // Verify that the old camera rejects calls via HW module.
394 EXPECT_TRUE(!status.isOk() && status.getServiceSpecificError() ==
395 static_cast<int>(EvsResult::OWNERSHIP_LOST));
396 } else {
397 // default implementation supports multiple clients.
398 EXPECT_TRUE(status.isOk());
399 }
400
401 // Close the superseded camera
402 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
403 mActiveCameras.pop_front();
404
405 // Verify that the second camera instance self-identifies correctly
406 ASSERT_TRUE(pCam2->getCameraInfo(&cameraInfo).isOk());
407 EXPECT_EQ(cam.id, cameraInfo.id);
408
409 // Close the second camera instance
410 ASSERT_TRUE(mEnumerator->closeCamera(pCam2).isOk());
411 mActiveCameras.pop_front();
412 }
413
414 // Sleep here to ensure the destructor cleanup has time to run so we don't break follow on tests
415 sleep(1); // I hate that this is an arbitrary time to wait. :( b/36122635
416}
417
418/*
419 * CameraStreamPerformance:
420 * Measure and qualify the stream start up time and streaming frame rate of each reported camera
421 */
422TEST_P(EvsAidlTest, CameraStreamPerformance) {
423 LOG(INFO) << "Starting CameraStreamPerformance test";
424
425 // Get the camera list
426 loadCameraList();
427
428 // Test each reported camera
429 for (auto&& cam : mCameraInfo) {
430 bool isLogicalCam = false;
431 auto devices = getPhysicalCameraIds(cam.id, isLogicalCam);
432 if (mIsHwModule && isLogicalCam) {
433 LOG(INFO) << "Skip a logical device " << cam.id;
434 continue;
435 }
436
437 // Read a target resolution from the metadata
438 Stream targetCfg = getFirstStreamConfiguration(
439 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
440 ASSERT_GT(targetCfg.width, 0);
441 ASSERT_GT(targetCfg.height, 0);
442
443 std::shared_ptr<IEvsCamera> pCam;
444 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
445 EXPECT_NE(pCam, nullptr);
446
447 // Store a camera handle for a clean-up
448 mActiveCameras.push_back(pCam);
449
450 // Set up a frame receiver object which will fire up its own thread
Frederick Mayle7056b242022-03-29 02:38:12 +0000451 std::shared_ptr<FrameHandler> frameHandler = ndk::SharedRefBase::make<FrameHandler>(
452 pCam, cam, nullptr, FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -0700453 EXPECT_NE(frameHandler, nullptr);
454
455 // Start the camera's video stream
456 nsecs_t start = systemTime(SYSTEM_TIME_MONOTONIC);
457 ASSERT_TRUE(frameHandler->startStream());
458
459 // Ensure the first frame arrived within the expected time
460 frameHandler->waitForFrameCount(1);
461 nsecs_t firstFrame = systemTime(SYSTEM_TIME_MONOTONIC);
462 nsecs_t timeToFirstFrame = systemTime(SYSTEM_TIME_MONOTONIC) - start;
463
464 // Extra delays are expected when we attempt to start a video stream on
465 // the logical camera device. The amount of delay is expected the
466 // number of physical camera devices multiplied by
467 // kMaxStreamStartMilliseconds at most.
468 EXPECT_LE(nanoseconds_to_milliseconds(timeToFirstFrame),
469 kMaxStreamStartMilliseconds * devices.size());
470 printf("%s: Measured time to first frame %0.2f ms\n", cam.id.data(),
471 timeToFirstFrame * kNanoToMilliseconds);
472 LOG(INFO) << cam.id << ": Measured time to first frame " << std::scientific
473 << timeToFirstFrame * kNanoToMilliseconds << " ms.";
474
475 // Check aspect ratio
476 unsigned width = 0, height = 0;
477 frameHandler->getFrameDimension(&width, &height);
478 EXPECT_GE(width, height);
479
480 // Wait a bit, then ensure we get at least the required minimum number of frames
481 sleep(5);
482 nsecs_t end = systemTime(SYSTEM_TIME_MONOTONIC);
483
484 // Even when the camera pointer goes out of scope, the FrameHandler object will
485 // keep the stream alive unless we tell it to shutdown.
486 // Also note that the FrameHandle and the Camera have a mutual circular reference, so
487 // we have to break that cycle in order for either of them to get cleaned up.
488 frameHandler->shutdown();
489
490 unsigned framesReceived = 0;
491 frameHandler->getFramesCounters(&framesReceived, nullptr);
492 framesReceived = framesReceived - 1; // Back out the first frame we already waited for
493 nsecs_t runTime = end - firstFrame;
494 float framesPerSecond = framesReceived / (runTime * kNanoToSeconds);
495 printf("Measured camera rate %3.2f fps\n", framesPerSecond);
496 LOG(INFO) << "Measured camera rate " << std::scientific << framesPerSecond << " fps.";
497 EXPECT_GE(framesPerSecond, kMinimumFramesPerSecond);
498
499 // Explicitly release the camera
500 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
501 mActiveCameras.clear();
502 }
503}
504
505/*
506 * CameraStreamBuffering:
507 * Ensure the camera implementation behaves properly when the client holds onto buffers for more
508 * than one frame time. The camera must cleanly skip frames until the client is ready again.
509 */
510TEST_P(EvsAidlTest, CameraStreamBuffering) {
511 LOG(INFO) << "Starting CameraStreamBuffering test";
512
513 // Arbitrary constant (should be > 1 and not too big)
514 static const unsigned int kBuffersToHold = 6;
515
516 // Get the camera list
517 loadCameraList();
518
519 // Test each reported camera
520 for (auto&& cam : mCameraInfo) {
521 bool isLogicalCam = false;
522 getPhysicalCameraIds(cam.id, isLogicalCam);
523 if (mIsHwModule && isLogicalCam) {
524 LOG(INFO) << "Skip a logical device " << cam.id << " for HW target.";
525 continue;
526 }
527
528 // Read a target resolution from the metadata
529 Stream targetCfg = getFirstStreamConfiguration(
530 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
531 ASSERT_GT(targetCfg.width, 0);
532 ASSERT_GT(targetCfg.height, 0);
533
534 std::shared_ptr<IEvsCamera> pCam;
535 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
536 EXPECT_NE(pCam, nullptr);
537
538 // Store a camera handle for a clean-up
539 mActiveCameras.push_back(pCam);
540
541 // Ask for a very large number of buffers in flight to ensure it errors correctly
Changyeon Jo0d814ce2022-04-23 05:26:16 -0700542 auto badResult = pCam->setMaxFramesInFlight(std::numeric_limits<int32_t>::max());
Changyeon Jo80189012021-10-10 16:34:21 -0700543 EXPECT_TRUE(!badResult.isOk() && badResult.getServiceSpecificError() ==
Changyeon Job440b232022-05-10 22:49:28 -0700544 static_cast<int>(EvsResult::BUFFER_NOT_AVAILABLE));
Changyeon Jo80189012021-10-10 16:34:21 -0700545
546 // Now ask for exactly two buffers in flight as we'll test behavior in that case
547 ASSERT_TRUE(pCam->setMaxFramesInFlight(kBuffersToHold).isOk());
548
549 // Set up a frame receiver object which will fire up its own thread.
Frederick Mayle7056b242022-03-29 02:38:12 +0000550 std::shared_ptr<FrameHandler> frameHandler = ndk::SharedRefBase::make<FrameHandler>(
551 pCam, cam, nullptr, FrameHandler::eNoAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -0700552 EXPECT_NE(frameHandler, nullptr);
553
554 // Start the camera's video stream
555 ASSERT_TRUE(frameHandler->startStream());
556
557 // Check that the video stream stalls once we've gotten exactly the number of buffers
558 // we requested since we told the frameHandler not to return them.
559 sleep(1); // 1 second should be enough for at least 5 frames to be delivered worst case
560 unsigned framesReceived = 0;
561 frameHandler->getFramesCounters(&framesReceived, nullptr);
562 ASSERT_EQ(kBuffersToHold, framesReceived) << "Stream didn't stall at expected buffer limit";
563
564 // Give back one buffer
565 ASSERT_TRUE(frameHandler->returnHeldBuffer());
566
567 // Once we return a buffer, it shouldn't take more than 1/10 second to get a new one
568 // filled since we require 10fps minimum -- but give a 10% allowance just in case.
569 usleep(110 * kMillisecondsToMicroseconds);
570 frameHandler->getFramesCounters(&framesReceived, nullptr);
571 EXPECT_EQ(kBuffersToHold + 1, framesReceived) << "Stream should've resumed";
572
573 // Even when the camera pointer goes out of scope, the FrameHandler object will
574 // keep the stream alive unless we tell it to shutdown.
575 // Also note that the FrameHandle and the Camera have a mutual circular reference, so
576 // we have to break that cycle in order for either of them to get cleaned up.
577 frameHandler->shutdown();
578
579 // Explicitly release the camera
580 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
581 mActiveCameras.clear();
582 }
583}
584
585/*
586 * CameraToDisplayRoundTrip:
587 * End to end test of data flowing from the camera to the display. Each delivered frame of camera
588 * imagery is simply copied to the display buffer and presented on screen. This is the one test
589 * which a human could observe to see the operation of the system on the physical display.
590 */
591TEST_P(EvsAidlTest, CameraToDisplayRoundTrip) {
592 LOG(INFO) << "Starting CameraToDisplayRoundTrip test";
593
594 // Get the camera list
595 loadCameraList();
596
597 // Request available display IDs
598 uint8_t targetDisplayId = 0;
599 std::vector<uint8_t> displayIds;
600 ASSERT_TRUE(mEnumerator->getDisplayIdList(&displayIds).isOk());
601 EXPECT_GT(displayIds.size(), 0);
602 targetDisplayId = displayIds[0];
603
Changyeon Jo80189012021-10-10 16:34:21 -0700604 // Test each reported camera
605 for (auto&& cam : mCameraInfo) {
Changyeon Jo017cb982022-11-16 22:04:38 +0000606 // Request exclusive access to the first EVS display
607 std::shared_ptr<IEvsDisplay> pDisplay;
608 ASSERT_TRUE(mEnumerator->openDisplay(targetDisplayId, &pDisplay).isOk());
609 EXPECT_NE(pDisplay, nullptr);
610 LOG(INFO) << "Display " << static_cast<int>(targetDisplayId) << " is in use.";
611
612 // Get the display descriptor
613 DisplayDesc displayDesc;
614 ASSERT_TRUE(pDisplay->getDisplayInfo(&displayDesc).isOk());
615 LOG(INFO) << " Resolution: " << displayDesc.width << "x" << displayDesc.height;
616 ASSERT_GT(displayDesc.width, 0);
617 ASSERT_GT(displayDesc.height, 0);
618
Changyeon Jo80189012021-10-10 16:34:21 -0700619 bool isLogicalCam = false;
620 getPhysicalCameraIds(cam.id, isLogicalCam);
621 if (mIsHwModule && isLogicalCam) {
622 LOG(INFO) << "Skip a logical device " << cam.id << " for HW target.";
Changyeon Jo7793baa2023-01-19 13:18:47 -0800623 ASSERT_TRUE(mEnumerator->closeDisplay(pDisplay).isOk());
Changyeon Jo80189012021-10-10 16:34:21 -0700624 continue;
625 }
626
627 // Read a target resolution from the metadata
628 Stream targetCfg = getFirstStreamConfiguration(
629 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
630 ASSERT_GT(targetCfg.width, 0);
631 ASSERT_GT(targetCfg.height, 0);
632
633 std::shared_ptr<IEvsCamera> pCam;
634 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
635 EXPECT_NE(pCam, nullptr);
636
637 // Store a camera handle for a clean-up
638 mActiveCameras.push_back(pCam);
639
640 // Set up a frame receiver object which will fire up its own thread.
Frederick Mayle7056b242022-03-29 02:38:12 +0000641 std::shared_ptr<FrameHandler> frameHandler = ndk::SharedRefBase::make<FrameHandler>(
642 pCam, cam, pDisplay, FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -0700643 EXPECT_NE(frameHandler, nullptr);
644
645 // Activate the display
646 ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::VISIBLE_ON_NEXT_FRAME).isOk());
647
648 // Start the camera's video stream
649 ASSERT_TRUE(frameHandler->startStream());
650
651 // Wait a while to let the data flow
652 static const int kSecondsToWait = 5;
653 const int streamTimeMs =
654 kSecondsToWait * kSecondsToMilliseconds - kMaxStreamStartMilliseconds;
655 const unsigned minimumFramesExpected =
656 streamTimeMs * kMinimumFramesPerSecond / kSecondsToMilliseconds;
657 sleep(kSecondsToWait);
658 unsigned framesReceived = 0;
659 unsigned framesDisplayed = 0;
660 frameHandler->getFramesCounters(&framesReceived, &framesDisplayed);
661 EXPECT_EQ(framesReceived, framesDisplayed);
662 EXPECT_GE(framesDisplayed, minimumFramesExpected);
663
664 // Turn off the display (yes, before the stream stops -- it should be handled)
665 ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::NOT_VISIBLE).isOk());
666
667 // Shut down the streamer
668 frameHandler->shutdown();
669
670 // Explicitly release the camera
671 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
672 mActiveCameras.clear();
Changyeon Jo80189012021-10-10 16:34:21 -0700673
Changyeon Jo017cb982022-11-16 22:04:38 +0000674 // Explicitly release the display
675 ASSERT_TRUE(mEnumerator->closeDisplay(pDisplay).isOk());
676 }
Changyeon Jo80189012021-10-10 16:34:21 -0700677}
678
679/*
680 * MultiCameraStream:
681 * Verify that each client can start and stop video streams on the same
682 * underlying camera.
683 */
684TEST_P(EvsAidlTest, MultiCameraStream) {
685 LOG(INFO) << "Starting MultiCameraStream test";
686
687 if (mIsHwModule) {
688 // This test is not for HW module implementation.
689 return;
690 }
691
692 // Get the camera list
693 loadCameraList();
694
695 // Test each reported camera
696 for (auto&& cam : mCameraInfo) {
697 // Read a target resolution from the metadata
698 Stream targetCfg = getFirstStreamConfiguration(
699 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
700 ASSERT_GT(targetCfg.width, 0);
701 ASSERT_GT(targetCfg.height, 0);
702
703 // Create two camera clients.
704 std::shared_ptr<IEvsCamera> pCam0;
705 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam0).isOk());
706 EXPECT_NE(pCam0, nullptr);
707
708 // Store a camera handle for a clean-up
709 mActiveCameras.push_back(pCam0);
710
711 std::shared_ptr<IEvsCamera> pCam1;
712 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam1).isOk());
713 EXPECT_NE(pCam1, nullptr);
714
715 // Store a camera handle for a clean-up
716 mActiveCameras.push_back(pCam1);
717
718 // Set up per-client frame receiver objects which will fire up its own thread
Frederick Mayle7056b242022-03-29 02:38:12 +0000719 std::shared_ptr<FrameHandler> frameHandler0 = ndk::SharedRefBase::make<FrameHandler>(
720 pCam0, cam, nullptr, FrameHandler::eAutoReturn);
721 std::shared_ptr<FrameHandler> frameHandler1 = ndk::SharedRefBase::make<FrameHandler>(
722 pCam1, cam, nullptr, FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -0700723 EXPECT_NE(frameHandler0, nullptr);
724 EXPECT_NE(frameHandler1, nullptr);
725
726 // Start the camera's video stream via client 0
727 ASSERT_TRUE(frameHandler0->startStream());
728 ASSERT_TRUE(frameHandler1->startStream());
729
730 // Ensure the stream starts
731 frameHandler0->waitForFrameCount(1);
732 frameHandler1->waitForFrameCount(1);
733
734 nsecs_t firstFrame = systemTime(SYSTEM_TIME_MONOTONIC);
735
736 // Wait a bit, then ensure both clients get at least the required minimum number of frames
737 sleep(5);
738 nsecs_t end = systemTime(SYSTEM_TIME_MONOTONIC);
739 unsigned framesReceived0 = 0, framesReceived1 = 0;
740 frameHandler0->getFramesCounters(&framesReceived0, nullptr);
741 frameHandler1->getFramesCounters(&framesReceived1, nullptr);
742 framesReceived0 = framesReceived0 - 1; // Back out the first frame we already waited for
743 framesReceived1 = framesReceived1 - 1; // Back out the first frame we already waited for
744 nsecs_t runTime = end - firstFrame;
745 float framesPerSecond0 = framesReceived0 / (runTime * kNanoToSeconds);
746 float framesPerSecond1 = framesReceived1 / (runTime * kNanoToSeconds);
747 LOG(INFO) << "Measured camera rate " << std::scientific << framesPerSecond0 << " fps and "
748 << framesPerSecond1 << " fps";
749 EXPECT_GE(framesPerSecond0, kMinimumFramesPerSecond);
750 EXPECT_GE(framesPerSecond1, kMinimumFramesPerSecond);
751
752 // Shutdown one client
753 frameHandler0->shutdown();
754
755 // Read frame counters again
756 frameHandler0->getFramesCounters(&framesReceived0, nullptr);
757 frameHandler1->getFramesCounters(&framesReceived1, nullptr);
758
759 // Wait a bit again
760 sleep(5);
761 unsigned framesReceivedAfterStop0 = 0, framesReceivedAfterStop1 = 0;
762 frameHandler0->getFramesCounters(&framesReceivedAfterStop0, nullptr);
763 frameHandler1->getFramesCounters(&framesReceivedAfterStop1, nullptr);
764 EXPECT_EQ(framesReceived0, framesReceivedAfterStop0);
765 EXPECT_LT(framesReceived1, framesReceivedAfterStop1);
766
767 // Shutdown another
768 frameHandler1->shutdown();
769
770 // Explicitly release the camera
771 ASSERT_TRUE(mEnumerator->closeCamera(pCam0).isOk());
772 ASSERT_TRUE(mEnumerator->closeCamera(pCam1).isOk());
773 mActiveCameras.clear();
774
775 // TODO(b/145459970, b/145457727): below sleep() is added to ensure the
776 // destruction of active camera objects; this may be related with two
777 // issues.
778 sleep(1);
779 }
780}
781
782/*
783 * CameraParameter:
784 * Verify that a client can adjust a camera parameter.
785 */
786TEST_P(EvsAidlTest, CameraParameter) {
787 LOG(INFO) << "Starting CameraParameter test";
788
789 // Get the camera list
790 loadCameraList();
791
792 // Test each reported camera
793 for (auto&& cam : mCameraInfo) {
794 bool isLogicalCam = false;
795 getPhysicalCameraIds(cam.id, isLogicalCam);
796 if (isLogicalCam) {
797 // TODO(b/145465724): Support camera parameter programming on
798 // logical devices.
799 LOG(INFO) << "Skip a logical device " << cam.id;
800 continue;
801 }
802
803 // Read a target resolution from the metadata
804 Stream targetCfg = getFirstStreamConfiguration(
805 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
806 ASSERT_GT(targetCfg.width, 0);
807 ASSERT_GT(targetCfg.height, 0);
808
809 // Create a camera client
810 std::shared_ptr<IEvsCamera> pCam;
811 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
812 EXPECT_NE(pCam, nullptr);
813
814 // Store a camera
815 mActiveCameras.push_back(pCam);
816
817 // Get the parameter list
818 std::vector<CameraParam> cmds;
819 ASSERT_TRUE(pCam->getParameterList(&cmds).isOk());
820 if (cmds.size() < 1) {
821 continue;
822 }
823
824 // Set up per-client frame receiver objects which will fire up its own thread
Frederick Mayle7056b242022-03-29 02:38:12 +0000825 std::shared_ptr<FrameHandler> frameHandler = ndk::SharedRefBase::make<FrameHandler>(
826 pCam, cam, nullptr, FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -0700827 EXPECT_NE(frameHandler, nullptr);
828
829 // Start the camera's video stream
830 ASSERT_TRUE(frameHandler->startStream());
831
832 // Ensure the stream starts
833 frameHandler->waitForFrameCount(1);
834
835 // Set current client is the primary client
836 ASSERT_TRUE(pCam->setPrimaryClient().isOk());
837 for (auto& cmd : cmds) {
838 // Get a valid parameter value range
839 ParameterRange range;
840 ASSERT_TRUE(pCam->getIntParameterRange(cmd, &range).isOk());
841
842 std::vector<int32_t> values;
843 if (cmd == CameraParam::ABSOLUTE_FOCUS) {
844 // Try to turn off auto-focus
845 ASSERT_TRUE(pCam->setIntParameter(CameraParam::AUTO_FOCUS, 0, &values).isOk());
846 for (auto&& v : values) {
847 EXPECT_EQ(v, 0);
848 }
849 }
850
851 // Try to program a parameter with a random value [minVal, maxVal]
852 int32_t val0 = range.min + (std::rand() % (range.max - range.min));
853
854 // Rounding down
855 val0 = val0 - (val0 % range.step);
856 values.clear();
857 ASSERT_TRUE(pCam->setIntParameter(cmd, val0, &values).isOk());
858
859 values.clear();
860 ASSERT_TRUE(pCam->getIntParameter(cmd, &values).isOk());
861 for (auto&& v : values) {
862 EXPECT_EQ(val0, v) << "Values are not matched.";
863 }
864 }
865 ASSERT_TRUE(pCam->unsetPrimaryClient().isOk());
866
867 // Shutdown
868 frameHandler->shutdown();
869
870 // Explicitly release the camera
871 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
872 mActiveCameras.clear();
873 }
874}
875
876/*
877 * CameraPrimaryClientRelease
878 * Verify that non-primary client gets notified when the primary client either
879 * terminates or releases a role.
880 */
881TEST_P(EvsAidlTest, CameraPrimaryClientRelease) {
882 LOG(INFO) << "Starting CameraPrimaryClientRelease test";
883
884 if (mIsHwModule) {
885 // This test is not for HW module implementation.
886 return;
887 }
888
889 // Get the camera list
890 loadCameraList();
891
892 // Test each reported camera
893 for (auto&& cam : mCameraInfo) {
894 bool isLogicalCam = false;
895 getPhysicalCameraIds(cam.id, isLogicalCam);
896 if (isLogicalCam) {
897 // TODO(b/145465724): Support camera parameter programming on
898 // logical devices.
899 LOG(INFO) << "Skip a logical device " << cam.id;
900 continue;
901 }
902
903 // Read a target resolution from the metadata
904 Stream targetCfg = getFirstStreamConfiguration(
905 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
906 ASSERT_GT(targetCfg.width, 0);
907 ASSERT_GT(targetCfg.height, 0);
908
909 // Create two camera clients.
910 std::shared_ptr<IEvsCamera> pPrimaryCam;
911 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pPrimaryCam).isOk());
912 EXPECT_NE(pPrimaryCam, nullptr);
913
914 // Store a camera handle for a clean-up
915 mActiveCameras.push_back(pPrimaryCam);
916
917 std::shared_ptr<IEvsCamera> pSecondaryCam;
918 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pSecondaryCam).isOk());
919 EXPECT_NE(pSecondaryCam, nullptr);
920
921 // Store a camera handle for a clean-up
922 mActiveCameras.push_back(pSecondaryCam);
923
924 // Set up per-client frame receiver objects which will fire up its own thread
Frederick Mayle7056b242022-03-29 02:38:12 +0000925 std::shared_ptr<FrameHandler> frameHandlerPrimary = ndk::SharedRefBase::make<FrameHandler>(
Changyeon Jo80189012021-10-10 16:34:21 -0700926 pPrimaryCam, cam, nullptr, FrameHandler::eAutoReturn);
Frederick Mayle7056b242022-03-29 02:38:12 +0000927 std::shared_ptr<FrameHandler> frameHandlerSecondary =
928 ndk::SharedRefBase::make<FrameHandler>(pSecondaryCam, cam, nullptr,
929 FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -0700930 EXPECT_NE(frameHandlerPrimary, nullptr);
931 EXPECT_NE(frameHandlerSecondary, nullptr);
932
933 // Set one client as the primary client
934 ASSERT_TRUE(pPrimaryCam->setPrimaryClient().isOk());
935
936 // Try to set another client as the primary client.
937 ASSERT_FALSE(pSecondaryCam->setPrimaryClient().isOk());
938
939 // Start the camera's video stream via a primary client client.
940 ASSERT_TRUE(frameHandlerPrimary->startStream());
941
942 // Ensure the stream starts
943 frameHandlerPrimary->waitForFrameCount(1);
944
945 // Start the camera's video stream via another client
946 ASSERT_TRUE(frameHandlerSecondary->startStream());
947
948 // Ensure the stream starts
949 frameHandlerSecondary->waitForFrameCount(1);
950
951 // Non-primary client expects to receive a primary client role relesed
952 // notification.
953 EvsEventDesc aTargetEvent = {};
954 EvsEventDesc aNotification = {};
955
956 bool listening = false;
957 std::mutex eventLock;
958 std::condition_variable eventCond;
959 std::thread listener =
960 std::thread([&aNotification, &frameHandlerSecondary, &listening, &eventCond]() {
961 // Notify that a listening thread is running.
962 listening = true;
963 eventCond.notify_all();
964
965 EvsEventDesc aTargetEvent;
966 aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
967 if (!frameHandlerSecondary->waitForEvent(aTargetEvent, aNotification, true)) {
968 LOG(WARNING) << "A timer is expired before a target event is fired.";
969 }
970 });
971
972 // Wait until a listening thread starts.
973 std::unique_lock<std::mutex> lock(eventLock);
974 auto timer = std::chrono::system_clock::now();
975 while (!listening) {
976 timer += 1s;
977 eventCond.wait_until(lock, timer);
978 }
979 lock.unlock();
980
981 // Release a primary client role.
982 ASSERT_TRUE(pPrimaryCam->unsetPrimaryClient().isOk());
983
984 // Join a listening thread.
985 if (listener.joinable()) {
986 listener.join();
987 }
988
989 // Verify change notifications.
990 ASSERT_EQ(EvsEventType::MASTER_RELEASED, static_cast<EvsEventType>(aNotification.aType));
991
992 // Non-primary becomes a primary client.
993 ASSERT_TRUE(pSecondaryCam->setPrimaryClient().isOk());
994
995 // Previous primary client fails to become a primary client.
996 ASSERT_FALSE(pPrimaryCam->setPrimaryClient().isOk());
997
998 listening = false;
999 listener = std::thread([&aNotification, &frameHandlerPrimary, &listening, &eventCond]() {
1000 // Notify that a listening thread is running.
1001 listening = true;
1002 eventCond.notify_all();
1003
1004 EvsEventDesc aTargetEvent;
1005 aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
1006 if (!frameHandlerPrimary->waitForEvent(aTargetEvent, aNotification, true)) {
1007 LOG(WARNING) << "A timer is expired before a target event is fired.";
1008 }
1009 });
1010
1011 // Wait until a listening thread starts.
1012 timer = std::chrono::system_clock::now();
1013 lock.lock();
1014 while (!listening) {
1015 eventCond.wait_until(lock, timer + 1s);
1016 }
1017 lock.unlock();
1018
1019 // Closing current primary client.
1020 frameHandlerSecondary->shutdown();
1021
1022 // Join a listening thread.
1023 if (listener.joinable()) {
1024 listener.join();
1025 }
1026
1027 // Verify change notifications.
1028 ASSERT_EQ(EvsEventType::MASTER_RELEASED, static_cast<EvsEventType>(aNotification.aType));
1029
1030 // Closing streams.
1031 frameHandlerPrimary->shutdown();
1032
1033 // Explicitly release the camera
1034 ASSERT_TRUE(mEnumerator->closeCamera(pPrimaryCam).isOk());
1035 ASSERT_TRUE(mEnumerator->closeCamera(pSecondaryCam).isOk());
1036 mActiveCameras.clear();
1037 }
1038}
1039
1040/*
1041 * MultiCameraParameter:
1042 * Verify that primary and non-primary clients behave as expected when they try to adjust
1043 * camera parameters.
1044 */
1045TEST_P(EvsAidlTest, MultiCameraParameter) {
1046 LOG(INFO) << "Starting MultiCameraParameter test";
1047
1048 if (mIsHwModule) {
1049 // This test is not for HW module implementation.
1050 return;
1051 }
1052
1053 // Get the camera list
1054 loadCameraList();
1055
1056 // Test each reported camera
1057 for (auto&& cam : mCameraInfo) {
1058 bool isLogicalCam = false;
1059 getPhysicalCameraIds(cam.id, isLogicalCam);
1060 if (isLogicalCam) {
1061 // TODO(b/145465724): Support camera parameter programming on
1062 // logical devices.
1063 LOG(INFO) << "Skip a logical device " << cam.id;
1064 continue;
1065 }
1066
1067 // Read a target resolution from the metadata
1068 Stream targetCfg = getFirstStreamConfiguration(
1069 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
1070 ASSERT_GT(targetCfg.width, 0);
1071 ASSERT_GT(targetCfg.height, 0);
1072
1073 // Create two camera clients.
1074 std::shared_ptr<IEvsCamera> pPrimaryCam;
1075 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pPrimaryCam).isOk());
1076 EXPECT_NE(pPrimaryCam, nullptr);
1077
1078 // Store a camera handle for a clean-up
1079 mActiveCameras.push_back(pPrimaryCam);
1080
1081 std::shared_ptr<IEvsCamera> pSecondaryCam;
1082 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pSecondaryCam).isOk());
1083 EXPECT_NE(pSecondaryCam, nullptr);
1084
1085 // Store a camera handle for a clean-up
1086 mActiveCameras.push_back(pSecondaryCam);
1087
1088 // Get the parameter list
1089 std::vector<CameraParam> camPrimaryCmds, camSecondaryCmds;
1090 ASSERT_TRUE(pPrimaryCam->getParameterList(&camPrimaryCmds).isOk());
1091 ASSERT_TRUE(pSecondaryCam->getParameterList(&camSecondaryCmds).isOk());
1092 if (camPrimaryCmds.size() < 1 || camSecondaryCmds.size() < 1) {
1093 // Skip a camera device if it does not support any parameter.
1094 continue;
1095 }
1096
1097 // Set up per-client frame receiver objects which will fire up its own thread
Frederick Mayle7056b242022-03-29 02:38:12 +00001098 std::shared_ptr<FrameHandler> frameHandlerPrimary = ndk::SharedRefBase::make<FrameHandler>(
Changyeon Jo80189012021-10-10 16:34:21 -07001099 pPrimaryCam, cam, nullptr, FrameHandler::eAutoReturn);
Frederick Mayle7056b242022-03-29 02:38:12 +00001100 std::shared_ptr<FrameHandler> frameHandlerSecondary =
1101 ndk::SharedRefBase::make<FrameHandler>(pSecondaryCam, cam, nullptr,
1102 FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -07001103 EXPECT_NE(frameHandlerPrimary, nullptr);
1104 EXPECT_NE(frameHandlerSecondary, nullptr);
1105
1106 // Set one client as the primary client.
1107 ASSERT_TRUE(pPrimaryCam->setPrimaryClient().isOk());
1108
1109 // Try to set another client as the primary client.
1110 ASSERT_FALSE(pSecondaryCam->setPrimaryClient().isOk());
1111
1112 // Start the camera's video stream via a primary client client.
1113 ASSERT_TRUE(frameHandlerPrimary->startStream());
1114
1115 // Ensure the stream starts
1116 frameHandlerPrimary->waitForFrameCount(1);
1117
1118 // Start the camera's video stream via another client
1119 ASSERT_TRUE(frameHandlerSecondary->startStream());
1120
1121 // Ensure the stream starts
1122 frameHandlerSecondary->waitForFrameCount(1);
1123
1124 int32_t val0 = 0;
1125 std::vector<int32_t> values;
1126 EvsEventDesc aNotification0 = {};
1127 EvsEventDesc aNotification1 = {};
1128 for (auto& cmd : camPrimaryCmds) {
1129 // Get a valid parameter value range
1130 ParameterRange range;
1131 ASSERT_TRUE(pPrimaryCam->getIntParameterRange(cmd, &range).isOk());
1132 if (cmd == CameraParam::ABSOLUTE_FOCUS) {
1133 // Try to turn off auto-focus
1134 values.clear();
1135 ASSERT_TRUE(
1136 pPrimaryCam->setIntParameter(CameraParam::AUTO_FOCUS, 0, &values).isOk());
1137 for (auto&& v : values) {
1138 EXPECT_EQ(v, 0);
1139 }
1140 }
1141
1142 // Calculate a parameter value to program.
1143 val0 = range.min + (std::rand() % (range.max - range.min));
1144 val0 = val0 - (val0 % range.step);
1145
1146 // Prepare and start event listeners.
1147 bool listening0 = false;
1148 bool listening1 = false;
1149 std::condition_variable eventCond;
1150 std::thread listener0 = std::thread([cmd, val0, &aNotification0, &frameHandlerPrimary,
1151 &listening0, &listening1, &eventCond]() {
1152 listening0 = true;
1153 if (listening1) {
1154 eventCond.notify_all();
1155 }
1156
1157 EvsEventDesc aTargetEvent;
1158 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001159 aTargetEvent.payload.push_back(static_cast<int32_t>(cmd));
1160 aTargetEvent.payload.push_back(val0);
Changyeon Jo80189012021-10-10 16:34:21 -07001161 if (!frameHandlerPrimary->waitForEvent(aTargetEvent, aNotification0)) {
1162 LOG(WARNING) << "A timer is expired before a target event is fired.";
1163 }
1164 });
1165 std::thread listener1 = std::thread([cmd, val0, &aNotification1, &frameHandlerSecondary,
1166 &listening0, &listening1, &eventCond]() {
1167 listening1 = true;
1168 if (listening0) {
1169 eventCond.notify_all();
1170 }
1171
1172 EvsEventDesc aTargetEvent;
1173 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001174 aTargetEvent.payload.push_back(static_cast<int32_t>(cmd));
1175 aTargetEvent.payload.push_back(val0);
Changyeon Jo80189012021-10-10 16:34:21 -07001176 if (!frameHandlerSecondary->waitForEvent(aTargetEvent, aNotification1)) {
1177 LOG(WARNING) << "A timer is expired before a target event is fired.";
1178 }
1179 });
1180
1181 // Wait until a listening thread starts.
1182 std::mutex eventLock;
1183 std::unique_lock<std::mutex> lock(eventLock);
1184 auto timer = std::chrono::system_clock::now();
1185 while (!listening0 || !listening1) {
1186 eventCond.wait_until(lock, timer + 1s);
1187 }
1188 lock.unlock();
1189
1190 // Try to program a parameter
1191 values.clear();
1192 ASSERT_TRUE(pPrimaryCam->setIntParameter(cmd, val0, &values).isOk());
1193 for (auto&& v : values) {
1194 EXPECT_EQ(val0, v) << "Values are not matched.";
1195 }
1196
1197 // Join a listening thread.
1198 if (listener0.joinable()) {
1199 listener0.join();
1200 }
1201 if (listener1.joinable()) {
1202 listener1.join();
1203 }
1204
1205 // Verify a change notification
1206 ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
1207 static_cast<EvsEventType>(aNotification0.aType));
1208 ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
1209 static_cast<EvsEventType>(aNotification1.aType));
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001210 ASSERT_GE(aNotification0.payload.size(), 2);
1211 ASSERT_GE(aNotification1.payload.size(), 2);
Changyeon Jo80189012021-10-10 16:34:21 -07001212 ASSERT_EQ(cmd, static_cast<CameraParam>(aNotification0.payload[0]));
1213 ASSERT_EQ(cmd, static_cast<CameraParam>(aNotification1.payload[0]));
1214 for (auto&& v : values) {
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001215 ASSERT_EQ(v, aNotification0.payload[1]);
1216 ASSERT_EQ(v, aNotification1.payload[1]);
Changyeon Jo80189012021-10-10 16:34:21 -07001217 }
1218
1219 // Clients expects to receive a parameter change notification
1220 // whenever a primary client client adjusts it.
1221 values.clear();
1222 ASSERT_TRUE(pPrimaryCam->getIntParameter(cmd, &values).isOk());
1223 for (auto&& v : values) {
1224 EXPECT_EQ(val0, v) << "Values are not matched.";
1225 }
1226 }
1227
1228 // Try to adjust a parameter via non-primary client
1229 values.clear();
1230 ASSERT_FALSE(pSecondaryCam->setIntParameter(camSecondaryCmds[0], val0, &values).isOk());
1231
1232 // Non-primary client attempts to be a primary client
1233 ASSERT_FALSE(pSecondaryCam->setPrimaryClient().isOk());
1234
1235 // Primary client retires from a primary client role
1236 bool listening = false;
1237 std::condition_variable eventCond;
1238 std::thread listener =
1239 std::thread([&aNotification0, &frameHandlerSecondary, &listening, &eventCond]() {
1240 listening = true;
1241 eventCond.notify_all();
1242
1243 EvsEventDesc aTargetEvent;
1244 aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
1245 if (!frameHandlerSecondary->waitForEvent(aTargetEvent, aNotification0, true)) {
1246 LOG(WARNING) << "A timer is expired before a target event is fired.";
1247 }
1248 });
1249
1250 std::mutex eventLock;
1251 auto timer = std::chrono::system_clock::now();
1252 std::unique_lock<std::mutex> lock(eventLock);
1253 while (!listening) {
1254 eventCond.wait_until(lock, timer + 1s);
1255 }
1256 lock.unlock();
1257
1258 ASSERT_TRUE(pPrimaryCam->unsetPrimaryClient().isOk());
1259
1260 if (listener.joinable()) {
1261 listener.join();
1262 }
1263 ASSERT_EQ(EvsEventType::MASTER_RELEASED, static_cast<EvsEventType>(aNotification0.aType));
1264
1265 // Try to adjust a parameter after being retired
1266 values.clear();
1267 ASSERT_FALSE(pPrimaryCam->setIntParameter(camPrimaryCmds[0], val0, &values).isOk());
1268
1269 // Non-primary client becomes a primary client
1270 ASSERT_TRUE(pSecondaryCam->setPrimaryClient().isOk());
1271
1272 // Try to adjust a parameter via new primary client
1273 for (auto& cmd : camSecondaryCmds) {
1274 // Get a valid parameter value range
1275 ParameterRange range;
1276 ASSERT_TRUE(pSecondaryCam->getIntParameterRange(cmd, &range).isOk());
1277
1278 values.clear();
1279 if (cmd == CameraParam::ABSOLUTE_FOCUS) {
1280 // Try to turn off auto-focus
1281 values.clear();
1282 ASSERT_TRUE(
1283 pSecondaryCam->setIntParameter(CameraParam::AUTO_FOCUS, 0, &values).isOk());
1284 for (auto&& v : values) {
1285 EXPECT_EQ(v, 0);
1286 }
1287 }
1288
1289 // Calculate a parameter value to program. This is being rounding down.
1290 val0 = range.min + (std::rand() % (range.max - range.min));
1291 val0 = val0 - (val0 % range.step);
1292
1293 // Prepare and start event listeners.
1294 bool listening0 = false;
1295 bool listening1 = false;
1296 std::condition_variable eventCond;
1297 std::thread listener0 = std::thread([&]() {
1298 listening0 = true;
1299 if (listening1) {
1300 eventCond.notify_all();
1301 }
1302
1303 EvsEventDesc aTargetEvent;
1304 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001305 aTargetEvent.payload.push_back(static_cast<int32_t>(cmd));
1306 aTargetEvent.payload.push_back(val0);
Changyeon Jo80189012021-10-10 16:34:21 -07001307 if (!frameHandlerPrimary->waitForEvent(aTargetEvent, aNotification0)) {
1308 LOG(WARNING) << "A timer is expired before a target event is fired.";
1309 }
1310 });
1311 std::thread listener1 = std::thread([&]() {
1312 listening1 = true;
1313 if (listening0) {
1314 eventCond.notify_all();
1315 }
1316
1317 EvsEventDesc aTargetEvent;
1318 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001319 aTargetEvent.payload.push_back(static_cast<int32_t>(cmd));
1320 aTargetEvent.payload.push_back(val0);
Changyeon Jo80189012021-10-10 16:34:21 -07001321 if (!frameHandlerSecondary->waitForEvent(aTargetEvent, aNotification1)) {
1322 LOG(WARNING) << "A timer is expired before a target event is fired.";
1323 }
1324 });
1325
1326 // Wait until a listening thread starts.
1327 std::mutex eventLock;
1328 std::unique_lock<std::mutex> lock(eventLock);
1329 auto timer = std::chrono::system_clock::now();
1330 while (!listening0 || !listening1) {
1331 eventCond.wait_until(lock, timer + 1s);
1332 }
1333 lock.unlock();
1334
1335 // Try to program a parameter
1336 values.clear();
1337 ASSERT_TRUE(pSecondaryCam->setIntParameter(cmd, val0, &values).isOk());
1338
1339 // Clients expects to receive a parameter change notification
1340 // whenever a primary client client adjusts it.
1341 values.clear();
1342 ASSERT_TRUE(pSecondaryCam->getIntParameter(cmd, &values).isOk());
1343 for (auto&& v : values) {
1344 EXPECT_EQ(val0, v) << "Values are not matched.";
1345 }
1346
1347 // Join a listening thread.
1348 if (listener0.joinable()) {
1349 listener0.join();
1350 }
1351 if (listener1.joinable()) {
1352 listener1.join();
1353 }
1354
1355 // Verify a change notification
1356 ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
1357 static_cast<EvsEventType>(aNotification0.aType));
1358 ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
1359 static_cast<EvsEventType>(aNotification1.aType));
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001360 ASSERT_GE(aNotification0.payload.size(), 2);
1361 ASSERT_GE(aNotification1.payload.size(), 2);
Changyeon Jo80189012021-10-10 16:34:21 -07001362 ASSERT_EQ(cmd, static_cast<CameraParam>(aNotification0.payload[0]));
1363 ASSERT_EQ(cmd, static_cast<CameraParam>(aNotification1.payload[0]));
1364 for (auto&& v : values) {
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001365 ASSERT_EQ(v, aNotification0.payload[1]);
1366 ASSERT_EQ(v, aNotification1.payload[1]);
Changyeon Jo80189012021-10-10 16:34:21 -07001367 }
1368 }
1369
1370 // New primary client retires from the role
1371 ASSERT_TRUE(pSecondaryCam->unsetPrimaryClient().isOk());
1372
1373 // Shutdown
1374 frameHandlerPrimary->shutdown();
1375 frameHandlerSecondary->shutdown();
1376
1377 // Explicitly release the camera
1378 ASSERT_TRUE(mEnumerator->closeCamera(pPrimaryCam).isOk());
1379 ASSERT_TRUE(mEnumerator->closeCamera(pSecondaryCam).isOk());
1380 mActiveCameras.clear();
1381 }
1382}
1383
1384/*
1385 * HighPriorityCameraClient:
1386 * EVS client, which owns the display, is priortized and therefore can take over
1387 * a primary client role from other EVS clients without the display.
1388 */
1389TEST_P(EvsAidlTest, HighPriorityCameraClient) {
1390 LOG(INFO) << "Starting HighPriorityCameraClient test";
1391
1392 if (mIsHwModule) {
1393 // This test is not for HW module implementation.
1394 return;
1395 }
1396
1397 // Get the camera list
1398 loadCameraList();
1399
Changyeon Jo80189012021-10-10 16:34:21 -07001400 // Test each reported camera
1401 for (auto&& cam : mCameraInfo) {
Changyeon Jo050b2ce2024-01-08 16:18:24 -08001402 bool isLogicalCam = false;
1403 if (getPhysicalCameraIds(cam.id, isLogicalCam); isLogicalCam) {
1404 LOG(INFO) << "Skip a logical device, " << cam.id;
1405 continue;
1406 }
1407
Changyeon Jo017cb982022-11-16 22:04:38 +00001408 // Request available display IDs
1409 uint8_t targetDisplayId = 0;
1410 std::vector<uint8_t> displayIds;
1411 ASSERT_TRUE(mEnumerator->getDisplayIdList(&displayIds).isOk());
1412 EXPECT_GT(displayIds.size(), 0);
1413 targetDisplayId = displayIds[0];
1414
1415 // Request exclusive access to the EVS display
1416 std::shared_ptr<IEvsDisplay> pDisplay;
1417 ASSERT_TRUE(mEnumerator->openDisplay(targetDisplayId, &pDisplay).isOk());
1418 EXPECT_NE(pDisplay, nullptr);
1419
Changyeon Jo80189012021-10-10 16:34:21 -07001420 // Read a target resolution from the metadata
1421 Stream targetCfg = getFirstStreamConfiguration(
1422 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
1423 ASSERT_GT(targetCfg.width, 0);
1424 ASSERT_GT(targetCfg.height, 0);
1425
1426 // Create two clients
1427 std::shared_ptr<IEvsCamera> pCam0;
1428 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam0).isOk());
1429 EXPECT_NE(pCam0, nullptr);
1430
1431 // Store a camera handle for a clean-up
1432 mActiveCameras.push_back(pCam0);
1433
1434 std::shared_ptr<IEvsCamera> pCam1;
1435 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam1).isOk());
1436 EXPECT_NE(pCam1, nullptr);
1437
1438 // Store a camera handle for a clean-up
1439 mActiveCameras.push_back(pCam1);
1440
1441 // Get the parameter list; this test will use the first command in both
1442 // lists.
1443 std::vector<CameraParam> cam0Cmds, cam1Cmds;
1444 ASSERT_TRUE(pCam0->getParameterList(&cam0Cmds).isOk());
1445 ASSERT_TRUE(pCam1->getParameterList(&cam1Cmds).isOk());
1446 if (cam0Cmds.size() < 1 || cam1Cmds.size() < 1) {
1447 // Cannot execute this test.
Changyeon Jo7793baa2023-01-19 13:18:47 -08001448 ASSERT_TRUE(mEnumerator->closeDisplay(pDisplay).isOk());
1449 continue;
Changyeon Jo80189012021-10-10 16:34:21 -07001450 }
1451
1452 // Set up a frame receiver object which will fire up its own thread.
Frederick Mayle7056b242022-03-29 02:38:12 +00001453 std::shared_ptr<FrameHandler> frameHandler0 = ndk::SharedRefBase::make<FrameHandler>(
1454 pCam0, cam, nullptr, FrameHandler::eAutoReturn);
1455 std::shared_ptr<FrameHandler> frameHandler1 = ndk::SharedRefBase::make<FrameHandler>(
1456 pCam1, cam, nullptr, FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -07001457 EXPECT_NE(frameHandler0, nullptr);
1458 EXPECT_NE(frameHandler1, nullptr);
1459
1460 // Activate the display
1461 ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::VISIBLE_ON_NEXT_FRAME).isOk());
1462
1463 // Start the camera's video stream
1464 ASSERT_TRUE(frameHandler0->startStream());
1465 ASSERT_TRUE(frameHandler1->startStream());
1466
1467 // Ensure the stream starts
1468 frameHandler0->waitForFrameCount(1);
1469 frameHandler1->waitForFrameCount(1);
1470
1471 // Client 1 becomes a primary client and programs a parameter.
1472
1473 // Get a valid parameter value range
1474 ParameterRange range;
1475 ASSERT_TRUE(pCam1->getIntParameterRange(cam1Cmds[0], &range).isOk());
1476
1477 // Client1 becomes a primary client
1478 ASSERT_TRUE(pCam1->setPrimaryClient().isOk());
1479
1480 std::vector<int32_t> values;
1481 EvsEventDesc aTargetEvent = {};
1482 EvsEventDesc aNotification = {};
1483 bool listening = false;
1484 std::mutex eventLock;
1485 std::condition_variable eventCond;
1486 if (cam1Cmds[0] == CameraParam::ABSOLUTE_FOCUS) {
1487 std::thread listener =
1488 std::thread([&frameHandler0, &aNotification, &listening, &eventCond] {
1489 listening = true;
1490 eventCond.notify_all();
1491
1492 EvsEventDesc aTargetEvent;
1493 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001494 aTargetEvent.payload.push_back(
1495 static_cast<int32_t>(CameraParam::AUTO_FOCUS));
1496 aTargetEvent.payload.push_back(0);
Changyeon Jo80189012021-10-10 16:34:21 -07001497 if (!frameHandler0->waitForEvent(aTargetEvent, aNotification)) {
1498 LOG(WARNING) << "A timer is expired before a target event is fired.";
1499 }
1500 });
1501
1502 // Wait until a lister starts.
1503 std::unique_lock<std::mutex> lock(eventLock);
1504 auto timer = std::chrono::system_clock::now();
1505 while (!listening) {
1506 eventCond.wait_until(lock, timer + 1s);
1507 }
1508 lock.unlock();
1509
1510 // Try to turn off auto-focus
1511 ASSERT_TRUE(pCam1->setIntParameter(CameraParam::AUTO_FOCUS, 0, &values).isOk());
1512 for (auto&& v : values) {
1513 EXPECT_EQ(v, 0);
1514 }
1515
1516 // Join a listener
1517 if (listener.joinable()) {
1518 listener.join();
1519 }
1520
1521 // Make sure AUTO_FOCUS is off.
1522 ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
1523 EvsEventType::PARAMETER_CHANGED);
1524 }
1525
1526 // Try to program a parameter with a random value [minVal, maxVal] after
1527 // rounding it down.
1528 int32_t val0 = range.min + (std::rand() % (range.max - range.min));
1529 val0 = val0 - (val0 % range.step);
1530
1531 std::thread listener = std::thread(
1532 [&frameHandler1, &aNotification, &listening, &eventCond, &cam1Cmds, val0] {
1533 listening = true;
1534 eventCond.notify_all();
1535
1536 EvsEventDesc aTargetEvent;
1537 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001538 aTargetEvent.payload.push_back(static_cast<int32_t>(cam1Cmds[0]));
1539 aTargetEvent.payload.push_back(val0);
Changyeon Jo80189012021-10-10 16:34:21 -07001540 if (!frameHandler1->waitForEvent(aTargetEvent, aNotification)) {
1541 LOG(WARNING) << "A timer is expired before a target event is fired.";
1542 }
1543 });
1544
1545 // Wait until a lister starts.
1546 listening = false;
1547 std::unique_lock<std::mutex> lock(eventLock);
1548 auto timer = std::chrono::system_clock::now();
1549 while (!listening) {
1550 eventCond.wait_until(lock, timer + 1s);
1551 }
1552 lock.unlock();
1553
1554 values.clear();
1555 ASSERT_TRUE(pCam1->setIntParameter(cam1Cmds[0], val0, &values).isOk());
1556 for (auto&& v : values) {
1557 EXPECT_EQ(val0, v);
1558 }
1559
1560 // Join a listener
1561 if (listener.joinable()) {
1562 listener.join();
1563 }
1564
1565 // Verify a change notification
1566 ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType), EvsEventType::PARAMETER_CHANGED);
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001567 ASSERT_GE(aNotification.payload.size(), 2);
Changyeon Jo80189012021-10-10 16:34:21 -07001568 ASSERT_EQ(static_cast<CameraParam>(aNotification.payload[0]), cam1Cmds[0]);
1569 for (auto&& v : values) {
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001570 ASSERT_EQ(v, aNotification.payload[1]);
Changyeon Jo80189012021-10-10 16:34:21 -07001571 }
1572
1573 listener = std::thread([&frameHandler1, &aNotification, &listening, &eventCond] {
1574 listening = true;
1575 eventCond.notify_all();
1576
1577 EvsEventDesc aTargetEvent;
1578 aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
1579 if (!frameHandler1->waitForEvent(aTargetEvent, aNotification, true)) {
1580 LOG(WARNING) << "A timer is expired before a target event is fired.";
1581 }
1582 });
1583
1584 // Wait until a lister starts.
1585 listening = false;
1586 lock.lock();
1587 timer = std::chrono::system_clock::now();
1588 while (!listening) {
1589 eventCond.wait_until(lock, timer + 1s);
1590 }
1591 lock.unlock();
1592
1593 // Client 0 steals a primary client role
1594 ASSERT_TRUE(pCam0->forcePrimaryClient(pDisplay).isOk());
1595
1596 // Join a listener
1597 if (listener.joinable()) {
1598 listener.join();
1599 }
1600
1601 ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType), EvsEventType::MASTER_RELEASED);
1602
1603 // Client 0 programs a parameter
1604 val0 = range.min + (std::rand() % (range.max - range.min));
1605
1606 // Rounding down
1607 val0 = val0 - (val0 % range.step);
1608
1609 if (cam0Cmds[0] == CameraParam::ABSOLUTE_FOCUS) {
1610 std::thread listener =
1611 std::thread([&frameHandler1, &aNotification, &listening, &eventCond] {
1612 listening = true;
1613 eventCond.notify_all();
1614
1615 EvsEventDesc aTargetEvent;
1616 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001617 aTargetEvent.payload.push_back(
1618 static_cast<int32_t>(CameraParam::AUTO_FOCUS));
1619 aTargetEvent.payload.push_back(0);
Changyeon Jo80189012021-10-10 16:34:21 -07001620 if (!frameHandler1->waitForEvent(aTargetEvent, aNotification)) {
1621 LOG(WARNING) << "A timer is expired before a target event is fired.";
1622 }
1623 });
1624
1625 // Wait until a lister starts.
1626 std::unique_lock<std::mutex> lock(eventLock);
1627 auto timer = std::chrono::system_clock::now();
1628 while (!listening) {
1629 eventCond.wait_until(lock, timer + 1s);
1630 }
1631 lock.unlock();
1632
1633 // Try to turn off auto-focus
1634 values.clear();
1635 ASSERT_TRUE(pCam0->setIntParameter(CameraParam::AUTO_FOCUS, 0, &values).isOk());
1636 for (auto&& v : values) {
1637 EXPECT_EQ(v, 0);
1638 }
1639
1640 // Join a listener
1641 if (listener.joinable()) {
1642 listener.join();
1643 }
1644
1645 // Make sure AUTO_FOCUS is off.
1646 ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
1647 EvsEventType::PARAMETER_CHANGED);
1648 }
1649
1650 listener = std::thread(
1651 [&frameHandler0, &aNotification, &listening, &eventCond, &cam0Cmds, val0] {
1652 listening = true;
1653 eventCond.notify_all();
1654
1655 EvsEventDesc aTargetEvent;
1656 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001657 aTargetEvent.payload.push_back(static_cast<int32_t>(cam0Cmds[0]));
1658 aTargetEvent.payload.push_back(val0);
Changyeon Jo80189012021-10-10 16:34:21 -07001659 if (!frameHandler0->waitForEvent(aTargetEvent, aNotification)) {
1660 LOG(WARNING) << "A timer is expired before a target event is fired.";
1661 }
1662 });
1663
1664 // Wait until a lister starts.
1665 listening = false;
1666 timer = std::chrono::system_clock::now();
1667 lock.lock();
1668 while (!listening) {
1669 eventCond.wait_until(lock, timer + 1s);
1670 }
1671 lock.unlock();
1672
1673 values.clear();
1674 ASSERT_TRUE(pCam0->setIntParameter(cam0Cmds[0], val0, &values).isOk());
1675
1676 // Join a listener
1677 if (listener.joinable()) {
1678 listener.join();
1679 }
1680 // Verify a change notification
1681 ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType), EvsEventType::PARAMETER_CHANGED);
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001682 ASSERT_GE(aNotification.payload.size(), 2);
Changyeon Jo80189012021-10-10 16:34:21 -07001683 ASSERT_EQ(static_cast<CameraParam>(aNotification.payload[0]), cam0Cmds[0]);
1684 for (auto&& v : values) {
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001685 ASSERT_EQ(v, aNotification.payload[1]);
Changyeon Jo80189012021-10-10 16:34:21 -07001686 }
1687
1688 // Turn off the display (yes, before the stream stops -- it should be handled)
1689 ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::NOT_VISIBLE).isOk());
1690
1691 // Shut down the streamer
1692 frameHandler0->shutdown();
1693 frameHandler1->shutdown();
1694
1695 // Explicitly release the camera
1696 ASSERT_TRUE(mEnumerator->closeCamera(pCam0).isOk());
1697 ASSERT_TRUE(mEnumerator->closeCamera(pCam1).isOk());
1698 mActiveCameras.clear();
Changyeon Jo80189012021-10-10 16:34:21 -07001699
Changyeon Jo017cb982022-11-16 22:04:38 +00001700 // Explicitly release the display
1701 ASSERT_TRUE(mEnumerator->closeDisplay(pDisplay).isOk());
1702 }
Changyeon Jo80189012021-10-10 16:34:21 -07001703}
1704
1705/*
1706 * CameraUseStreamConfigToDisplay:
1707 * End to end test of data flowing from the camera to the display. Similar to
1708 * CameraToDisplayRoundTrip test case but this case retrieves available stream
1709 * configurations from EVS and uses one of them to start a video stream.
1710 */
1711TEST_P(EvsAidlTest, CameraUseStreamConfigToDisplay) {
1712 LOG(INFO) << "Starting CameraUseStreamConfigToDisplay test";
1713
1714 // Get the camera list
1715 loadCameraList();
1716
1717 // Request available display IDs
1718 uint8_t targetDisplayId = 0;
1719 std::vector<uint8_t> displayIds;
1720 ASSERT_TRUE(mEnumerator->getDisplayIdList(&displayIds).isOk());
1721 EXPECT_GT(displayIds.size(), 0);
1722 targetDisplayId = displayIds[0];
1723
Changyeon Jo80189012021-10-10 16:34:21 -07001724 // Test each reported camera
1725 for (auto&& cam : mCameraInfo) {
Changyeon Jo017cb982022-11-16 22:04:38 +00001726 // Request exclusive access to the EVS display
1727 std::shared_ptr<IEvsDisplay> pDisplay;
1728 ASSERT_TRUE(mEnumerator->openDisplay(targetDisplayId, &pDisplay).isOk());
1729 EXPECT_NE(pDisplay, nullptr);
1730
Changyeon Jo80189012021-10-10 16:34:21 -07001731 // choose a configuration that has a frame rate faster than minReqFps.
1732 Stream targetCfg = {};
1733 const int32_t minReqFps = 15;
1734 int32_t maxArea = 0;
1735 camera_metadata_entry_t streamCfgs;
1736 bool foundCfg = false;
1737 if (!find_camera_metadata_entry(reinterpret_cast<camera_metadata_t*>(cam.metadata.data()),
1738 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
1739 &streamCfgs)) {
1740 // Stream configurations are found in metadata
1741 RawStreamConfig* ptr = reinterpret_cast<RawStreamConfig*>(streamCfgs.data.i32);
1742 for (unsigned offset = 0; offset < streamCfgs.count; offset += kStreamCfgSz) {
Changyeon Jo7f5ad612022-08-17 21:47:58 -07001743 if (ptr->direction == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT) {
Changyeon Jo80189012021-10-10 16:34:21 -07001744 if (ptr->width * ptr->height > maxArea && ptr->framerate >= minReqFps) {
1745 targetCfg.width = ptr->width;
1746 targetCfg.height = ptr->height;
Changyeon Jo7f5ad612022-08-17 21:47:58 -07001747 targetCfg.format = static_cast<PixelFormat>(ptr->format);
Changyeon Jo80189012021-10-10 16:34:21 -07001748
1749 maxArea = ptr->width * ptr->height;
1750 foundCfg = true;
1751 }
1752 }
1753 ++ptr;
1754 }
1755 }
Changyeon Jo80189012021-10-10 16:34:21 -07001756
1757 if (!foundCfg) {
1758 // Current EVS camera does not provide stream configurations in the
1759 // metadata.
1760 continue;
1761 }
1762
1763 std::shared_ptr<IEvsCamera> pCam;
1764 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
1765 EXPECT_NE(pCam, nullptr);
1766
1767 // Store a camera handle for a clean-up
1768 mActiveCameras.push_back(pCam);
1769
1770 // Set up a frame receiver object which will fire up its own thread.
Frederick Mayle7056b242022-03-29 02:38:12 +00001771 std::shared_ptr<FrameHandler> frameHandler = ndk::SharedRefBase::make<FrameHandler>(
1772 pCam, cam, pDisplay, FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -07001773 EXPECT_NE(frameHandler, nullptr);
1774
1775 // Activate the display
1776 ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::VISIBLE_ON_NEXT_FRAME).isOk());
1777
1778 // Start the camera's video stream
1779 ASSERT_TRUE(frameHandler->startStream());
1780
1781 // Wait a while to let the data flow
1782 static const int kSecondsToWait = 5;
1783 const int streamTimeMs =
1784 kSecondsToWait * kSecondsToMilliseconds - kMaxStreamStartMilliseconds;
1785 const unsigned minimumFramesExpected =
1786 streamTimeMs * kMinimumFramesPerSecond / kSecondsToMilliseconds;
1787 sleep(kSecondsToWait);
1788 unsigned framesReceived = 0;
1789 unsigned framesDisplayed = 0;
1790 frameHandler->getFramesCounters(&framesReceived, &framesDisplayed);
1791 EXPECT_EQ(framesReceived, framesDisplayed);
1792 EXPECT_GE(framesDisplayed, minimumFramesExpected);
1793
1794 // Turn off the display (yes, before the stream stops -- it should be handled)
1795 ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::NOT_VISIBLE).isOk());
1796
1797 // Shut down the streamer
1798 frameHandler->shutdown();
1799
1800 // Explicitly release the camera
1801 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
1802 mActiveCameras.clear();
Changyeon Jo80189012021-10-10 16:34:21 -07001803
Changyeon Jo017cb982022-11-16 22:04:38 +00001804 // Explicitly release the display
1805 ASSERT_TRUE(mEnumerator->closeDisplay(pDisplay).isOk());
1806 }
Changyeon Jo80189012021-10-10 16:34:21 -07001807}
1808
1809/*
1810 * MultiCameraStreamUseConfig:
1811 * Verify that each client can start and stop video streams on the same
1812 * underlying camera with same configuration.
1813 */
1814TEST_P(EvsAidlTest, MultiCameraStreamUseConfig) {
1815 LOG(INFO) << "Starting MultiCameraStream test";
1816
1817 if (mIsHwModule) {
1818 // This test is not for HW module implementation.
1819 return;
1820 }
1821
1822 // Get the camera list
1823 loadCameraList();
1824
1825 // Test each reported camera
1826 for (auto&& cam : mCameraInfo) {
1827 // choose a configuration that has a frame rate faster than minReqFps.
1828 Stream targetCfg = {};
1829 const int32_t minReqFps = 15;
1830 int32_t maxArea = 0;
1831 camera_metadata_entry_t streamCfgs;
1832 bool foundCfg = false;
1833 if (!find_camera_metadata_entry(reinterpret_cast<camera_metadata_t*>(cam.metadata.data()),
1834 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
1835 &streamCfgs)) {
1836 // Stream configurations are found in metadata
1837 RawStreamConfig* ptr = reinterpret_cast<RawStreamConfig*>(streamCfgs.data.i32);
1838 for (unsigned offset = 0; offset < streamCfgs.count; offset += kStreamCfgSz) {
Changyeon Jo7f5ad612022-08-17 21:47:58 -07001839 if (ptr->direction == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT) {
Changyeon Jo80189012021-10-10 16:34:21 -07001840 if (ptr->width * ptr->height > maxArea && ptr->framerate >= minReqFps) {
1841 targetCfg.width = ptr->width;
1842 targetCfg.height = ptr->height;
Changyeon Jo7f5ad612022-08-17 21:47:58 -07001843 targetCfg.format = static_cast<PixelFormat>(ptr->format);
Changyeon Jo80189012021-10-10 16:34:21 -07001844
1845 maxArea = ptr->width * ptr->height;
1846 foundCfg = true;
1847 }
1848 }
1849 ++ptr;
1850 }
1851 }
Changyeon Jo80189012021-10-10 16:34:21 -07001852
1853 if (!foundCfg) {
1854 LOG(INFO) << "Device " << cam.id
1855 << " does not provide a list of supported stream configurations, skipped";
1856 continue;
1857 }
1858
1859 // Create the first camera client with a selected stream configuration.
1860 std::shared_ptr<IEvsCamera> pCam0;
1861 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam0).isOk());
1862 EXPECT_NE(pCam0, nullptr);
1863
1864 // Store a camera handle for a clean-up
1865 mActiveCameras.push_back(pCam0);
1866
1867 // Try to create the second camera client with different stream
1868 // configuration.
1869 int32_t id = targetCfg.id;
1870 targetCfg.id += 1; // EVS manager sees only the stream id.
1871 std::shared_ptr<IEvsCamera> pCam1;
1872 ASSERT_FALSE(mEnumerator->openCamera(cam.id, targetCfg, &pCam1).isOk());
1873
1874 // Try again with same stream configuration.
1875 targetCfg.id = id;
1876 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam1).isOk());
1877 EXPECT_NE(pCam1, nullptr);
1878
1879 // Set up per-client frame receiver objects which will fire up its own thread
Frederick Mayle7056b242022-03-29 02:38:12 +00001880 std::shared_ptr<FrameHandler> frameHandler0 = ndk::SharedRefBase::make<FrameHandler>(
1881 pCam0, cam, nullptr, FrameHandler::eAutoReturn);
1882 std::shared_ptr<FrameHandler> frameHandler1 = ndk::SharedRefBase::make<FrameHandler>(
1883 pCam1, cam, nullptr, FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -07001884 EXPECT_NE(frameHandler0, nullptr);
1885 EXPECT_NE(frameHandler1, nullptr);
1886
1887 // Start the camera's video stream via client 0
1888 ASSERT_TRUE(frameHandler0->startStream());
1889 ASSERT_TRUE(frameHandler1->startStream());
1890
1891 // Ensure the stream starts
1892 frameHandler0->waitForFrameCount(1);
1893 frameHandler1->waitForFrameCount(1);
1894
1895 nsecs_t firstFrame = systemTime(SYSTEM_TIME_MONOTONIC);
1896
1897 // Wait a bit, then ensure both clients get at least the required minimum number of frames
1898 sleep(5);
1899 nsecs_t end = systemTime(SYSTEM_TIME_MONOTONIC);
1900 unsigned framesReceived0 = 0, framesReceived1 = 0;
1901 frameHandler0->getFramesCounters(&framesReceived0, nullptr);
1902 frameHandler1->getFramesCounters(&framesReceived1, nullptr);
1903 framesReceived0 = framesReceived0 - 1; // Back out the first frame we already waited for
1904 framesReceived1 = framesReceived1 - 1; // Back out the first frame we already waited for
1905 nsecs_t runTime = end - firstFrame;
1906 float framesPerSecond0 = framesReceived0 / (runTime * kNanoToSeconds);
1907 float framesPerSecond1 = framesReceived1 / (runTime * kNanoToSeconds);
1908 LOG(INFO) << "Measured camera rate " << std::scientific << framesPerSecond0 << " fps and "
1909 << framesPerSecond1 << " fps";
1910 EXPECT_GE(framesPerSecond0, kMinimumFramesPerSecond);
1911 EXPECT_GE(framesPerSecond1, kMinimumFramesPerSecond);
1912
1913 // Shutdown one client
1914 frameHandler0->shutdown();
1915
1916 // Read frame counters again
1917 frameHandler0->getFramesCounters(&framesReceived0, nullptr);
1918 frameHandler1->getFramesCounters(&framesReceived1, nullptr);
1919
1920 // Wait a bit again
1921 sleep(5);
1922 unsigned framesReceivedAfterStop0 = 0, framesReceivedAfterStop1 = 0;
1923 frameHandler0->getFramesCounters(&framesReceivedAfterStop0, nullptr);
1924 frameHandler1->getFramesCounters(&framesReceivedAfterStop1, nullptr);
1925 EXPECT_EQ(framesReceived0, framesReceivedAfterStop0);
1926 EXPECT_LT(framesReceived1, framesReceivedAfterStop1);
1927
1928 // Shutdown another
1929 frameHandler1->shutdown();
1930
1931 // Explicitly release the camera
1932 ASSERT_TRUE(mEnumerator->closeCamera(pCam0).isOk());
1933 ASSERT_TRUE(mEnumerator->closeCamera(pCam1).isOk());
1934 mActiveCameras.clear();
1935 }
1936}
1937
1938/*
1939 * LogicalCameraMetadata:
1940 * Opens logical camera reported by the enumerator and validate its metadata by
1941 * checking its capability and locating supporting physical camera device
1942 * identifiers.
1943 */
1944TEST_P(EvsAidlTest, LogicalCameraMetadata) {
1945 LOG(INFO) << "Starting LogicalCameraMetadata test";
1946
1947 // Get the camera list
1948 loadCameraList();
1949
1950 // Open and close each camera twice
1951 for (auto&& cam : mCameraInfo) {
1952 bool isLogicalCam = false;
1953 auto devices = getPhysicalCameraIds(cam.id, isLogicalCam);
1954 if (isLogicalCam) {
1955 ASSERT_GE(devices.size(), 1) << "Logical camera device must have at least one physical "
1956 "camera device ID in its metadata.";
1957 }
1958 }
1959}
1960
1961/*
1962 * CameraStreamExternalBuffering:
1963 * This is same with CameraStreamBuffering except frame buffers are allocated by
1964 * the test client and then imported by EVS framework.
1965 */
1966TEST_P(EvsAidlTest, CameraStreamExternalBuffering) {
1967 LOG(INFO) << "Starting CameraStreamExternalBuffering test";
1968
1969 // Arbitrary constant (should be > 1 and not too big)
1970 static const unsigned int kBuffersToHold = 3;
1971
1972 // Get the camera list
1973 loadCameraList();
1974
1975 // Acquire the graphics buffer allocator
1976 android::GraphicBufferAllocator& alloc(android::GraphicBufferAllocator::get());
1977 const auto usage =
1978 GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_SW_READ_RARELY | GRALLOC_USAGE_SW_WRITE_OFTEN;
1979
1980 // Test each reported camera
1981 for (auto&& cam : mCameraInfo) {
Changyeon Jo050b2ce2024-01-08 16:18:24 -08001982 bool isLogicalCam = false;
1983 getPhysicalCameraIds(cam.id, isLogicalCam);
1984 if (isLogicalCam) {
1985 LOG(INFO) << "Skip a logical device, " << cam.id;
1986 continue;
1987 }
1988
Changyeon Jo80189012021-10-10 16:34:21 -07001989 // Read a target resolution from the metadata
1990 Stream targetCfg = getFirstStreamConfiguration(
1991 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
1992 ASSERT_GT(targetCfg.width, 0);
1993 ASSERT_GT(targetCfg.height, 0);
1994
1995 // Allocate buffers to use
1996 std::vector<BufferDesc> buffers;
1997 buffers.resize(kBuffersToHold);
1998 for (auto i = 0; i < kBuffersToHold; ++i) {
1999 unsigned pixelsPerLine;
2000 buffer_handle_t memHandle = nullptr;
2001 android::status_t result =
2002 alloc.allocate(targetCfg.width, targetCfg.height,
2003 static_cast<android::PixelFormat>(targetCfg.format),
2004 /* layerCount = */ 1, usage, &memHandle, &pixelsPerLine,
2005 /* graphicBufferId = */ 0,
2006 /* requestorName = */ "CameraStreamExternalBufferingTest");
2007 if (result != android::NO_ERROR) {
2008 LOG(ERROR) << __FUNCTION__ << " failed to allocate memory.";
2009 // Release previous allocated buffers
2010 for (auto j = 0; j < i; j++) {
2011 alloc.free(::android::dupFromAidl(buffers[i].buffer.handle));
2012 }
2013 return;
2014 } else {
2015 BufferDesc buf;
2016 HardwareBufferDescription* pDesc =
2017 reinterpret_cast<HardwareBufferDescription*>(&buf.buffer.description);
2018 pDesc->width = targetCfg.width;
2019 pDesc->height = targetCfg.height;
2020 pDesc->layers = 1;
2021 pDesc->format = targetCfg.format;
2022 pDesc->usage = static_cast<BufferUsage>(usage);
2023 pDesc->stride = pixelsPerLine;
2024 buf.buffer.handle = ::android::dupToAidl(memHandle);
2025 buf.bufferId = i; // Unique number to identify this buffer
2026 buffers[i] = std::move(buf);
2027 }
2028 }
2029
Changyeon Jo80189012021-10-10 16:34:21 -07002030 std::shared_ptr<IEvsCamera> pCam;
2031 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
2032 EXPECT_NE(pCam, nullptr);
2033
2034 // Store a camera handle for a clean-up
2035 mActiveCameras.push_back(pCam);
2036
2037 // Request to import buffers
2038 int delta = 0;
2039 auto status = pCam->importExternalBuffers(buffers, &delta);
Changyeon Jo80189012021-10-10 16:34:21 -07002040 ASSERT_TRUE(status.isOk());
2041 EXPECT_GE(delta, kBuffersToHold);
2042
2043 // Set up a frame receiver object which will fire up its own thread.
Frederick Mayle7056b242022-03-29 02:38:12 +00002044 std::shared_ptr<FrameHandler> frameHandler = ndk::SharedRefBase::make<FrameHandler>(
2045 pCam, cam, nullptr, FrameHandler::eNoAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -07002046 EXPECT_NE(frameHandler, nullptr);
2047
2048 // Start the camera's video stream
2049 ASSERT_TRUE(frameHandler->startStream());
2050
2051 // Check that the video stream stalls once we've gotten exactly the number of buffers
2052 // we requested since we told the frameHandler not to return them.
2053 sleep(1); // 1 second should be enough for at least 5 frames to be delivered worst case
2054 unsigned framesReceived = 0;
2055 frameHandler->getFramesCounters(&framesReceived, nullptr);
2056 ASSERT_LE(kBuffersToHold, framesReceived) << "Stream didn't stall at expected buffer limit";
2057
2058 // Give back one buffer
2059 EXPECT_TRUE(frameHandler->returnHeldBuffer());
2060
2061 // Once we return a buffer, it shouldn't take more than 1/10 second to get a new one
2062 // filled since we require 10fps minimum -- but give a 10% allowance just in case.
2063 unsigned framesReceivedAfter = 0;
2064 usleep(110 * kMillisecondsToMicroseconds);
2065 frameHandler->getFramesCounters(&framesReceivedAfter, nullptr);
2066 EXPECT_EQ(framesReceived + 1, framesReceivedAfter) << "Stream should've resumed";
2067
2068 // Even when the camera pointer goes out of scope, the FrameHandler object will
2069 // keep the stream alive unless we tell it to shutdown.
2070 // Also note that the FrameHandle and the Camera have a mutual circular reference, so
2071 // we have to break that cycle in order for either of them to get cleaned up.
2072 frameHandler->shutdown();
2073
2074 // Explicitly release the camera
2075 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
2076 mActiveCameras.clear();
2077 // Release buffers
2078 for (auto& b : buffers) {
2079 alloc.free(::android::dupFromAidl(b.buffer.handle));
2080 }
2081 buffers.resize(0);
2082 }
2083}
2084
Changyeon Jodbcf52c2022-05-11 00:01:31 -07002085TEST_P(EvsAidlTest, DeviceStatusCallbackRegistration) {
2086 std::shared_ptr<IEvsEnumeratorStatusCallback> cb =
2087 ndk::SharedRefBase::make<DeviceStatusCallback>();
2088 ndk::ScopedAStatus status = mEnumerator->registerStatusCallback(cb);
2089 if (mIsHwModule) {
2090 ASSERT_TRUE(status.isOk());
2091 } else {
2092 // A callback registration may fail if a HIDL EVS HAL implementation is
2093 // running.
2094 ASSERT_TRUE(status.isOk() ||
2095 status.getServiceSpecificError() == static_cast<int>(EvsResult::NOT_SUPPORTED));
2096 }
2097}
2098
Changyeon Jo80189012021-10-10 16:34:21 -07002099/*
2100 * UltrasonicsArrayOpenClean:
2101 * Opens each ultrasonics arrays reported by the enumerator and then explicitly closes it via a
2102 * call to closeUltrasonicsArray. Then repeats the test to ensure all ultrasonics arrays
2103 * can be reopened.
2104 */
2105TEST_P(EvsAidlTest, UltrasonicsArrayOpenClean) {
2106 LOG(INFO) << "Starting UltrasonicsArrayOpenClean test";
2107
2108 // Get the ultrasonics array list
2109 loadUltrasonicsArrayList();
2110
2111 // Open and close each ultrasonics array twice
2112 for (auto&& ultraInfo : mUltrasonicsArraysInfo) {
2113 for (int pass = 0; pass < 2; pass++) {
2114 std::shared_ptr<IEvsUltrasonicsArray> pUltrasonicsArray;
2115 ASSERT_TRUE(
2116 mEnumerator
2117 ->openUltrasonicsArray(ultraInfo.ultrasonicsArrayId, &pUltrasonicsArray)
2118 .isOk());
2119 EXPECT_NE(pUltrasonicsArray, nullptr);
2120
2121 // Verify that this ultrasonics array self-identifies correctly
2122 UltrasonicsArrayDesc desc;
2123 ASSERT_TRUE(pUltrasonicsArray->getUltrasonicArrayInfo(&desc).isOk());
2124 EXPECT_EQ(ultraInfo.ultrasonicsArrayId, desc.ultrasonicsArrayId);
2125 LOG(DEBUG) << "Found ultrasonics array " << ultraInfo.ultrasonicsArrayId;
2126
2127 // Explicitly close the ultrasonics array so resources are released right away
2128 ASSERT_TRUE(mEnumerator->closeUltrasonicsArray(pUltrasonicsArray).isOk());
2129 }
2130 }
2131}
2132
2133// Starts a stream and verifies all data received is valid.
2134TEST_P(EvsAidlTest, UltrasonicsVerifyStreamData) {
2135 LOG(INFO) << "Starting UltrasonicsVerifyStreamData";
2136
2137 // Get the ultrasonics array list
2138 loadUltrasonicsArrayList();
2139
2140 // For each ultrasonics array.
2141 for (auto&& ultraInfo : mUltrasonicsArraysInfo) {
2142 LOG(DEBUG) << "Testing ultrasonics array: " << ultraInfo.ultrasonicsArrayId;
2143
2144 std::shared_ptr<IEvsUltrasonicsArray> pUltrasonicsArray;
2145 ASSERT_TRUE(
2146 mEnumerator->openUltrasonicsArray(ultraInfo.ultrasonicsArrayId, &pUltrasonicsArray)
2147 .isOk());
2148 EXPECT_NE(pUltrasonicsArray, nullptr);
2149
2150 std::shared_ptr<FrameHandlerUltrasonics> frameHandler =
Frederick Mayle7056b242022-03-29 02:38:12 +00002151 ndk::SharedRefBase::make<FrameHandlerUltrasonics>(pUltrasonicsArray);
Changyeon Jo80189012021-10-10 16:34:21 -07002152 EXPECT_NE(frameHandler, nullptr);
2153
2154 // Start stream.
2155 ASSERT_TRUE(pUltrasonicsArray->startStream(frameHandler).isOk());
2156
2157 // Wait 5 seconds to receive frames.
2158 sleep(5);
2159
2160 // Stop stream.
2161 ASSERT_TRUE(pUltrasonicsArray->stopStream().isOk());
2162
2163 EXPECT_GT(frameHandler->getReceiveFramesCount(), 0);
2164 EXPECT_TRUE(frameHandler->areAllFramesValid());
2165
2166 // Explicitly close the ultrasonics array so resources are released right away
2167 ASSERT_TRUE(mEnumerator->closeUltrasonicsArray(pUltrasonicsArray).isOk());
2168 }
2169}
2170
2171// Sets frames in flight before and after start of stream and verfies success.
2172TEST_P(EvsAidlTest, UltrasonicsSetFramesInFlight) {
2173 LOG(INFO) << "Starting UltrasonicsSetFramesInFlight";
2174
2175 // Get the ultrasonics array list
2176 loadUltrasonicsArrayList();
2177
2178 // For each ultrasonics array.
2179 for (auto&& ultraInfo : mUltrasonicsArraysInfo) {
2180 LOG(DEBUG) << "Testing ultrasonics array: " << ultraInfo.ultrasonicsArrayId;
2181
2182 std::shared_ptr<IEvsUltrasonicsArray> pUltrasonicsArray;
2183 ASSERT_TRUE(
2184 mEnumerator->openUltrasonicsArray(ultraInfo.ultrasonicsArrayId, &pUltrasonicsArray)
2185 .isOk());
2186 EXPECT_NE(pUltrasonicsArray, nullptr);
2187
2188 ASSERT_TRUE(pUltrasonicsArray->setMaxFramesInFlight(10).isOk());
2189
2190 std::shared_ptr<FrameHandlerUltrasonics> frameHandler =
Frederick Mayle7056b242022-03-29 02:38:12 +00002191 ndk::SharedRefBase::make<FrameHandlerUltrasonics>(pUltrasonicsArray);
Changyeon Jo80189012021-10-10 16:34:21 -07002192 EXPECT_NE(frameHandler, nullptr);
2193
2194 // Start stream.
2195 ASSERT_TRUE(pUltrasonicsArray->startStream(frameHandler).isOk());
2196 ASSERT_TRUE(pUltrasonicsArray->setMaxFramesInFlight(5).isOk());
2197
2198 // Stop stream.
2199 ASSERT_TRUE(pUltrasonicsArray->stopStream().isOk());
2200
2201 // Explicitly close the ultrasonics array so resources are released right away
2202 ASSERT_TRUE(mEnumerator->closeUltrasonicsArray(pUltrasonicsArray).isOk());
2203 }
2204}
2205
Hao Chene708da82023-03-28 16:20:57 -07002206/*
2207 * DisplayOpen:
2208 * Test both clean shut down and "aggressive open" device stealing behavior.
2209 */
2210TEST_P(EvsAidlTest, DisplayOpen) {
2211 LOG(INFO) << "Starting DisplayOpen test";
2212
2213 // Request available display IDs.
2214 std::vector<uint8_t> displayIds;
2215 ASSERT_TRUE(mEnumerator->getDisplayIdList(&displayIds).isOk());
2216 EXPECT_GT(displayIds.size(), 0);
2217
2218 for (const auto displayId : displayIds) {
2219 std::shared_ptr<IEvsDisplay> pDisplay;
2220
2221 // Request exclusive access to each EVS display, then let it go.
2222 ASSERT_TRUE(mEnumerator->openDisplay(displayId, &pDisplay).isOk());
2223 ASSERT_NE(pDisplay, nullptr);
2224
2225 {
2226 // Ask the display what its name is.
2227 DisplayDesc desc;
2228 ASSERT_TRUE(pDisplay->getDisplayInfo(&desc).isOk());
2229 LOG(DEBUG) << "Found display " << desc.id;
2230 }
2231
2232 ASSERT_TRUE(mEnumerator->closeDisplay(pDisplay).isOk());
2233
2234 // Ensure we can reopen the display after it has been closed.
2235 ASSERT_TRUE(mEnumerator->openDisplay(displayId, &pDisplay).isOk());
2236 ASSERT_NE(pDisplay, nullptr);
2237
2238 // Open the display while its already open -- ownership should be transferred.
2239 std::shared_ptr<IEvsDisplay> pDisplay2;
2240 ASSERT_TRUE(mEnumerator->openDisplay(displayId, &pDisplay2).isOk());
2241 ASSERT_NE(pDisplay2, nullptr);
2242
2243 {
2244 // Ensure the old display properly reports its assassination.
2245 DisplayState badState;
2246 EXPECT_TRUE(pDisplay->getDisplayState(&badState).isOk());
2247 EXPECT_EQ(badState, DisplayState::DEAD);
2248 }
2249
2250 // Close only the newest display instance -- the other should already be a zombie.
2251 ASSERT_TRUE(mEnumerator->closeDisplay(pDisplay2).isOk());
2252
2253 // Finally, validate that we can open the display after the provoked failure above.
2254 ASSERT_TRUE(mEnumerator->openDisplay(displayId, &pDisplay).isOk());
2255 ASSERT_NE(pDisplay, nullptr);
2256 ASSERT_TRUE(mEnumerator->closeDisplay(pDisplay).isOk());
2257 }
2258}
2259
2260/*
2261 * DisplayStates:
2262 * Validate that display states transition as expected and can be queried from either the display
2263 * object itself or the owning enumerator.
2264 */
2265TEST_P(EvsAidlTest, DisplayStates) {
2266 using std::literals::chrono_literals::operator""ms;
2267
2268 LOG(INFO) << "Starting DisplayStates test";
2269
2270 // Request available display IDs.
2271 std::vector<uint8_t> displayIds;
2272 ASSERT_TRUE(mEnumerator->getDisplayIdList(&displayIds).isOk());
2273 EXPECT_GT(displayIds.size(), 0);
2274
2275 for (const auto displayId : displayIds) {
2276 // Ensure the display starts in the expected state.
2277 {
2278 DisplayState state;
2279 EXPECT_FALSE(mEnumerator->getDisplayState(&state).isOk());
2280 }
Hao Chenbe819392023-03-29 16:18:47 -07002281 for (const auto displayIdToQuery : displayIds) {
2282 DisplayState state;
2283 EXPECT_FALSE(mEnumerator->getDisplayStateById(displayIdToQuery, &state).isOk());
2284 }
Hao Chene708da82023-03-28 16:20:57 -07002285
2286 // Scope to limit the lifetime of the pDisplay pointer, and thus the IEvsDisplay object.
2287 {
2288 // Request exclusive access to the EVS display.
2289 std::shared_ptr<IEvsDisplay> pDisplay;
2290 ASSERT_TRUE(mEnumerator->openDisplay(displayId, &pDisplay).isOk());
2291 ASSERT_NE(pDisplay, nullptr);
2292 {
2293 DisplayState state;
2294 EXPECT_TRUE(mEnumerator->getDisplayState(&state).isOk());
2295 EXPECT_EQ(state, DisplayState::NOT_VISIBLE);
2296 }
Hao Chenbe819392023-03-29 16:18:47 -07002297 for (const auto displayIdToQuery : displayIds) {
2298 DisplayState state;
Hao Chen6cfe1f82023-04-14 15:44:22 -07002299 bool get_state_ok =
2300 mEnumerator->getDisplayStateById(displayIdToQuery, &state).isOk();
2301 if (displayIdToQuery != displayId) {
2302 EXPECT_FALSE(get_state_ok);
2303 } else if (get_state_ok) {
Hao Chenbe819392023-03-29 16:18:47 -07002304 EXPECT_EQ(state, DisplayState::NOT_VISIBLE);
Hao Chenbe819392023-03-29 16:18:47 -07002305 }
2306 }
Hao Chene708da82023-03-28 16:20:57 -07002307
2308 // Activate the display.
2309 EXPECT_TRUE(pDisplay->setDisplayState(DisplayState::VISIBLE_ON_NEXT_FRAME).isOk());
2310 {
2311 DisplayState state;
2312 EXPECT_TRUE(mEnumerator->getDisplayState(&state).isOk());
2313 EXPECT_EQ(state, DisplayState::VISIBLE_ON_NEXT_FRAME);
2314 }
2315 {
2316 DisplayState state;
2317 EXPECT_TRUE(pDisplay->getDisplayState(&state).isOk());
2318 EXPECT_EQ(state, DisplayState::VISIBLE_ON_NEXT_FRAME);
2319 }
Hao Chenbe819392023-03-29 16:18:47 -07002320 for (const auto displayIdToQuery : displayIds) {
2321 DisplayState state;
Hao Chen6cfe1f82023-04-14 15:44:22 -07002322 bool get_state_ok =
2323 mEnumerator->getDisplayStateById(displayIdToQuery, &state).isOk();
2324 if (displayIdToQuery != displayId) {
2325 EXPECT_FALSE(get_state_ok);
2326 } else if (get_state_ok) {
Hao Chenbe819392023-03-29 16:18:47 -07002327 EXPECT_EQ(state, DisplayState::VISIBLE_ON_NEXT_FRAME);
Hao Chenbe819392023-03-29 16:18:47 -07002328 }
2329 }
Hao Chene708da82023-03-28 16:20:57 -07002330
2331 // Get the output buffer we'd use to display the imagery.
2332 BufferDesc tgtBuffer;
2333 ASSERT_TRUE(pDisplay->getTargetBuffer(&tgtBuffer).isOk());
2334
2335 // Send the target buffer back for display (we didn't actually fill anything).
2336 EXPECT_TRUE(pDisplay->returnTargetBufferForDisplay(tgtBuffer).isOk());
2337
2338 // Sleep for a tenth of a second to ensure the driver has time to get the image
2339 // displayed.
2340 std::this_thread::sleep_for(100ms);
2341 {
2342 DisplayState state;
2343 EXPECT_TRUE(mEnumerator->getDisplayState(&state).isOk());
2344 EXPECT_EQ(state, DisplayState::VISIBLE);
2345 }
2346 {
2347 DisplayState state;
2348 EXPECT_TRUE(pDisplay->getDisplayState(&state).isOk());
2349 EXPECT_EQ(state, DisplayState::VISIBLE);
2350 }
Hao Chenbe819392023-03-29 16:18:47 -07002351 for (const auto displayIdToQuery : displayIds) {
2352 DisplayState state;
Hao Chen6cfe1f82023-04-14 15:44:22 -07002353 bool get_state_ok =
2354 mEnumerator->getDisplayStateById(displayIdToQuery, &state).isOk();
2355 if (displayIdToQuery != displayId) {
2356 EXPECT_FALSE(get_state_ok);
2357 } else if (get_state_ok) {
Hao Chenbe819392023-03-29 16:18:47 -07002358 EXPECT_EQ(state, DisplayState::VISIBLE);
Hao Chenbe819392023-03-29 16:18:47 -07002359 }
2360 }
Hao Chene708da82023-03-28 16:20:57 -07002361
2362 // Turn off the display.
2363 EXPECT_TRUE(pDisplay->setDisplayState(DisplayState::NOT_VISIBLE).isOk());
2364 std::this_thread::sleep_for(100ms);
2365 {
2366 DisplayState state;
2367 EXPECT_TRUE(mEnumerator->getDisplayState(&state).isOk());
2368 EXPECT_EQ(state, DisplayState::NOT_VISIBLE);
2369 }
2370 {
2371 DisplayState state;
2372 EXPECT_TRUE(pDisplay->getDisplayState(&state).isOk());
2373 EXPECT_EQ(state, DisplayState::NOT_VISIBLE);
2374 }
Hao Chenbe819392023-03-29 16:18:47 -07002375 for (const auto displayIdToQuery : displayIds) {
2376 DisplayState state;
Hao Chen6cfe1f82023-04-14 15:44:22 -07002377 bool get_state_ok =
2378 mEnumerator->getDisplayStateById(displayIdToQuery, &state).isOk();
2379 if (displayIdToQuery != displayId) {
2380 EXPECT_FALSE(get_state_ok);
2381 } else if (get_state_ok) {
Hao Chenbe819392023-03-29 16:18:47 -07002382 EXPECT_EQ(state, DisplayState::NOT_VISIBLE);
Hao Chenbe819392023-03-29 16:18:47 -07002383 }
2384 }
Hao Chene708da82023-03-28 16:20:57 -07002385
2386 // Close the display.
2387 mEnumerator->closeDisplay(pDisplay);
2388 }
2389
2390 // Now that the display pointer has gone out of scope, causing the IEvsDisplay interface
2391 // object to be destroyed, we should be back to the "not open" state.
2392 // NOTE: If we want this to pass without the sleep above, we'd have to add the
2393 // (now recommended) closeDisplay() call instead of relying on the smarter pointer
2394 // going out of scope. I've not done that because I want to verify that the deletion
2395 // of the object does actually clean up (eventually).
2396 {
2397 DisplayState state;
2398 EXPECT_FALSE(mEnumerator->getDisplayState(&state).isOk());
2399 }
Hao Chenbe819392023-03-29 16:18:47 -07002400 for (const auto displayIdToQuery : displayIds) {
2401 DisplayState state;
2402 EXPECT_FALSE(mEnumerator->getDisplayStateById(displayIdToQuery, &state).isOk());
2403 }
Hao Chene708da82023-03-28 16:20:57 -07002404 }
2405}
2406
Changyeon Jo80189012021-10-10 16:34:21 -07002407GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(EvsAidlTest);
2408INSTANTIATE_TEST_SUITE_P(
2409 PerInstance, EvsAidlTest,
2410 testing::ValuesIn(android::getAidlHalInstanceNames(IEvsEnumerator::descriptor)),
2411 android::PrintInstanceNameToString);
2412
2413int main(int argc, char** argv) {
2414 ::testing::InitGoogleTest(&argc, argv);
2415 ABinderProcess_setThreadPoolMaxThreadCount(1);
2416 ABinderProcess_startThreadPool();
2417 return RUN_ALL_TESTS();
2418}