blob: 2706c499bad2a5b855fc9bab6813af1fdb14cf4d [file] [log] [blame]
Changyeon Jo80189012021-10-10 16:34:21 -07001/*
2 * Copyright (C) 2022 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "FrameHandler.h"
18#include "FrameHandlerUltrasonics.h"
19
20#include <aidl/Gtest.h>
21#include <aidl/Vintf.h>
Changyeon Jodbcf52c2022-05-11 00:01:31 -070022#include <aidl/android/hardware/automotive/evs/BnEvsEnumeratorStatusCallback.h>
Changyeon Jo80189012021-10-10 16:34:21 -070023#include <aidl/android/hardware/automotive/evs/BufferDesc.h>
24#include <aidl/android/hardware/automotive/evs/CameraDesc.h>
25#include <aidl/android/hardware/automotive/evs/CameraParam.h>
Changyeon Jodbcf52c2022-05-11 00:01:31 -070026#include <aidl/android/hardware/automotive/evs/DeviceStatus.h>
Changyeon Jo80189012021-10-10 16:34:21 -070027#include <aidl/android/hardware/automotive/evs/DisplayDesc.h>
28#include <aidl/android/hardware/automotive/evs/DisplayState.h>
29#include <aidl/android/hardware/automotive/evs/EvsEventDesc.h>
30#include <aidl/android/hardware/automotive/evs/EvsEventType.h>
31#include <aidl/android/hardware/automotive/evs/EvsResult.h>
32#include <aidl/android/hardware/automotive/evs/IEvsCamera.h>
33#include <aidl/android/hardware/automotive/evs/IEvsDisplay.h>
34#include <aidl/android/hardware/automotive/evs/IEvsEnumerator.h>
Changyeon Jodbcf52c2022-05-11 00:01:31 -070035#include <aidl/android/hardware/automotive/evs/IEvsEnumeratorStatusCallback.h>
Changyeon Jo80189012021-10-10 16:34:21 -070036#include <aidl/android/hardware/automotive/evs/IEvsUltrasonicsArray.h>
37#include <aidl/android/hardware/automotive/evs/ParameterRange.h>
38#include <aidl/android/hardware/automotive/evs/Stream.h>
39#include <aidl/android/hardware/automotive/evs/UltrasonicsArrayDesc.h>
40#include <aidl/android/hardware/common/NativeHandle.h>
41#include <aidl/android/hardware/graphics/common/HardwareBufferDescription.h>
42#include <aidl/android/hardware/graphics/common/PixelFormat.h>
43#include <aidlcommonsupport/NativeHandle.h>
44#include <android-base/logging.h>
45#include <android/binder_ibinder.h>
46#include <android/binder_manager.h>
47#include <android/binder_process.h>
48#include <android/binder_status.h>
49#include <system/camera_metadata.h>
50#include <ui/GraphicBuffer.h>
51#include <ui/GraphicBufferAllocator.h>
52#include <utils/Timers.h>
53
54#include <deque>
55#include <thread>
56#include <unordered_set>
57
58namespace {
59
60// These values are called out in the EVS design doc (as of Mar 8, 2017)
61constexpr int kMaxStreamStartMilliseconds = 500;
62constexpr int kMinimumFramesPerSecond = 10;
63constexpr int kSecondsToMilliseconds = 1000;
64constexpr int kMillisecondsToMicroseconds = 1000;
65constexpr float kNanoToMilliseconds = 0.000001f;
66constexpr float kNanoToSeconds = 0.000000001f;
67
68/*
69 * Please note that this is different from what is defined in
70 * libhardware/modules/camera/3_4/metadata/types.h; this has one additional
71 * field to store a framerate.
72 */
73typedef struct {
74 int32_t id;
75 int32_t width;
76 int32_t height;
77 int32_t format;
78 int32_t direction;
79 int32_t framerate;
80} RawStreamConfig;
81constexpr size_t kStreamCfgSz = sizeof(RawStreamConfig) / sizeof(int32_t);
82
Changyeon Jodbcf52c2022-05-11 00:01:31 -070083using ::aidl::android::hardware::automotive::evs::BnEvsEnumeratorStatusCallback;
Changyeon Jo80189012021-10-10 16:34:21 -070084using ::aidl::android::hardware::automotive::evs::BufferDesc;
85using ::aidl::android::hardware::automotive::evs::CameraDesc;
86using ::aidl::android::hardware::automotive::evs::CameraParam;
Changyeon Jodbcf52c2022-05-11 00:01:31 -070087using ::aidl::android::hardware::automotive::evs::DeviceStatus;
Changyeon Jo80189012021-10-10 16:34:21 -070088using ::aidl::android::hardware::automotive::evs::DisplayDesc;
89using ::aidl::android::hardware::automotive::evs::DisplayState;
90using ::aidl::android::hardware::automotive::evs::EvsEventDesc;
91using ::aidl::android::hardware::automotive::evs::EvsEventType;
92using ::aidl::android::hardware::automotive::evs::EvsResult;
93using ::aidl::android::hardware::automotive::evs::IEvsCamera;
94using ::aidl::android::hardware::automotive::evs::IEvsDisplay;
95using ::aidl::android::hardware::automotive::evs::IEvsEnumerator;
Changyeon Jodbcf52c2022-05-11 00:01:31 -070096using ::aidl::android::hardware::automotive::evs::IEvsEnumeratorStatusCallback;
Changyeon Jo80189012021-10-10 16:34:21 -070097using ::aidl::android::hardware::automotive::evs::IEvsUltrasonicsArray;
98using ::aidl::android::hardware::automotive::evs::ParameterRange;
99using ::aidl::android::hardware::automotive::evs::Stream;
100using ::aidl::android::hardware::automotive::evs::UltrasonicsArrayDesc;
101using ::aidl::android::hardware::graphics::common::BufferUsage;
102using ::aidl::android::hardware::graphics::common::HardwareBufferDescription;
103using ::aidl::android::hardware::graphics::common::PixelFormat;
104using std::chrono_literals::operator""s;
105
Changyeon Jodbcf52c2022-05-11 00:01:31 -0700106} // namespace
107
Changyeon Jo80189012021-10-10 16:34:21 -0700108// The main test class for EVS
109class EvsAidlTest : public ::testing::TestWithParam<std::string> {
110 public:
111 virtual void SetUp() override {
112 // Make sure we can connect to the enumerator
113 std::string service_name = GetParam();
114 AIBinder* binder = AServiceManager_waitForService(service_name.data());
115 ASSERT_NE(binder, nullptr);
116 mEnumerator = IEvsEnumerator::fromBinder(::ndk::SpAIBinder(binder));
117 LOG(INFO) << "Test target service: " << service_name;
118
119 ASSERT_TRUE(mEnumerator->isHardware(&mIsHwModule).isOk());
120 }
121
122 virtual void TearDown() override {
123 // Attempt to close any active camera
124 for (auto&& cam : mActiveCameras) {
125 if (cam != nullptr) {
126 mEnumerator->closeCamera(cam);
127 }
128 }
129 mActiveCameras.clear();
130 }
131
132 protected:
133 void loadCameraList() {
134 // SetUp() must run first!
135 ASSERT_NE(mEnumerator, nullptr);
136
137 // Get the camera list
138 ASSERT_TRUE(mEnumerator->getCameraList(&mCameraInfo).isOk())
139 << "Failed to get a list of available cameras";
140 LOG(INFO) << "We have " << mCameraInfo.size() << " cameras.";
141 }
142
143 void loadUltrasonicsArrayList() {
144 // SetUp() must run first!
145 ASSERT_NE(mEnumerator, nullptr);
146
147 // Get the ultrasonics array list
Changyeon Jo9f6f5922022-04-12 19:29:10 -0700148 auto result = mEnumerator->getUltrasonicsArrayList(&mUltrasonicsArraysInfo);
149 ASSERT_TRUE(result.isOk() ||
150 // TODO(b/149874793): Remove below conditions when
151 // getUltrasonicsArrayList() is implemented.
152 (!result.isOk() && result.getServiceSpecificError() ==
153 static_cast<int32_t>(EvsResult::NOT_IMPLEMENTED)))
Changyeon Jo80189012021-10-10 16:34:21 -0700154 << "Failed to get a list of available ultrasonics arrays";
155 LOG(INFO) << "We have " << mCameraInfo.size() << " ultrasonics arrays.";
156 }
157
158 bool isLogicalCamera(const camera_metadata_t* metadata) {
159 if (metadata == nullptr) {
160 // A logical camera device must have a valid camera metadata.
161 return false;
162 }
163
164 // Looking for LOGICAL_MULTI_CAMERA capability from metadata.
165 camera_metadata_ro_entry_t entry;
166 int rc = find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
167 &entry);
168 if (rc != 0) {
169 // No capabilities are found.
170 return false;
171 }
172
173 for (size_t i = 0; i < entry.count; ++i) {
174 uint8_t cap = entry.data.u8[i];
175 if (cap == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA) {
176 return true;
177 }
178 }
179
180 return false;
181 }
182
183 std::unordered_set<std::string> getPhysicalCameraIds(const std::string& id, bool& flag) {
184 std::unordered_set<std::string> physicalCameras;
185 const auto it = std::find_if(mCameraInfo.begin(), mCameraInfo.end(),
186 [&id](const CameraDesc& desc) { return id == desc.id; });
187 if (it == mCameraInfo.end()) {
188 // Unknown camera is requested. Return an empty list.
189 return physicalCameras;
190 }
191
192 const camera_metadata_t* metadata = reinterpret_cast<camera_metadata_t*>(&it->metadata[0]);
193 flag = isLogicalCamera(metadata);
194 if (!flag) {
195 // EVS assumes that the device w/o a valid metadata is a physical
196 // device.
197 LOG(INFO) << id << " is not a logical camera device.";
198 physicalCameras.insert(id);
199 return physicalCameras;
200 }
201
202 // Look for physical camera identifiers
203 camera_metadata_ro_entry entry;
204 int rc = find_camera_metadata_ro_entry(metadata, ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS,
205 &entry);
206 if (rc != 0) {
207 LOG(ERROR) << "No physical camera ID is found for a logical camera device";
208 }
209
210 const uint8_t* ids = entry.data.u8;
211 size_t start = 0;
212 for (size_t i = 0; i < entry.count; ++i) {
213 if (ids[i] == '\0') {
214 if (start != i) {
215 std::string id(reinterpret_cast<const char*>(ids + start));
216 physicalCameras.insert(id);
217 }
218 start = i + 1;
219 }
220 }
221
222 LOG(INFO) << id << " consists of " << physicalCameras.size() << " physical camera devices";
223 return physicalCameras;
224 }
225
226 Stream getFirstStreamConfiguration(camera_metadata_t* metadata) {
227 Stream targetCfg = {};
228 camera_metadata_entry_t streamCfgs;
229 if (!find_camera_metadata_entry(metadata, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
230 &streamCfgs)) {
231 // Stream configurations are found in metadata
232 RawStreamConfig* ptr = reinterpret_cast<RawStreamConfig*>(streamCfgs.data.i32);
233 for (unsigned offset = 0; offset < streamCfgs.count; offset += kStreamCfgSz) {
Changyeon Jo7f5ad612022-08-17 21:47:58 -0700234 if (ptr->direction == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT) {
Changyeon Jo80189012021-10-10 16:34:21 -0700235 targetCfg.width = ptr->width;
236 targetCfg.height = ptr->height;
237 targetCfg.format = static_cast<PixelFormat>(ptr->format);
238 break;
239 }
240 ++ptr;
241 }
242 }
243
244 return targetCfg;
245 }
246
Changyeon Jodbcf52c2022-05-11 00:01:31 -0700247 class DeviceStatusCallback : public BnEvsEnumeratorStatusCallback {
248 ndk::ScopedAStatus deviceStatusChanged(const std::vector<DeviceStatus>&) override {
249 // This empty implementation returns always ok().
250 return ndk::ScopedAStatus::ok();
251 }
252 };
253
Changyeon Jo80189012021-10-10 16:34:21 -0700254 // Every test needs access to the service
255 std::shared_ptr<IEvsEnumerator> mEnumerator;
256 // Empty unless/util loadCameraList() is called
257 std::vector<CameraDesc> mCameraInfo;
258 // boolean to tell current module under testing is HW module implementation
259 // or not
260 bool mIsHwModule;
261 // A list of active camera handles that are need to be cleaned up
262 std::deque<std::shared_ptr<IEvsCamera>> mActiveCameras;
263 // Empty unless/util loadUltrasonicsArrayList() is called
264 std::vector<UltrasonicsArrayDesc> mUltrasonicsArraysInfo;
265 // A list of active ultrasonics array handles that are to be cleaned up
266 std::deque<std::weak_ptr<IEvsUltrasonicsArray>> mActiveUltrasonicsArrays;
267};
268
269// Test cases, their implementations, and corresponding requirements are
270// documented at go/aae-evs-public-api-test.
271
272/*
273 * CameraOpenClean:
274 * Opens each camera reported by the enumerator and then explicitly closes it via a
275 * call to closeCamera. Then repeats the test to ensure all cameras can be reopened.
276 */
277TEST_P(EvsAidlTest, CameraOpenClean) {
278 LOG(INFO) << "Starting CameraOpenClean test";
279
280 // Get the camera list
281 loadCameraList();
282
283 // Open and close each camera twice
284 for (auto&& cam : mCameraInfo) {
285 bool isLogicalCam = false;
286 auto devices = getPhysicalCameraIds(cam.id, isLogicalCam);
287 if (mIsHwModule && isLogicalCam) {
288 LOG(INFO) << "Skip a logical device, " << cam.id << " for HW target.";
289 continue;
290 }
291
292 // Read a target resolution from the metadata
293 Stream targetCfg = getFirstStreamConfiguration(
294 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
295 ASSERT_GT(targetCfg.width, 0);
296 ASSERT_GT(targetCfg.height, 0);
297
298 for (int pass = 0; pass < 2; pass++) {
299 std::shared_ptr<IEvsCamera> pCam;
300 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
301 ASSERT_NE(pCam, nullptr);
302
303 CameraDesc cameraInfo;
304 for (auto&& devName : devices) {
305 ASSERT_TRUE(pCam->getPhysicalCameraInfo(devName, &cameraInfo).isOk());
306 EXPECT_EQ(devName, cameraInfo.id);
307 }
308
309 // Store a camera handle for a clean-up
310 mActiveCameras.push_back(pCam);
311
312 // Verify that this camera self-identifies correctly
313 ASSERT_TRUE(pCam->getCameraInfo(&cameraInfo).isOk());
314 EXPECT_EQ(cam.id, cameraInfo.id);
315
316 // Verify methods for extended info
317 const auto id = 0xFFFFFFFF; // meaningless id
318 std::vector<uint8_t> values;
319 auto status = pCam->setExtendedInfo(id, values);
320 if (isLogicalCam) {
321 EXPECT_TRUE(!status.isOk() && status.getServiceSpecificError() ==
322 static_cast<int>(EvsResult::NOT_SUPPORTED));
323 } else {
324 EXPECT_TRUE(status.isOk());
325 }
326
327 status = pCam->getExtendedInfo(id, &values);
328 if (isLogicalCam) {
329 EXPECT_TRUE(!status.isOk() && status.getServiceSpecificError() ==
330 static_cast<int>(EvsResult::NOT_SUPPORTED));
331 } else {
332 EXPECT_TRUE(status.isOk());
333 }
334
335 // Explicitly close the camera so resources are released right away
336 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
337 mActiveCameras.clear();
338 }
339 }
340}
341
342/*
343 * CameraOpenAggressive:
344 * Opens each camera reported by the enumerator twice in a row without an intervening closeCamera
345 * call. This ensures that the intended "aggressive open" behavior works. This is necessary for
346 * the system to be tolerant of shutdown/restart race conditions.
347 */
348TEST_P(EvsAidlTest, CameraOpenAggressive) {
349 LOG(INFO) << "Starting CameraOpenAggressive test";
350
351 // Get the camera list
352 loadCameraList();
353
354 // Open and close each camera twice
355 for (auto&& cam : mCameraInfo) {
356 bool isLogicalCam = false;
357 getPhysicalCameraIds(cam.id, isLogicalCam);
358 if (mIsHwModule && isLogicalCam) {
359 LOG(INFO) << "Skip a logical device, " << cam.id << " for HW target.";
360 continue;
361 }
362
363 // Read a target resolution from the metadata
364 Stream targetCfg = getFirstStreamConfiguration(
365 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
366 ASSERT_GT(targetCfg.width, 0);
367 ASSERT_GT(targetCfg.height, 0);
368
369 mActiveCameras.clear();
370 std::shared_ptr<IEvsCamera> pCam;
371 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
372 EXPECT_NE(pCam, nullptr);
373
374 // Store a camera handle for a clean-up
375 mActiveCameras.push_back(pCam);
376
377 // Verify that this camera self-identifies correctly
378 CameraDesc cameraInfo;
379 ASSERT_TRUE(pCam->getCameraInfo(&cameraInfo).isOk());
380 EXPECT_EQ(cam.id, cameraInfo.id);
381
382 std::shared_ptr<IEvsCamera> pCam2;
383 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam2).isOk());
384 EXPECT_NE(pCam2, nullptr);
385 EXPECT_NE(pCam, pCam2);
386
387 // Store a camera handle for a clean-up
388 mActiveCameras.push_back(pCam2);
389
390 auto status = pCam->setMaxFramesInFlight(2);
391 if (mIsHwModule) {
392 // Verify that the old camera rejects calls via HW module.
393 EXPECT_TRUE(!status.isOk() && status.getServiceSpecificError() ==
394 static_cast<int>(EvsResult::OWNERSHIP_LOST));
395 } else {
396 // default implementation supports multiple clients.
397 EXPECT_TRUE(status.isOk());
398 }
399
400 // Close the superseded camera
401 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
402 mActiveCameras.pop_front();
403
404 // Verify that the second camera instance self-identifies correctly
405 ASSERT_TRUE(pCam2->getCameraInfo(&cameraInfo).isOk());
406 EXPECT_EQ(cam.id, cameraInfo.id);
407
408 // Close the second camera instance
409 ASSERT_TRUE(mEnumerator->closeCamera(pCam2).isOk());
410 mActiveCameras.pop_front();
411 }
412
413 // Sleep here to ensure the destructor cleanup has time to run so we don't break follow on tests
414 sleep(1); // I hate that this is an arbitrary time to wait. :( b/36122635
415}
416
417/*
418 * CameraStreamPerformance:
419 * Measure and qualify the stream start up time and streaming frame rate of each reported camera
420 */
421TEST_P(EvsAidlTest, CameraStreamPerformance) {
422 LOG(INFO) << "Starting CameraStreamPerformance test";
423
424 // Get the camera list
425 loadCameraList();
426
427 // Test each reported camera
428 for (auto&& cam : mCameraInfo) {
429 bool isLogicalCam = false;
430 auto devices = getPhysicalCameraIds(cam.id, isLogicalCam);
431 if (mIsHwModule && isLogicalCam) {
432 LOG(INFO) << "Skip a logical device " << cam.id;
433 continue;
434 }
435
436 // Read a target resolution from the metadata
437 Stream targetCfg = getFirstStreamConfiguration(
438 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
439 ASSERT_GT(targetCfg.width, 0);
440 ASSERT_GT(targetCfg.height, 0);
441
442 std::shared_ptr<IEvsCamera> pCam;
443 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
444 EXPECT_NE(pCam, nullptr);
445
446 // Store a camera handle for a clean-up
447 mActiveCameras.push_back(pCam);
448
449 // Set up a frame receiver object which will fire up its own thread
Frederick Mayle7056b242022-03-29 02:38:12 +0000450 std::shared_ptr<FrameHandler> frameHandler = ndk::SharedRefBase::make<FrameHandler>(
451 pCam, cam, nullptr, FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -0700452 EXPECT_NE(frameHandler, nullptr);
453
454 // Start the camera's video stream
455 nsecs_t start = systemTime(SYSTEM_TIME_MONOTONIC);
456 ASSERT_TRUE(frameHandler->startStream());
457
458 // Ensure the first frame arrived within the expected time
459 frameHandler->waitForFrameCount(1);
460 nsecs_t firstFrame = systemTime(SYSTEM_TIME_MONOTONIC);
461 nsecs_t timeToFirstFrame = systemTime(SYSTEM_TIME_MONOTONIC) - start;
462
463 // Extra delays are expected when we attempt to start a video stream on
464 // the logical camera device. The amount of delay is expected the
465 // number of physical camera devices multiplied by
466 // kMaxStreamStartMilliseconds at most.
467 EXPECT_LE(nanoseconds_to_milliseconds(timeToFirstFrame),
468 kMaxStreamStartMilliseconds * devices.size());
469 printf("%s: Measured time to first frame %0.2f ms\n", cam.id.data(),
470 timeToFirstFrame * kNanoToMilliseconds);
471 LOG(INFO) << cam.id << ": Measured time to first frame " << std::scientific
472 << timeToFirstFrame * kNanoToMilliseconds << " ms.";
473
474 // Check aspect ratio
475 unsigned width = 0, height = 0;
476 frameHandler->getFrameDimension(&width, &height);
477 EXPECT_GE(width, height);
478
479 // Wait a bit, then ensure we get at least the required minimum number of frames
480 sleep(5);
481 nsecs_t end = systemTime(SYSTEM_TIME_MONOTONIC);
482
483 // Even when the camera pointer goes out of scope, the FrameHandler object will
484 // keep the stream alive unless we tell it to shutdown.
485 // Also note that the FrameHandle and the Camera have a mutual circular reference, so
486 // we have to break that cycle in order for either of them to get cleaned up.
487 frameHandler->shutdown();
488
489 unsigned framesReceived = 0;
490 frameHandler->getFramesCounters(&framesReceived, nullptr);
491 framesReceived = framesReceived - 1; // Back out the first frame we already waited for
492 nsecs_t runTime = end - firstFrame;
493 float framesPerSecond = framesReceived / (runTime * kNanoToSeconds);
494 printf("Measured camera rate %3.2f fps\n", framesPerSecond);
495 LOG(INFO) << "Measured camera rate " << std::scientific << framesPerSecond << " fps.";
496 EXPECT_GE(framesPerSecond, kMinimumFramesPerSecond);
497
498 // Explicitly release the camera
499 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
500 mActiveCameras.clear();
501 }
502}
503
504/*
505 * CameraStreamBuffering:
506 * Ensure the camera implementation behaves properly when the client holds onto buffers for more
507 * than one frame time. The camera must cleanly skip frames until the client is ready again.
508 */
509TEST_P(EvsAidlTest, CameraStreamBuffering) {
510 LOG(INFO) << "Starting CameraStreamBuffering test";
511
512 // Arbitrary constant (should be > 1 and not too big)
513 static const unsigned int kBuffersToHold = 6;
514
515 // Get the camera list
516 loadCameraList();
517
518 // Test each reported camera
519 for (auto&& cam : mCameraInfo) {
520 bool isLogicalCam = false;
521 getPhysicalCameraIds(cam.id, isLogicalCam);
522 if (mIsHwModule && isLogicalCam) {
523 LOG(INFO) << "Skip a logical device " << cam.id << " for HW target.";
524 continue;
525 }
526
527 // Read a target resolution from the metadata
528 Stream targetCfg = getFirstStreamConfiguration(
529 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
530 ASSERT_GT(targetCfg.width, 0);
531 ASSERT_GT(targetCfg.height, 0);
532
533 std::shared_ptr<IEvsCamera> pCam;
534 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
535 EXPECT_NE(pCam, nullptr);
536
537 // Store a camera handle for a clean-up
538 mActiveCameras.push_back(pCam);
539
540 // Ask for a very large number of buffers in flight to ensure it errors correctly
Changyeon Jo0d814ce2022-04-23 05:26:16 -0700541 auto badResult = pCam->setMaxFramesInFlight(std::numeric_limits<int32_t>::max());
Changyeon Jo80189012021-10-10 16:34:21 -0700542 EXPECT_TRUE(!badResult.isOk() && badResult.getServiceSpecificError() ==
Changyeon Job440b232022-05-10 22:49:28 -0700543 static_cast<int>(EvsResult::BUFFER_NOT_AVAILABLE));
Changyeon Jo80189012021-10-10 16:34:21 -0700544
545 // Now ask for exactly two buffers in flight as we'll test behavior in that case
546 ASSERT_TRUE(pCam->setMaxFramesInFlight(kBuffersToHold).isOk());
547
548 // Set up a frame receiver object which will fire up its own thread.
Frederick Mayle7056b242022-03-29 02:38:12 +0000549 std::shared_ptr<FrameHandler> frameHandler = ndk::SharedRefBase::make<FrameHandler>(
550 pCam, cam, nullptr, FrameHandler::eNoAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -0700551 EXPECT_NE(frameHandler, nullptr);
552
553 // Start the camera's video stream
554 ASSERT_TRUE(frameHandler->startStream());
555
556 // Check that the video stream stalls once we've gotten exactly the number of buffers
557 // we requested since we told the frameHandler not to return them.
558 sleep(1); // 1 second should be enough for at least 5 frames to be delivered worst case
559 unsigned framesReceived = 0;
560 frameHandler->getFramesCounters(&framesReceived, nullptr);
561 ASSERT_EQ(kBuffersToHold, framesReceived) << "Stream didn't stall at expected buffer limit";
562
563 // Give back one buffer
564 ASSERT_TRUE(frameHandler->returnHeldBuffer());
565
566 // Once we return a buffer, it shouldn't take more than 1/10 second to get a new one
567 // filled since we require 10fps minimum -- but give a 10% allowance just in case.
568 usleep(110 * kMillisecondsToMicroseconds);
569 frameHandler->getFramesCounters(&framesReceived, nullptr);
570 EXPECT_EQ(kBuffersToHold + 1, framesReceived) << "Stream should've resumed";
571
572 // Even when the camera pointer goes out of scope, the FrameHandler object will
573 // keep the stream alive unless we tell it to shutdown.
574 // Also note that the FrameHandle and the Camera have a mutual circular reference, so
575 // we have to break that cycle in order for either of them to get cleaned up.
576 frameHandler->shutdown();
577
578 // Explicitly release the camera
579 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
580 mActiveCameras.clear();
581 }
582}
583
584/*
585 * CameraToDisplayRoundTrip:
586 * End to end test of data flowing from the camera to the display. Each delivered frame of camera
587 * imagery is simply copied to the display buffer and presented on screen. This is the one test
588 * which a human could observe to see the operation of the system on the physical display.
589 */
590TEST_P(EvsAidlTest, CameraToDisplayRoundTrip) {
591 LOG(INFO) << "Starting CameraToDisplayRoundTrip test";
592
593 // Get the camera list
594 loadCameraList();
595
596 // Request available display IDs
597 uint8_t targetDisplayId = 0;
598 std::vector<uint8_t> displayIds;
599 ASSERT_TRUE(mEnumerator->getDisplayIdList(&displayIds).isOk());
600 EXPECT_GT(displayIds.size(), 0);
601 targetDisplayId = displayIds[0];
602
Changyeon Jo80189012021-10-10 16:34:21 -0700603 // Test each reported camera
604 for (auto&& cam : mCameraInfo) {
Changyeon Jo017cb982022-11-16 22:04:38 +0000605 // Request exclusive access to the first EVS display
606 std::shared_ptr<IEvsDisplay> pDisplay;
607 ASSERT_TRUE(mEnumerator->openDisplay(targetDisplayId, &pDisplay).isOk());
608 EXPECT_NE(pDisplay, nullptr);
609 LOG(INFO) << "Display " << static_cast<int>(targetDisplayId) << " is in use.";
610
611 // Get the display descriptor
612 DisplayDesc displayDesc;
613 ASSERT_TRUE(pDisplay->getDisplayInfo(&displayDesc).isOk());
614 LOG(INFO) << " Resolution: " << displayDesc.width << "x" << displayDesc.height;
615 ASSERT_GT(displayDesc.width, 0);
616 ASSERT_GT(displayDesc.height, 0);
617
Changyeon Jo80189012021-10-10 16:34:21 -0700618 bool isLogicalCam = false;
619 getPhysicalCameraIds(cam.id, isLogicalCam);
620 if (mIsHwModule && isLogicalCam) {
621 LOG(INFO) << "Skip a logical device " << cam.id << " for HW target.";
Changyeon Jo7793baa2023-01-19 13:18:47 -0800622 ASSERT_TRUE(mEnumerator->closeDisplay(pDisplay).isOk());
Changyeon Jo80189012021-10-10 16:34:21 -0700623 continue;
624 }
625
626 // Read a target resolution from the metadata
627 Stream targetCfg = getFirstStreamConfiguration(
628 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
629 ASSERT_GT(targetCfg.width, 0);
630 ASSERT_GT(targetCfg.height, 0);
631
632 std::shared_ptr<IEvsCamera> pCam;
633 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
634 EXPECT_NE(pCam, nullptr);
635
636 // Store a camera handle for a clean-up
637 mActiveCameras.push_back(pCam);
638
639 // Set up a frame receiver object which will fire up its own thread.
Frederick Mayle7056b242022-03-29 02:38:12 +0000640 std::shared_ptr<FrameHandler> frameHandler = ndk::SharedRefBase::make<FrameHandler>(
641 pCam, cam, pDisplay, FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -0700642 EXPECT_NE(frameHandler, nullptr);
643
644 // Activate the display
645 ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::VISIBLE_ON_NEXT_FRAME).isOk());
646
647 // Start the camera's video stream
648 ASSERT_TRUE(frameHandler->startStream());
649
650 // Wait a while to let the data flow
651 static const int kSecondsToWait = 5;
652 const int streamTimeMs =
653 kSecondsToWait * kSecondsToMilliseconds - kMaxStreamStartMilliseconds;
654 const unsigned minimumFramesExpected =
655 streamTimeMs * kMinimumFramesPerSecond / kSecondsToMilliseconds;
656 sleep(kSecondsToWait);
657 unsigned framesReceived = 0;
658 unsigned framesDisplayed = 0;
659 frameHandler->getFramesCounters(&framesReceived, &framesDisplayed);
660 EXPECT_EQ(framesReceived, framesDisplayed);
661 EXPECT_GE(framesDisplayed, minimumFramesExpected);
662
663 // Turn off the display (yes, before the stream stops -- it should be handled)
664 ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::NOT_VISIBLE).isOk());
665
666 // Shut down the streamer
667 frameHandler->shutdown();
668
669 // Explicitly release the camera
670 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
671 mActiveCameras.clear();
Changyeon Jo80189012021-10-10 16:34:21 -0700672
Changyeon Jo017cb982022-11-16 22:04:38 +0000673 // Explicitly release the display
674 ASSERT_TRUE(mEnumerator->closeDisplay(pDisplay).isOk());
675 }
Changyeon Jo80189012021-10-10 16:34:21 -0700676}
677
678/*
679 * MultiCameraStream:
680 * Verify that each client can start and stop video streams on the same
681 * underlying camera.
682 */
683TEST_P(EvsAidlTest, MultiCameraStream) {
684 LOG(INFO) << "Starting MultiCameraStream test";
685
686 if (mIsHwModule) {
687 // This test is not for HW module implementation.
688 return;
689 }
690
691 // Get the camera list
692 loadCameraList();
693
694 // Test each reported camera
695 for (auto&& cam : mCameraInfo) {
696 // Read a target resolution from the metadata
697 Stream targetCfg = getFirstStreamConfiguration(
698 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
699 ASSERT_GT(targetCfg.width, 0);
700 ASSERT_GT(targetCfg.height, 0);
701
702 // Create two camera clients.
703 std::shared_ptr<IEvsCamera> pCam0;
704 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam0).isOk());
705 EXPECT_NE(pCam0, nullptr);
706
707 // Store a camera handle for a clean-up
708 mActiveCameras.push_back(pCam0);
709
710 std::shared_ptr<IEvsCamera> pCam1;
711 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam1).isOk());
712 EXPECT_NE(pCam1, nullptr);
713
714 // Store a camera handle for a clean-up
715 mActiveCameras.push_back(pCam1);
716
717 // Set up per-client frame receiver objects which will fire up its own thread
Frederick Mayle7056b242022-03-29 02:38:12 +0000718 std::shared_ptr<FrameHandler> frameHandler0 = ndk::SharedRefBase::make<FrameHandler>(
719 pCam0, cam, nullptr, FrameHandler::eAutoReturn);
720 std::shared_ptr<FrameHandler> frameHandler1 = ndk::SharedRefBase::make<FrameHandler>(
721 pCam1, cam, nullptr, FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -0700722 EXPECT_NE(frameHandler0, nullptr);
723 EXPECT_NE(frameHandler1, nullptr);
724
725 // Start the camera's video stream via client 0
726 ASSERT_TRUE(frameHandler0->startStream());
727 ASSERT_TRUE(frameHandler1->startStream());
728
729 // Ensure the stream starts
730 frameHandler0->waitForFrameCount(1);
731 frameHandler1->waitForFrameCount(1);
732
733 nsecs_t firstFrame = systemTime(SYSTEM_TIME_MONOTONIC);
734
735 // Wait a bit, then ensure both clients get at least the required minimum number of frames
736 sleep(5);
737 nsecs_t end = systemTime(SYSTEM_TIME_MONOTONIC);
738 unsigned framesReceived0 = 0, framesReceived1 = 0;
739 frameHandler0->getFramesCounters(&framesReceived0, nullptr);
740 frameHandler1->getFramesCounters(&framesReceived1, nullptr);
741 framesReceived0 = framesReceived0 - 1; // Back out the first frame we already waited for
742 framesReceived1 = framesReceived1 - 1; // Back out the first frame we already waited for
743 nsecs_t runTime = end - firstFrame;
744 float framesPerSecond0 = framesReceived0 / (runTime * kNanoToSeconds);
745 float framesPerSecond1 = framesReceived1 / (runTime * kNanoToSeconds);
746 LOG(INFO) << "Measured camera rate " << std::scientific << framesPerSecond0 << " fps and "
747 << framesPerSecond1 << " fps";
748 EXPECT_GE(framesPerSecond0, kMinimumFramesPerSecond);
749 EXPECT_GE(framesPerSecond1, kMinimumFramesPerSecond);
750
751 // Shutdown one client
752 frameHandler0->shutdown();
753
754 // Read frame counters again
755 frameHandler0->getFramesCounters(&framesReceived0, nullptr);
756 frameHandler1->getFramesCounters(&framesReceived1, nullptr);
757
758 // Wait a bit again
759 sleep(5);
760 unsigned framesReceivedAfterStop0 = 0, framesReceivedAfterStop1 = 0;
761 frameHandler0->getFramesCounters(&framesReceivedAfterStop0, nullptr);
762 frameHandler1->getFramesCounters(&framesReceivedAfterStop1, nullptr);
763 EXPECT_EQ(framesReceived0, framesReceivedAfterStop0);
764 EXPECT_LT(framesReceived1, framesReceivedAfterStop1);
765
766 // Shutdown another
767 frameHandler1->shutdown();
768
769 // Explicitly release the camera
770 ASSERT_TRUE(mEnumerator->closeCamera(pCam0).isOk());
771 ASSERT_TRUE(mEnumerator->closeCamera(pCam1).isOk());
772 mActiveCameras.clear();
773
774 // TODO(b/145459970, b/145457727): below sleep() is added to ensure the
775 // destruction of active camera objects; this may be related with two
776 // issues.
777 sleep(1);
778 }
779}
780
781/*
782 * CameraParameter:
783 * Verify that a client can adjust a camera parameter.
784 */
785TEST_P(EvsAidlTest, CameraParameter) {
786 LOG(INFO) << "Starting CameraParameter test";
787
788 // Get the camera list
789 loadCameraList();
790
791 // Test each reported camera
792 for (auto&& cam : mCameraInfo) {
793 bool isLogicalCam = false;
794 getPhysicalCameraIds(cam.id, isLogicalCam);
795 if (isLogicalCam) {
796 // TODO(b/145465724): Support camera parameter programming on
797 // logical devices.
798 LOG(INFO) << "Skip a logical device " << cam.id;
799 continue;
800 }
801
802 // Read a target resolution from the metadata
803 Stream targetCfg = getFirstStreamConfiguration(
804 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
805 ASSERT_GT(targetCfg.width, 0);
806 ASSERT_GT(targetCfg.height, 0);
807
808 // Create a camera client
809 std::shared_ptr<IEvsCamera> pCam;
810 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
811 EXPECT_NE(pCam, nullptr);
812
813 // Store a camera
814 mActiveCameras.push_back(pCam);
815
816 // Get the parameter list
817 std::vector<CameraParam> cmds;
818 ASSERT_TRUE(pCam->getParameterList(&cmds).isOk());
819 if (cmds.size() < 1) {
820 continue;
821 }
822
823 // Set up per-client frame receiver objects which will fire up its own thread
Frederick Mayle7056b242022-03-29 02:38:12 +0000824 std::shared_ptr<FrameHandler> frameHandler = ndk::SharedRefBase::make<FrameHandler>(
825 pCam, cam, nullptr, FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -0700826 EXPECT_NE(frameHandler, nullptr);
827
828 // Start the camera's video stream
829 ASSERT_TRUE(frameHandler->startStream());
830
831 // Ensure the stream starts
832 frameHandler->waitForFrameCount(1);
833
834 // Set current client is the primary client
835 ASSERT_TRUE(pCam->setPrimaryClient().isOk());
836 for (auto& cmd : cmds) {
837 // Get a valid parameter value range
838 ParameterRange range;
839 ASSERT_TRUE(pCam->getIntParameterRange(cmd, &range).isOk());
840
841 std::vector<int32_t> values;
842 if (cmd == CameraParam::ABSOLUTE_FOCUS) {
843 // Try to turn off auto-focus
844 ASSERT_TRUE(pCam->setIntParameter(CameraParam::AUTO_FOCUS, 0, &values).isOk());
845 for (auto&& v : values) {
846 EXPECT_EQ(v, 0);
847 }
848 }
849
850 // Try to program a parameter with a random value [minVal, maxVal]
851 int32_t val0 = range.min + (std::rand() % (range.max - range.min));
852
853 // Rounding down
854 val0 = val0 - (val0 % range.step);
855 values.clear();
856 ASSERT_TRUE(pCam->setIntParameter(cmd, val0, &values).isOk());
857
858 values.clear();
859 ASSERT_TRUE(pCam->getIntParameter(cmd, &values).isOk());
860 for (auto&& v : values) {
861 EXPECT_EQ(val0, v) << "Values are not matched.";
862 }
863 }
864 ASSERT_TRUE(pCam->unsetPrimaryClient().isOk());
865
866 // Shutdown
867 frameHandler->shutdown();
868
869 // Explicitly release the camera
870 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
871 mActiveCameras.clear();
872 }
873}
874
875/*
876 * CameraPrimaryClientRelease
877 * Verify that non-primary client gets notified when the primary client either
878 * terminates or releases a role.
879 */
880TEST_P(EvsAidlTest, CameraPrimaryClientRelease) {
881 LOG(INFO) << "Starting CameraPrimaryClientRelease test";
882
883 if (mIsHwModule) {
884 // This test is not for HW module implementation.
885 return;
886 }
887
888 // Get the camera list
889 loadCameraList();
890
891 // Test each reported camera
892 for (auto&& cam : mCameraInfo) {
893 bool isLogicalCam = false;
894 getPhysicalCameraIds(cam.id, isLogicalCam);
895 if (isLogicalCam) {
896 // TODO(b/145465724): Support camera parameter programming on
897 // logical devices.
898 LOG(INFO) << "Skip a logical device " << cam.id;
899 continue;
900 }
901
902 // Read a target resolution from the metadata
903 Stream targetCfg = getFirstStreamConfiguration(
904 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
905 ASSERT_GT(targetCfg.width, 0);
906 ASSERT_GT(targetCfg.height, 0);
907
908 // Create two camera clients.
909 std::shared_ptr<IEvsCamera> pPrimaryCam;
910 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pPrimaryCam).isOk());
911 EXPECT_NE(pPrimaryCam, nullptr);
912
913 // Store a camera handle for a clean-up
914 mActiveCameras.push_back(pPrimaryCam);
915
916 std::shared_ptr<IEvsCamera> pSecondaryCam;
917 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pSecondaryCam).isOk());
918 EXPECT_NE(pSecondaryCam, nullptr);
919
920 // Store a camera handle for a clean-up
921 mActiveCameras.push_back(pSecondaryCam);
922
923 // Set up per-client frame receiver objects which will fire up its own thread
Frederick Mayle7056b242022-03-29 02:38:12 +0000924 std::shared_ptr<FrameHandler> frameHandlerPrimary = ndk::SharedRefBase::make<FrameHandler>(
Changyeon Jo80189012021-10-10 16:34:21 -0700925 pPrimaryCam, cam, nullptr, FrameHandler::eAutoReturn);
Frederick Mayle7056b242022-03-29 02:38:12 +0000926 std::shared_ptr<FrameHandler> frameHandlerSecondary =
927 ndk::SharedRefBase::make<FrameHandler>(pSecondaryCam, cam, nullptr,
928 FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -0700929 EXPECT_NE(frameHandlerPrimary, nullptr);
930 EXPECT_NE(frameHandlerSecondary, nullptr);
931
932 // Set one client as the primary client
933 ASSERT_TRUE(pPrimaryCam->setPrimaryClient().isOk());
934
935 // Try to set another client as the primary client.
936 ASSERT_FALSE(pSecondaryCam->setPrimaryClient().isOk());
937
938 // Start the camera's video stream via a primary client client.
939 ASSERT_TRUE(frameHandlerPrimary->startStream());
940
941 // Ensure the stream starts
942 frameHandlerPrimary->waitForFrameCount(1);
943
944 // Start the camera's video stream via another client
945 ASSERT_TRUE(frameHandlerSecondary->startStream());
946
947 // Ensure the stream starts
948 frameHandlerSecondary->waitForFrameCount(1);
949
950 // Non-primary client expects to receive a primary client role relesed
951 // notification.
952 EvsEventDesc aTargetEvent = {};
953 EvsEventDesc aNotification = {};
954
955 bool listening = false;
956 std::mutex eventLock;
957 std::condition_variable eventCond;
958 std::thread listener =
959 std::thread([&aNotification, &frameHandlerSecondary, &listening, &eventCond]() {
960 // Notify that a listening thread is running.
961 listening = true;
962 eventCond.notify_all();
963
964 EvsEventDesc aTargetEvent;
965 aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
966 if (!frameHandlerSecondary->waitForEvent(aTargetEvent, aNotification, true)) {
967 LOG(WARNING) << "A timer is expired before a target event is fired.";
968 }
969 });
970
971 // Wait until a listening thread starts.
972 std::unique_lock<std::mutex> lock(eventLock);
973 auto timer = std::chrono::system_clock::now();
974 while (!listening) {
975 timer += 1s;
976 eventCond.wait_until(lock, timer);
977 }
978 lock.unlock();
979
980 // Release a primary client role.
981 ASSERT_TRUE(pPrimaryCam->unsetPrimaryClient().isOk());
982
983 // Join a listening thread.
984 if (listener.joinable()) {
985 listener.join();
986 }
987
988 // Verify change notifications.
989 ASSERT_EQ(EvsEventType::MASTER_RELEASED, static_cast<EvsEventType>(aNotification.aType));
990
991 // Non-primary becomes a primary client.
992 ASSERT_TRUE(pSecondaryCam->setPrimaryClient().isOk());
993
994 // Previous primary client fails to become a primary client.
995 ASSERT_FALSE(pPrimaryCam->setPrimaryClient().isOk());
996
997 listening = false;
998 listener = std::thread([&aNotification, &frameHandlerPrimary, &listening, &eventCond]() {
999 // Notify that a listening thread is running.
1000 listening = true;
1001 eventCond.notify_all();
1002
1003 EvsEventDesc aTargetEvent;
1004 aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
1005 if (!frameHandlerPrimary->waitForEvent(aTargetEvent, aNotification, true)) {
1006 LOG(WARNING) << "A timer is expired before a target event is fired.";
1007 }
1008 });
1009
1010 // Wait until a listening thread starts.
1011 timer = std::chrono::system_clock::now();
1012 lock.lock();
1013 while (!listening) {
1014 eventCond.wait_until(lock, timer + 1s);
1015 }
1016 lock.unlock();
1017
1018 // Closing current primary client.
1019 frameHandlerSecondary->shutdown();
1020
1021 // Join a listening thread.
1022 if (listener.joinable()) {
1023 listener.join();
1024 }
1025
1026 // Verify change notifications.
1027 ASSERT_EQ(EvsEventType::MASTER_RELEASED, static_cast<EvsEventType>(aNotification.aType));
1028
1029 // Closing streams.
1030 frameHandlerPrimary->shutdown();
1031
1032 // Explicitly release the camera
1033 ASSERT_TRUE(mEnumerator->closeCamera(pPrimaryCam).isOk());
1034 ASSERT_TRUE(mEnumerator->closeCamera(pSecondaryCam).isOk());
1035 mActiveCameras.clear();
1036 }
1037}
1038
1039/*
1040 * MultiCameraParameter:
1041 * Verify that primary and non-primary clients behave as expected when they try to adjust
1042 * camera parameters.
1043 */
1044TEST_P(EvsAidlTest, MultiCameraParameter) {
1045 LOG(INFO) << "Starting MultiCameraParameter test";
1046
1047 if (mIsHwModule) {
1048 // This test is not for HW module implementation.
1049 return;
1050 }
1051
1052 // Get the camera list
1053 loadCameraList();
1054
1055 // Test each reported camera
1056 for (auto&& cam : mCameraInfo) {
1057 bool isLogicalCam = false;
1058 getPhysicalCameraIds(cam.id, isLogicalCam);
1059 if (isLogicalCam) {
1060 // TODO(b/145465724): Support camera parameter programming on
1061 // logical devices.
1062 LOG(INFO) << "Skip a logical device " << cam.id;
1063 continue;
1064 }
1065
1066 // Read a target resolution from the metadata
1067 Stream targetCfg = getFirstStreamConfiguration(
1068 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
1069 ASSERT_GT(targetCfg.width, 0);
1070 ASSERT_GT(targetCfg.height, 0);
1071
1072 // Create two camera clients.
1073 std::shared_ptr<IEvsCamera> pPrimaryCam;
1074 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pPrimaryCam).isOk());
1075 EXPECT_NE(pPrimaryCam, nullptr);
1076
1077 // Store a camera handle for a clean-up
1078 mActiveCameras.push_back(pPrimaryCam);
1079
1080 std::shared_ptr<IEvsCamera> pSecondaryCam;
1081 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pSecondaryCam).isOk());
1082 EXPECT_NE(pSecondaryCam, nullptr);
1083
1084 // Store a camera handle for a clean-up
1085 mActiveCameras.push_back(pSecondaryCam);
1086
1087 // Get the parameter list
1088 std::vector<CameraParam> camPrimaryCmds, camSecondaryCmds;
1089 ASSERT_TRUE(pPrimaryCam->getParameterList(&camPrimaryCmds).isOk());
1090 ASSERT_TRUE(pSecondaryCam->getParameterList(&camSecondaryCmds).isOk());
1091 if (camPrimaryCmds.size() < 1 || camSecondaryCmds.size() < 1) {
1092 // Skip a camera device if it does not support any parameter.
1093 continue;
1094 }
1095
1096 // Set up per-client frame receiver objects which will fire up its own thread
Frederick Mayle7056b242022-03-29 02:38:12 +00001097 std::shared_ptr<FrameHandler> frameHandlerPrimary = ndk::SharedRefBase::make<FrameHandler>(
Changyeon Jo80189012021-10-10 16:34:21 -07001098 pPrimaryCam, cam, nullptr, FrameHandler::eAutoReturn);
Frederick Mayle7056b242022-03-29 02:38:12 +00001099 std::shared_ptr<FrameHandler> frameHandlerSecondary =
1100 ndk::SharedRefBase::make<FrameHandler>(pSecondaryCam, cam, nullptr,
1101 FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -07001102 EXPECT_NE(frameHandlerPrimary, nullptr);
1103 EXPECT_NE(frameHandlerSecondary, nullptr);
1104
1105 // Set one client as the primary client.
1106 ASSERT_TRUE(pPrimaryCam->setPrimaryClient().isOk());
1107
1108 // Try to set another client as the primary client.
1109 ASSERT_FALSE(pSecondaryCam->setPrimaryClient().isOk());
1110
1111 // Start the camera's video stream via a primary client client.
1112 ASSERT_TRUE(frameHandlerPrimary->startStream());
1113
1114 // Ensure the stream starts
1115 frameHandlerPrimary->waitForFrameCount(1);
1116
1117 // Start the camera's video stream via another client
1118 ASSERT_TRUE(frameHandlerSecondary->startStream());
1119
1120 // Ensure the stream starts
1121 frameHandlerSecondary->waitForFrameCount(1);
1122
1123 int32_t val0 = 0;
1124 std::vector<int32_t> values;
1125 EvsEventDesc aNotification0 = {};
1126 EvsEventDesc aNotification1 = {};
1127 for (auto& cmd : camPrimaryCmds) {
1128 // Get a valid parameter value range
1129 ParameterRange range;
1130 ASSERT_TRUE(pPrimaryCam->getIntParameterRange(cmd, &range).isOk());
1131 if (cmd == CameraParam::ABSOLUTE_FOCUS) {
1132 // Try to turn off auto-focus
1133 values.clear();
1134 ASSERT_TRUE(
1135 pPrimaryCam->setIntParameter(CameraParam::AUTO_FOCUS, 0, &values).isOk());
1136 for (auto&& v : values) {
1137 EXPECT_EQ(v, 0);
1138 }
1139 }
1140
1141 // Calculate a parameter value to program.
1142 val0 = range.min + (std::rand() % (range.max - range.min));
1143 val0 = val0 - (val0 % range.step);
1144
1145 // Prepare and start event listeners.
1146 bool listening0 = false;
1147 bool listening1 = false;
1148 std::condition_variable eventCond;
1149 std::thread listener0 = std::thread([cmd, val0, &aNotification0, &frameHandlerPrimary,
1150 &listening0, &listening1, &eventCond]() {
1151 listening0 = true;
1152 if (listening1) {
1153 eventCond.notify_all();
1154 }
1155
1156 EvsEventDesc aTargetEvent;
1157 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001158 aTargetEvent.payload.push_back(static_cast<int32_t>(cmd));
1159 aTargetEvent.payload.push_back(val0);
Changyeon Jo80189012021-10-10 16:34:21 -07001160 if (!frameHandlerPrimary->waitForEvent(aTargetEvent, aNotification0)) {
1161 LOG(WARNING) << "A timer is expired before a target event is fired.";
1162 }
1163 });
1164 std::thread listener1 = std::thread([cmd, val0, &aNotification1, &frameHandlerSecondary,
1165 &listening0, &listening1, &eventCond]() {
1166 listening1 = true;
1167 if (listening0) {
1168 eventCond.notify_all();
1169 }
1170
1171 EvsEventDesc aTargetEvent;
1172 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001173 aTargetEvent.payload.push_back(static_cast<int32_t>(cmd));
1174 aTargetEvent.payload.push_back(val0);
Changyeon Jo80189012021-10-10 16:34:21 -07001175 if (!frameHandlerSecondary->waitForEvent(aTargetEvent, aNotification1)) {
1176 LOG(WARNING) << "A timer is expired before a target event is fired.";
1177 }
1178 });
1179
1180 // Wait until a listening thread starts.
1181 std::mutex eventLock;
1182 std::unique_lock<std::mutex> lock(eventLock);
1183 auto timer = std::chrono::system_clock::now();
1184 while (!listening0 || !listening1) {
1185 eventCond.wait_until(lock, timer + 1s);
1186 }
1187 lock.unlock();
1188
1189 // Try to program a parameter
1190 values.clear();
1191 ASSERT_TRUE(pPrimaryCam->setIntParameter(cmd, val0, &values).isOk());
1192 for (auto&& v : values) {
1193 EXPECT_EQ(val0, v) << "Values are not matched.";
1194 }
1195
1196 // Join a listening thread.
1197 if (listener0.joinable()) {
1198 listener0.join();
1199 }
1200 if (listener1.joinable()) {
1201 listener1.join();
1202 }
1203
1204 // Verify a change notification
1205 ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
1206 static_cast<EvsEventType>(aNotification0.aType));
1207 ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
1208 static_cast<EvsEventType>(aNotification1.aType));
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001209 ASSERT_GE(aNotification0.payload.size(), 2);
1210 ASSERT_GE(aNotification1.payload.size(), 2);
Changyeon Jo80189012021-10-10 16:34:21 -07001211 ASSERT_EQ(cmd, static_cast<CameraParam>(aNotification0.payload[0]));
1212 ASSERT_EQ(cmd, static_cast<CameraParam>(aNotification1.payload[0]));
1213 for (auto&& v : values) {
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001214 ASSERT_EQ(v, aNotification0.payload[1]);
1215 ASSERT_EQ(v, aNotification1.payload[1]);
Changyeon Jo80189012021-10-10 16:34:21 -07001216 }
1217
1218 // Clients expects to receive a parameter change notification
1219 // whenever a primary client client adjusts it.
1220 values.clear();
1221 ASSERT_TRUE(pPrimaryCam->getIntParameter(cmd, &values).isOk());
1222 for (auto&& v : values) {
1223 EXPECT_EQ(val0, v) << "Values are not matched.";
1224 }
1225 }
1226
1227 // Try to adjust a parameter via non-primary client
1228 values.clear();
1229 ASSERT_FALSE(pSecondaryCam->setIntParameter(camSecondaryCmds[0], val0, &values).isOk());
1230
1231 // Non-primary client attempts to be a primary client
1232 ASSERT_FALSE(pSecondaryCam->setPrimaryClient().isOk());
1233
1234 // Primary client retires from a primary client role
1235 bool listening = false;
1236 std::condition_variable eventCond;
1237 std::thread listener =
1238 std::thread([&aNotification0, &frameHandlerSecondary, &listening, &eventCond]() {
1239 listening = true;
1240 eventCond.notify_all();
1241
1242 EvsEventDesc aTargetEvent;
1243 aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
1244 if (!frameHandlerSecondary->waitForEvent(aTargetEvent, aNotification0, true)) {
1245 LOG(WARNING) << "A timer is expired before a target event is fired.";
1246 }
1247 });
1248
1249 std::mutex eventLock;
1250 auto timer = std::chrono::system_clock::now();
1251 std::unique_lock<std::mutex> lock(eventLock);
1252 while (!listening) {
1253 eventCond.wait_until(lock, timer + 1s);
1254 }
1255 lock.unlock();
1256
1257 ASSERT_TRUE(pPrimaryCam->unsetPrimaryClient().isOk());
1258
1259 if (listener.joinable()) {
1260 listener.join();
1261 }
1262 ASSERT_EQ(EvsEventType::MASTER_RELEASED, static_cast<EvsEventType>(aNotification0.aType));
1263
1264 // Try to adjust a parameter after being retired
1265 values.clear();
1266 ASSERT_FALSE(pPrimaryCam->setIntParameter(camPrimaryCmds[0], val0, &values).isOk());
1267
1268 // Non-primary client becomes a primary client
1269 ASSERT_TRUE(pSecondaryCam->setPrimaryClient().isOk());
1270
1271 // Try to adjust a parameter via new primary client
1272 for (auto& cmd : camSecondaryCmds) {
1273 // Get a valid parameter value range
1274 ParameterRange range;
1275 ASSERT_TRUE(pSecondaryCam->getIntParameterRange(cmd, &range).isOk());
1276
1277 values.clear();
1278 if (cmd == CameraParam::ABSOLUTE_FOCUS) {
1279 // Try to turn off auto-focus
1280 values.clear();
1281 ASSERT_TRUE(
1282 pSecondaryCam->setIntParameter(CameraParam::AUTO_FOCUS, 0, &values).isOk());
1283 for (auto&& v : values) {
1284 EXPECT_EQ(v, 0);
1285 }
1286 }
1287
1288 // Calculate a parameter value to program. This is being rounding down.
1289 val0 = range.min + (std::rand() % (range.max - range.min));
1290 val0 = val0 - (val0 % range.step);
1291
1292 // Prepare and start event listeners.
1293 bool listening0 = false;
1294 bool listening1 = false;
1295 std::condition_variable eventCond;
1296 std::thread listener0 = std::thread([&]() {
1297 listening0 = true;
1298 if (listening1) {
1299 eventCond.notify_all();
1300 }
1301
1302 EvsEventDesc aTargetEvent;
1303 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001304 aTargetEvent.payload.push_back(static_cast<int32_t>(cmd));
1305 aTargetEvent.payload.push_back(val0);
Changyeon Jo80189012021-10-10 16:34:21 -07001306 if (!frameHandlerPrimary->waitForEvent(aTargetEvent, aNotification0)) {
1307 LOG(WARNING) << "A timer is expired before a target event is fired.";
1308 }
1309 });
1310 std::thread listener1 = std::thread([&]() {
1311 listening1 = true;
1312 if (listening0) {
1313 eventCond.notify_all();
1314 }
1315
1316 EvsEventDesc aTargetEvent;
1317 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001318 aTargetEvent.payload.push_back(static_cast<int32_t>(cmd));
1319 aTargetEvent.payload.push_back(val0);
Changyeon Jo80189012021-10-10 16:34:21 -07001320 if (!frameHandlerSecondary->waitForEvent(aTargetEvent, aNotification1)) {
1321 LOG(WARNING) << "A timer is expired before a target event is fired.";
1322 }
1323 });
1324
1325 // Wait until a listening thread starts.
1326 std::mutex eventLock;
1327 std::unique_lock<std::mutex> lock(eventLock);
1328 auto timer = std::chrono::system_clock::now();
1329 while (!listening0 || !listening1) {
1330 eventCond.wait_until(lock, timer + 1s);
1331 }
1332 lock.unlock();
1333
1334 // Try to program a parameter
1335 values.clear();
1336 ASSERT_TRUE(pSecondaryCam->setIntParameter(cmd, val0, &values).isOk());
1337
1338 // Clients expects to receive a parameter change notification
1339 // whenever a primary client client adjusts it.
1340 values.clear();
1341 ASSERT_TRUE(pSecondaryCam->getIntParameter(cmd, &values).isOk());
1342 for (auto&& v : values) {
1343 EXPECT_EQ(val0, v) << "Values are not matched.";
1344 }
1345
1346 // Join a listening thread.
1347 if (listener0.joinable()) {
1348 listener0.join();
1349 }
1350 if (listener1.joinable()) {
1351 listener1.join();
1352 }
1353
1354 // Verify a change notification
1355 ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
1356 static_cast<EvsEventType>(aNotification0.aType));
1357 ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
1358 static_cast<EvsEventType>(aNotification1.aType));
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001359 ASSERT_GE(aNotification0.payload.size(), 2);
1360 ASSERT_GE(aNotification1.payload.size(), 2);
Changyeon Jo80189012021-10-10 16:34:21 -07001361 ASSERT_EQ(cmd, static_cast<CameraParam>(aNotification0.payload[0]));
1362 ASSERT_EQ(cmd, static_cast<CameraParam>(aNotification1.payload[0]));
1363 for (auto&& v : values) {
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001364 ASSERT_EQ(v, aNotification0.payload[1]);
1365 ASSERT_EQ(v, aNotification1.payload[1]);
Changyeon Jo80189012021-10-10 16:34:21 -07001366 }
1367 }
1368
1369 // New primary client retires from the role
1370 ASSERT_TRUE(pSecondaryCam->unsetPrimaryClient().isOk());
1371
1372 // Shutdown
1373 frameHandlerPrimary->shutdown();
1374 frameHandlerSecondary->shutdown();
1375
1376 // Explicitly release the camera
1377 ASSERT_TRUE(mEnumerator->closeCamera(pPrimaryCam).isOk());
1378 ASSERT_TRUE(mEnumerator->closeCamera(pSecondaryCam).isOk());
1379 mActiveCameras.clear();
1380 }
1381}
1382
1383/*
1384 * HighPriorityCameraClient:
1385 * EVS client, which owns the display, is priortized and therefore can take over
1386 * a primary client role from other EVS clients without the display.
1387 */
1388TEST_P(EvsAidlTest, HighPriorityCameraClient) {
1389 LOG(INFO) << "Starting HighPriorityCameraClient test";
1390
1391 if (mIsHwModule) {
1392 // This test is not for HW module implementation.
1393 return;
1394 }
1395
1396 // Get the camera list
1397 loadCameraList();
1398
Changyeon Jo80189012021-10-10 16:34:21 -07001399 // Test each reported camera
1400 for (auto&& cam : mCameraInfo) {
Changyeon Jo017cb982022-11-16 22:04:38 +00001401 // Request available display IDs
1402 uint8_t targetDisplayId = 0;
1403 std::vector<uint8_t> displayIds;
1404 ASSERT_TRUE(mEnumerator->getDisplayIdList(&displayIds).isOk());
1405 EXPECT_GT(displayIds.size(), 0);
1406 targetDisplayId = displayIds[0];
1407
1408 // Request exclusive access to the EVS display
1409 std::shared_ptr<IEvsDisplay> pDisplay;
1410 ASSERT_TRUE(mEnumerator->openDisplay(targetDisplayId, &pDisplay).isOk());
1411 EXPECT_NE(pDisplay, nullptr);
1412
Changyeon Jo80189012021-10-10 16:34:21 -07001413 // Read a target resolution from the metadata
1414 Stream targetCfg = getFirstStreamConfiguration(
1415 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
1416 ASSERT_GT(targetCfg.width, 0);
1417 ASSERT_GT(targetCfg.height, 0);
1418
1419 // Create two clients
1420 std::shared_ptr<IEvsCamera> pCam0;
1421 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam0).isOk());
1422 EXPECT_NE(pCam0, nullptr);
1423
1424 // Store a camera handle for a clean-up
1425 mActiveCameras.push_back(pCam0);
1426
1427 std::shared_ptr<IEvsCamera> pCam1;
1428 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam1).isOk());
1429 EXPECT_NE(pCam1, nullptr);
1430
1431 // Store a camera handle for a clean-up
1432 mActiveCameras.push_back(pCam1);
1433
1434 // Get the parameter list; this test will use the first command in both
1435 // lists.
1436 std::vector<CameraParam> cam0Cmds, cam1Cmds;
1437 ASSERT_TRUE(pCam0->getParameterList(&cam0Cmds).isOk());
1438 ASSERT_TRUE(pCam1->getParameterList(&cam1Cmds).isOk());
1439 if (cam0Cmds.size() < 1 || cam1Cmds.size() < 1) {
1440 // Cannot execute this test.
Changyeon Jo7793baa2023-01-19 13:18:47 -08001441 ASSERT_TRUE(mEnumerator->closeDisplay(pDisplay).isOk());
1442 continue;
Changyeon Jo80189012021-10-10 16:34:21 -07001443 }
1444
1445 // Set up a frame receiver object which will fire up its own thread.
Frederick Mayle7056b242022-03-29 02:38:12 +00001446 std::shared_ptr<FrameHandler> frameHandler0 = ndk::SharedRefBase::make<FrameHandler>(
1447 pCam0, cam, nullptr, FrameHandler::eAutoReturn);
1448 std::shared_ptr<FrameHandler> frameHandler1 = ndk::SharedRefBase::make<FrameHandler>(
1449 pCam1, cam, nullptr, FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -07001450 EXPECT_NE(frameHandler0, nullptr);
1451 EXPECT_NE(frameHandler1, nullptr);
1452
1453 // Activate the display
1454 ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::VISIBLE_ON_NEXT_FRAME).isOk());
1455
1456 // Start the camera's video stream
1457 ASSERT_TRUE(frameHandler0->startStream());
1458 ASSERT_TRUE(frameHandler1->startStream());
1459
1460 // Ensure the stream starts
1461 frameHandler0->waitForFrameCount(1);
1462 frameHandler1->waitForFrameCount(1);
1463
1464 // Client 1 becomes a primary client and programs a parameter.
1465
1466 // Get a valid parameter value range
1467 ParameterRange range;
1468 ASSERT_TRUE(pCam1->getIntParameterRange(cam1Cmds[0], &range).isOk());
1469
1470 // Client1 becomes a primary client
1471 ASSERT_TRUE(pCam1->setPrimaryClient().isOk());
1472
1473 std::vector<int32_t> values;
1474 EvsEventDesc aTargetEvent = {};
1475 EvsEventDesc aNotification = {};
1476 bool listening = false;
1477 std::mutex eventLock;
1478 std::condition_variable eventCond;
1479 if (cam1Cmds[0] == CameraParam::ABSOLUTE_FOCUS) {
1480 std::thread listener =
1481 std::thread([&frameHandler0, &aNotification, &listening, &eventCond] {
1482 listening = true;
1483 eventCond.notify_all();
1484
1485 EvsEventDesc aTargetEvent;
1486 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001487 aTargetEvent.payload.push_back(
1488 static_cast<int32_t>(CameraParam::AUTO_FOCUS));
1489 aTargetEvent.payload.push_back(0);
Changyeon Jo80189012021-10-10 16:34:21 -07001490 if (!frameHandler0->waitForEvent(aTargetEvent, aNotification)) {
1491 LOG(WARNING) << "A timer is expired before a target event is fired.";
1492 }
1493 });
1494
1495 // Wait until a lister starts.
1496 std::unique_lock<std::mutex> lock(eventLock);
1497 auto timer = std::chrono::system_clock::now();
1498 while (!listening) {
1499 eventCond.wait_until(lock, timer + 1s);
1500 }
1501 lock.unlock();
1502
1503 // Try to turn off auto-focus
1504 ASSERT_TRUE(pCam1->setIntParameter(CameraParam::AUTO_FOCUS, 0, &values).isOk());
1505 for (auto&& v : values) {
1506 EXPECT_EQ(v, 0);
1507 }
1508
1509 // Join a listener
1510 if (listener.joinable()) {
1511 listener.join();
1512 }
1513
1514 // Make sure AUTO_FOCUS is off.
1515 ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
1516 EvsEventType::PARAMETER_CHANGED);
1517 }
1518
1519 // Try to program a parameter with a random value [minVal, maxVal] after
1520 // rounding it down.
1521 int32_t val0 = range.min + (std::rand() % (range.max - range.min));
1522 val0 = val0 - (val0 % range.step);
1523
1524 std::thread listener = std::thread(
1525 [&frameHandler1, &aNotification, &listening, &eventCond, &cam1Cmds, val0] {
1526 listening = true;
1527 eventCond.notify_all();
1528
1529 EvsEventDesc aTargetEvent;
1530 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001531 aTargetEvent.payload.push_back(static_cast<int32_t>(cam1Cmds[0]));
1532 aTargetEvent.payload.push_back(val0);
Changyeon Jo80189012021-10-10 16:34:21 -07001533 if (!frameHandler1->waitForEvent(aTargetEvent, aNotification)) {
1534 LOG(WARNING) << "A timer is expired before a target event is fired.";
1535 }
1536 });
1537
1538 // Wait until a lister starts.
1539 listening = false;
1540 std::unique_lock<std::mutex> lock(eventLock);
1541 auto timer = std::chrono::system_clock::now();
1542 while (!listening) {
1543 eventCond.wait_until(lock, timer + 1s);
1544 }
1545 lock.unlock();
1546
1547 values.clear();
1548 ASSERT_TRUE(pCam1->setIntParameter(cam1Cmds[0], val0, &values).isOk());
1549 for (auto&& v : values) {
1550 EXPECT_EQ(val0, v);
1551 }
1552
1553 // Join a listener
1554 if (listener.joinable()) {
1555 listener.join();
1556 }
1557
1558 // Verify a change notification
1559 ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType), EvsEventType::PARAMETER_CHANGED);
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001560 ASSERT_GE(aNotification.payload.size(), 2);
Changyeon Jo80189012021-10-10 16:34:21 -07001561 ASSERT_EQ(static_cast<CameraParam>(aNotification.payload[0]), cam1Cmds[0]);
1562 for (auto&& v : values) {
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001563 ASSERT_EQ(v, aNotification.payload[1]);
Changyeon Jo80189012021-10-10 16:34:21 -07001564 }
1565
1566 listener = std::thread([&frameHandler1, &aNotification, &listening, &eventCond] {
1567 listening = true;
1568 eventCond.notify_all();
1569
1570 EvsEventDesc aTargetEvent;
1571 aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
1572 if (!frameHandler1->waitForEvent(aTargetEvent, aNotification, true)) {
1573 LOG(WARNING) << "A timer is expired before a target event is fired.";
1574 }
1575 });
1576
1577 // Wait until a lister starts.
1578 listening = false;
1579 lock.lock();
1580 timer = std::chrono::system_clock::now();
1581 while (!listening) {
1582 eventCond.wait_until(lock, timer + 1s);
1583 }
1584 lock.unlock();
1585
1586 // Client 0 steals a primary client role
1587 ASSERT_TRUE(pCam0->forcePrimaryClient(pDisplay).isOk());
1588
1589 // Join a listener
1590 if (listener.joinable()) {
1591 listener.join();
1592 }
1593
1594 ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType), EvsEventType::MASTER_RELEASED);
1595
1596 // Client 0 programs a parameter
1597 val0 = range.min + (std::rand() % (range.max - range.min));
1598
1599 // Rounding down
1600 val0 = val0 - (val0 % range.step);
1601
1602 if (cam0Cmds[0] == CameraParam::ABSOLUTE_FOCUS) {
1603 std::thread listener =
1604 std::thread([&frameHandler1, &aNotification, &listening, &eventCond] {
1605 listening = true;
1606 eventCond.notify_all();
1607
1608 EvsEventDesc aTargetEvent;
1609 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001610 aTargetEvent.payload.push_back(
1611 static_cast<int32_t>(CameraParam::AUTO_FOCUS));
1612 aTargetEvent.payload.push_back(0);
Changyeon Jo80189012021-10-10 16:34:21 -07001613 if (!frameHandler1->waitForEvent(aTargetEvent, aNotification)) {
1614 LOG(WARNING) << "A timer is expired before a target event is fired.";
1615 }
1616 });
1617
1618 // Wait until a lister starts.
1619 std::unique_lock<std::mutex> lock(eventLock);
1620 auto timer = std::chrono::system_clock::now();
1621 while (!listening) {
1622 eventCond.wait_until(lock, timer + 1s);
1623 }
1624 lock.unlock();
1625
1626 // Try to turn off auto-focus
1627 values.clear();
1628 ASSERT_TRUE(pCam0->setIntParameter(CameraParam::AUTO_FOCUS, 0, &values).isOk());
1629 for (auto&& v : values) {
1630 EXPECT_EQ(v, 0);
1631 }
1632
1633 // Join a listener
1634 if (listener.joinable()) {
1635 listener.join();
1636 }
1637
1638 // Make sure AUTO_FOCUS is off.
1639 ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
1640 EvsEventType::PARAMETER_CHANGED);
1641 }
1642
1643 listener = std::thread(
1644 [&frameHandler0, &aNotification, &listening, &eventCond, &cam0Cmds, val0] {
1645 listening = true;
1646 eventCond.notify_all();
1647
1648 EvsEventDesc aTargetEvent;
1649 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001650 aTargetEvent.payload.push_back(static_cast<int32_t>(cam0Cmds[0]));
1651 aTargetEvent.payload.push_back(val0);
Changyeon Jo80189012021-10-10 16:34:21 -07001652 if (!frameHandler0->waitForEvent(aTargetEvent, aNotification)) {
1653 LOG(WARNING) << "A timer is expired before a target event is fired.";
1654 }
1655 });
1656
1657 // Wait until a lister starts.
1658 listening = false;
1659 timer = std::chrono::system_clock::now();
1660 lock.lock();
1661 while (!listening) {
1662 eventCond.wait_until(lock, timer + 1s);
1663 }
1664 lock.unlock();
1665
1666 values.clear();
1667 ASSERT_TRUE(pCam0->setIntParameter(cam0Cmds[0], val0, &values).isOk());
1668
1669 // Join a listener
1670 if (listener.joinable()) {
1671 listener.join();
1672 }
1673 // Verify a change notification
1674 ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType), EvsEventType::PARAMETER_CHANGED);
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001675 ASSERT_GE(aNotification.payload.size(), 2);
Changyeon Jo80189012021-10-10 16:34:21 -07001676 ASSERT_EQ(static_cast<CameraParam>(aNotification.payload[0]), cam0Cmds[0]);
1677 for (auto&& v : values) {
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001678 ASSERT_EQ(v, aNotification.payload[1]);
Changyeon Jo80189012021-10-10 16:34:21 -07001679 }
1680
1681 // Turn off the display (yes, before the stream stops -- it should be handled)
1682 ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::NOT_VISIBLE).isOk());
1683
1684 // Shut down the streamer
1685 frameHandler0->shutdown();
1686 frameHandler1->shutdown();
1687
1688 // Explicitly release the camera
1689 ASSERT_TRUE(mEnumerator->closeCamera(pCam0).isOk());
1690 ASSERT_TRUE(mEnumerator->closeCamera(pCam1).isOk());
1691 mActiveCameras.clear();
Changyeon Jo80189012021-10-10 16:34:21 -07001692
Changyeon Jo017cb982022-11-16 22:04:38 +00001693 // Explicitly release the display
1694 ASSERT_TRUE(mEnumerator->closeDisplay(pDisplay).isOk());
1695 }
Changyeon Jo80189012021-10-10 16:34:21 -07001696}
1697
1698/*
1699 * CameraUseStreamConfigToDisplay:
1700 * End to end test of data flowing from the camera to the display. Similar to
1701 * CameraToDisplayRoundTrip test case but this case retrieves available stream
1702 * configurations from EVS and uses one of them to start a video stream.
1703 */
1704TEST_P(EvsAidlTest, CameraUseStreamConfigToDisplay) {
1705 LOG(INFO) << "Starting CameraUseStreamConfigToDisplay test";
1706
1707 // Get the camera list
1708 loadCameraList();
1709
1710 // Request available display IDs
1711 uint8_t targetDisplayId = 0;
1712 std::vector<uint8_t> displayIds;
1713 ASSERT_TRUE(mEnumerator->getDisplayIdList(&displayIds).isOk());
1714 EXPECT_GT(displayIds.size(), 0);
1715 targetDisplayId = displayIds[0];
1716
Changyeon Jo80189012021-10-10 16:34:21 -07001717 // Test each reported camera
1718 for (auto&& cam : mCameraInfo) {
Changyeon Jo017cb982022-11-16 22:04:38 +00001719 // Request exclusive access to the EVS display
1720 std::shared_ptr<IEvsDisplay> pDisplay;
1721 ASSERT_TRUE(mEnumerator->openDisplay(targetDisplayId, &pDisplay).isOk());
1722 EXPECT_NE(pDisplay, nullptr);
1723
Changyeon Jo80189012021-10-10 16:34:21 -07001724 // choose a configuration that has a frame rate faster than minReqFps.
1725 Stream targetCfg = {};
1726 const int32_t minReqFps = 15;
1727 int32_t maxArea = 0;
1728 camera_metadata_entry_t streamCfgs;
1729 bool foundCfg = false;
1730 if (!find_camera_metadata_entry(reinterpret_cast<camera_metadata_t*>(cam.metadata.data()),
1731 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
1732 &streamCfgs)) {
1733 // Stream configurations are found in metadata
1734 RawStreamConfig* ptr = reinterpret_cast<RawStreamConfig*>(streamCfgs.data.i32);
1735 for (unsigned offset = 0; offset < streamCfgs.count; offset += kStreamCfgSz) {
Changyeon Jo7f5ad612022-08-17 21:47:58 -07001736 if (ptr->direction == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT) {
Changyeon Jo80189012021-10-10 16:34:21 -07001737 if (ptr->width * ptr->height > maxArea && ptr->framerate >= minReqFps) {
1738 targetCfg.width = ptr->width;
1739 targetCfg.height = ptr->height;
Changyeon Jo7f5ad612022-08-17 21:47:58 -07001740 targetCfg.format = static_cast<PixelFormat>(ptr->format);
Changyeon Jo80189012021-10-10 16:34:21 -07001741
1742 maxArea = ptr->width * ptr->height;
1743 foundCfg = true;
1744 }
1745 }
1746 ++ptr;
1747 }
1748 }
Changyeon Jo80189012021-10-10 16:34:21 -07001749
1750 if (!foundCfg) {
1751 // Current EVS camera does not provide stream configurations in the
1752 // metadata.
1753 continue;
1754 }
1755
1756 std::shared_ptr<IEvsCamera> pCam;
1757 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
1758 EXPECT_NE(pCam, nullptr);
1759
1760 // Store a camera handle for a clean-up
1761 mActiveCameras.push_back(pCam);
1762
1763 // Set up a frame receiver object which will fire up its own thread.
Frederick Mayle7056b242022-03-29 02:38:12 +00001764 std::shared_ptr<FrameHandler> frameHandler = ndk::SharedRefBase::make<FrameHandler>(
1765 pCam, cam, pDisplay, FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -07001766 EXPECT_NE(frameHandler, nullptr);
1767
1768 // Activate the display
1769 ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::VISIBLE_ON_NEXT_FRAME).isOk());
1770
1771 // Start the camera's video stream
1772 ASSERT_TRUE(frameHandler->startStream());
1773
1774 // Wait a while to let the data flow
1775 static const int kSecondsToWait = 5;
1776 const int streamTimeMs =
1777 kSecondsToWait * kSecondsToMilliseconds - kMaxStreamStartMilliseconds;
1778 const unsigned minimumFramesExpected =
1779 streamTimeMs * kMinimumFramesPerSecond / kSecondsToMilliseconds;
1780 sleep(kSecondsToWait);
1781 unsigned framesReceived = 0;
1782 unsigned framesDisplayed = 0;
1783 frameHandler->getFramesCounters(&framesReceived, &framesDisplayed);
1784 EXPECT_EQ(framesReceived, framesDisplayed);
1785 EXPECT_GE(framesDisplayed, minimumFramesExpected);
1786
1787 // Turn off the display (yes, before the stream stops -- it should be handled)
1788 ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::NOT_VISIBLE).isOk());
1789
1790 // Shut down the streamer
1791 frameHandler->shutdown();
1792
1793 // Explicitly release the camera
1794 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
1795 mActiveCameras.clear();
Changyeon Jo80189012021-10-10 16:34:21 -07001796
Changyeon Jo017cb982022-11-16 22:04:38 +00001797 // Explicitly release the display
1798 ASSERT_TRUE(mEnumerator->closeDisplay(pDisplay).isOk());
1799 }
Changyeon Jo80189012021-10-10 16:34:21 -07001800}
1801
1802/*
1803 * MultiCameraStreamUseConfig:
1804 * Verify that each client can start and stop video streams on the same
1805 * underlying camera with same configuration.
1806 */
1807TEST_P(EvsAidlTest, MultiCameraStreamUseConfig) {
1808 LOG(INFO) << "Starting MultiCameraStream test";
1809
1810 if (mIsHwModule) {
1811 // This test is not for HW module implementation.
1812 return;
1813 }
1814
1815 // Get the camera list
1816 loadCameraList();
1817
1818 // Test each reported camera
1819 for (auto&& cam : mCameraInfo) {
1820 // choose a configuration that has a frame rate faster than minReqFps.
1821 Stream targetCfg = {};
1822 const int32_t minReqFps = 15;
1823 int32_t maxArea = 0;
1824 camera_metadata_entry_t streamCfgs;
1825 bool foundCfg = false;
1826 if (!find_camera_metadata_entry(reinterpret_cast<camera_metadata_t*>(cam.metadata.data()),
1827 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
1828 &streamCfgs)) {
1829 // Stream configurations are found in metadata
1830 RawStreamConfig* ptr = reinterpret_cast<RawStreamConfig*>(streamCfgs.data.i32);
1831 for (unsigned offset = 0; offset < streamCfgs.count; offset += kStreamCfgSz) {
Changyeon Jo7f5ad612022-08-17 21:47:58 -07001832 if (ptr->direction == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT) {
Changyeon Jo80189012021-10-10 16:34:21 -07001833 if (ptr->width * ptr->height > maxArea && ptr->framerate >= minReqFps) {
1834 targetCfg.width = ptr->width;
1835 targetCfg.height = ptr->height;
Changyeon Jo7f5ad612022-08-17 21:47:58 -07001836 targetCfg.format = static_cast<PixelFormat>(ptr->format);
Changyeon Jo80189012021-10-10 16:34:21 -07001837
1838 maxArea = ptr->width * ptr->height;
1839 foundCfg = true;
1840 }
1841 }
1842 ++ptr;
1843 }
1844 }
Changyeon Jo80189012021-10-10 16:34:21 -07001845
1846 if (!foundCfg) {
1847 LOG(INFO) << "Device " << cam.id
1848 << " does not provide a list of supported stream configurations, skipped";
1849 continue;
1850 }
1851
1852 // Create the first camera client with a selected stream configuration.
1853 std::shared_ptr<IEvsCamera> pCam0;
1854 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam0).isOk());
1855 EXPECT_NE(pCam0, nullptr);
1856
1857 // Store a camera handle for a clean-up
1858 mActiveCameras.push_back(pCam0);
1859
1860 // Try to create the second camera client with different stream
1861 // configuration.
1862 int32_t id = targetCfg.id;
1863 targetCfg.id += 1; // EVS manager sees only the stream id.
1864 std::shared_ptr<IEvsCamera> pCam1;
1865 ASSERT_FALSE(mEnumerator->openCamera(cam.id, targetCfg, &pCam1).isOk());
1866
1867 // Try again with same stream configuration.
1868 targetCfg.id = id;
1869 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam1).isOk());
1870 EXPECT_NE(pCam1, nullptr);
1871
1872 // Set up per-client frame receiver objects which will fire up its own thread
Frederick Mayle7056b242022-03-29 02:38:12 +00001873 std::shared_ptr<FrameHandler> frameHandler0 = ndk::SharedRefBase::make<FrameHandler>(
1874 pCam0, cam, nullptr, FrameHandler::eAutoReturn);
1875 std::shared_ptr<FrameHandler> frameHandler1 = ndk::SharedRefBase::make<FrameHandler>(
1876 pCam1, cam, nullptr, FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -07001877 EXPECT_NE(frameHandler0, nullptr);
1878 EXPECT_NE(frameHandler1, nullptr);
1879
1880 // Start the camera's video stream via client 0
1881 ASSERT_TRUE(frameHandler0->startStream());
1882 ASSERT_TRUE(frameHandler1->startStream());
1883
1884 // Ensure the stream starts
1885 frameHandler0->waitForFrameCount(1);
1886 frameHandler1->waitForFrameCount(1);
1887
1888 nsecs_t firstFrame = systemTime(SYSTEM_TIME_MONOTONIC);
1889
1890 // Wait a bit, then ensure both clients get at least the required minimum number of frames
1891 sleep(5);
1892 nsecs_t end = systemTime(SYSTEM_TIME_MONOTONIC);
1893 unsigned framesReceived0 = 0, framesReceived1 = 0;
1894 frameHandler0->getFramesCounters(&framesReceived0, nullptr);
1895 frameHandler1->getFramesCounters(&framesReceived1, nullptr);
1896 framesReceived0 = framesReceived0 - 1; // Back out the first frame we already waited for
1897 framesReceived1 = framesReceived1 - 1; // Back out the first frame we already waited for
1898 nsecs_t runTime = end - firstFrame;
1899 float framesPerSecond0 = framesReceived0 / (runTime * kNanoToSeconds);
1900 float framesPerSecond1 = framesReceived1 / (runTime * kNanoToSeconds);
1901 LOG(INFO) << "Measured camera rate " << std::scientific << framesPerSecond0 << " fps and "
1902 << framesPerSecond1 << " fps";
1903 EXPECT_GE(framesPerSecond0, kMinimumFramesPerSecond);
1904 EXPECT_GE(framesPerSecond1, kMinimumFramesPerSecond);
1905
1906 // Shutdown one client
1907 frameHandler0->shutdown();
1908
1909 // Read frame counters again
1910 frameHandler0->getFramesCounters(&framesReceived0, nullptr);
1911 frameHandler1->getFramesCounters(&framesReceived1, nullptr);
1912
1913 // Wait a bit again
1914 sleep(5);
1915 unsigned framesReceivedAfterStop0 = 0, framesReceivedAfterStop1 = 0;
1916 frameHandler0->getFramesCounters(&framesReceivedAfterStop0, nullptr);
1917 frameHandler1->getFramesCounters(&framesReceivedAfterStop1, nullptr);
1918 EXPECT_EQ(framesReceived0, framesReceivedAfterStop0);
1919 EXPECT_LT(framesReceived1, framesReceivedAfterStop1);
1920
1921 // Shutdown another
1922 frameHandler1->shutdown();
1923
1924 // Explicitly release the camera
1925 ASSERT_TRUE(mEnumerator->closeCamera(pCam0).isOk());
1926 ASSERT_TRUE(mEnumerator->closeCamera(pCam1).isOk());
1927 mActiveCameras.clear();
1928 }
1929}
1930
1931/*
1932 * LogicalCameraMetadata:
1933 * Opens logical camera reported by the enumerator and validate its metadata by
1934 * checking its capability and locating supporting physical camera device
1935 * identifiers.
1936 */
1937TEST_P(EvsAidlTest, LogicalCameraMetadata) {
1938 LOG(INFO) << "Starting LogicalCameraMetadata test";
1939
1940 // Get the camera list
1941 loadCameraList();
1942
1943 // Open and close each camera twice
1944 for (auto&& cam : mCameraInfo) {
1945 bool isLogicalCam = false;
1946 auto devices = getPhysicalCameraIds(cam.id, isLogicalCam);
1947 if (isLogicalCam) {
1948 ASSERT_GE(devices.size(), 1) << "Logical camera device must have at least one physical "
1949 "camera device ID in its metadata.";
1950 }
1951 }
1952}
1953
1954/*
1955 * CameraStreamExternalBuffering:
1956 * This is same with CameraStreamBuffering except frame buffers are allocated by
1957 * the test client and then imported by EVS framework.
1958 */
1959TEST_P(EvsAidlTest, CameraStreamExternalBuffering) {
1960 LOG(INFO) << "Starting CameraStreamExternalBuffering test";
1961
1962 // Arbitrary constant (should be > 1 and not too big)
1963 static const unsigned int kBuffersToHold = 3;
1964
1965 // Get the camera list
1966 loadCameraList();
1967
1968 // Acquire the graphics buffer allocator
1969 android::GraphicBufferAllocator& alloc(android::GraphicBufferAllocator::get());
1970 const auto usage =
1971 GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_SW_READ_RARELY | GRALLOC_USAGE_SW_WRITE_OFTEN;
1972
1973 // Test each reported camera
1974 for (auto&& cam : mCameraInfo) {
1975 // Read a target resolution from the metadata
1976 Stream targetCfg = getFirstStreamConfiguration(
1977 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
1978 ASSERT_GT(targetCfg.width, 0);
1979 ASSERT_GT(targetCfg.height, 0);
1980
1981 // Allocate buffers to use
1982 std::vector<BufferDesc> buffers;
1983 buffers.resize(kBuffersToHold);
1984 for (auto i = 0; i < kBuffersToHold; ++i) {
1985 unsigned pixelsPerLine;
1986 buffer_handle_t memHandle = nullptr;
1987 android::status_t result =
1988 alloc.allocate(targetCfg.width, targetCfg.height,
1989 static_cast<android::PixelFormat>(targetCfg.format),
1990 /* layerCount = */ 1, usage, &memHandle, &pixelsPerLine,
1991 /* graphicBufferId = */ 0,
1992 /* requestorName = */ "CameraStreamExternalBufferingTest");
1993 if (result != android::NO_ERROR) {
1994 LOG(ERROR) << __FUNCTION__ << " failed to allocate memory.";
1995 // Release previous allocated buffers
1996 for (auto j = 0; j < i; j++) {
1997 alloc.free(::android::dupFromAidl(buffers[i].buffer.handle));
1998 }
1999 return;
2000 } else {
2001 BufferDesc buf;
2002 HardwareBufferDescription* pDesc =
2003 reinterpret_cast<HardwareBufferDescription*>(&buf.buffer.description);
2004 pDesc->width = targetCfg.width;
2005 pDesc->height = targetCfg.height;
2006 pDesc->layers = 1;
2007 pDesc->format = targetCfg.format;
2008 pDesc->usage = static_cast<BufferUsage>(usage);
2009 pDesc->stride = pixelsPerLine;
2010 buf.buffer.handle = ::android::dupToAidl(memHandle);
2011 buf.bufferId = i; // Unique number to identify this buffer
2012 buffers[i] = std::move(buf);
2013 }
2014 }
2015
2016 bool isLogicalCam = false;
2017 getPhysicalCameraIds(cam.id, isLogicalCam);
2018
2019 std::shared_ptr<IEvsCamera> pCam;
2020 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
2021 EXPECT_NE(pCam, nullptr);
2022
2023 // Store a camera handle for a clean-up
2024 mActiveCameras.push_back(pCam);
2025
2026 // Request to import buffers
2027 int delta = 0;
2028 auto status = pCam->importExternalBuffers(buffers, &delta);
2029 if (isLogicalCam) {
2030 ASSERT_FALSE(status.isOk());
2031 continue;
2032 }
2033
2034 ASSERT_TRUE(status.isOk());
2035 EXPECT_GE(delta, kBuffersToHold);
2036
2037 // Set up a frame receiver object which will fire up its own thread.
Frederick Mayle7056b242022-03-29 02:38:12 +00002038 std::shared_ptr<FrameHandler> frameHandler = ndk::SharedRefBase::make<FrameHandler>(
2039 pCam, cam, nullptr, FrameHandler::eNoAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -07002040 EXPECT_NE(frameHandler, nullptr);
2041
2042 // Start the camera's video stream
2043 ASSERT_TRUE(frameHandler->startStream());
2044
2045 // Check that the video stream stalls once we've gotten exactly the number of buffers
2046 // we requested since we told the frameHandler not to return them.
2047 sleep(1); // 1 second should be enough for at least 5 frames to be delivered worst case
2048 unsigned framesReceived = 0;
2049 frameHandler->getFramesCounters(&framesReceived, nullptr);
2050 ASSERT_LE(kBuffersToHold, framesReceived) << "Stream didn't stall at expected buffer limit";
2051
2052 // Give back one buffer
2053 EXPECT_TRUE(frameHandler->returnHeldBuffer());
2054
2055 // Once we return a buffer, it shouldn't take more than 1/10 second to get a new one
2056 // filled since we require 10fps minimum -- but give a 10% allowance just in case.
2057 unsigned framesReceivedAfter = 0;
2058 usleep(110 * kMillisecondsToMicroseconds);
2059 frameHandler->getFramesCounters(&framesReceivedAfter, nullptr);
2060 EXPECT_EQ(framesReceived + 1, framesReceivedAfter) << "Stream should've resumed";
2061
2062 // Even when the camera pointer goes out of scope, the FrameHandler object will
2063 // keep the stream alive unless we tell it to shutdown.
2064 // Also note that the FrameHandle and the Camera have a mutual circular reference, so
2065 // we have to break that cycle in order for either of them to get cleaned up.
2066 frameHandler->shutdown();
2067
2068 // Explicitly release the camera
2069 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
2070 mActiveCameras.clear();
2071 // Release buffers
2072 for (auto& b : buffers) {
2073 alloc.free(::android::dupFromAidl(b.buffer.handle));
2074 }
2075 buffers.resize(0);
2076 }
2077}
2078
Changyeon Jodbcf52c2022-05-11 00:01:31 -07002079TEST_P(EvsAidlTest, DeviceStatusCallbackRegistration) {
2080 std::shared_ptr<IEvsEnumeratorStatusCallback> cb =
2081 ndk::SharedRefBase::make<DeviceStatusCallback>();
2082 ndk::ScopedAStatus status = mEnumerator->registerStatusCallback(cb);
2083 if (mIsHwModule) {
2084 ASSERT_TRUE(status.isOk());
2085 } else {
2086 // A callback registration may fail if a HIDL EVS HAL implementation is
2087 // running.
2088 ASSERT_TRUE(status.isOk() ||
2089 status.getServiceSpecificError() == static_cast<int>(EvsResult::NOT_SUPPORTED));
2090 }
2091}
2092
Changyeon Jo80189012021-10-10 16:34:21 -07002093/*
2094 * UltrasonicsArrayOpenClean:
2095 * Opens each ultrasonics arrays reported by the enumerator and then explicitly closes it via a
2096 * call to closeUltrasonicsArray. Then repeats the test to ensure all ultrasonics arrays
2097 * can be reopened.
2098 */
2099TEST_P(EvsAidlTest, UltrasonicsArrayOpenClean) {
2100 LOG(INFO) << "Starting UltrasonicsArrayOpenClean test";
2101
2102 // Get the ultrasonics array list
2103 loadUltrasonicsArrayList();
2104
2105 // Open and close each ultrasonics array twice
2106 for (auto&& ultraInfo : mUltrasonicsArraysInfo) {
2107 for (int pass = 0; pass < 2; pass++) {
2108 std::shared_ptr<IEvsUltrasonicsArray> pUltrasonicsArray;
2109 ASSERT_TRUE(
2110 mEnumerator
2111 ->openUltrasonicsArray(ultraInfo.ultrasonicsArrayId, &pUltrasonicsArray)
2112 .isOk());
2113 EXPECT_NE(pUltrasonicsArray, nullptr);
2114
2115 // Verify that this ultrasonics array self-identifies correctly
2116 UltrasonicsArrayDesc desc;
2117 ASSERT_TRUE(pUltrasonicsArray->getUltrasonicArrayInfo(&desc).isOk());
2118 EXPECT_EQ(ultraInfo.ultrasonicsArrayId, desc.ultrasonicsArrayId);
2119 LOG(DEBUG) << "Found ultrasonics array " << ultraInfo.ultrasonicsArrayId;
2120
2121 // Explicitly close the ultrasonics array so resources are released right away
2122 ASSERT_TRUE(mEnumerator->closeUltrasonicsArray(pUltrasonicsArray).isOk());
2123 }
2124 }
2125}
2126
2127// Starts a stream and verifies all data received is valid.
2128TEST_P(EvsAidlTest, UltrasonicsVerifyStreamData) {
2129 LOG(INFO) << "Starting UltrasonicsVerifyStreamData";
2130
2131 // Get the ultrasonics array list
2132 loadUltrasonicsArrayList();
2133
2134 // For each ultrasonics array.
2135 for (auto&& ultraInfo : mUltrasonicsArraysInfo) {
2136 LOG(DEBUG) << "Testing ultrasonics array: " << ultraInfo.ultrasonicsArrayId;
2137
2138 std::shared_ptr<IEvsUltrasonicsArray> pUltrasonicsArray;
2139 ASSERT_TRUE(
2140 mEnumerator->openUltrasonicsArray(ultraInfo.ultrasonicsArrayId, &pUltrasonicsArray)
2141 .isOk());
2142 EXPECT_NE(pUltrasonicsArray, nullptr);
2143
2144 std::shared_ptr<FrameHandlerUltrasonics> frameHandler =
Frederick Mayle7056b242022-03-29 02:38:12 +00002145 ndk::SharedRefBase::make<FrameHandlerUltrasonics>(pUltrasonicsArray);
Changyeon Jo80189012021-10-10 16:34:21 -07002146 EXPECT_NE(frameHandler, nullptr);
2147
2148 // Start stream.
2149 ASSERT_TRUE(pUltrasonicsArray->startStream(frameHandler).isOk());
2150
2151 // Wait 5 seconds to receive frames.
2152 sleep(5);
2153
2154 // Stop stream.
2155 ASSERT_TRUE(pUltrasonicsArray->stopStream().isOk());
2156
2157 EXPECT_GT(frameHandler->getReceiveFramesCount(), 0);
2158 EXPECT_TRUE(frameHandler->areAllFramesValid());
2159
2160 // Explicitly close the ultrasonics array so resources are released right away
2161 ASSERT_TRUE(mEnumerator->closeUltrasonicsArray(pUltrasonicsArray).isOk());
2162 }
2163}
2164
2165// Sets frames in flight before and after start of stream and verfies success.
2166TEST_P(EvsAidlTest, UltrasonicsSetFramesInFlight) {
2167 LOG(INFO) << "Starting UltrasonicsSetFramesInFlight";
2168
2169 // Get the ultrasonics array list
2170 loadUltrasonicsArrayList();
2171
2172 // For each ultrasonics array.
2173 for (auto&& ultraInfo : mUltrasonicsArraysInfo) {
2174 LOG(DEBUG) << "Testing ultrasonics array: " << ultraInfo.ultrasonicsArrayId;
2175
2176 std::shared_ptr<IEvsUltrasonicsArray> pUltrasonicsArray;
2177 ASSERT_TRUE(
2178 mEnumerator->openUltrasonicsArray(ultraInfo.ultrasonicsArrayId, &pUltrasonicsArray)
2179 .isOk());
2180 EXPECT_NE(pUltrasonicsArray, nullptr);
2181
2182 ASSERT_TRUE(pUltrasonicsArray->setMaxFramesInFlight(10).isOk());
2183
2184 std::shared_ptr<FrameHandlerUltrasonics> frameHandler =
Frederick Mayle7056b242022-03-29 02:38:12 +00002185 ndk::SharedRefBase::make<FrameHandlerUltrasonics>(pUltrasonicsArray);
Changyeon Jo80189012021-10-10 16:34:21 -07002186 EXPECT_NE(frameHandler, nullptr);
2187
2188 // Start stream.
2189 ASSERT_TRUE(pUltrasonicsArray->startStream(frameHandler).isOk());
2190 ASSERT_TRUE(pUltrasonicsArray->setMaxFramesInFlight(5).isOk());
2191
2192 // Stop stream.
2193 ASSERT_TRUE(pUltrasonicsArray->stopStream().isOk());
2194
2195 // Explicitly close the ultrasonics array so resources are released right away
2196 ASSERT_TRUE(mEnumerator->closeUltrasonicsArray(pUltrasonicsArray).isOk());
2197 }
2198}
2199
2200GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(EvsAidlTest);
2201INSTANTIATE_TEST_SUITE_P(
2202 PerInstance, EvsAidlTest,
2203 testing::ValuesIn(android::getAidlHalInstanceNames(IEvsEnumerator::descriptor)),
2204 android::PrintInstanceNameToString);
2205
2206int main(int argc, char** argv) {
2207 ::testing::InitGoogleTest(&argc, argv);
2208 ABinderProcess_setThreadPoolMaxThreadCount(1);
2209 ABinderProcess_startThreadPool();
2210 return RUN_ALL_TESTS();
2211}