blob: 3cab204aec06c3d6f4368b05b157f9fa8ed2e459 [file] [log] [blame]
Changyeon Jo80189012021-10-10 16:34:21 -07001/*
2 * Copyright (C) 2022 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "FrameHandler.h"
18#include "FrameHandlerUltrasonics.h"
19
20#include <aidl/Gtest.h>
21#include <aidl/Vintf.h>
Changyeon Jodbcf52c2022-05-11 00:01:31 -070022#include <aidl/android/hardware/automotive/evs/BnEvsEnumeratorStatusCallback.h>
Changyeon Jo80189012021-10-10 16:34:21 -070023#include <aidl/android/hardware/automotive/evs/BufferDesc.h>
24#include <aidl/android/hardware/automotive/evs/CameraDesc.h>
25#include <aidl/android/hardware/automotive/evs/CameraParam.h>
Changyeon Jodbcf52c2022-05-11 00:01:31 -070026#include <aidl/android/hardware/automotive/evs/DeviceStatus.h>
Changyeon Jo80189012021-10-10 16:34:21 -070027#include <aidl/android/hardware/automotive/evs/DisplayDesc.h>
28#include <aidl/android/hardware/automotive/evs/DisplayState.h>
29#include <aidl/android/hardware/automotive/evs/EvsEventDesc.h>
30#include <aidl/android/hardware/automotive/evs/EvsEventType.h>
31#include <aidl/android/hardware/automotive/evs/EvsResult.h>
32#include <aidl/android/hardware/automotive/evs/IEvsCamera.h>
33#include <aidl/android/hardware/automotive/evs/IEvsDisplay.h>
34#include <aidl/android/hardware/automotive/evs/IEvsEnumerator.h>
Changyeon Jodbcf52c2022-05-11 00:01:31 -070035#include <aidl/android/hardware/automotive/evs/IEvsEnumeratorStatusCallback.h>
Changyeon Jo80189012021-10-10 16:34:21 -070036#include <aidl/android/hardware/automotive/evs/IEvsUltrasonicsArray.h>
37#include <aidl/android/hardware/automotive/evs/ParameterRange.h>
38#include <aidl/android/hardware/automotive/evs/Stream.h>
39#include <aidl/android/hardware/automotive/evs/UltrasonicsArrayDesc.h>
40#include <aidl/android/hardware/common/NativeHandle.h>
41#include <aidl/android/hardware/graphics/common/HardwareBufferDescription.h>
42#include <aidl/android/hardware/graphics/common/PixelFormat.h>
43#include <aidlcommonsupport/NativeHandle.h>
44#include <android-base/logging.h>
45#include <android/binder_ibinder.h>
46#include <android/binder_manager.h>
47#include <android/binder_process.h>
48#include <android/binder_status.h>
49#include <system/camera_metadata.h>
50#include <ui/GraphicBuffer.h>
51#include <ui/GraphicBufferAllocator.h>
52#include <utils/Timers.h>
53
54#include <deque>
55#include <thread>
56#include <unordered_set>
57
58namespace {
59
60// These values are called out in the EVS design doc (as of Mar 8, 2017)
61constexpr int kMaxStreamStartMilliseconds = 500;
62constexpr int kMinimumFramesPerSecond = 10;
63constexpr int kSecondsToMilliseconds = 1000;
64constexpr int kMillisecondsToMicroseconds = 1000;
65constexpr float kNanoToMilliseconds = 0.000001f;
66constexpr float kNanoToSeconds = 0.000000001f;
67
68/*
69 * Please note that this is different from what is defined in
70 * libhardware/modules/camera/3_4/metadata/types.h; this has one additional
71 * field to store a framerate.
72 */
73typedef struct {
74 int32_t id;
75 int32_t width;
76 int32_t height;
77 int32_t format;
78 int32_t direction;
79 int32_t framerate;
80} RawStreamConfig;
81constexpr size_t kStreamCfgSz = sizeof(RawStreamConfig) / sizeof(int32_t);
82
Changyeon Jodbcf52c2022-05-11 00:01:31 -070083using ::aidl::android::hardware::automotive::evs::BnEvsEnumeratorStatusCallback;
Changyeon Jo80189012021-10-10 16:34:21 -070084using ::aidl::android::hardware::automotive::evs::BufferDesc;
85using ::aidl::android::hardware::automotive::evs::CameraDesc;
86using ::aidl::android::hardware::automotive::evs::CameraParam;
Changyeon Jodbcf52c2022-05-11 00:01:31 -070087using ::aidl::android::hardware::automotive::evs::DeviceStatus;
Changyeon Jo80189012021-10-10 16:34:21 -070088using ::aidl::android::hardware::automotive::evs::DisplayDesc;
89using ::aidl::android::hardware::automotive::evs::DisplayState;
90using ::aidl::android::hardware::automotive::evs::EvsEventDesc;
91using ::aidl::android::hardware::automotive::evs::EvsEventType;
92using ::aidl::android::hardware::automotive::evs::EvsResult;
93using ::aidl::android::hardware::automotive::evs::IEvsCamera;
94using ::aidl::android::hardware::automotive::evs::IEvsDisplay;
95using ::aidl::android::hardware::automotive::evs::IEvsEnumerator;
Changyeon Jodbcf52c2022-05-11 00:01:31 -070096using ::aidl::android::hardware::automotive::evs::IEvsEnumeratorStatusCallback;
Changyeon Jo80189012021-10-10 16:34:21 -070097using ::aidl::android::hardware::automotive::evs::IEvsUltrasonicsArray;
98using ::aidl::android::hardware::automotive::evs::ParameterRange;
99using ::aidl::android::hardware::automotive::evs::Stream;
100using ::aidl::android::hardware::automotive::evs::UltrasonicsArrayDesc;
101using ::aidl::android::hardware::graphics::common::BufferUsage;
102using ::aidl::android::hardware::graphics::common::HardwareBufferDescription;
103using ::aidl::android::hardware::graphics::common::PixelFormat;
104using std::chrono_literals::operator""s;
105
Changyeon Jodbcf52c2022-05-11 00:01:31 -0700106} // namespace
107
Changyeon Jo80189012021-10-10 16:34:21 -0700108// The main test class for EVS
109class EvsAidlTest : public ::testing::TestWithParam<std::string> {
110 public:
111 virtual void SetUp() override {
112 // Make sure we can connect to the enumerator
113 std::string service_name = GetParam();
114 AIBinder* binder = AServiceManager_waitForService(service_name.data());
115 ASSERT_NE(binder, nullptr);
116 mEnumerator = IEvsEnumerator::fromBinder(::ndk::SpAIBinder(binder));
117 LOG(INFO) << "Test target service: " << service_name;
118
119 ASSERT_TRUE(mEnumerator->isHardware(&mIsHwModule).isOk());
120 }
121
122 virtual void TearDown() override {
123 // Attempt to close any active camera
124 for (auto&& cam : mActiveCameras) {
125 if (cam != nullptr) {
126 mEnumerator->closeCamera(cam);
127 }
128 }
129 mActiveCameras.clear();
130 }
131
132 protected:
133 void loadCameraList() {
134 // SetUp() must run first!
135 ASSERT_NE(mEnumerator, nullptr);
136
137 // Get the camera list
138 ASSERT_TRUE(mEnumerator->getCameraList(&mCameraInfo).isOk())
139 << "Failed to get a list of available cameras";
140 LOG(INFO) << "We have " << mCameraInfo.size() << " cameras.";
141 }
142
143 void loadUltrasonicsArrayList() {
144 // SetUp() must run first!
145 ASSERT_NE(mEnumerator, nullptr);
146
147 // Get the ultrasonics array list
Changyeon Jo9f6f5922022-04-12 19:29:10 -0700148 auto result = mEnumerator->getUltrasonicsArrayList(&mUltrasonicsArraysInfo);
149 ASSERT_TRUE(result.isOk() ||
150 // TODO(b/149874793): Remove below conditions when
151 // getUltrasonicsArrayList() is implemented.
152 (!result.isOk() && result.getServiceSpecificError() ==
153 static_cast<int32_t>(EvsResult::NOT_IMPLEMENTED)))
Changyeon Jo80189012021-10-10 16:34:21 -0700154 << "Failed to get a list of available ultrasonics arrays";
155 LOG(INFO) << "We have " << mCameraInfo.size() << " ultrasonics arrays.";
156 }
157
158 bool isLogicalCamera(const camera_metadata_t* metadata) {
159 if (metadata == nullptr) {
160 // A logical camera device must have a valid camera metadata.
161 return false;
162 }
163
164 // Looking for LOGICAL_MULTI_CAMERA capability from metadata.
165 camera_metadata_ro_entry_t entry;
166 int rc = find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
167 &entry);
168 if (rc != 0) {
169 // No capabilities are found.
170 return false;
171 }
172
173 for (size_t i = 0; i < entry.count; ++i) {
174 uint8_t cap = entry.data.u8[i];
175 if (cap == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA) {
176 return true;
177 }
178 }
179
180 return false;
181 }
182
183 std::unordered_set<std::string> getPhysicalCameraIds(const std::string& id, bool& flag) {
184 std::unordered_set<std::string> physicalCameras;
185 const auto it = std::find_if(mCameraInfo.begin(), mCameraInfo.end(),
186 [&id](const CameraDesc& desc) { return id == desc.id; });
187 if (it == mCameraInfo.end()) {
188 // Unknown camera is requested. Return an empty list.
189 return physicalCameras;
190 }
191
192 const camera_metadata_t* metadata = reinterpret_cast<camera_metadata_t*>(&it->metadata[0]);
193 flag = isLogicalCamera(metadata);
194 if (!flag) {
195 // EVS assumes that the device w/o a valid metadata is a physical
196 // device.
197 LOG(INFO) << id << " is not a logical camera device.";
198 physicalCameras.insert(id);
199 return physicalCameras;
200 }
201
202 // Look for physical camera identifiers
203 camera_metadata_ro_entry entry;
204 int rc = find_camera_metadata_ro_entry(metadata, ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS,
205 &entry);
206 if (rc != 0) {
207 LOG(ERROR) << "No physical camera ID is found for a logical camera device";
208 }
209
210 const uint8_t* ids = entry.data.u8;
211 size_t start = 0;
212 for (size_t i = 0; i < entry.count; ++i) {
213 if (ids[i] == '\0') {
214 if (start != i) {
215 std::string id(reinterpret_cast<const char*>(ids + start));
216 physicalCameras.insert(id);
217 }
218 start = i + 1;
219 }
220 }
221
222 LOG(INFO) << id << " consists of " << physicalCameras.size() << " physical camera devices";
223 return physicalCameras;
224 }
225
226 Stream getFirstStreamConfiguration(camera_metadata_t* metadata) {
227 Stream targetCfg = {};
228 camera_metadata_entry_t streamCfgs;
229 if (!find_camera_metadata_entry(metadata, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
230 &streamCfgs)) {
231 // Stream configurations are found in metadata
232 RawStreamConfig* ptr = reinterpret_cast<RawStreamConfig*>(streamCfgs.data.i32);
233 for (unsigned offset = 0; offset < streamCfgs.count; offset += kStreamCfgSz) {
Changyeon Jo7f5ad612022-08-17 21:47:58 -0700234 if (ptr->direction == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT) {
Changyeon Jo80189012021-10-10 16:34:21 -0700235 targetCfg.width = ptr->width;
236 targetCfg.height = ptr->height;
237 targetCfg.format = static_cast<PixelFormat>(ptr->format);
238 break;
239 }
240 ++ptr;
241 }
242 }
243
244 return targetCfg;
245 }
246
Changyeon Jodbcf52c2022-05-11 00:01:31 -0700247 class DeviceStatusCallback : public BnEvsEnumeratorStatusCallback {
248 ndk::ScopedAStatus deviceStatusChanged(const std::vector<DeviceStatus>&) override {
249 // This empty implementation returns always ok().
250 return ndk::ScopedAStatus::ok();
251 }
252 };
253
Changyeon Jo80189012021-10-10 16:34:21 -0700254 // Every test needs access to the service
255 std::shared_ptr<IEvsEnumerator> mEnumerator;
256 // Empty unless/util loadCameraList() is called
257 std::vector<CameraDesc> mCameraInfo;
258 // boolean to tell current module under testing is HW module implementation
259 // or not
260 bool mIsHwModule;
261 // A list of active camera handles that are need to be cleaned up
262 std::deque<std::shared_ptr<IEvsCamera>> mActiveCameras;
263 // Empty unless/util loadUltrasonicsArrayList() is called
264 std::vector<UltrasonicsArrayDesc> mUltrasonicsArraysInfo;
265 // A list of active ultrasonics array handles that are to be cleaned up
266 std::deque<std::weak_ptr<IEvsUltrasonicsArray>> mActiveUltrasonicsArrays;
267};
268
269// Test cases, their implementations, and corresponding requirements are
270// documented at go/aae-evs-public-api-test.
271
272/*
273 * CameraOpenClean:
274 * Opens each camera reported by the enumerator and then explicitly closes it via a
275 * call to closeCamera. Then repeats the test to ensure all cameras can be reopened.
276 */
277TEST_P(EvsAidlTest, CameraOpenClean) {
278 LOG(INFO) << "Starting CameraOpenClean test";
279
280 // Get the camera list
281 loadCameraList();
282
283 // Open and close each camera twice
284 for (auto&& cam : mCameraInfo) {
285 bool isLogicalCam = false;
286 auto devices = getPhysicalCameraIds(cam.id, isLogicalCam);
287 if (mIsHwModule && isLogicalCam) {
288 LOG(INFO) << "Skip a logical device, " << cam.id << " for HW target.";
289 continue;
290 }
291
292 // Read a target resolution from the metadata
293 Stream targetCfg = getFirstStreamConfiguration(
294 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
295 ASSERT_GT(targetCfg.width, 0);
296 ASSERT_GT(targetCfg.height, 0);
297
298 for (int pass = 0; pass < 2; pass++) {
299 std::shared_ptr<IEvsCamera> pCam;
300 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
301 ASSERT_NE(pCam, nullptr);
302
303 CameraDesc cameraInfo;
304 for (auto&& devName : devices) {
305 ASSERT_TRUE(pCam->getPhysicalCameraInfo(devName, &cameraInfo).isOk());
306 EXPECT_EQ(devName, cameraInfo.id);
307 }
308
309 // Store a camera handle for a clean-up
310 mActiveCameras.push_back(pCam);
311
312 // Verify that this camera self-identifies correctly
313 ASSERT_TRUE(pCam->getCameraInfo(&cameraInfo).isOk());
314 EXPECT_EQ(cam.id, cameraInfo.id);
315
316 // Verify methods for extended info
317 const auto id = 0xFFFFFFFF; // meaningless id
318 std::vector<uint8_t> values;
319 auto status = pCam->setExtendedInfo(id, values);
320 if (isLogicalCam) {
321 EXPECT_TRUE(!status.isOk() && status.getServiceSpecificError() ==
322 static_cast<int>(EvsResult::NOT_SUPPORTED));
323 } else {
324 EXPECT_TRUE(status.isOk());
325 }
326
327 status = pCam->getExtendedInfo(id, &values);
328 if (isLogicalCam) {
329 EXPECT_TRUE(!status.isOk() && status.getServiceSpecificError() ==
330 static_cast<int>(EvsResult::NOT_SUPPORTED));
331 } else {
332 EXPECT_TRUE(status.isOk());
333 }
334
335 // Explicitly close the camera so resources are released right away
336 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
337 mActiveCameras.clear();
338 }
339 }
340}
341
342/*
343 * CameraOpenAggressive:
344 * Opens each camera reported by the enumerator twice in a row without an intervening closeCamera
345 * call. This ensures that the intended "aggressive open" behavior works. This is necessary for
346 * the system to be tolerant of shutdown/restart race conditions.
347 */
348TEST_P(EvsAidlTest, CameraOpenAggressive) {
349 LOG(INFO) << "Starting CameraOpenAggressive test";
350
351 // Get the camera list
352 loadCameraList();
353
354 // Open and close each camera twice
355 for (auto&& cam : mCameraInfo) {
356 bool isLogicalCam = false;
357 getPhysicalCameraIds(cam.id, isLogicalCam);
358 if (mIsHwModule && isLogicalCam) {
359 LOG(INFO) << "Skip a logical device, " << cam.id << " for HW target.";
360 continue;
361 }
362
363 // Read a target resolution from the metadata
364 Stream targetCfg = getFirstStreamConfiguration(
365 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
366 ASSERT_GT(targetCfg.width, 0);
367 ASSERT_GT(targetCfg.height, 0);
368
369 mActiveCameras.clear();
370 std::shared_ptr<IEvsCamera> pCam;
371 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
372 EXPECT_NE(pCam, nullptr);
373
374 // Store a camera handle for a clean-up
375 mActiveCameras.push_back(pCam);
376
377 // Verify that this camera self-identifies correctly
378 CameraDesc cameraInfo;
379 ASSERT_TRUE(pCam->getCameraInfo(&cameraInfo).isOk());
380 EXPECT_EQ(cam.id, cameraInfo.id);
381
382 std::shared_ptr<IEvsCamera> pCam2;
383 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam2).isOk());
384 EXPECT_NE(pCam2, nullptr);
385 EXPECT_NE(pCam, pCam2);
386
387 // Store a camera handle for a clean-up
388 mActiveCameras.push_back(pCam2);
389
390 auto status = pCam->setMaxFramesInFlight(2);
391 if (mIsHwModule) {
392 // Verify that the old camera rejects calls via HW module.
393 EXPECT_TRUE(!status.isOk() && status.getServiceSpecificError() ==
394 static_cast<int>(EvsResult::OWNERSHIP_LOST));
395 } else {
396 // default implementation supports multiple clients.
397 EXPECT_TRUE(status.isOk());
398 }
399
400 // Close the superseded camera
401 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
402 mActiveCameras.pop_front();
403
404 // Verify that the second camera instance self-identifies correctly
405 ASSERT_TRUE(pCam2->getCameraInfo(&cameraInfo).isOk());
406 EXPECT_EQ(cam.id, cameraInfo.id);
407
408 // Close the second camera instance
409 ASSERT_TRUE(mEnumerator->closeCamera(pCam2).isOk());
410 mActiveCameras.pop_front();
411 }
412
413 // Sleep here to ensure the destructor cleanup has time to run so we don't break follow on tests
414 sleep(1); // I hate that this is an arbitrary time to wait. :( b/36122635
415}
416
417/*
418 * CameraStreamPerformance:
419 * Measure and qualify the stream start up time and streaming frame rate of each reported camera
420 */
421TEST_P(EvsAidlTest, CameraStreamPerformance) {
422 LOG(INFO) << "Starting CameraStreamPerformance test";
423
424 // Get the camera list
425 loadCameraList();
426
427 // Test each reported camera
428 for (auto&& cam : mCameraInfo) {
429 bool isLogicalCam = false;
430 auto devices = getPhysicalCameraIds(cam.id, isLogicalCam);
431 if (mIsHwModule && isLogicalCam) {
432 LOG(INFO) << "Skip a logical device " << cam.id;
433 continue;
434 }
435
436 // Read a target resolution from the metadata
437 Stream targetCfg = getFirstStreamConfiguration(
438 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
439 ASSERT_GT(targetCfg.width, 0);
440 ASSERT_GT(targetCfg.height, 0);
441
442 std::shared_ptr<IEvsCamera> pCam;
443 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
444 EXPECT_NE(pCam, nullptr);
445
446 // Store a camera handle for a clean-up
447 mActiveCameras.push_back(pCam);
448
449 // Set up a frame receiver object which will fire up its own thread
Frederick Mayle7056b242022-03-29 02:38:12 +0000450 std::shared_ptr<FrameHandler> frameHandler = ndk::SharedRefBase::make<FrameHandler>(
451 pCam, cam, nullptr, FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -0700452 EXPECT_NE(frameHandler, nullptr);
453
454 // Start the camera's video stream
455 nsecs_t start = systemTime(SYSTEM_TIME_MONOTONIC);
456 ASSERT_TRUE(frameHandler->startStream());
457
458 // Ensure the first frame arrived within the expected time
459 frameHandler->waitForFrameCount(1);
460 nsecs_t firstFrame = systemTime(SYSTEM_TIME_MONOTONIC);
461 nsecs_t timeToFirstFrame = systemTime(SYSTEM_TIME_MONOTONIC) - start;
462
463 // Extra delays are expected when we attempt to start a video stream on
464 // the logical camera device. The amount of delay is expected the
465 // number of physical camera devices multiplied by
466 // kMaxStreamStartMilliseconds at most.
467 EXPECT_LE(nanoseconds_to_milliseconds(timeToFirstFrame),
468 kMaxStreamStartMilliseconds * devices.size());
469 printf("%s: Measured time to first frame %0.2f ms\n", cam.id.data(),
470 timeToFirstFrame * kNanoToMilliseconds);
471 LOG(INFO) << cam.id << ": Measured time to first frame " << std::scientific
472 << timeToFirstFrame * kNanoToMilliseconds << " ms.";
473
474 // Check aspect ratio
475 unsigned width = 0, height = 0;
476 frameHandler->getFrameDimension(&width, &height);
477 EXPECT_GE(width, height);
478
479 // Wait a bit, then ensure we get at least the required minimum number of frames
480 sleep(5);
481 nsecs_t end = systemTime(SYSTEM_TIME_MONOTONIC);
482
483 // Even when the camera pointer goes out of scope, the FrameHandler object will
484 // keep the stream alive unless we tell it to shutdown.
485 // Also note that the FrameHandle and the Camera have a mutual circular reference, so
486 // we have to break that cycle in order for either of them to get cleaned up.
487 frameHandler->shutdown();
488
489 unsigned framesReceived = 0;
490 frameHandler->getFramesCounters(&framesReceived, nullptr);
491 framesReceived = framesReceived - 1; // Back out the first frame we already waited for
492 nsecs_t runTime = end - firstFrame;
493 float framesPerSecond = framesReceived / (runTime * kNanoToSeconds);
494 printf("Measured camera rate %3.2f fps\n", framesPerSecond);
495 LOG(INFO) << "Measured camera rate " << std::scientific << framesPerSecond << " fps.";
496 EXPECT_GE(framesPerSecond, kMinimumFramesPerSecond);
497
498 // Explicitly release the camera
499 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
500 mActiveCameras.clear();
501 }
502}
503
504/*
505 * CameraStreamBuffering:
506 * Ensure the camera implementation behaves properly when the client holds onto buffers for more
507 * than one frame time. The camera must cleanly skip frames until the client is ready again.
508 */
509TEST_P(EvsAidlTest, CameraStreamBuffering) {
510 LOG(INFO) << "Starting CameraStreamBuffering test";
511
512 // Arbitrary constant (should be > 1 and not too big)
513 static const unsigned int kBuffersToHold = 6;
514
515 // Get the camera list
516 loadCameraList();
517
518 // Test each reported camera
519 for (auto&& cam : mCameraInfo) {
520 bool isLogicalCam = false;
521 getPhysicalCameraIds(cam.id, isLogicalCam);
522 if (mIsHwModule && isLogicalCam) {
523 LOG(INFO) << "Skip a logical device " << cam.id << " for HW target.";
524 continue;
525 }
526
527 // Read a target resolution from the metadata
528 Stream targetCfg = getFirstStreamConfiguration(
529 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
530 ASSERT_GT(targetCfg.width, 0);
531 ASSERT_GT(targetCfg.height, 0);
532
533 std::shared_ptr<IEvsCamera> pCam;
534 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
535 EXPECT_NE(pCam, nullptr);
536
537 // Store a camera handle for a clean-up
538 mActiveCameras.push_back(pCam);
539
540 // Ask for a very large number of buffers in flight to ensure it errors correctly
Changyeon Jo0d814ce2022-04-23 05:26:16 -0700541 auto badResult = pCam->setMaxFramesInFlight(std::numeric_limits<int32_t>::max());
Changyeon Jo80189012021-10-10 16:34:21 -0700542 EXPECT_TRUE(!badResult.isOk() && badResult.getServiceSpecificError() ==
Changyeon Job440b232022-05-10 22:49:28 -0700543 static_cast<int>(EvsResult::BUFFER_NOT_AVAILABLE));
Changyeon Jo80189012021-10-10 16:34:21 -0700544
545 // Now ask for exactly two buffers in flight as we'll test behavior in that case
546 ASSERT_TRUE(pCam->setMaxFramesInFlight(kBuffersToHold).isOk());
547
548 // Set up a frame receiver object which will fire up its own thread.
Frederick Mayle7056b242022-03-29 02:38:12 +0000549 std::shared_ptr<FrameHandler> frameHandler = ndk::SharedRefBase::make<FrameHandler>(
550 pCam, cam, nullptr, FrameHandler::eNoAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -0700551 EXPECT_NE(frameHandler, nullptr);
552
553 // Start the camera's video stream
554 ASSERT_TRUE(frameHandler->startStream());
555
556 // Check that the video stream stalls once we've gotten exactly the number of buffers
557 // we requested since we told the frameHandler not to return them.
558 sleep(1); // 1 second should be enough for at least 5 frames to be delivered worst case
559 unsigned framesReceived = 0;
560 frameHandler->getFramesCounters(&framesReceived, nullptr);
561 ASSERT_EQ(kBuffersToHold, framesReceived) << "Stream didn't stall at expected buffer limit";
562
563 // Give back one buffer
564 ASSERT_TRUE(frameHandler->returnHeldBuffer());
565
566 // Once we return a buffer, it shouldn't take more than 1/10 second to get a new one
567 // filled since we require 10fps minimum -- but give a 10% allowance just in case.
568 usleep(110 * kMillisecondsToMicroseconds);
569 frameHandler->getFramesCounters(&framesReceived, nullptr);
570 EXPECT_EQ(kBuffersToHold + 1, framesReceived) << "Stream should've resumed";
571
572 // Even when the camera pointer goes out of scope, the FrameHandler object will
573 // keep the stream alive unless we tell it to shutdown.
574 // Also note that the FrameHandle and the Camera have a mutual circular reference, so
575 // we have to break that cycle in order for either of them to get cleaned up.
576 frameHandler->shutdown();
577
578 // Explicitly release the camera
579 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
580 mActiveCameras.clear();
581 }
582}
583
584/*
585 * CameraToDisplayRoundTrip:
586 * End to end test of data flowing from the camera to the display. Each delivered frame of camera
587 * imagery is simply copied to the display buffer and presented on screen. This is the one test
588 * which a human could observe to see the operation of the system on the physical display.
589 */
590TEST_P(EvsAidlTest, CameraToDisplayRoundTrip) {
591 LOG(INFO) << "Starting CameraToDisplayRoundTrip test";
592
593 // Get the camera list
594 loadCameraList();
595
596 // Request available display IDs
597 uint8_t targetDisplayId = 0;
598 std::vector<uint8_t> displayIds;
599 ASSERT_TRUE(mEnumerator->getDisplayIdList(&displayIds).isOk());
600 EXPECT_GT(displayIds.size(), 0);
601 targetDisplayId = displayIds[0];
602
Changyeon Jo80189012021-10-10 16:34:21 -0700603 // Test each reported camera
604 for (auto&& cam : mCameraInfo) {
Changyeon Jo017cb982022-11-16 22:04:38 +0000605 // Request exclusive access to the first EVS display
606 std::shared_ptr<IEvsDisplay> pDisplay;
607 ASSERT_TRUE(mEnumerator->openDisplay(targetDisplayId, &pDisplay).isOk());
608 EXPECT_NE(pDisplay, nullptr);
609 LOG(INFO) << "Display " << static_cast<int>(targetDisplayId) << " is in use.";
610
611 // Get the display descriptor
612 DisplayDesc displayDesc;
613 ASSERT_TRUE(pDisplay->getDisplayInfo(&displayDesc).isOk());
614 LOG(INFO) << " Resolution: " << displayDesc.width << "x" << displayDesc.height;
615 ASSERT_GT(displayDesc.width, 0);
616 ASSERT_GT(displayDesc.height, 0);
617
Changyeon Jo80189012021-10-10 16:34:21 -0700618 bool isLogicalCam = false;
619 getPhysicalCameraIds(cam.id, isLogicalCam);
620 if (mIsHwModule && isLogicalCam) {
621 LOG(INFO) << "Skip a logical device " << cam.id << " for HW target.";
622 continue;
623 }
624
625 // Read a target resolution from the metadata
626 Stream targetCfg = getFirstStreamConfiguration(
627 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
628 ASSERT_GT(targetCfg.width, 0);
629 ASSERT_GT(targetCfg.height, 0);
630
631 std::shared_ptr<IEvsCamera> pCam;
632 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
633 EXPECT_NE(pCam, nullptr);
634
635 // Store a camera handle for a clean-up
636 mActiveCameras.push_back(pCam);
637
638 // Set up a frame receiver object which will fire up its own thread.
Frederick Mayle7056b242022-03-29 02:38:12 +0000639 std::shared_ptr<FrameHandler> frameHandler = ndk::SharedRefBase::make<FrameHandler>(
640 pCam, cam, pDisplay, FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -0700641 EXPECT_NE(frameHandler, nullptr);
642
643 // Activate the display
644 ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::VISIBLE_ON_NEXT_FRAME).isOk());
645
646 // Start the camera's video stream
647 ASSERT_TRUE(frameHandler->startStream());
648
649 // Wait a while to let the data flow
650 static const int kSecondsToWait = 5;
651 const int streamTimeMs =
652 kSecondsToWait * kSecondsToMilliseconds - kMaxStreamStartMilliseconds;
653 const unsigned minimumFramesExpected =
654 streamTimeMs * kMinimumFramesPerSecond / kSecondsToMilliseconds;
655 sleep(kSecondsToWait);
656 unsigned framesReceived = 0;
657 unsigned framesDisplayed = 0;
658 frameHandler->getFramesCounters(&framesReceived, &framesDisplayed);
659 EXPECT_EQ(framesReceived, framesDisplayed);
660 EXPECT_GE(framesDisplayed, minimumFramesExpected);
661
662 // Turn off the display (yes, before the stream stops -- it should be handled)
663 ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::NOT_VISIBLE).isOk());
664
665 // Shut down the streamer
666 frameHandler->shutdown();
667
668 // Explicitly release the camera
669 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
670 mActiveCameras.clear();
Changyeon Jo80189012021-10-10 16:34:21 -0700671
Changyeon Jo017cb982022-11-16 22:04:38 +0000672 // Explicitly release the display
673 ASSERT_TRUE(mEnumerator->closeDisplay(pDisplay).isOk());
674 }
Changyeon Jo80189012021-10-10 16:34:21 -0700675}
676
677/*
678 * MultiCameraStream:
679 * Verify that each client can start and stop video streams on the same
680 * underlying camera.
681 */
682TEST_P(EvsAidlTest, MultiCameraStream) {
683 LOG(INFO) << "Starting MultiCameraStream test";
684
685 if (mIsHwModule) {
686 // This test is not for HW module implementation.
687 return;
688 }
689
690 // Get the camera list
691 loadCameraList();
692
693 // Test each reported camera
694 for (auto&& cam : mCameraInfo) {
695 // Read a target resolution from the metadata
696 Stream targetCfg = getFirstStreamConfiguration(
697 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
698 ASSERT_GT(targetCfg.width, 0);
699 ASSERT_GT(targetCfg.height, 0);
700
701 // Create two camera clients.
702 std::shared_ptr<IEvsCamera> pCam0;
703 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam0).isOk());
704 EXPECT_NE(pCam0, nullptr);
705
706 // Store a camera handle for a clean-up
707 mActiveCameras.push_back(pCam0);
708
709 std::shared_ptr<IEvsCamera> pCam1;
710 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam1).isOk());
711 EXPECT_NE(pCam1, nullptr);
712
713 // Store a camera handle for a clean-up
714 mActiveCameras.push_back(pCam1);
715
716 // Set up per-client frame receiver objects which will fire up its own thread
Frederick Mayle7056b242022-03-29 02:38:12 +0000717 std::shared_ptr<FrameHandler> frameHandler0 = ndk::SharedRefBase::make<FrameHandler>(
718 pCam0, cam, nullptr, FrameHandler::eAutoReturn);
719 std::shared_ptr<FrameHandler> frameHandler1 = ndk::SharedRefBase::make<FrameHandler>(
720 pCam1, cam, nullptr, FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -0700721 EXPECT_NE(frameHandler0, nullptr);
722 EXPECT_NE(frameHandler1, nullptr);
723
724 // Start the camera's video stream via client 0
725 ASSERT_TRUE(frameHandler0->startStream());
726 ASSERT_TRUE(frameHandler1->startStream());
727
728 // Ensure the stream starts
729 frameHandler0->waitForFrameCount(1);
730 frameHandler1->waitForFrameCount(1);
731
732 nsecs_t firstFrame = systemTime(SYSTEM_TIME_MONOTONIC);
733
734 // Wait a bit, then ensure both clients get at least the required minimum number of frames
735 sleep(5);
736 nsecs_t end = systemTime(SYSTEM_TIME_MONOTONIC);
737 unsigned framesReceived0 = 0, framesReceived1 = 0;
738 frameHandler0->getFramesCounters(&framesReceived0, nullptr);
739 frameHandler1->getFramesCounters(&framesReceived1, nullptr);
740 framesReceived0 = framesReceived0 - 1; // Back out the first frame we already waited for
741 framesReceived1 = framesReceived1 - 1; // Back out the first frame we already waited for
742 nsecs_t runTime = end - firstFrame;
743 float framesPerSecond0 = framesReceived0 / (runTime * kNanoToSeconds);
744 float framesPerSecond1 = framesReceived1 / (runTime * kNanoToSeconds);
745 LOG(INFO) << "Measured camera rate " << std::scientific << framesPerSecond0 << " fps and "
746 << framesPerSecond1 << " fps";
747 EXPECT_GE(framesPerSecond0, kMinimumFramesPerSecond);
748 EXPECT_GE(framesPerSecond1, kMinimumFramesPerSecond);
749
750 // Shutdown one client
751 frameHandler0->shutdown();
752
753 // Read frame counters again
754 frameHandler0->getFramesCounters(&framesReceived0, nullptr);
755 frameHandler1->getFramesCounters(&framesReceived1, nullptr);
756
757 // Wait a bit again
758 sleep(5);
759 unsigned framesReceivedAfterStop0 = 0, framesReceivedAfterStop1 = 0;
760 frameHandler0->getFramesCounters(&framesReceivedAfterStop0, nullptr);
761 frameHandler1->getFramesCounters(&framesReceivedAfterStop1, nullptr);
762 EXPECT_EQ(framesReceived0, framesReceivedAfterStop0);
763 EXPECT_LT(framesReceived1, framesReceivedAfterStop1);
764
765 // Shutdown another
766 frameHandler1->shutdown();
767
768 // Explicitly release the camera
769 ASSERT_TRUE(mEnumerator->closeCamera(pCam0).isOk());
770 ASSERT_TRUE(mEnumerator->closeCamera(pCam1).isOk());
771 mActiveCameras.clear();
772
773 // TODO(b/145459970, b/145457727): below sleep() is added to ensure the
774 // destruction of active camera objects; this may be related with two
775 // issues.
776 sleep(1);
777 }
778}
779
780/*
781 * CameraParameter:
782 * Verify that a client can adjust a camera parameter.
783 */
784TEST_P(EvsAidlTest, CameraParameter) {
785 LOG(INFO) << "Starting CameraParameter test";
786
787 // Get the camera list
788 loadCameraList();
789
790 // Test each reported camera
791 for (auto&& cam : mCameraInfo) {
792 bool isLogicalCam = false;
793 getPhysicalCameraIds(cam.id, isLogicalCam);
794 if (isLogicalCam) {
795 // TODO(b/145465724): Support camera parameter programming on
796 // logical devices.
797 LOG(INFO) << "Skip a logical device " << cam.id;
798 continue;
799 }
800
801 // Read a target resolution from the metadata
802 Stream targetCfg = getFirstStreamConfiguration(
803 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
804 ASSERT_GT(targetCfg.width, 0);
805 ASSERT_GT(targetCfg.height, 0);
806
807 // Create a camera client
808 std::shared_ptr<IEvsCamera> pCam;
809 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
810 EXPECT_NE(pCam, nullptr);
811
812 // Store a camera
813 mActiveCameras.push_back(pCam);
814
815 // Get the parameter list
816 std::vector<CameraParam> cmds;
817 ASSERT_TRUE(pCam->getParameterList(&cmds).isOk());
818 if (cmds.size() < 1) {
819 continue;
820 }
821
822 // Set up per-client frame receiver objects which will fire up its own thread
Frederick Mayle7056b242022-03-29 02:38:12 +0000823 std::shared_ptr<FrameHandler> frameHandler = ndk::SharedRefBase::make<FrameHandler>(
824 pCam, cam, nullptr, FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -0700825 EXPECT_NE(frameHandler, nullptr);
826
827 // Start the camera's video stream
828 ASSERT_TRUE(frameHandler->startStream());
829
830 // Ensure the stream starts
831 frameHandler->waitForFrameCount(1);
832
833 // Set current client is the primary client
834 ASSERT_TRUE(pCam->setPrimaryClient().isOk());
835 for (auto& cmd : cmds) {
836 // Get a valid parameter value range
837 ParameterRange range;
838 ASSERT_TRUE(pCam->getIntParameterRange(cmd, &range).isOk());
839
840 std::vector<int32_t> values;
841 if (cmd == CameraParam::ABSOLUTE_FOCUS) {
842 // Try to turn off auto-focus
843 ASSERT_TRUE(pCam->setIntParameter(CameraParam::AUTO_FOCUS, 0, &values).isOk());
844 for (auto&& v : values) {
845 EXPECT_EQ(v, 0);
846 }
847 }
848
849 // Try to program a parameter with a random value [minVal, maxVal]
850 int32_t val0 = range.min + (std::rand() % (range.max - range.min));
851
852 // Rounding down
853 val0 = val0 - (val0 % range.step);
854 values.clear();
855 ASSERT_TRUE(pCam->setIntParameter(cmd, val0, &values).isOk());
856
857 values.clear();
858 ASSERT_TRUE(pCam->getIntParameter(cmd, &values).isOk());
859 for (auto&& v : values) {
860 EXPECT_EQ(val0, v) << "Values are not matched.";
861 }
862 }
863 ASSERT_TRUE(pCam->unsetPrimaryClient().isOk());
864
865 // Shutdown
866 frameHandler->shutdown();
867
868 // Explicitly release the camera
869 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
870 mActiveCameras.clear();
871 }
872}
873
874/*
875 * CameraPrimaryClientRelease
876 * Verify that non-primary client gets notified when the primary client either
877 * terminates or releases a role.
878 */
879TEST_P(EvsAidlTest, CameraPrimaryClientRelease) {
880 LOG(INFO) << "Starting CameraPrimaryClientRelease test";
881
882 if (mIsHwModule) {
883 // This test is not for HW module implementation.
884 return;
885 }
886
887 // Get the camera list
888 loadCameraList();
889
890 // Test each reported camera
891 for (auto&& cam : mCameraInfo) {
892 bool isLogicalCam = false;
893 getPhysicalCameraIds(cam.id, isLogicalCam);
894 if (isLogicalCam) {
895 // TODO(b/145465724): Support camera parameter programming on
896 // logical devices.
897 LOG(INFO) << "Skip a logical device " << cam.id;
898 continue;
899 }
900
901 // Read a target resolution from the metadata
902 Stream targetCfg = getFirstStreamConfiguration(
903 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
904 ASSERT_GT(targetCfg.width, 0);
905 ASSERT_GT(targetCfg.height, 0);
906
907 // Create two camera clients.
908 std::shared_ptr<IEvsCamera> pPrimaryCam;
909 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pPrimaryCam).isOk());
910 EXPECT_NE(pPrimaryCam, nullptr);
911
912 // Store a camera handle for a clean-up
913 mActiveCameras.push_back(pPrimaryCam);
914
915 std::shared_ptr<IEvsCamera> pSecondaryCam;
916 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pSecondaryCam).isOk());
917 EXPECT_NE(pSecondaryCam, nullptr);
918
919 // Store a camera handle for a clean-up
920 mActiveCameras.push_back(pSecondaryCam);
921
922 // Set up per-client frame receiver objects which will fire up its own thread
Frederick Mayle7056b242022-03-29 02:38:12 +0000923 std::shared_ptr<FrameHandler> frameHandlerPrimary = ndk::SharedRefBase::make<FrameHandler>(
Changyeon Jo80189012021-10-10 16:34:21 -0700924 pPrimaryCam, cam, nullptr, FrameHandler::eAutoReturn);
Frederick Mayle7056b242022-03-29 02:38:12 +0000925 std::shared_ptr<FrameHandler> frameHandlerSecondary =
926 ndk::SharedRefBase::make<FrameHandler>(pSecondaryCam, cam, nullptr,
927 FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -0700928 EXPECT_NE(frameHandlerPrimary, nullptr);
929 EXPECT_NE(frameHandlerSecondary, nullptr);
930
931 // Set one client as the primary client
932 ASSERT_TRUE(pPrimaryCam->setPrimaryClient().isOk());
933
934 // Try to set another client as the primary client.
935 ASSERT_FALSE(pSecondaryCam->setPrimaryClient().isOk());
936
937 // Start the camera's video stream via a primary client client.
938 ASSERT_TRUE(frameHandlerPrimary->startStream());
939
940 // Ensure the stream starts
941 frameHandlerPrimary->waitForFrameCount(1);
942
943 // Start the camera's video stream via another client
944 ASSERT_TRUE(frameHandlerSecondary->startStream());
945
946 // Ensure the stream starts
947 frameHandlerSecondary->waitForFrameCount(1);
948
949 // Non-primary client expects to receive a primary client role relesed
950 // notification.
951 EvsEventDesc aTargetEvent = {};
952 EvsEventDesc aNotification = {};
953
954 bool listening = false;
955 std::mutex eventLock;
956 std::condition_variable eventCond;
957 std::thread listener =
958 std::thread([&aNotification, &frameHandlerSecondary, &listening, &eventCond]() {
959 // Notify that a listening thread is running.
960 listening = true;
961 eventCond.notify_all();
962
963 EvsEventDesc aTargetEvent;
964 aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
965 if (!frameHandlerSecondary->waitForEvent(aTargetEvent, aNotification, true)) {
966 LOG(WARNING) << "A timer is expired before a target event is fired.";
967 }
968 });
969
970 // Wait until a listening thread starts.
971 std::unique_lock<std::mutex> lock(eventLock);
972 auto timer = std::chrono::system_clock::now();
973 while (!listening) {
974 timer += 1s;
975 eventCond.wait_until(lock, timer);
976 }
977 lock.unlock();
978
979 // Release a primary client role.
980 ASSERT_TRUE(pPrimaryCam->unsetPrimaryClient().isOk());
981
982 // Join a listening thread.
983 if (listener.joinable()) {
984 listener.join();
985 }
986
987 // Verify change notifications.
988 ASSERT_EQ(EvsEventType::MASTER_RELEASED, static_cast<EvsEventType>(aNotification.aType));
989
990 // Non-primary becomes a primary client.
991 ASSERT_TRUE(pSecondaryCam->setPrimaryClient().isOk());
992
993 // Previous primary client fails to become a primary client.
994 ASSERT_FALSE(pPrimaryCam->setPrimaryClient().isOk());
995
996 listening = false;
997 listener = std::thread([&aNotification, &frameHandlerPrimary, &listening, &eventCond]() {
998 // Notify that a listening thread is running.
999 listening = true;
1000 eventCond.notify_all();
1001
1002 EvsEventDesc aTargetEvent;
1003 aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
1004 if (!frameHandlerPrimary->waitForEvent(aTargetEvent, aNotification, true)) {
1005 LOG(WARNING) << "A timer is expired before a target event is fired.";
1006 }
1007 });
1008
1009 // Wait until a listening thread starts.
1010 timer = std::chrono::system_clock::now();
1011 lock.lock();
1012 while (!listening) {
1013 eventCond.wait_until(lock, timer + 1s);
1014 }
1015 lock.unlock();
1016
1017 // Closing current primary client.
1018 frameHandlerSecondary->shutdown();
1019
1020 // Join a listening thread.
1021 if (listener.joinable()) {
1022 listener.join();
1023 }
1024
1025 // Verify change notifications.
1026 ASSERT_EQ(EvsEventType::MASTER_RELEASED, static_cast<EvsEventType>(aNotification.aType));
1027
1028 // Closing streams.
1029 frameHandlerPrimary->shutdown();
1030
1031 // Explicitly release the camera
1032 ASSERT_TRUE(mEnumerator->closeCamera(pPrimaryCam).isOk());
1033 ASSERT_TRUE(mEnumerator->closeCamera(pSecondaryCam).isOk());
1034 mActiveCameras.clear();
1035 }
1036}
1037
1038/*
1039 * MultiCameraParameter:
1040 * Verify that primary and non-primary clients behave as expected when they try to adjust
1041 * camera parameters.
1042 */
1043TEST_P(EvsAidlTest, MultiCameraParameter) {
1044 LOG(INFO) << "Starting MultiCameraParameter test";
1045
1046 if (mIsHwModule) {
1047 // This test is not for HW module implementation.
1048 return;
1049 }
1050
1051 // Get the camera list
1052 loadCameraList();
1053
1054 // Test each reported camera
1055 for (auto&& cam : mCameraInfo) {
1056 bool isLogicalCam = false;
1057 getPhysicalCameraIds(cam.id, isLogicalCam);
1058 if (isLogicalCam) {
1059 // TODO(b/145465724): Support camera parameter programming on
1060 // logical devices.
1061 LOG(INFO) << "Skip a logical device " << cam.id;
1062 continue;
1063 }
1064
1065 // Read a target resolution from the metadata
1066 Stream targetCfg = getFirstStreamConfiguration(
1067 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
1068 ASSERT_GT(targetCfg.width, 0);
1069 ASSERT_GT(targetCfg.height, 0);
1070
1071 // Create two camera clients.
1072 std::shared_ptr<IEvsCamera> pPrimaryCam;
1073 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pPrimaryCam).isOk());
1074 EXPECT_NE(pPrimaryCam, nullptr);
1075
1076 // Store a camera handle for a clean-up
1077 mActiveCameras.push_back(pPrimaryCam);
1078
1079 std::shared_ptr<IEvsCamera> pSecondaryCam;
1080 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pSecondaryCam).isOk());
1081 EXPECT_NE(pSecondaryCam, nullptr);
1082
1083 // Store a camera handle for a clean-up
1084 mActiveCameras.push_back(pSecondaryCam);
1085
1086 // Get the parameter list
1087 std::vector<CameraParam> camPrimaryCmds, camSecondaryCmds;
1088 ASSERT_TRUE(pPrimaryCam->getParameterList(&camPrimaryCmds).isOk());
1089 ASSERT_TRUE(pSecondaryCam->getParameterList(&camSecondaryCmds).isOk());
1090 if (camPrimaryCmds.size() < 1 || camSecondaryCmds.size() < 1) {
1091 // Skip a camera device if it does not support any parameter.
1092 continue;
1093 }
1094
1095 // Set up per-client frame receiver objects which will fire up its own thread
Frederick Mayle7056b242022-03-29 02:38:12 +00001096 std::shared_ptr<FrameHandler> frameHandlerPrimary = ndk::SharedRefBase::make<FrameHandler>(
Changyeon Jo80189012021-10-10 16:34:21 -07001097 pPrimaryCam, cam, nullptr, FrameHandler::eAutoReturn);
Frederick Mayle7056b242022-03-29 02:38:12 +00001098 std::shared_ptr<FrameHandler> frameHandlerSecondary =
1099 ndk::SharedRefBase::make<FrameHandler>(pSecondaryCam, cam, nullptr,
1100 FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -07001101 EXPECT_NE(frameHandlerPrimary, nullptr);
1102 EXPECT_NE(frameHandlerSecondary, nullptr);
1103
1104 // Set one client as the primary client.
1105 ASSERT_TRUE(pPrimaryCam->setPrimaryClient().isOk());
1106
1107 // Try to set another client as the primary client.
1108 ASSERT_FALSE(pSecondaryCam->setPrimaryClient().isOk());
1109
1110 // Start the camera's video stream via a primary client client.
1111 ASSERT_TRUE(frameHandlerPrimary->startStream());
1112
1113 // Ensure the stream starts
1114 frameHandlerPrimary->waitForFrameCount(1);
1115
1116 // Start the camera's video stream via another client
1117 ASSERT_TRUE(frameHandlerSecondary->startStream());
1118
1119 // Ensure the stream starts
1120 frameHandlerSecondary->waitForFrameCount(1);
1121
1122 int32_t val0 = 0;
1123 std::vector<int32_t> values;
1124 EvsEventDesc aNotification0 = {};
1125 EvsEventDesc aNotification1 = {};
1126 for (auto& cmd : camPrimaryCmds) {
1127 // Get a valid parameter value range
1128 ParameterRange range;
1129 ASSERT_TRUE(pPrimaryCam->getIntParameterRange(cmd, &range).isOk());
1130 if (cmd == CameraParam::ABSOLUTE_FOCUS) {
1131 // Try to turn off auto-focus
1132 values.clear();
1133 ASSERT_TRUE(
1134 pPrimaryCam->setIntParameter(CameraParam::AUTO_FOCUS, 0, &values).isOk());
1135 for (auto&& v : values) {
1136 EXPECT_EQ(v, 0);
1137 }
1138 }
1139
1140 // Calculate a parameter value to program.
1141 val0 = range.min + (std::rand() % (range.max - range.min));
1142 val0 = val0 - (val0 % range.step);
1143
1144 // Prepare and start event listeners.
1145 bool listening0 = false;
1146 bool listening1 = false;
1147 std::condition_variable eventCond;
1148 std::thread listener0 = std::thread([cmd, val0, &aNotification0, &frameHandlerPrimary,
1149 &listening0, &listening1, &eventCond]() {
1150 listening0 = true;
1151 if (listening1) {
1152 eventCond.notify_all();
1153 }
1154
1155 EvsEventDesc aTargetEvent;
1156 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001157 aTargetEvent.payload.push_back(static_cast<int32_t>(cmd));
1158 aTargetEvent.payload.push_back(val0);
Changyeon Jo80189012021-10-10 16:34:21 -07001159 if (!frameHandlerPrimary->waitForEvent(aTargetEvent, aNotification0)) {
1160 LOG(WARNING) << "A timer is expired before a target event is fired.";
1161 }
1162 });
1163 std::thread listener1 = std::thread([cmd, val0, &aNotification1, &frameHandlerSecondary,
1164 &listening0, &listening1, &eventCond]() {
1165 listening1 = true;
1166 if (listening0) {
1167 eventCond.notify_all();
1168 }
1169
1170 EvsEventDesc aTargetEvent;
1171 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001172 aTargetEvent.payload.push_back(static_cast<int32_t>(cmd));
1173 aTargetEvent.payload.push_back(val0);
Changyeon Jo80189012021-10-10 16:34:21 -07001174 if (!frameHandlerSecondary->waitForEvent(aTargetEvent, aNotification1)) {
1175 LOG(WARNING) << "A timer is expired before a target event is fired.";
1176 }
1177 });
1178
1179 // Wait until a listening thread starts.
1180 std::mutex eventLock;
1181 std::unique_lock<std::mutex> lock(eventLock);
1182 auto timer = std::chrono::system_clock::now();
1183 while (!listening0 || !listening1) {
1184 eventCond.wait_until(lock, timer + 1s);
1185 }
1186 lock.unlock();
1187
1188 // Try to program a parameter
1189 values.clear();
1190 ASSERT_TRUE(pPrimaryCam->setIntParameter(cmd, val0, &values).isOk());
1191 for (auto&& v : values) {
1192 EXPECT_EQ(val0, v) << "Values are not matched.";
1193 }
1194
1195 // Join a listening thread.
1196 if (listener0.joinable()) {
1197 listener0.join();
1198 }
1199 if (listener1.joinable()) {
1200 listener1.join();
1201 }
1202
1203 // Verify a change notification
1204 ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
1205 static_cast<EvsEventType>(aNotification0.aType));
1206 ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
1207 static_cast<EvsEventType>(aNotification1.aType));
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001208 ASSERT_GE(aNotification0.payload.size(), 2);
1209 ASSERT_GE(aNotification1.payload.size(), 2);
Changyeon Jo80189012021-10-10 16:34:21 -07001210 ASSERT_EQ(cmd, static_cast<CameraParam>(aNotification0.payload[0]));
1211 ASSERT_EQ(cmd, static_cast<CameraParam>(aNotification1.payload[0]));
1212 for (auto&& v : values) {
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001213 ASSERT_EQ(v, aNotification0.payload[1]);
1214 ASSERT_EQ(v, aNotification1.payload[1]);
Changyeon Jo80189012021-10-10 16:34:21 -07001215 }
1216
1217 // Clients expects to receive a parameter change notification
1218 // whenever a primary client client adjusts it.
1219 values.clear();
1220 ASSERT_TRUE(pPrimaryCam->getIntParameter(cmd, &values).isOk());
1221 for (auto&& v : values) {
1222 EXPECT_EQ(val0, v) << "Values are not matched.";
1223 }
1224 }
1225
1226 // Try to adjust a parameter via non-primary client
1227 values.clear();
1228 ASSERT_FALSE(pSecondaryCam->setIntParameter(camSecondaryCmds[0], val0, &values).isOk());
1229
1230 // Non-primary client attempts to be a primary client
1231 ASSERT_FALSE(pSecondaryCam->setPrimaryClient().isOk());
1232
1233 // Primary client retires from a primary client role
1234 bool listening = false;
1235 std::condition_variable eventCond;
1236 std::thread listener =
1237 std::thread([&aNotification0, &frameHandlerSecondary, &listening, &eventCond]() {
1238 listening = true;
1239 eventCond.notify_all();
1240
1241 EvsEventDesc aTargetEvent;
1242 aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
1243 if (!frameHandlerSecondary->waitForEvent(aTargetEvent, aNotification0, true)) {
1244 LOG(WARNING) << "A timer is expired before a target event is fired.";
1245 }
1246 });
1247
1248 std::mutex eventLock;
1249 auto timer = std::chrono::system_clock::now();
1250 std::unique_lock<std::mutex> lock(eventLock);
1251 while (!listening) {
1252 eventCond.wait_until(lock, timer + 1s);
1253 }
1254 lock.unlock();
1255
1256 ASSERT_TRUE(pPrimaryCam->unsetPrimaryClient().isOk());
1257
1258 if (listener.joinable()) {
1259 listener.join();
1260 }
1261 ASSERT_EQ(EvsEventType::MASTER_RELEASED, static_cast<EvsEventType>(aNotification0.aType));
1262
1263 // Try to adjust a parameter after being retired
1264 values.clear();
1265 ASSERT_FALSE(pPrimaryCam->setIntParameter(camPrimaryCmds[0], val0, &values).isOk());
1266
1267 // Non-primary client becomes a primary client
1268 ASSERT_TRUE(pSecondaryCam->setPrimaryClient().isOk());
1269
1270 // Try to adjust a parameter via new primary client
1271 for (auto& cmd : camSecondaryCmds) {
1272 // Get a valid parameter value range
1273 ParameterRange range;
1274 ASSERT_TRUE(pSecondaryCam->getIntParameterRange(cmd, &range).isOk());
1275
1276 values.clear();
1277 if (cmd == CameraParam::ABSOLUTE_FOCUS) {
1278 // Try to turn off auto-focus
1279 values.clear();
1280 ASSERT_TRUE(
1281 pSecondaryCam->setIntParameter(CameraParam::AUTO_FOCUS, 0, &values).isOk());
1282 for (auto&& v : values) {
1283 EXPECT_EQ(v, 0);
1284 }
1285 }
1286
1287 // Calculate a parameter value to program. This is being rounding down.
1288 val0 = range.min + (std::rand() % (range.max - range.min));
1289 val0 = val0 - (val0 % range.step);
1290
1291 // Prepare and start event listeners.
1292 bool listening0 = false;
1293 bool listening1 = false;
1294 std::condition_variable eventCond;
1295 std::thread listener0 = std::thread([&]() {
1296 listening0 = true;
1297 if (listening1) {
1298 eventCond.notify_all();
1299 }
1300
1301 EvsEventDesc aTargetEvent;
1302 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001303 aTargetEvent.payload.push_back(static_cast<int32_t>(cmd));
1304 aTargetEvent.payload.push_back(val0);
Changyeon Jo80189012021-10-10 16:34:21 -07001305 if (!frameHandlerPrimary->waitForEvent(aTargetEvent, aNotification0)) {
1306 LOG(WARNING) << "A timer is expired before a target event is fired.";
1307 }
1308 });
1309 std::thread listener1 = std::thread([&]() {
1310 listening1 = true;
1311 if (listening0) {
1312 eventCond.notify_all();
1313 }
1314
1315 EvsEventDesc aTargetEvent;
1316 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001317 aTargetEvent.payload.push_back(static_cast<int32_t>(cmd));
1318 aTargetEvent.payload.push_back(val0);
Changyeon Jo80189012021-10-10 16:34:21 -07001319 if (!frameHandlerSecondary->waitForEvent(aTargetEvent, aNotification1)) {
1320 LOG(WARNING) << "A timer is expired before a target event is fired.";
1321 }
1322 });
1323
1324 // Wait until a listening thread starts.
1325 std::mutex eventLock;
1326 std::unique_lock<std::mutex> lock(eventLock);
1327 auto timer = std::chrono::system_clock::now();
1328 while (!listening0 || !listening1) {
1329 eventCond.wait_until(lock, timer + 1s);
1330 }
1331 lock.unlock();
1332
1333 // Try to program a parameter
1334 values.clear();
1335 ASSERT_TRUE(pSecondaryCam->setIntParameter(cmd, val0, &values).isOk());
1336
1337 // Clients expects to receive a parameter change notification
1338 // whenever a primary client client adjusts it.
1339 values.clear();
1340 ASSERT_TRUE(pSecondaryCam->getIntParameter(cmd, &values).isOk());
1341 for (auto&& v : values) {
1342 EXPECT_EQ(val0, v) << "Values are not matched.";
1343 }
1344
1345 // Join a listening thread.
1346 if (listener0.joinable()) {
1347 listener0.join();
1348 }
1349 if (listener1.joinable()) {
1350 listener1.join();
1351 }
1352
1353 // Verify a change notification
1354 ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
1355 static_cast<EvsEventType>(aNotification0.aType));
1356 ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
1357 static_cast<EvsEventType>(aNotification1.aType));
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001358 ASSERT_GE(aNotification0.payload.size(), 2);
1359 ASSERT_GE(aNotification1.payload.size(), 2);
Changyeon Jo80189012021-10-10 16:34:21 -07001360 ASSERT_EQ(cmd, static_cast<CameraParam>(aNotification0.payload[0]));
1361 ASSERT_EQ(cmd, static_cast<CameraParam>(aNotification1.payload[0]));
1362 for (auto&& v : values) {
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001363 ASSERT_EQ(v, aNotification0.payload[1]);
1364 ASSERT_EQ(v, aNotification1.payload[1]);
Changyeon Jo80189012021-10-10 16:34:21 -07001365 }
1366 }
1367
1368 // New primary client retires from the role
1369 ASSERT_TRUE(pSecondaryCam->unsetPrimaryClient().isOk());
1370
1371 // Shutdown
1372 frameHandlerPrimary->shutdown();
1373 frameHandlerSecondary->shutdown();
1374
1375 // Explicitly release the camera
1376 ASSERT_TRUE(mEnumerator->closeCamera(pPrimaryCam).isOk());
1377 ASSERT_TRUE(mEnumerator->closeCamera(pSecondaryCam).isOk());
1378 mActiveCameras.clear();
1379 }
1380}
1381
1382/*
1383 * HighPriorityCameraClient:
1384 * EVS client, which owns the display, is priortized and therefore can take over
1385 * a primary client role from other EVS clients without the display.
1386 */
1387TEST_P(EvsAidlTest, HighPriorityCameraClient) {
1388 LOG(INFO) << "Starting HighPriorityCameraClient test";
1389
1390 if (mIsHwModule) {
1391 // This test is not for HW module implementation.
1392 return;
1393 }
1394
1395 // Get the camera list
1396 loadCameraList();
1397
Changyeon Jo80189012021-10-10 16:34:21 -07001398 // Test each reported camera
1399 for (auto&& cam : mCameraInfo) {
Changyeon Jo017cb982022-11-16 22:04:38 +00001400 // Request available display IDs
1401 uint8_t targetDisplayId = 0;
1402 std::vector<uint8_t> displayIds;
1403 ASSERT_TRUE(mEnumerator->getDisplayIdList(&displayIds).isOk());
1404 EXPECT_GT(displayIds.size(), 0);
1405 targetDisplayId = displayIds[0];
1406
1407 // Request exclusive access to the EVS display
1408 std::shared_ptr<IEvsDisplay> pDisplay;
1409 ASSERT_TRUE(mEnumerator->openDisplay(targetDisplayId, &pDisplay).isOk());
1410 EXPECT_NE(pDisplay, nullptr);
1411
Changyeon Jo80189012021-10-10 16:34:21 -07001412 // Read a target resolution from the metadata
1413 Stream targetCfg = getFirstStreamConfiguration(
1414 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
1415 ASSERT_GT(targetCfg.width, 0);
1416 ASSERT_GT(targetCfg.height, 0);
1417
1418 // Create two clients
1419 std::shared_ptr<IEvsCamera> pCam0;
1420 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam0).isOk());
1421 EXPECT_NE(pCam0, nullptr);
1422
1423 // Store a camera handle for a clean-up
1424 mActiveCameras.push_back(pCam0);
1425
1426 std::shared_ptr<IEvsCamera> pCam1;
1427 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam1).isOk());
1428 EXPECT_NE(pCam1, nullptr);
1429
1430 // Store a camera handle for a clean-up
1431 mActiveCameras.push_back(pCam1);
1432
1433 // Get the parameter list; this test will use the first command in both
1434 // lists.
1435 std::vector<CameraParam> cam0Cmds, cam1Cmds;
1436 ASSERT_TRUE(pCam0->getParameterList(&cam0Cmds).isOk());
1437 ASSERT_TRUE(pCam1->getParameterList(&cam1Cmds).isOk());
1438 if (cam0Cmds.size() < 1 || cam1Cmds.size() < 1) {
1439 // Cannot execute this test.
1440 return;
1441 }
1442
1443 // Set up a frame receiver object which will fire up its own thread.
Frederick Mayle7056b242022-03-29 02:38:12 +00001444 std::shared_ptr<FrameHandler> frameHandler0 = ndk::SharedRefBase::make<FrameHandler>(
1445 pCam0, cam, nullptr, FrameHandler::eAutoReturn);
1446 std::shared_ptr<FrameHandler> frameHandler1 = ndk::SharedRefBase::make<FrameHandler>(
1447 pCam1, cam, nullptr, FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -07001448 EXPECT_NE(frameHandler0, nullptr);
1449 EXPECT_NE(frameHandler1, nullptr);
1450
1451 // Activate the display
1452 ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::VISIBLE_ON_NEXT_FRAME).isOk());
1453
1454 // Start the camera's video stream
1455 ASSERT_TRUE(frameHandler0->startStream());
1456 ASSERT_TRUE(frameHandler1->startStream());
1457
1458 // Ensure the stream starts
1459 frameHandler0->waitForFrameCount(1);
1460 frameHandler1->waitForFrameCount(1);
1461
1462 // Client 1 becomes a primary client and programs a parameter.
1463
1464 // Get a valid parameter value range
1465 ParameterRange range;
1466 ASSERT_TRUE(pCam1->getIntParameterRange(cam1Cmds[0], &range).isOk());
1467
1468 // Client1 becomes a primary client
1469 ASSERT_TRUE(pCam1->setPrimaryClient().isOk());
1470
1471 std::vector<int32_t> values;
1472 EvsEventDesc aTargetEvent = {};
1473 EvsEventDesc aNotification = {};
1474 bool listening = false;
1475 std::mutex eventLock;
1476 std::condition_variable eventCond;
1477 if (cam1Cmds[0] == CameraParam::ABSOLUTE_FOCUS) {
1478 std::thread listener =
1479 std::thread([&frameHandler0, &aNotification, &listening, &eventCond] {
1480 listening = true;
1481 eventCond.notify_all();
1482
1483 EvsEventDesc aTargetEvent;
1484 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001485 aTargetEvent.payload.push_back(
1486 static_cast<int32_t>(CameraParam::AUTO_FOCUS));
1487 aTargetEvent.payload.push_back(0);
Changyeon Jo80189012021-10-10 16:34:21 -07001488 if (!frameHandler0->waitForEvent(aTargetEvent, aNotification)) {
1489 LOG(WARNING) << "A timer is expired before a target event is fired.";
1490 }
1491 });
1492
1493 // Wait until a lister starts.
1494 std::unique_lock<std::mutex> lock(eventLock);
1495 auto timer = std::chrono::system_clock::now();
1496 while (!listening) {
1497 eventCond.wait_until(lock, timer + 1s);
1498 }
1499 lock.unlock();
1500
1501 // Try to turn off auto-focus
1502 ASSERT_TRUE(pCam1->setIntParameter(CameraParam::AUTO_FOCUS, 0, &values).isOk());
1503 for (auto&& v : values) {
1504 EXPECT_EQ(v, 0);
1505 }
1506
1507 // Join a listener
1508 if (listener.joinable()) {
1509 listener.join();
1510 }
1511
1512 // Make sure AUTO_FOCUS is off.
1513 ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
1514 EvsEventType::PARAMETER_CHANGED);
1515 }
1516
1517 // Try to program a parameter with a random value [minVal, maxVal] after
1518 // rounding it down.
1519 int32_t val0 = range.min + (std::rand() % (range.max - range.min));
1520 val0 = val0 - (val0 % range.step);
1521
1522 std::thread listener = std::thread(
1523 [&frameHandler1, &aNotification, &listening, &eventCond, &cam1Cmds, val0] {
1524 listening = true;
1525 eventCond.notify_all();
1526
1527 EvsEventDesc aTargetEvent;
1528 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001529 aTargetEvent.payload.push_back(static_cast<int32_t>(cam1Cmds[0]));
1530 aTargetEvent.payload.push_back(val0);
Changyeon Jo80189012021-10-10 16:34:21 -07001531 if (!frameHandler1->waitForEvent(aTargetEvent, aNotification)) {
1532 LOG(WARNING) << "A timer is expired before a target event is fired.";
1533 }
1534 });
1535
1536 // Wait until a lister starts.
1537 listening = false;
1538 std::unique_lock<std::mutex> lock(eventLock);
1539 auto timer = std::chrono::system_clock::now();
1540 while (!listening) {
1541 eventCond.wait_until(lock, timer + 1s);
1542 }
1543 lock.unlock();
1544
1545 values.clear();
1546 ASSERT_TRUE(pCam1->setIntParameter(cam1Cmds[0], val0, &values).isOk());
1547 for (auto&& v : values) {
1548 EXPECT_EQ(val0, v);
1549 }
1550
1551 // Join a listener
1552 if (listener.joinable()) {
1553 listener.join();
1554 }
1555
1556 // Verify a change notification
1557 ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType), EvsEventType::PARAMETER_CHANGED);
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001558 ASSERT_GE(aNotification.payload.size(), 2);
Changyeon Jo80189012021-10-10 16:34:21 -07001559 ASSERT_EQ(static_cast<CameraParam>(aNotification.payload[0]), cam1Cmds[0]);
1560 for (auto&& v : values) {
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001561 ASSERT_EQ(v, aNotification.payload[1]);
Changyeon Jo80189012021-10-10 16:34:21 -07001562 }
1563
1564 listener = std::thread([&frameHandler1, &aNotification, &listening, &eventCond] {
1565 listening = true;
1566 eventCond.notify_all();
1567
1568 EvsEventDesc aTargetEvent;
1569 aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
1570 if (!frameHandler1->waitForEvent(aTargetEvent, aNotification, true)) {
1571 LOG(WARNING) << "A timer is expired before a target event is fired.";
1572 }
1573 });
1574
1575 // Wait until a lister starts.
1576 listening = false;
1577 lock.lock();
1578 timer = std::chrono::system_clock::now();
1579 while (!listening) {
1580 eventCond.wait_until(lock, timer + 1s);
1581 }
1582 lock.unlock();
1583
1584 // Client 0 steals a primary client role
1585 ASSERT_TRUE(pCam0->forcePrimaryClient(pDisplay).isOk());
1586
1587 // Join a listener
1588 if (listener.joinable()) {
1589 listener.join();
1590 }
1591
1592 ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType), EvsEventType::MASTER_RELEASED);
1593
1594 // Client 0 programs a parameter
1595 val0 = range.min + (std::rand() % (range.max - range.min));
1596
1597 // Rounding down
1598 val0 = val0 - (val0 % range.step);
1599
1600 if (cam0Cmds[0] == CameraParam::ABSOLUTE_FOCUS) {
1601 std::thread listener =
1602 std::thread([&frameHandler1, &aNotification, &listening, &eventCond] {
1603 listening = true;
1604 eventCond.notify_all();
1605
1606 EvsEventDesc aTargetEvent;
1607 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001608 aTargetEvent.payload.push_back(
1609 static_cast<int32_t>(CameraParam::AUTO_FOCUS));
1610 aTargetEvent.payload.push_back(0);
Changyeon Jo80189012021-10-10 16:34:21 -07001611 if (!frameHandler1->waitForEvent(aTargetEvent, aNotification)) {
1612 LOG(WARNING) << "A timer is expired before a target event is fired.";
1613 }
1614 });
1615
1616 // Wait until a lister starts.
1617 std::unique_lock<std::mutex> lock(eventLock);
1618 auto timer = std::chrono::system_clock::now();
1619 while (!listening) {
1620 eventCond.wait_until(lock, timer + 1s);
1621 }
1622 lock.unlock();
1623
1624 // Try to turn off auto-focus
1625 values.clear();
1626 ASSERT_TRUE(pCam0->setIntParameter(CameraParam::AUTO_FOCUS, 0, &values).isOk());
1627 for (auto&& v : values) {
1628 EXPECT_EQ(v, 0);
1629 }
1630
1631 // Join a listener
1632 if (listener.joinable()) {
1633 listener.join();
1634 }
1635
1636 // Make sure AUTO_FOCUS is off.
1637 ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
1638 EvsEventType::PARAMETER_CHANGED);
1639 }
1640
1641 listener = std::thread(
1642 [&frameHandler0, &aNotification, &listening, &eventCond, &cam0Cmds, val0] {
1643 listening = true;
1644 eventCond.notify_all();
1645
1646 EvsEventDesc aTargetEvent;
1647 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001648 aTargetEvent.payload.push_back(static_cast<int32_t>(cam0Cmds[0]));
1649 aTargetEvent.payload.push_back(val0);
Changyeon Jo80189012021-10-10 16:34:21 -07001650 if (!frameHandler0->waitForEvent(aTargetEvent, aNotification)) {
1651 LOG(WARNING) << "A timer is expired before a target event is fired.";
1652 }
1653 });
1654
1655 // Wait until a lister starts.
1656 listening = false;
1657 timer = std::chrono::system_clock::now();
1658 lock.lock();
1659 while (!listening) {
1660 eventCond.wait_until(lock, timer + 1s);
1661 }
1662 lock.unlock();
1663
1664 values.clear();
1665 ASSERT_TRUE(pCam0->setIntParameter(cam0Cmds[0], val0, &values).isOk());
1666
1667 // Join a listener
1668 if (listener.joinable()) {
1669 listener.join();
1670 }
1671 // Verify a change notification
1672 ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType), EvsEventType::PARAMETER_CHANGED);
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001673 ASSERT_GE(aNotification.payload.size(), 2);
Changyeon Jo80189012021-10-10 16:34:21 -07001674 ASSERT_EQ(static_cast<CameraParam>(aNotification.payload[0]), cam0Cmds[0]);
1675 for (auto&& v : values) {
Changyeon Jo9f6f5922022-04-12 19:29:10 -07001676 ASSERT_EQ(v, aNotification.payload[1]);
Changyeon Jo80189012021-10-10 16:34:21 -07001677 }
1678
1679 // Turn off the display (yes, before the stream stops -- it should be handled)
1680 ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::NOT_VISIBLE).isOk());
1681
1682 // Shut down the streamer
1683 frameHandler0->shutdown();
1684 frameHandler1->shutdown();
1685
1686 // Explicitly release the camera
1687 ASSERT_TRUE(mEnumerator->closeCamera(pCam0).isOk());
1688 ASSERT_TRUE(mEnumerator->closeCamera(pCam1).isOk());
1689 mActiveCameras.clear();
Changyeon Jo80189012021-10-10 16:34:21 -07001690
Changyeon Jo017cb982022-11-16 22:04:38 +00001691 // Explicitly release the display
1692 ASSERT_TRUE(mEnumerator->closeDisplay(pDisplay).isOk());
1693 }
Changyeon Jo80189012021-10-10 16:34:21 -07001694}
1695
1696/*
1697 * CameraUseStreamConfigToDisplay:
1698 * End to end test of data flowing from the camera to the display. Similar to
1699 * CameraToDisplayRoundTrip test case but this case retrieves available stream
1700 * configurations from EVS and uses one of them to start a video stream.
1701 */
1702TEST_P(EvsAidlTest, CameraUseStreamConfigToDisplay) {
1703 LOG(INFO) << "Starting CameraUseStreamConfigToDisplay test";
1704
1705 // Get the camera list
1706 loadCameraList();
1707
1708 // Request available display IDs
1709 uint8_t targetDisplayId = 0;
1710 std::vector<uint8_t> displayIds;
1711 ASSERT_TRUE(mEnumerator->getDisplayIdList(&displayIds).isOk());
1712 EXPECT_GT(displayIds.size(), 0);
1713 targetDisplayId = displayIds[0];
1714
Changyeon Jo80189012021-10-10 16:34:21 -07001715 // Test each reported camera
1716 for (auto&& cam : mCameraInfo) {
Changyeon Jo017cb982022-11-16 22:04:38 +00001717 // Request exclusive access to the EVS display
1718 std::shared_ptr<IEvsDisplay> pDisplay;
1719 ASSERT_TRUE(mEnumerator->openDisplay(targetDisplayId, &pDisplay).isOk());
1720 EXPECT_NE(pDisplay, nullptr);
1721
Changyeon Jo80189012021-10-10 16:34:21 -07001722 // choose a configuration that has a frame rate faster than minReqFps.
1723 Stream targetCfg = {};
1724 const int32_t minReqFps = 15;
1725 int32_t maxArea = 0;
1726 camera_metadata_entry_t streamCfgs;
1727 bool foundCfg = false;
1728 if (!find_camera_metadata_entry(reinterpret_cast<camera_metadata_t*>(cam.metadata.data()),
1729 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
1730 &streamCfgs)) {
1731 // Stream configurations are found in metadata
1732 RawStreamConfig* ptr = reinterpret_cast<RawStreamConfig*>(streamCfgs.data.i32);
1733 for (unsigned offset = 0; offset < streamCfgs.count; offset += kStreamCfgSz) {
Changyeon Jo7f5ad612022-08-17 21:47:58 -07001734 if (ptr->direction == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT) {
Changyeon Jo80189012021-10-10 16:34:21 -07001735 if (ptr->width * ptr->height > maxArea && ptr->framerate >= minReqFps) {
1736 targetCfg.width = ptr->width;
1737 targetCfg.height = ptr->height;
Changyeon Jo7f5ad612022-08-17 21:47:58 -07001738 targetCfg.format = static_cast<PixelFormat>(ptr->format);
Changyeon Jo80189012021-10-10 16:34:21 -07001739
1740 maxArea = ptr->width * ptr->height;
1741 foundCfg = true;
1742 }
1743 }
1744 ++ptr;
1745 }
1746 }
Changyeon Jo80189012021-10-10 16:34:21 -07001747
1748 if (!foundCfg) {
1749 // Current EVS camera does not provide stream configurations in the
1750 // metadata.
1751 continue;
1752 }
1753
1754 std::shared_ptr<IEvsCamera> pCam;
1755 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
1756 EXPECT_NE(pCam, nullptr);
1757
1758 // Store a camera handle for a clean-up
1759 mActiveCameras.push_back(pCam);
1760
1761 // Set up a frame receiver object which will fire up its own thread.
Frederick Mayle7056b242022-03-29 02:38:12 +00001762 std::shared_ptr<FrameHandler> frameHandler = ndk::SharedRefBase::make<FrameHandler>(
1763 pCam, cam, pDisplay, FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -07001764 EXPECT_NE(frameHandler, nullptr);
1765
1766 // Activate the display
1767 ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::VISIBLE_ON_NEXT_FRAME).isOk());
1768
1769 // Start the camera's video stream
1770 ASSERT_TRUE(frameHandler->startStream());
1771
1772 // Wait a while to let the data flow
1773 static const int kSecondsToWait = 5;
1774 const int streamTimeMs =
1775 kSecondsToWait * kSecondsToMilliseconds - kMaxStreamStartMilliseconds;
1776 const unsigned minimumFramesExpected =
1777 streamTimeMs * kMinimumFramesPerSecond / kSecondsToMilliseconds;
1778 sleep(kSecondsToWait);
1779 unsigned framesReceived = 0;
1780 unsigned framesDisplayed = 0;
1781 frameHandler->getFramesCounters(&framesReceived, &framesDisplayed);
1782 EXPECT_EQ(framesReceived, framesDisplayed);
1783 EXPECT_GE(framesDisplayed, minimumFramesExpected);
1784
1785 // Turn off the display (yes, before the stream stops -- it should be handled)
1786 ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::NOT_VISIBLE).isOk());
1787
1788 // Shut down the streamer
1789 frameHandler->shutdown();
1790
1791 // Explicitly release the camera
1792 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
1793 mActiveCameras.clear();
Changyeon Jo80189012021-10-10 16:34:21 -07001794
Changyeon Jo017cb982022-11-16 22:04:38 +00001795 // Explicitly release the display
1796 ASSERT_TRUE(mEnumerator->closeDisplay(pDisplay).isOk());
1797 }
Changyeon Jo80189012021-10-10 16:34:21 -07001798}
1799
1800/*
1801 * MultiCameraStreamUseConfig:
1802 * Verify that each client can start and stop video streams on the same
1803 * underlying camera with same configuration.
1804 */
1805TEST_P(EvsAidlTest, MultiCameraStreamUseConfig) {
1806 LOG(INFO) << "Starting MultiCameraStream test";
1807
1808 if (mIsHwModule) {
1809 // This test is not for HW module implementation.
1810 return;
1811 }
1812
1813 // Get the camera list
1814 loadCameraList();
1815
1816 // Test each reported camera
1817 for (auto&& cam : mCameraInfo) {
1818 // choose a configuration that has a frame rate faster than minReqFps.
1819 Stream targetCfg = {};
1820 const int32_t minReqFps = 15;
1821 int32_t maxArea = 0;
1822 camera_metadata_entry_t streamCfgs;
1823 bool foundCfg = false;
1824 if (!find_camera_metadata_entry(reinterpret_cast<camera_metadata_t*>(cam.metadata.data()),
1825 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
1826 &streamCfgs)) {
1827 // Stream configurations are found in metadata
1828 RawStreamConfig* ptr = reinterpret_cast<RawStreamConfig*>(streamCfgs.data.i32);
1829 for (unsigned offset = 0; offset < streamCfgs.count; offset += kStreamCfgSz) {
Changyeon Jo7f5ad612022-08-17 21:47:58 -07001830 if (ptr->direction == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT) {
Changyeon Jo80189012021-10-10 16:34:21 -07001831 if (ptr->width * ptr->height > maxArea && ptr->framerate >= minReqFps) {
1832 targetCfg.width = ptr->width;
1833 targetCfg.height = ptr->height;
Changyeon Jo7f5ad612022-08-17 21:47:58 -07001834 targetCfg.format = static_cast<PixelFormat>(ptr->format);
Changyeon Jo80189012021-10-10 16:34:21 -07001835
1836 maxArea = ptr->width * ptr->height;
1837 foundCfg = true;
1838 }
1839 }
1840 ++ptr;
1841 }
1842 }
Changyeon Jo80189012021-10-10 16:34:21 -07001843
1844 if (!foundCfg) {
1845 LOG(INFO) << "Device " << cam.id
1846 << " does not provide a list of supported stream configurations, skipped";
1847 continue;
1848 }
1849
1850 // Create the first camera client with a selected stream configuration.
1851 std::shared_ptr<IEvsCamera> pCam0;
1852 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam0).isOk());
1853 EXPECT_NE(pCam0, nullptr);
1854
1855 // Store a camera handle for a clean-up
1856 mActiveCameras.push_back(pCam0);
1857
1858 // Try to create the second camera client with different stream
1859 // configuration.
1860 int32_t id = targetCfg.id;
1861 targetCfg.id += 1; // EVS manager sees only the stream id.
1862 std::shared_ptr<IEvsCamera> pCam1;
1863 ASSERT_FALSE(mEnumerator->openCamera(cam.id, targetCfg, &pCam1).isOk());
1864
1865 // Try again with same stream configuration.
1866 targetCfg.id = id;
1867 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam1).isOk());
1868 EXPECT_NE(pCam1, nullptr);
1869
1870 // Set up per-client frame receiver objects which will fire up its own thread
Frederick Mayle7056b242022-03-29 02:38:12 +00001871 std::shared_ptr<FrameHandler> frameHandler0 = ndk::SharedRefBase::make<FrameHandler>(
1872 pCam0, cam, nullptr, FrameHandler::eAutoReturn);
1873 std::shared_ptr<FrameHandler> frameHandler1 = ndk::SharedRefBase::make<FrameHandler>(
1874 pCam1, cam, nullptr, FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -07001875 EXPECT_NE(frameHandler0, nullptr);
1876 EXPECT_NE(frameHandler1, nullptr);
1877
1878 // Start the camera's video stream via client 0
1879 ASSERT_TRUE(frameHandler0->startStream());
1880 ASSERT_TRUE(frameHandler1->startStream());
1881
1882 // Ensure the stream starts
1883 frameHandler0->waitForFrameCount(1);
1884 frameHandler1->waitForFrameCount(1);
1885
1886 nsecs_t firstFrame = systemTime(SYSTEM_TIME_MONOTONIC);
1887
1888 // Wait a bit, then ensure both clients get at least the required minimum number of frames
1889 sleep(5);
1890 nsecs_t end = systemTime(SYSTEM_TIME_MONOTONIC);
1891 unsigned framesReceived0 = 0, framesReceived1 = 0;
1892 frameHandler0->getFramesCounters(&framesReceived0, nullptr);
1893 frameHandler1->getFramesCounters(&framesReceived1, nullptr);
1894 framesReceived0 = framesReceived0 - 1; // Back out the first frame we already waited for
1895 framesReceived1 = framesReceived1 - 1; // Back out the first frame we already waited for
1896 nsecs_t runTime = end - firstFrame;
1897 float framesPerSecond0 = framesReceived0 / (runTime * kNanoToSeconds);
1898 float framesPerSecond1 = framesReceived1 / (runTime * kNanoToSeconds);
1899 LOG(INFO) << "Measured camera rate " << std::scientific << framesPerSecond0 << " fps and "
1900 << framesPerSecond1 << " fps";
1901 EXPECT_GE(framesPerSecond0, kMinimumFramesPerSecond);
1902 EXPECT_GE(framesPerSecond1, kMinimumFramesPerSecond);
1903
1904 // Shutdown one client
1905 frameHandler0->shutdown();
1906
1907 // Read frame counters again
1908 frameHandler0->getFramesCounters(&framesReceived0, nullptr);
1909 frameHandler1->getFramesCounters(&framesReceived1, nullptr);
1910
1911 // Wait a bit again
1912 sleep(5);
1913 unsigned framesReceivedAfterStop0 = 0, framesReceivedAfterStop1 = 0;
1914 frameHandler0->getFramesCounters(&framesReceivedAfterStop0, nullptr);
1915 frameHandler1->getFramesCounters(&framesReceivedAfterStop1, nullptr);
1916 EXPECT_EQ(framesReceived0, framesReceivedAfterStop0);
1917 EXPECT_LT(framesReceived1, framesReceivedAfterStop1);
1918
1919 // Shutdown another
1920 frameHandler1->shutdown();
1921
1922 // Explicitly release the camera
1923 ASSERT_TRUE(mEnumerator->closeCamera(pCam0).isOk());
1924 ASSERT_TRUE(mEnumerator->closeCamera(pCam1).isOk());
1925 mActiveCameras.clear();
1926 }
1927}
1928
1929/*
1930 * LogicalCameraMetadata:
1931 * Opens logical camera reported by the enumerator and validate its metadata by
1932 * checking its capability and locating supporting physical camera device
1933 * identifiers.
1934 */
1935TEST_P(EvsAidlTest, LogicalCameraMetadata) {
1936 LOG(INFO) << "Starting LogicalCameraMetadata test";
1937
1938 // Get the camera list
1939 loadCameraList();
1940
1941 // Open and close each camera twice
1942 for (auto&& cam : mCameraInfo) {
1943 bool isLogicalCam = false;
1944 auto devices = getPhysicalCameraIds(cam.id, isLogicalCam);
1945 if (isLogicalCam) {
1946 ASSERT_GE(devices.size(), 1) << "Logical camera device must have at least one physical "
1947 "camera device ID in its metadata.";
1948 }
1949 }
1950}
1951
1952/*
1953 * CameraStreamExternalBuffering:
1954 * This is same with CameraStreamBuffering except frame buffers are allocated by
1955 * the test client and then imported by EVS framework.
1956 */
1957TEST_P(EvsAidlTest, CameraStreamExternalBuffering) {
1958 LOG(INFO) << "Starting CameraStreamExternalBuffering test";
1959
1960 // Arbitrary constant (should be > 1 and not too big)
1961 static const unsigned int kBuffersToHold = 3;
1962
1963 // Get the camera list
1964 loadCameraList();
1965
1966 // Acquire the graphics buffer allocator
1967 android::GraphicBufferAllocator& alloc(android::GraphicBufferAllocator::get());
1968 const auto usage =
1969 GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_SW_READ_RARELY | GRALLOC_USAGE_SW_WRITE_OFTEN;
1970
1971 // Test each reported camera
1972 for (auto&& cam : mCameraInfo) {
1973 // Read a target resolution from the metadata
1974 Stream targetCfg = getFirstStreamConfiguration(
1975 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
1976 ASSERT_GT(targetCfg.width, 0);
1977 ASSERT_GT(targetCfg.height, 0);
1978
1979 // Allocate buffers to use
1980 std::vector<BufferDesc> buffers;
1981 buffers.resize(kBuffersToHold);
1982 for (auto i = 0; i < kBuffersToHold; ++i) {
1983 unsigned pixelsPerLine;
1984 buffer_handle_t memHandle = nullptr;
1985 android::status_t result =
1986 alloc.allocate(targetCfg.width, targetCfg.height,
1987 static_cast<android::PixelFormat>(targetCfg.format),
1988 /* layerCount = */ 1, usage, &memHandle, &pixelsPerLine,
1989 /* graphicBufferId = */ 0,
1990 /* requestorName = */ "CameraStreamExternalBufferingTest");
1991 if (result != android::NO_ERROR) {
1992 LOG(ERROR) << __FUNCTION__ << " failed to allocate memory.";
1993 // Release previous allocated buffers
1994 for (auto j = 0; j < i; j++) {
1995 alloc.free(::android::dupFromAidl(buffers[i].buffer.handle));
1996 }
1997 return;
1998 } else {
1999 BufferDesc buf;
2000 HardwareBufferDescription* pDesc =
2001 reinterpret_cast<HardwareBufferDescription*>(&buf.buffer.description);
2002 pDesc->width = targetCfg.width;
2003 pDesc->height = targetCfg.height;
2004 pDesc->layers = 1;
2005 pDesc->format = targetCfg.format;
2006 pDesc->usage = static_cast<BufferUsage>(usage);
2007 pDesc->stride = pixelsPerLine;
2008 buf.buffer.handle = ::android::dupToAidl(memHandle);
2009 buf.bufferId = i; // Unique number to identify this buffer
2010 buffers[i] = std::move(buf);
2011 }
2012 }
2013
2014 bool isLogicalCam = false;
2015 getPhysicalCameraIds(cam.id, isLogicalCam);
2016
2017 std::shared_ptr<IEvsCamera> pCam;
2018 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
2019 EXPECT_NE(pCam, nullptr);
2020
2021 // Store a camera handle for a clean-up
2022 mActiveCameras.push_back(pCam);
2023
2024 // Request to import buffers
2025 int delta = 0;
2026 auto status = pCam->importExternalBuffers(buffers, &delta);
2027 if (isLogicalCam) {
2028 ASSERT_FALSE(status.isOk());
2029 continue;
2030 }
2031
2032 ASSERT_TRUE(status.isOk());
2033 EXPECT_GE(delta, kBuffersToHold);
2034
2035 // Set up a frame receiver object which will fire up its own thread.
Frederick Mayle7056b242022-03-29 02:38:12 +00002036 std::shared_ptr<FrameHandler> frameHandler = ndk::SharedRefBase::make<FrameHandler>(
2037 pCam, cam, nullptr, FrameHandler::eNoAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -07002038 EXPECT_NE(frameHandler, nullptr);
2039
2040 // Start the camera's video stream
2041 ASSERT_TRUE(frameHandler->startStream());
2042
2043 // Check that the video stream stalls once we've gotten exactly the number of buffers
2044 // we requested since we told the frameHandler not to return them.
2045 sleep(1); // 1 second should be enough for at least 5 frames to be delivered worst case
2046 unsigned framesReceived = 0;
2047 frameHandler->getFramesCounters(&framesReceived, nullptr);
2048 ASSERT_LE(kBuffersToHold, framesReceived) << "Stream didn't stall at expected buffer limit";
2049
2050 // Give back one buffer
2051 EXPECT_TRUE(frameHandler->returnHeldBuffer());
2052
2053 // Once we return a buffer, it shouldn't take more than 1/10 second to get a new one
2054 // filled since we require 10fps minimum -- but give a 10% allowance just in case.
2055 unsigned framesReceivedAfter = 0;
2056 usleep(110 * kMillisecondsToMicroseconds);
2057 frameHandler->getFramesCounters(&framesReceivedAfter, nullptr);
2058 EXPECT_EQ(framesReceived + 1, framesReceivedAfter) << "Stream should've resumed";
2059
2060 // Even when the camera pointer goes out of scope, the FrameHandler object will
2061 // keep the stream alive unless we tell it to shutdown.
2062 // Also note that the FrameHandle and the Camera have a mutual circular reference, so
2063 // we have to break that cycle in order for either of them to get cleaned up.
2064 frameHandler->shutdown();
2065
2066 // Explicitly release the camera
2067 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
2068 mActiveCameras.clear();
2069 // Release buffers
2070 for (auto& b : buffers) {
2071 alloc.free(::android::dupFromAidl(b.buffer.handle));
2072 }
2073 buffers.resize(0);
2074 }
2075}
2076
Changyeon Jodbcf52c2022-05-11 00:01:31 -07002077TEST_P(EvsAidlTest, DeviceStatusCallbackRegistration) {
2078 std::shared_ptr<IEvsEnumeratorStatusCallback> cb =
2079 ndk::SharedRefBase::make<DeviceStatusCallback>();
2080 ndk::ScopedAStatus status = mEnumerator->registerStatusCallback(cb);
2081 if (mIsHwModule) {
2082 ASSERT_TRUE(status.isOk());
2083 } else {
2084 // A callback registration may fail if a HIDL EVS HAL implementation is
2085 // running.
2086 ASSERT_TRUE(status.isOk() ||
2087 status.getServiceSpecificError() == static_cast<int>(EvsResult::NOT_SUPPORTED));
2088 }
2089}
2090
Changyeon Jo80189012021-10-10 16:34:21 -07002091/*
2092 * UltrasonicsArrayOpenClean:
2093 * Opens each ultrasonics arrays reported by the enumerator and then explicitly closes it via a
2094 * call to closeUltrasonicsArray. Then repeats the test to ensure all ultrasonics arrays
2095 * can be reopened.
2096 */
2097TEST_P(EvsAidlTest, UltrasonicsArrayOpenClean) {
2098 LOG(INFO) << "Starting UltrasonicsArrayOpenClean test";
2099
2100 // Get the ultrasonics array list
2101 loadUltrasonicsArrayList();
2102
2103 // Open and close each ultrasonics array twice
2104 for (auto&& ultraInfo : mUltrasonicsArraysInfo) {
2105 for (int pass = 0; pass < 2; pass++) {
2106 std::shared_ptr<IEvsUltrasonicsArray> pUltrasonicsArray;
2107 ASSERT_TRUE(
2108 mEnumerator
2109 ->openUltrasonicsArray(ultraInfo.ultrasonicsArrayId, &pUltrasonicsArray)
2110 .isOk());
2111 EXPECT_NE(pUltrasonicsArray, nullptr);
2112
2113 // Verify that this ultrasonics array self-identifies correctly
2114 UltrasonicsArrayDesc desc;
2115 ASSERT_TRUE(pUltrasonicsArray->getUltrasonicArrayInfo(&desc).isOk());
2116 EXPECT_EQ(ultraInfo.ultrasonicsArrayId, desc.ultrasonicsArrayId);
2117 LOG(DEBUG) << "Found ultrasonics array " << ultraInfo.ultrasonicsArrayId;
2118
2119 // Explicitly close the ultrasonics array so resources are released right away
2120 ASSERT_TRUE(mEnumerator->closeUltrasonicsArray(pUltrasonicsArray).isOk());
2121 }
2122 }
2123}
2124
2125// Starts a stream and verifies all data received is valid.
2126TEST_P(EvsAidlTest, UltrasonicsVerifyStreamData) {
2127 LOG(INFO) << "Starting UltrasonicsVerifyStreamData";
2128
2129 // Get the ultrasonics array list
2130 loadUltrasonicsArrayList();
2131
2132 // For each ultrasonics array.
2133 for (auto&& ultraInfo : mUltrasonicsArraysInfo) {
2134 LOG(DEBUG) << "Testing ultrasonics array: " << ultraInfo.ultrasonicsArrayId;
2135
2136 std::shared_ptr<IEvsUltrasonicsArray> pUltrasonicsArray;
2137 ASSERT_TRUE(
2138 mEnumerator->openUltrasonicsArray(ultraInfo.ultrasonicsArrayId, &pUltrasonicsArray)
2139 .isOk());
2140 EXPECT_NE(pUltrasonicsArray, nullptr);
2141
2142 std::shared_ptr<FrameHandlerUltrasonics> frameHandler =
Frederick Mayle7056b242022-03-29 02:38:12 +00002143 ndk::SharedRefBase::make<FrameHandlerUltrasonics>(pUltrasonicsArray);
Changyeon Jo80189012021-10-10 16:34:21 -07002144 EXPECT_NE(frameHandler, nullptr);
2145
2146 // Start stream.
2147 ASSERT_TRUE(pUltrasonicsArray->startStream(frameHandler).isOk());
2148
2149 // Wait 5 seconds to receive frames.
2150 sleep(5);
2151
2152 // Stop stream.
2153 ASSERT_TRUE(pUltrasonicsArray->stopStream().isOk());
2154
2155 EXPECT_GT(frameHandler->getReceiveFramesCount(), 0);
2156 EXPECT_TRUE(frameHandler->areAllFramesValid());
2157
2158 // Explicitly close the ultrasonics array so resources are released right away
2159 ASSERT_TRUE(mEnumerator->closeUltrasonicsArray(pUltrasonicsArray).isOk());
2160 }
2161}
2162
2163// Sets frames in flight before and after start of stream and verfies success.
2164TEST_P(EvsAidlTest, UltrasonicsSetFramesInFlight) {
2165 LOG(INFO) << "Starting UltrasonicsSetFramesInFlight";
2166
2167 // Get the ultrasonics array list
2168 loadUltrasonicsArrayList();
2169
2170 // For each ultrasonics array.
2171 for (auto&& ultraInfo : mUltrasonicsArraysInfo) {
2172 LOG(DEBUG) << "Testing ultrasonics array: " << ultraInfo.ultrasonicsArrayId;
2173
2174 std::shared_ptr<IEvsUltrasonicsArray> pUltrasonicsArray;
2175 ASSERT_TRUE(
2176 mEnumerator->openUltrasonicsArray(ultraInfo.ultrasonicsArrayId, &pUltrasonicsArray)
2177 .isOk());
2178 EXPECT_NE(pUltrasonicsArray, nullptr);
2179
2180 ASSERT_TRUE(pUltrasonicsArray->setMaxFramesInFlight(10).isOk());
2181
2182 std::shared_ptr<FrameHandlerUltrasonics> frameHandler =
Frederick Mayle7056b242022-03-29 02:38:12 +00002183 ndk::SharedRefBase::make<FrameHandlerUltrasonics>(pUltrasonicsArray);
Changyeon Jo80189012021-10-10 16:34:21 -07002184 EXPECT_NE(frameHandler, nullptr);
2185
2186 // Start stream.
2187 ASSERT_TRUE(pUltrasonicsArray->startStream(frameHandler).isOk());
2188 ASSERT_TRUE(pUltrasonicsArray->setMaxFramesInFlight(5).isOk());
2189
2190 // Stop stream.
2191 ASSERT_TRUE(pUltrasonicsArray->stopStream().isOk());
2192
2193 // Explicitly close the ultrasonics array so resources are released right away
2194 ASSERT_TRUE(mEnumerator->closeUltrasonicsArray(pUltrasonicsArray).isOk());
2195 }
2196}
2197
2198GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(EvsAidlTest);
2199INSTANTIATE_TEST_SUITE_P(
2200 PerInstance, EvsAidlTest,
2201 testing::ValuesIn(android::getAidlHalInstanceNames(IEvsEnumerator::descriptor)),
2202 android::PrintInstanceNameToString);
2203
2204int main(int argc, char** argv) {
2205 ::testing::InitGoogleTest(&argc, argv);
2206 ABinderProcess_setThreadPoolMaxThreadCount(1);
2207 ABinderProcess_startThreadPool();
2208 return RUN_ALL_TESTS();
2209}