blob: c709d40690c5df101ef67409a0e2d27bcad611c7 [file] [log] [blame]
Changyeon Jo80189012021-10-10 16:34:21 -07001/*
2 * Copyright (C) 2022 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "FrameHandler.h"
18#include "FrameHandlerUltrasonics.h"
19
20#include <aidl/Gtest.h>
21#include <aidl/Vintf.h>
22#include <aidl/android/hardware/automotive/evs/BufferDesc.h>
23#include <aidl/android/hardware/automotive/evs/CameraDesc.h>
24#include <aidl/android/hardware/automotive/evs/CameraParam.h>
25#include <aidl/android/hardware/automotive/evs/DisplayDesc.h>
26#include <aidl/android/hardware/automotive/evs/DisplayState.h>
27#include <aidl/android/hardware/automotive/evs/EvsEventDesc.h>
28#include <aidl/android/hardware/automotive/evs/EvsEventType.h>
29#include <aidl/android/hardware/automotive/evs/EvsResult.h>
30#include <aidl/android/hardware/automotive/evs/IEvsCamera.h>
31#include <aidl/android/hardware/automotive/evs/IEvsDisplay.h>
32#include <aidl/android/hardware/automotive/evs/IEvsEnumerator.h>
33#include <aidl/android/hardware/automotive/evs/IEvsUltrasonicsArray.h>
34#include <aidl/android/hardware/automotive/evs/ParameterRange.h>
35#include <aidl/android/hardware/automotive/evs/Stream.h>
36#include <aidl/android/hardware/automotive/evs/UltrasonicsArrayDesc.h>
37#include <aidl/android/hardware/common/NativeHandle.h>
38#include <aidl/android/hardware/graphics/common/HardwareBufferDescription.h>
39#include <aidl/android/hardware/graphics/common/PixelFormat.h>
40#include <aidlcommonsupport/NativeHandle.h>
41#include <android-base/logging.h>
42#include <android/binder_ibinder.h>
43#include <android/binder_manager.h>
44#include <android/binder_process.h>
45#include <android/binder_status.h>
46#include <system/camera_metadata.h>
47#include <ui/GraphicBuffer.h>
48#include <ui/GraphicBufferAllocator.h>
49#include <utils/Timers.h>
50
51#include <deque>
52#include <thread>
53#include <unordered_set>
54
55namespace {
56
57// These values are called out in the EVS design doc (as of Mar 8, 2017)
58constexpr int kMaxStreamStartMilliseconds = 500;
59constexpr int kMinimumFramesPerSecond = 10;
60constexpr int kSecondsToMilliseconds = 1000;
61constexpr int kMillisecondsToMicroseconds = 1000;
62constexpr float kNanoToMilliseconds = 0.000001f;
63constexpr float kNanoToSeconds = 0.000000001f;
64
65/*
66 * Please note that this is different from what is defined in
67 * libhardware/modules/camera/3_4/metadata/types.h; this has one additional
68 * field to store a framerate.
69 */
70typedef struct {
71 int32_t id;
72 int32_t width;
73 int32_t height;
74 int32_t format;
75 int32_t direction;
76 int32_t framerate;
77} RawStreamConfig;
78constexpr size_t kStreamCfgSz = sizeof(RawStreamConfig) / sizeof(int32_t);
79
80} // namespace
81
82using ::aidl::android::hardware::automotive::evs::BufferDesc;
83using ::aidl::android::hardware::automotive::evs::CameraDesc;
84using ::aidl::android::hardware::automotive::evs::CameraParam;
85using ::aidl::android::hardware::automotive::evs::DisplayDesc;
86using ::aidl::android::hardware::automotive::evs::DisplayState;
87using ::aidl::android::hardware::automotive::evs::EvsEventDesc;
88using ::aidl::android::hardware::automotive::evs::EvsEventType;
89using ::aidl::android::hardware::automotive::evs::EvsResult;
90using ::aidl::android::hardware::automotive::evs::IEvsCamera;
91using ::aidl::android::hardware::automotive::evs::IEvsDisplay;
92using ::aidl::android::hardware::automotive::evs::IEvsEnumerator;
93using ::aidl::android::hardware::automotive::evs::IEvsUltrasonicsArray;
94using ::aidl::android::hardware::automotive::evs::ParameterRange;
95using ::aidl::android::hardware::automotive::evs::Stream;
96using ::aidl::android::hardware::automotive::evs::UltrasonicsArrayDesc;
97using ::aidl::android::hardware::graphics::common::BufferUsage;
98using ::aidl::android::hardware::graphics::common::HardwareBufferDescription;
99using ::aidl::android::hardware::graphics::common::PixelFormat;
100using std::chrono_literals::operator""s;
101
102// The main test class for EVS
103class EvsAidlTest : public ::testing::TestWithParam<std::string> {
104 public:
105 virtual void SetUp() override {
106 // Make sure we can connect to the enumerator
107 std::string service_name = GetParam();
108 AIBinder* binder = AServiceManager_waitForService(service_name.data());
109 ASSERT_NE(binder, nullptr);
110 mEnumerator = IEvsEnumerator::fromBinder(::ndk::SpAIBinder(binder));
111 LOG(INFO) << "Test target service: " << service_name;
112
113 ASSERT_TRUE(mEnumerator->isHardware(&mIsHwModule).isOk());
114 }
115
116 virtual void TearDown() override {
117 // Attempt to close any active camera
118 for (auto&& cam : mActiveCameras) {
119 if (cam != nullptr) {
120 mEnumerator->closeCamera(cam);
121 }
122 }
123 mActiveCameras.clear();
124 }
125
126 protected:
127 void loadCameraList() {
128 // SetUp() must run first!
129 ASSERT_NE(mEnumerator, nullptr);
130
131 // Get the camera list
132 ASSERT_TRUE(mEnumerator->getCameraList(&mCameraInfo).isOk())
133 << "Failed to get a list of available cameras";
134 LOG(INFO) << "We have " << mCameraInfo.size() << " cameras.";
135 }
136
137 void loadUltrasonicsArrayList() {
138 // SetUp() must run first!
139 ASSERT_NE(mEnumerator, nullptr);
140
141 // Get the ultrasonics array list
142 ASSERT_TRUE(mEnumerator->getUltrasonicsArrayList(&mUltrasonicsArraysInfo).isOk())
143 << "Failed to get a list of available ultrasonics arrays";
144 LOG(INFO) << "We have " << mCameraInfo.size() << " ultrasonics arrays.";
145 }
146
147 bool isLogicalCamera(const camera_metadata_t* metadata) {
148 if (metadata == nullptr) {
149 // A logical camera device must have a valid camera metadata.
150 return false;
151 }
152
153 // Looking for LOGICAL_MULTI_CAMERA capability from metadata.
154 camera_metadata_ro_entry_t entry;
155 int rc = find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
156 &entry);
157 if (rc != 0) {
158 // No capabilities are found.
159 return false;
160 }
161
162 for (size_t i = 0; i < entry.count; ++i) {
163 uint8_t cap = entry.data.u8[i];
164 if (cap == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA) {
165 return true;
166 }
167 }
168
169 return false;
170 }
171
172 std::unordered_set<std::string> getPhysicalCameraIds(const std::string& id, bool& flag) {
173 std::unordered_set<std::string> physicalCameras;
174 const auto it = std::find_if(mCameraInfo.begin(), mCameraInfo.end(),
175 [&id](const CameraDesc& desc) { return id == desc.id; });
176 if (it == mCameraInfo.end()) {
177 // Unknown camera is requested. Return an empty list.
178 return physicalCameras;
179 }
180
181 const camera_metadata_t* metadata = reinterpret_cast<camera_metadata_t*>(&it->metadata[0]);
182 flag = isLogicalCamera(metadata);
183 if (!flag) {
184 // EVS assumes that the device w/o a valid metadata is a physical
185 // device.
186 LOG(INFO) << id << " is not a logical camera device.";
187 physicalCameras.insert(id);
188 return physicalCameras;
189 }
190
191 // Look for physical camera identifiers
192 camera_metadata_ro_entry entry;
193 int rc = find_camera_metadata_ro_entry(metadata, ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS,
194 &entry);
195 if (rc != 0) {
196 LOG(ERROR) << "No physical camera ID is found for a logical camera device";
197 }
198
199 const uint8_t* ids = entry.data.u8;
200 size_t start = 0;
201 for (size_t i = 0; i < entry.count; ++i) {
202 if (ids[i] == '\0') {
203 if (start != i) {
204 std::string id(reinterpret_cast<const char*>(ids + start));
205 physicalCameras.insert(id);
206 }
207 start = i + 1;
208 }
209 }
210
211 LOG(INFO) << id << " consists of " << physicalCameras.size() << " physical camera devices";
212 return physicalCameras;
213 }
214
215 Stream getFirstStreamConfiguration(camera_metadata_t* metadata) {
216 Stream targetCfg = {};
217 camera_metadata_entry_t streamCfgs;
218 if (!find_camera_metadata_entry(metadata, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
219 &streamCfgs)) {
220 // Stream configurations are found in metadata
221 RawStreamConfig* ptr = reinterpret_cast<RawStreamConfig*>(streamCfgs.data.i32);
222 for (unsigned offset = 0; offset < streamCfgs.count; offset += kStreamCfgSz) {
223 if (ptr->direction == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT &&
224 ptr->format == HAL_PIXEL_FORMAT_RGBA_8888) {
225 targetCfg.width = ptr->width;
226 targetCfg.height = ptr->height;
227 targetCfg.format = static_cast<PixelFormat>(ptr->format);
228 break;
229 }
230 ++ptr;
231 }
232 }
233
234 return targetCfg;
235 }
236
237 // Every test needs access to the service
238 std::shared_ptr<IEvsEnumerator> mEnumerator;
239 // Empty unless/util loadCameraList() is called
240 std::vector<CameraDesc> mCameraInfo;
241 // boolean to tell current module under testing is HW module implementation
242 // or not
243 bool mIsHwModule;
244 // A list of active camera handles that are need to be cleaned up
245 std::deque<std::shared_ptr<IEvsCamera>> mActiveCameras;
246 // Empty unless/util loadUltrasonicsArrayList() is called
247 std::vector<UltrasonicsArrayDesc> mUltrasonicsArraysInfo;
248 // A list of active ultrasonics array handles that are to be cleaned up
249 std::deque<std::weak_ptr<IEvsUltrasonicsArray>> mActiveUltrasonicsArrays;
250};
251
252// Test cases, their implementations, and corresponding requirements are
253// documented at go/aae-evs-public-api-test.
254
255/*
256 * CameraOpenClean:
257 * Opens each camera reported by the enumerator and then explicitly closes it via a
258 * call to closeCamera. Then repeats the test to ensure all cameras can be reopened.
259 */
260TEST_P(EvsAidlTest, CameraOpenClean) {
261 LOG(INFO) << "Starting CameraOpenClean test";
262
263 // Get the camera list
264 loadCameraList();
265
266 // Open and close each camera twice
267 for (auto&& cam : mCameraInfo) {
268 bool isLogicalCam = false;
269 auto devices = getPhysicalCameraIds(cam.id, isLogicalCam);
270 if (mIsHwModule && isLogicalCam) {
271 LOG(INFO) << "Skip a logical device, " << cam.id << " for HW target.";
272 continue;
273 }
274
275 // Read a target resolution from the metadata
276 Stream targetCfg = getFirstStreamConfiguration(
277 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
278 ASSERT_GT(targetCfg.width, 0);
279 ASSERT_GT(targetCfg.height, 0);
280
281 for (int pass = 0; pass < 2; pass++) {
282 std::shared_ptr<IEvsCamera> pCam;
283 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
284 ASSERT_NE(pCam, nullptr);
285
286 CameraDesc cameraInfo;
287 for (auto&& devName : devices) {
288 ASSERT_TRUE(pCam->getPhysicalCameraInfo(devName, &cameraInfo).isOk());
289 EXPECT_EQ(devName, cameraInfo.id);
290 }
291
292 // Store a camera handle for a clean-up
293 mActiveCameras.push_back(pCam);
294
295 // Verify that this camera self-identifies correctly
296 ASSERT_TRUE(pCam->getCameraInfo(&cameraInfo).isOk());
297 EXPECT_EQ(cam.id, cameraInfo.id);
298
299 // Verify methods for extended info
300 const auto id = 0xFFFFFFFF; // meaningless id
301 std::vector<uint8_t> values;
302 auto status = pCam->setExtendedInfo(id, values);
303 if (isLogicalCam) {
304 EXPECT_TRUE(!status.isOk() && status.getServiceSpecificError() ==
305 static_cast<int>(EvsResult::NOT_SUPPORTED));
306 } else {
307 EXPECT_TRUE(status.isOk());
308 }
309
310 status = pCam->getExtendedInfo(id, &values);
311 if (isLogicalCam) {
312 EXPECT_TRUE(!status.isOk() && status.getServiceSpecificError() ==
313 static_cast<int>(EvsResult::NOT_SUPPORTED));
314 } else {
315 EXPECT_TRUE(status.isOk());
316 }
317
318 // Explicitly close the camera so resources are released right away
319 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
320 mActiveCameras.clear();
321 }
322 }
323}
324
325/*
326 * CameraOpenAggressive:
327 * Opens each camera reported by the enumerator twice in a row without an intervening closeCamera
328 * call. This ensures that the intended "aggressive open" behavior works. This is necessary for
329 * the system to be tolerant of shutdown/restart race conditions.
330 */
331TEST_P(EvsAidlTest, CameraOpenAggressive) {
332 LOG(INFO) << "Starting CameraOpenAggressive test";
333
334 // Get the camera list
335 loadCameraList();
336
337 // Open and close each camera twice
338 for (auto&& cam : mCameraInfo) {
339 bool isLogicalCam = false;
340 getPhysicalCameraIds(cam.id, isLogicalCam);
341 if (mIsHwModule && isLogicalCam) {
342 LOG(INFO) << "Skip a logical device, " << cam.id << " for HW target.";
343 continue;
344 }
345
346 // Read a target resolution from the metadata
347 Stream targetCfg = getFirstStreamConfiguration(
348 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
349 ASSERT_GT(targetCfg.width, 0);
350 ASSERT_GT(targetCfg.height, 0);
351
352 mActiveCameras.clear();
353 std::shared_ptr<IEvsCamera> pCam;
354 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
355 EXPECT_NE(pCam, nullptr);
356
357 // Store a camera handle for a clean-up
358 mActiveCameras.push_back(pCam);
359
360 // Verify that this camera self-identifies correctly
361 CameraDesc cameraInfo;
362 ASSERT_TRUE(pCam->getCameraInfo(&cameraInfo).isOk());
363 EXPECT_EQ(cam.id, cameraInfo.id);
364
365 std::shared_ptr<IEvsCamera> pCam2;
366 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam2).isOk());
367 EXPECT_NE(pCam2, nullptr);
368 EXPECT_NE(pCam, pCam2);
369
370 // Store a camera handle for a clean-up
371 mActiveCameras.push_back(pCam2);
372
373 auto status = pCam->setMaxFramesInFlight(2);
374 if (mIsHwModule) {
375 // Verify that the old camera rejects calls via HW module.
376 EXPECT_TRUE(!status.isOk() && status.getServiceSpecificError() ==
377 static_cast<int>(EvsResult::OWNERSHIP_LOST));
378 } else {
379 // default implementation supports multiple clients.
380 EXPECT_TRUE(status.isOk());
381 }
382
383 // Close the superseded camera
384 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
385 mActiveCameras.pop_front();
386
387 // Verify that the second camera instance self-identifies correctly
388 ASSERT_TRUE(pCam2->getCameraInfo(&cameraInfo).isOk());
389 EXPECT_EQ(cam.id, cameraInfo.id);
390
391 // Close the second camera instance
392 ASSERT_TRUE(mEnumerator->closeCamera(pCam2).isOk());
393 mActiveCameras.pop_front();
394 }
395
396 // Sleep here to ensure the destructor cleanup has time to run so we don't break follow on tests
397 sleep(1); // I hate that this is an arbitrary time to wait. :( b/36122635
398}
399
400/*
401 * CameraStreamPerformance:
402 * Measure and qualify the stream start up time and streaming frame rate of each reported camera
403 */
404TEST_P(EvsAidlTest, CameraStreamPerformance) {
405 LOG(INFO) << "Starting CameraStreamPerformance test";
406
407 // Get the camera list
408 loadCameraList();
409
410 // Test each reported camera
411 for (auto&& cam : mCameraInfo) {
412 bool isLogicalCam = false;
413 auto devices = getPhysicalCameraIds(cam.id, isLogicalCam);
414 if (mIsHwModule && isLogicalCam) {
415 LOG(INFO) << "Skip a logical device " << cam.id;
416 continue;
417 }
418
419 // Read a target resolution from the metadata
420 Stream targetCfg = getFirstStreamConfiguration(
421 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
422 ASSERT_GT(targetCfg.width, 0);
423 ASSERT_GT(targetCfg.height, 0);
424
425 std::shared_ptr<IEvsCamera> pCam;
426 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
427 EXPECT_NE(pCam, nullptr);
428
429 // Store a camera handle for a clean-up
430 mActiveCameras.push_back(pCam);
431
432 // Set up a frame receiver object which will fire up its own thread
433 std::shared_ptr<FrameHandler> frameHandler =
434 std::make_shared<FrameHandler>(pCam, cam, nullptr, FrameHandler::eAutoReturn);
435 EXPECT_NE(frameHandler, nullptr);
436
437 // Start the camera's video stream
438 nsecs_t start = systemTime(SYSTEM_TIME_MONOTONIC);
439 ASSERT_TRUE(frameHandler->startStream());
440
441 // Ensure the first frame arrived within the expected time
442 frameHandler->waitForFrameCount(1);
443 nsecs_t firstFrame = systemTime(SYSTEM_TIME_MONOTONIC);
444 nsecs_t timeToFirstFrame = systemTime(SYSTEM_TIME_MONOTONIC) - start;
445
446 // Extra delays are expected when we attempt to start a video stream on
447 // the logical camera device. The amount of delay is expected the
448 // number of physical camera devices multiplied by
449 // kMaxStreamStartMilliseconds at most.
450 EXPECT_LE(nanoseconds_to_milliseconds(timeToFirstFrame),
451 kMaxStreamStartMilliseconds * devices.size());
452 printf("%s: Measured time to first frame %0.2f ms\n", cam.id.data(),
453 timeToFirstFrame * kNanoToMilliseconds);
454 LOG(INFO) << cam.id << ": Measured time to first frame " << std::scientific
455 << timeToFirstFrame * kNanoToMilliseconds << " ms.";
456
457 // Check aspect ratio
458 unsigned width = 0, height = 0;
459 frameHandler->getFrameDimension(&width, &height);
460 EXPECT_GE(width, height);
461
462 // Wait a bit, then ensure we get at least the required minimum number of frames
463 sleep(5);
464 nsecs_t end = systemTime(SYSTEM_TIME_MONOTONIC);
465
466 // Even when the camera pointer goes out of scope, the FrameHandler object will
467 // keep the stream alive unless we tell it to shutdown.
468 // Also note that the FrameHandle and the Camera have a mutual circular reference, so
469 // we have to break that cycle in order for either of them to get cleaned up.
470 frameHandler->shutdown();
471
472 unsigned framesReceived = 0;
473 frameHandler->getFramesCounters(&framesReceived, nullptr);
474 framesReceived = framesReceived - 1; // Back out the first frame we already waited for
475 nsecs_t runTime = end - firstFrame;
476 float framesPerSecond = framesReceived / (runTime * kNanoToSeconds);
477 printf("Measured camera rate %3.2f fps\n", framesPerSecond);
478 LOG(INFO) << "Measured camera rate " << std::scientific << framesPerSecond << " fps.";
479 EXPECT_GE(framesPerSecond, kMinimumFramesPerSecond);
480
481 // Explicitly release the camera
482 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
483 mActiveCameras.clear();
484 }
485}
486
487/*
488 * CameraStreamBuffering:
489 * Ensure the camera implementation behaves properly when the client holds onto buffers for more
490 * than one frame time. The camera must cleanly skip frames until the client is ready again.
491 */
492TEST_P(EvsAidlTest, CameraStreamBuffering) {
493 LOG(INFO) << "Starting CameraStreamBuffering test";
494
495 // Arbitrary constant (should be > 1 and not too big)
496 static const unsigned int kBuffersToHold = 6;
497
498 // Get the camera list
499 loadCameraList();
500
501 // Test each reported camera
502 for (auto&& cam : mCameraInfo) {
503 bool isLogicalCam = false;
504 getPhysicalCameraIds(cam.id, isLogicalCam);
505 if (mIsHwModule && isLogicalCam) {
506 LOG(INFO) << "Skip a logical device " << cam.id << " for HW target.";
507 continue;
508 }
509
510 // Read a target resolution from the metadata
511 Stream targetCfg = getFirstStreamConfiguration(
512 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
513 ASSERT_GT(targetCfg.width, 0);
514 ASSERT_GT(targetCfg.height, 0);
515
516 std::shared_ptr<IEvsCamera> pCam;
517 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
518 EXPECT_NE(pCam, nullptr);
519
520 // Store a camera handle for a clean-up
521 mActiveCameras.push_back(pCam);
522
523 // Ask for a very large number of buffers in flight to ensure it errors correctly
524 auto badResult = pCam->setMaxFramesInFlight(0xFFFFFFFF);
525 EXPECT_TRUE(!badResult.isOk() && badResult.getServiceSpecificError() ==
526 static_cast<int>(EvsResult::BUFFER_NOT_AVAILABLE));
527
528 // Now ask for exactly two buffers in flight as we'll test behavior in that case
529 ASSERT_TRUE(pCam->setMaxFramesInFlight(kBuffersToHold).isOk());
530
531 // Set up a frame receiver object which will fire up its own thread.
532 std::shared_ptr<FrameHandler> frameHandler =
533 std::make_shared<FrameHandler>(pCam, cam, nullptr, FrameHandler::eNoAutoReturn);
534 EXPECT_NE(frameHandler, nullptr);
535
536 // Start the camera's video stream
537 ASSERT_TRUE(frameHandler->startStream());
538
539 // Check that the video stream stalls once we've gotten exactly the number of buffers
540 // we requested since we told the frameHandler not to return them.
541 sleep(1); // 1 second should be enough for at least 5 frames to be delivered worst case
542 unsigned framesReceived = 0;
543 frameHandler->getFramesCounters(&framesReceived, nullptr);
544 ASSERT_EQ(kBuffersToHold, framesReceived) << "Stream didn't stall at expected buffer limit";
545
546 // Give back one buffer
547 ASSERT_TRUE(frameHandler->returnHeldBuffer());
548
549 // Once we return a buffer, it shouldn't take more than 1/10 second to get a new one
550 // filled since we require 10fps minimum -- but give a 10% allowance just in case.
551 usleep(110 * kMillisecondsToMicroseconds);
552 frameHandler->getFramesCounters(&framesReceived, nullptr);
553 EXPECT_EQ(kBuffersToHold + 1, framesReceived) << "Stream should've resumed";
554
555 // Even when the camera pointer goes out of scope, the FrameHandler object will
556 // keep the stream alive unless we tell it to shutdown.
557 // Also note that the FrameHandle and the Camera have a mutual circular reference, so
558 // we have to break that cycle in order for either of them to get cleaned up.
559 frameHandler->shutdown();
560
561 // Explicitly release the camera
562 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
563 mActiveCameras.clear();
564 }
565}
566
567/*
568 * CameraToDisplayRoundTrip:
569 * End to end test of data flowing from the camera to the display. Each delivered frame of camera
570 * imagery is simply copied to the display buffer and presented on screen. This is the one test
571 * which a human could observe to see the operation of the system on the physical display.
572 */
573TEST_P(EvsAidlTest, CameraToDisplayRoundTrip) {
574 LOG(INFO) << "Starting CameraToDisplayRoundTrip test";
575
576 // Get the camera list
577 loadCameraList();
578
579 // Request available display IDs
580 uint8_t targetDisplayId = 0;
581 std::vector<uint8_t> displayIds;
582 ASSERT_TRUE(mEnumerator->getDisplayIdList(&displayIds).isOk());
583 EXPECT_GT(displayIds.size(), 0);
584 targetDisplayId = displayIds[0];
585
586 // Request exclusive access to the first EVS display
587 std::shared_ptr<IEvsDisplay> pDisplay;
588 ASSERT_TRUE(mEnumerator->openDisplay(targetDisplayId, &pDisplay).isOk());
589 EXPECT_NE(pDisplay, nullptr);
590 LOG(INFO) << "Display " << targetDisplayId << " is in use.";
591
592 // Get the display descriptor
593 DisplayDesc displayDesc;
594 ASSERT_TRUE(pDisplay->getDisplayInfo(&displayDesc).isOk());
595 LOG(INFO) << " Resolution: " << displayDesc.width << "x" << displayDesc.height;
596 ASSERT_GT(displayDesc.width, 0);
597 ASSERT_GT(displayDesc.height, 0);
598
599 // Test each reported camera
600 for (auto&& cam : mCameraInfo) {
601 bool isLogicalCam = false;
602 getPhysicalCameraIds(cam.id, isLogicalCam);
603 if (mIsHwModule && isLogicalCam) {
604 LOG(INFO) << "Skip a logical device " << cam.id << " for HW target.";
605 continue;
606 }
607
608 // Read a target resolution from the metadata
609 Stream targetCfg = getFirstStreamConfiguration(
610 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
611 ASSERT_GT(targetCfg.width, 0);
612 ASSERT_GT(targetCfg.height, 0);
613
614 std::shared_ptr<IEvsCamera> pCam;
615 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
616 EXPECT_NE(pCam, nullptr);
617
618 // Store a camera handle for a clean-up
619 mActiveCameras.push_back(pCam);
620
621 // Set up a frame receiver object which will fire up its own thread.
622 std::shared_ptr<FrameHandler> frameHandler =
623 std::make_shared<FrameHandler>(pCam, cam, pDisplay, FrameHandler::eAutoReturn);
624 EXPECT_NE(frameHandler, nullptr);
625
626 // Activate the display
627 ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::VISIBLE_ON_NEXT_FRAME).isOk());
628
629 // Start the camera's video stream
630 ASSERT_TRUE(frameHandler->startStream());
631
632 // Wait a while to let the data flow
633 static const int kSecondsToWait = 5;
634 const int streamTimeMs =
635 kSecondsToWait * kSecondsToMilliseconds - kMaxStreamStartMilliseconds;
636 const unsigned minimumFramesExpected =
637 streamTimeMs * kMinimumFramesPerSecond / kSecondsToMilliseconds;
638 sleep(kSecondsToWait);
639 unsigned framesReceived = 0;
640 unsigned framesDisplayed = 0;
641 frameHandler->getFramesCounters(&framesReceived, &framesDisplayed);
642 EXPECT_EQ(framesReceived, framesDisplayed);
643 EXPECT_GE(framesDisplayed, minimumFramesExpected);
644
645 // Turn off the display (yes, before the stream stops -- it should be handled)
646 ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::NOT_VISIBLE).isOk());
647
648 // Shut down the streamer
649 frameHandler->shutdown();
650
651 // Explicitly release the camera
652 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
653 mActiveCameras.clear();
654 }
655
656 // Explicitly release the display
657 ASSERT_TRUE(mEnumerator->closeDisplay(pDisplay).isOk());
658}
659
660/*
661 * MultiCameraStream:
662 * Verify that each client can start and stop video streams on the same
663 * underlying camera.
664 */
665TEST_P(EvsAidlTest, MultiCameraStream) {
666 LOG(INFO) << "Starting MultiCameraStream test";
667
668 if (mIsHwModule) {
669 // This test is not for HW module implementation.
670 return;
671 }
672
673 // Get the camera list
674 loadCameraList();
675
676 // Test each reported camera
677 for (auto&& cam : mCameraInfo) {
678 // Read a target resolution from the metadata
679 Stream targetCfg = getFirstStreamConfiguration(
680 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
681 ASSERT_GT(targetCfg.width, 0);
682 ASSERT_GT(targetCfg.height, 0);
683
684 // Create two camera clients.
685 std::shared_ptr<IEvsCamera> pCam0;
686 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam0).isOk());
687 EXPECT_NE(pCam0, nullptr);
688
689 // Store a camera handle for a clean-up
690 mActiveCameras.push_back(pCam0);
691
692 std::shared_ptr<IEvsCamera> pCam1;
693 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam1).isOk());
694 EXPECT_NE(pCam1, nullptr);
695
696 // Store a camera handle for a clean-up
697 mActiveCameras.push_back(pCam1);
698
699 // Set up per-client frame receiver objects which will fire up its own thread
700 std::shared_ptr<FrameHandler> frameHandler0 =
701 std::make_shared<FrameHandler>(pCam0, cam, nullptr, FrameHandler::eAutoReturn);
702 std::shared_ptr<FrameHandler> frameHandler1 =
703 std::make_shared<FrameHandler>(pCam1, cam, nullptr, FrameHandler::eAutoReturn);
704 EXPECT_NE(frameHandler0, nullptr);
705 EXPECT_NE(frameHandler1, nullptr);
706
707 // Start the camera's video stream via client 0
708 ASSERT_TRUE(frameHandler0->startStream());
709 ASSERT_TRUE(frameHandler1->startStream());
710
711 // Ensure the stream starts
712 frameHandler0->waitForFrameCount(1);
713 frameHandler1->waitForFrameCount(1);
714
715 nsecs_t firstFrame = systemTime(SYSTEM_TIME_MONOTONIC);
716
717 // Wait a bit, then ensure both clients get at least the required minimum number of frames
718 sleep(5);
719 nsecs_t end = systemTime(SYSTEM_TIME_MONOTONIC);
720 unsigned framesReceived0 = 0, framesReceived1 = 0;
721 frameHandler0->getFramesCounters(&framesReceived0, nullptr);
722 frameHandler1->getFramesCounters(&framesReceived1, nullptr);
723 framesReceived0 = framesReceived0 - 1; // Back out the first frame we already waited for
724 framesReceived1 = framesReceived1 - 1; // Back out the first frame we already waited for
725 nsecs_t runTime = end - firstFrame;
726 float framesPerSecond0 = framesReceived0 / (runTime * kNanoToSeconds);
727 float framesPerSecond1 = framesReceived1 / (runTime * kNanoToSeconds);
728 LOG(INFO) << "Measured camera rate " << std::scientific << framesPerSecond0 << " fps and "
729 << framesPerSecond1 << " fps";
730 EXPECT_GE(framesPerSecond0, kMinimumFramesPerSecond);
731 EXPECT_GE(framesPerSecond1, kMinimumFramesPerSecond);
732
733 // Shutdown one client
734 frameHandler0->shutdown();
735
736 // Read frame counters again
737 frameHandler0->getFramesCounters(&framesReceived0, nullptr);
738 frameHandler1->getFramesCounters(&framesReceived1, nullptr);
739
740 // Wait a bit again
741 sleep(5);
742 unsigned framesReceivedAfterStop0 = 0, framesReceivedAfterStop1 = 0;
743 frameHandler0->getFramesCounters(&framesReceivedAfterStop0, nullptr);
744 frameHandler1->getFramesCounters(&framesReceivedAfterStop1, nullptr);
745 EXPECT_EQ(framesReceived0, framesReceivedAfterStop0);
746 EXPECT_LT(framesReceived1, framesReceivedAfterStop1);
747
748 // Shutdown another
749 frameHandler1->shutdown();
750
751 // Explicitly release the camera
752 ASSERT_TRUE(mEnumerator->closeCamera(pCam0).isOk());
753 ASSERT_TRUE(mEnumerator->closeCamera(pCam1).isOk());
754 mActiveCameras.clear();
755
756 // TODO(b/145459970, b/145457727): below sleep() is added to ensure the
757 // destruction of active camera objects; this may be related with two
758 // issues.
759 sleep(1);
760 }
761}
762
763/*
764 * CameraParameter:
765 * Verify that a client can adjust a camera parameter.
766 */
767TEST_P(EvsAidlTest, CameraParameter) {
768 LOG(INFO) << "Starting CameraParameter test";
769
770 // Get the camera list
771 loadCameraList();
772
773 // Test each reported camera
774 for (auto&& cam : mCameraInfo) {
775 bool isLogicalCam = false;
776 getPhysicalCameraIds(cam.id, isLogicalCam);
777 if (isLogicalCam) {
778 // TODO(b/145465724): Support camera parameter programming on
779 // logical devices.
780 LOG(INFO) << "Skip a logical device " << cam.id;
781 continue;
782 }
783
784 // Read a target resolution from the metadata
785 Stream targetCfg = getFirstStreamConfiguration(
786 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
787 ASSERT_GT(targetCfg.width, 0);
788 ASSERT_GT(targetCfg.height, 0);
789
790 // Create a camera client
791 std::shared_ptr<IEvsCamera> pCam;
792 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
793 EXPECT_NE(pCam, nullptr);
794
795 // Store a camera
796 mActiveCameras.push_back(pCam);
797
798 // Get the parameter list
799 std::vector<CameraParam> cmds;
800 ASSERT_TRUE(pCam->getParameterList(&cmds).isOk());
801 if (cmds.size() < 1) {
802 continue;
803 }
804
805 // Set up per-client frame receiver objects which will fire up its own thread
806 std::shared_ptr<FrameHandler> frameHandler =
807 std::make_shared<FrameHandler>(pCam, cam, nullptr, FrameHandler::eAutoReturn);
808 EXPECT_NE(frameHandler, nullptr);
809
810 // Start the camera's video stream
811 ASSERT_TRUE(frameHandler->startStream());
812
813 // Ensure the stream starts
814 frameHandler->waitForFrameCount(1);
815
816 // Set current client is the primary client
817 ASSERT_TRUE(pCam->setPrimaryClient().isOk());
818 for (auto& cmd : cmds) {
819 // Get a valid parameter value range
820 ParameterRange range;
821 ASSERT_TRUE(pCam->getIntParameterRange(cmd, &range).isOk());
822
823 std::vector<int32_t> values;
824 if (cmd == CameraParam::ABSOLUTE_FOCUS) {
825 // Try to turn off auto-focus
826 ASSERT_TRUE(pCam->setIntParameter(CameraParam::AUTO_FOCUS, 0, &values).isOk());
827 for (auto&& v : values) {
828 EXPECT_EQ(v, 0);
829 }
830 }
831
832 // Try to program a parameter with a random value [minVal, maxVal]
833 int32_t val0 = range.min + (std::rand() % (range.max - range.min));
834
835 // Rounding down
836 val0 = val0 - (val0 % range.step);
837 values.clear();
838 ASSERT_TRUE(pCam->setIntParameter(cmd, val0, &values).isOk());
839
840 values.clear();
841 ASSERT_TRUE(pCam->getIntParameter(cmd, &values).isOk());
842 for (auto&& v : values) {
843 EXPECT_EQ(val0, v) << "Values are not matched.";
844 }
845 }
846 ASSERT_TRUE(pCam->unsetPrimaryClient().isOk());
847
848 // Shutdown
849 frameHandler->shutdown();
850
851 // Explicitly release the camera
852 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
853 mActiveCameras.clear();
854 }
855}
856
857/*
858 * CameraPrimaryClientRelease
859 * Verify that non-primary client gets notified when the primary client either
860 * terminates or releases a role.
861 */
862TEST_P(EvsAidlTest, CameraPrimaryClientRelease) {
863 LOG(INFO) << "Starting CameraPrimaryClientRelease test";
864
865 if (mIsHwModule) {
866 // This test is not for HW module implementation.
867 return;
868 }
869
870 // Get the camera list
871 loadCameraList();
872
873 // Test each reported camera
874 for (auto&& cam : mCameraInfo) {
875 bool isLogicalCam = false;
876 getPhysicalCameraIds(cam.id, isLogicalCam);
877 if (isLogicalCam) {
878 // TODO(b/145465724): Support camera parameter programming on
879 // logical devices.
880 LOG(INFO) << "Skip a logical device " << cam.id;
881 continue;
882 }
883
884 // Read a target resolution from the metadata
885 Stream targetCfg = getFirstStreamConfiguration(
886 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
887 ASSERT_GT(targetCfg.width, 0);
888 ASSERT_GT(targetCfg.height, 0);
889
890 // Create two camera clients.
891 std::shared_ptr<IEvsCamera> pPrimaryCam;
892 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pPrimaryCam).isOk());
893 EXPECT_NE(pPrimaryCam, nullptr);
894
895 // Store a camera handle for a clean-up
896 mActiveCameras.push_back(pPrimaryCam);
897
898 std::shared_ptr<IEvsCamera> pSecondaryCam;
899 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pSecondaryCam).isOk());
900 EXPECT_NE(pSecondaryCam, nullptr);
901
902 // Store a camera handle for a clean-up
903 mActiveCameras.push_back(pSecondaryCam);
904
905 // Set up per-client frame receiver objects which will fire up its own thread
906 std::shared_ptr<FrameHandler> frameHandlerPrimary = std::make_shared<FrameHandler>(
907 pPrimaryCam, cam, nullptr, FrameHandler::eAutoReturn);
908 std::shared_ptr<FrameHandler> frameHandlerSecondary = std::make_shared<FrameHandler>(
909 pSecondaryCam, cam, nullptr, FrameHandler::eAutoReturn);
910 EXPECT_NE(frameHandlerPrimary, nullptr);
911 EXPECT_NE(frameHandlerSecondary, nullptr);
912
913 // Set one client as the primary client
914 ASSERT_TRUE(pPrimaryCam->setPrimaryClient().isOk());
915
916 // Try to set another client as the primary client.
917 ASSERT_FALSE(pSecondaryCam->setPrimaryClient().isOk());
918
919 // Start the camera's video stream via a primary client client.
920 ASSERT_TRUE(frameHandlerPrimary->startStream());
921
922 // Ensure the stream starts
923 frameHandlerPrimary->waitForFrameCount(1);
924
925 // Start the camera's video stream via another client
926 ASSERT_TRUE(frameHandlerSecondary->startStream());
927
928 // Ensure the stream starts
929 frameHandlerSecondary->waitForFrameCount(1);
930
931 // Non-primary client expects to receive a primary client role relesed
932 // notification.
933 EvsEventDesc aTargetEvent = {};
934 EvsEventDesc aNotification = {};
935
936 bool listening = false;
937 std::mutex eventLock;
938 std::condition_variable eventCond;
939 std::thread listener =
940 std::thread([&aNotification, &frameHandlerSecondary, &listening, &eventCond]() {
941 // Notify that a listening thread is running.
942 listening = true;
943 eventCond.notify_all();
944
945 EvsEventDesc aTargetEvent;
946 aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
947 if (!frameHandlerSecondary->waitForEvent(aTargetEvent, aNotification, true)) {
948 LOG(WARNING) << "A timer is expired before a target event is fired.";
949 }
950 });
951
952 // Wait until a listening thread starts.
953 std::unique_lock<std::mutex> lock(eventLock);
954 auto timer = std::chrono::system_clock::now();
955 while (!listening) {
956 timer += 1s;
957 eventCond.wait_until(lock, timer);
958 }
959 lock.unlock();
960
961 // Release a primary client role.
962 ASSERT_TRUE(pPrimaryCam->unsetPrimaryClient().isOk());
963
964 // Join a listening thread.
965 if (listener.joinable()) {
966 listener.join();
967 }
968
969 // Verify change notifications.
970 ASSERT_EQ(EvsEventType::MASTER_RELEASED, static_cast<EvsEventType>(aNotification.aType));
971
972 // Non-primary becomes a primary client.
973 ASSERT_TRUE(pSecondaryCam->setPrimaryClient().isOk());
974
975 // Previous primary client fails to become a primary client.
976 ASSERT_FALSE(pPrimaryCam->setPrimaryClient().isOk());
977
978 listening = false;
979 listener = std::thread([&aNotification, &frameHandlerPrimary, &listening, &eventCond]() {
980 // Notify that a listening thread is running.
981 listening = true;
982 eventCond.notify_all();
983
984 EvsEventDesc aTargetEvent;
985 aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
986 if (!frameHandlerPrimary->waitForEvent(aTargetEvent, aNotification, true)) {
987 LOG(WARNING) << "A timer is expired before a target event is fired.";
988 }
989 });
990
991 // Wait until a listening thread starts.
992 timer = std::chrono::system_clock::now();
993 lock.lock();
994 while (!listening) {
995 eventCond.wait_until(lock, timer + 1s);
996 }
997 lock.unlock();
998
999 // Closing current primary client.
1000 frameHandlerSecondary->shutdown();
1001
1002 // Join a listening thread.
1003 if (listener.joinable()) {
1004 listener.join();
1005 }
1006
1007 // Verify change notifications.
1008 ASSERT_EQ(EvsEventType::MASTER_RELEASED, static_cast<EvsEventType>(aNotification.aType));
1009
1010 // Closing streams.
1011 frameHandlerPrimary->shutdown();
1012
1013 // Explicitly release the camera
1014 ASSERT_TRUE(mEnumerator->closeCamera(pPrimaryCam).isOk());
1015 ASSERT_TRUE(mEnumerator->closeCamera(pSecondaryCam).isOk());
1016 mActiveCameras.clear();
1017 }
1018}
1019
1020/*
1021 * MultiCameraParameter:
1022 * Verify that primary and non-primary clients behave as expected when they try to adjust
1023 * camera parameters.
1024 */
1025TEST_P(EvsAidlTest, MultiCameraParameter) {
1026 LOG(INFO) << "Starting MultiCameraParameter test";
1027
1028 if (mIsHwModule) {
1029 // This test is not for HW module implementation.
1030 return;
1031 }
1032
1033 // Get the camera list
1034 loadCameraList();
1035
1036 // Test each reported camera
1037 for (auto&& cam : mCameraInfo) {
1038 bool isLogicalCam = false;
1039 getPhysicalCameraIds(cam.id, isLogicalCam);
1040 if (isLogicalCam) {
1041 // TODO(b/145465724): Support camera parameter programming on
1042 // logical devices.
1043 LOG(INFO) << "Skip a logical device " << cam.id;
1044 continue;
1045 }
1046
1047 // Read a target resolution from the metadata
1048 Stream targetCfg = getFirstStreamConfiguration(
1049 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
1050 ASSERT_GT(targetCfg.width, 0);
1051 ASSERT_GT(targetCfg.height, 0);
1052
1053 // Create two camera clients.
1054 std::shared_ptr<IEvsCamera> pPrimaryCam;
1055 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pPrimaryCam).isOk());
1056 EXPECT_NE(pPrimaryCam, nullptr);
1057
1058 // Store a camera handle for a clean-up
1059 mActiveCameras.push_back(pPrimaryCam);
1060
1061 std::shared_ptr<IEvsCamera> pSecondaryCam;
1062 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pSecondaryCam).isOk());
1063 EXPECT_NE(pSecondaryCam, nullptr);
1064
1065 // Store a camera handle for a clean-up
1066 mActiveCameras.push_back(pSecondaryCam);
1067
1068 // Get the parameter list
1069 std::vector<CameraParam> camPrimaryCmds, camSecondaryCmds;
1070 ASSERT_TRUE(pPrimaryCam->getParameterList(&camPrimaryCmds).isOk());
1071 ASSERT_TRUE(pSecondaryCam->getParameterList(&camSecondaryCmds).isOk());
1072 if (camPrimaryCmds.size() < 1 || camSecondaryCmds.size() < 1) {
1073 // Skip a camera device if it does not support any parameter.
1074 continue;
1075 }
1076
1077 // Set up per-client frame receiver objects which will fire up its own thread
1078 std::shared_ptr<FrameHandler> frameHandlerPrimary = std::make_shared<FrameHandler>(
1079 pPrimaryCam, cam, nullptr, FrameHandler::eAutoReturn);
1080 std::shared_ptr<FrameHandler> frameHandlerSecondary = std::make_shared<FrameHandler>(
1081 pSecondaryCam, cam, nullptr, FrameHandler::eAutoReturn);
1082 EXPECT_NE(frameHandlerPrimary, nullptr);
1083 EXPECT_NE(frameHandlerSecondary, nullptr);
1084
1085 // Set one client as the primary client.
1086 ASSERT_TRUE(pPrimaryCam->setPrimaryClient().isOk());
1087
1088 // Try to set another client as the primary client.
1089 ASSERT_FALSE(pSecondaryCam->setPrimaryClient().isOk());
1090
1091 // Start the camera's video stream via a primary client client.
1092 ASSERT_TRUE(frameHandlerPrimary->startStream());
1093
1094 // Ensure the stream starts
1095 frameHandlerPrimary->waitForFrameCount(1);
1096
1097 // Start the camera's video stream via another client
1098 ASSERT_TRUE(frameHandlerSecondary->startStream());
1099
1100 // Ensure the stream starts
1101 frameHandlerSecondary->waitForFrameCount(1);
1102
1103 int32_t val0 = 0;
1104 std::vector<int32_t> values;
1105 EvsEventDesc aNotification0 = {};
1106 EvsEventDesc aNotification1 = {};
1107 for (auto& cmd : camPrimaryCmds) {
1108 // Get a valid parameter value range
1109 ParameterRange range;
1110 ASSERT_TRUE(pPrimaryCam->getIntParameterRange(cmd, &range).isOk());
1111 if (cmd == CameraParam::ABSOLUTE_FOCUS) {
1112 // Try to turn off auto-focus
1113 values.clear();
1114 ASSERT_TRUE(
1115 pPrimaryCam->setIntParameter(CameraParam::AUTO_FOCUS, 0, &values).isOk());
1116 for (auto&& v : values) {
1117 EXPECT_EQ(v, 0);
1118 }
1119 }
1120
1121 // Calculate a parameter value to program.
1122 val0 = range.min + (std::rand() % (range.max - range.min));
1123 val0 = val0 - (val0 % range.step);
1124
1125 // Prepare and start event listeners.
1126 bool listening0 = false;
1127 bool listening1 = false;
1128 std::condition_variable eventCond;
1129 std::thread listener0 = std::thread([cmd, val0, &aNotification0, &frameHandlerPrimary,
1130 &listening0, &listening1, &eventCond]() {
1131 listening0 = true;
1132 if (listening1) {
1133 eventCond.notify_all();
1134 }
1135
1136 EvsEventDesc aTargetEvent;
1137 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1138 aTargetEvent.payload[0] = static_cast<uint32_t>(cmd);
1139 aTargetEvent.payload[1] = val0;
1140 if (!frameHandlerPrimary->waitForEvent(aTargetEvent, aNotification0)) {
1141 LOG(WARNING) << "A timer is expired before a target event is fired.";
1142 }
1143 });
1144 std::thread listener1 = std::thread([cmd, val0, &aNotification1, &frameHandlerSecondary,
1145 &listening0, &listening1, &eventCond]() {
1146 listening1 = true;
1147 if (listening0) {
1148 eventCond.notify_all();
1149 }
1150
1151 EvsEventDesc aTargetEvent;
1152 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1153 aTargetEvent.payload[0] = static_cast<uint32_t>(cmd);
1154 aTargetEvent.payload[1] = val0;
1155 if (!frameHandlerSecondary->waitForEvent(aTargetEvent, aNotification1)) {
1156 LOG(WARNING) << "A timer is expired before a target event is fired.";
1157 }
1158 });
1159
1160 // Wait until a listening thread starts.
1161 std::mutex eventLock;
1162 std::unique_lock<std::mutex> lock(eventLock);
1163 auto timer = std::chrono::system_clock::now();
1164 while (!listening0 || !listening1) {
1165 eventCond.wait_until(lock, timer + 1s);
1166 }
1167 lock.unlock();
1168
1169 // Try to program a parameter
1170 values.clear();
1171 ASSERT_TRUE(pPrimaryCam->setIntParameter(cmd, val0, &values).isOk());
1172 for (auto&& v : values) {
1173 EXPECT_EQ(val0, v) << "Values are not matched.";
1174 }
1175
1176 // Join a listening thread.
1177 if (listener0.joinable()) {
1178 listener0.join();
1179 }
1180 if (listener1.joinable()) {
1181 listener1.join();
1182 }
1183
1184 // Verify a change notification
1185 ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
1186 static_cast<EvsEventType>(aNotification0.aType));
1187 ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
1188 static_cast<EvsEventType>(aNotification1.aType));
1189 ASSERT_EQ(cmd, static_cast<CameraParam>(aNotification0.payload[0]));
1190 ASSERT_EQ(cmd, static_cast<CameraParam>(aNotification1.payload[0]));
1191 for (auto&& v : values) {
1192 ASSERT_EQ(v, static_cast<int32_t>(aNotification0.payload[1]));
1193 ASSERT_EQ(v, static_cast<int32_t>(aNotification1.payload[1]));
1194 }
1195
1196 // Clients expects to receive a parameter change notification
1197 // whenever a primary client client adjusts it.
1198 values.clear();
1199 ASSERT_TRUE(pPrimaryCam->getIntParameter(cmd, &values).isOk());
1200 for (auto&& v : values) {
1201 EXPECT_EQ(val0, v) << "Values are not matched.";
1202 }
1203 }
1204
1205 // Try to adjust a parameter via non-primary client
1206 values.clear();
1207 ASSERT_FALSE(pSecondaryCam->setIntParameter(camSecondaryCmds[0], val0, &values).isOk());
1208
1209 // Non-primary client attempts to be a primary client
1210 ASSERT_FALSE(pSecondaryCam->setPrimaryClient().isOk());
1211
1212 // Primary client retires from a primary client role
1213 bool listening = false;
1214 std::condition_variable eventCond;
1215 std::thread listener =
1216 std::thread([&aNotification0, &frameHandlerSecondary, &listening, &eventCond]() {
1217 listening = true;
1218 eventCond.notify_all();
1219
1220 EvsEventDesc aTargetEvent;
1221 aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
1222 if (!frameHandlerSecondary->waitForEvent(aTargetEvent, aNotification0, true)) {
1223 LOG(WARNING) << "A timer is expired before a target event is fired.";
1224 }
1225 });
1226
1227 std::mutex eventLock;
1228 auto timer = std::chrono::system_clock::now();
1229 std::unique_lock<std::mutex> lock(eventLock);
1230 while (!listening) {
1231 eventCond.wait_until(lock, timer + 1s);
1232 }
1233 lock.unlock();
1234
1235 ASSERT_TRUE(pPrimaryCam->unsetPrimaryClient().isOk());
1236
1237 if (listener.joinable()) {
1238 listener.join();
1239 }
1240 ASSERT_EQ(EvsEventType::MASTER_RELEASED, static_cast<EvsEventType>(aNotification0.aType));
1241
1242 // Try to adjust a parameter after being retired
1243 values.clear();
1244 ASSERT_FALSE(pPrimaryCam->setIntParameter(camPrimaryCmds[0], val0, &values).isOk());
1245
1246 // Non-primary client becomes a primary client
1247 ASSERT_TRUE(pSecondaryCam->setPrimaryClient().isOk());
1248
1249 // Try to adjust a parameter via new primary client
1250 for (auto& cmd : camSecondaryCmds) {
1251 // Get a valid parameter value range
1252 ParameterRange range;
1253 ASSERT_TRUE(pSecondaryCam->getIntParameterRange(cmd, &range).isOk());
1254
1255 values.clear();
1256 if (cmd == CameraParam::ABSOLUTE_FOCUS) {
1257 // Try to turn off auto-focus
1258 values.clear();
1259 ASSERT_TRUE(
1260 pSecondaryCam->setIntParameter(CameraParam::AUTO_FOCUS, 0, &values).isOk());
1261 for (auto&& v : values) {
1262 EXPECT_EQ(v, 0);
1263 }
1264 }
1265
1266 // Calculate a parameter value to program. This is being rounding down.
1267 val0 = range.min + (std::rand() % (range.max - range.min));
1268 val0 = val0 - (val0 % range.step);
1269
1270 // Prepare and start event listeners.
1271 bool listening0 = false;
1272 bool listening1 = false;
1273 std::condition_variable eventCond;
1274 std::thread listener0 = std::thread([&]() {
1275 listening0 = true;
1276 if (listening1) {
1277 eventCond.notify_all();
1278 }
1279
1280 EvsEventDesc aTargetEvent;
1281 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1282 aTargetEvent.payload[0] = static_cast<uint32_t>(cmd);
1283 aTargetEvent.payload[1] = val0;
1284 if (!frameHandlerPrimary->waitForEvent(aTargetEvent, aNotification0)) {
1285 LOG(WARNING) << "A timer is expired before a target event is fired.";
1286 }
1287 });
1288 std::thread listener1 = std::thread([&]() {
1289 listening1 = true;
1290 if (listening0) {
1291 eventCond.notify_all();
1292 }
1293
1294 EvsEventDesc aTargetEvent;
1295 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1296 aTargetEvent.payload[0] = static_cast<uint32_t>(cmd);
1297 aTargetEvent.payload[1] = val0;
1298 if (!frameHandlerSecondary->waitForEvent(aTargetEvent, aNotification1)) {
1299 LOG(WARNING) << "A timer is expired before a target event is fired.";
1300 }
1301 });
1302
1303 // Wait until a listening thread starts.
1304 std::mutex eventLock;
1305 std::unique_lock<std::mutex> lock(eventLock);
1306 auto timer = std::chrono::system_clock::now();
1307 while (!listening0 || !listening1) {
1308 eventCond.wait_until(lock, timer + 1s);
1309 }
1310 lock.unlock();
1311
1312 // Try to program a parameter
1313 values.clear();
1314 ASSERT_TRUE(pSecondaryCam->setIntParameter(cmd, val0, &values).isOk());
1315
1316 // Clients expects to receive a parameter change notification
1317 // whenever a primary client client adjusts it.
1318 values.clear();
1319 ASSERT_TRUE(pSecondaryCam->getIntParameter(cmd, &values).isOk());
1320 for (auto&& v : values) {
1321 EXPECT_EQ(val0, v) << "Values are not matched.";
1322 }
1323
1324 // Join a listening thread.
1325 if (listener0.joinable()) {
1326 listener0.join();
1327 }
1328 if (listener1.joinable()) {
1329 listener1.join();
1330 }
1331
1332 // Verify a change notification
1333 ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
1334 static_cast<EvsEventType>(aNotification0.aType));
1335 ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
1336 static_cast<EvsEventType>(aNotification1.aType));
1337 ASSERT_EQ(cmd, static_cast<CameraParam>(aNotification0.payload[0]));
1338 ASSERT_EQ(cmd, static_cast<CameraParam>(aNotification1.payload[0]));
1339 for (auto&& v : values) {
1340 ASSERT_EQ(v, static_cast<int32_t>(aNotification0.payload[1]));
1341 ASSERT_EQ(v, static_cast<int32_t>(aNotification1.payload[1]));
1342 }
1343 }
1344
1345 // New primary client retires from the role
1346 ASSERT_TRUE(pSecondaryCam->unsetPrimaryClient().isOk());
1347
1348 // Shutdown
1349 frameHandlerPrimary->shutdown();
1350 frameHandlerSecondary->shutdown();
1351
1352 // Explicitly release the camera
1353 ASSERT_TRUE(mEnumerator->closeCamera(pPrimaryCam).isOk());
1354 ASSERT_TRUE(mEnumerator->closeCamera(pSecondaryCam).isOk());
1355 mActiveCameras.clear();
1356 }
1357}
1358
1359/*
1360 * HighPriorityCameraClient:
1361 * EVS client, which owns the display, is priortized and therefore can take over
1362 * a primary client role from other EVS clients without the display.
1363 */
1364TEST_P(EvsAidlTest, HighPriorityCameraClient) {
1365 LOG(INFO) << "Starting HighPriorityCameraClient test";
1366
1367 if (mIsHwModule) {
1368 // This test is not for HW module implementation.
1369 return;
1370 }
1371
1372 // Get the camera list
1373 loadCameraList();
1374
1375 // Request available display IDs
1376 uint8_t targetDisplayId = 0;
1377 std::vector<uint8_t> displayIds;
1378 ASSERT_TRUE(mEnumerator->getDisplayIdList(&displayIds).isOk());
1379 EXPECT_GT(displayIds.size(), 0);
1380 targetDisplayId = displayIds[0];
1381
1382 // Request exclusive access to the EVS display
1383 std::shared_ptr<IEvsDisplay> pDisplay;
1384 ASSERT_TRUE(mEnumerator->openDisplay(targetDisplayId, &pDisplay).isOk());
1385 EXPECT_NE(pDisplay, nullptr);
1386
1387 // Test each reported camera
1388 for (auto&& cam : mCameraInfo) {
1389 // Read a target resolution from the metadata
1390 Stream targetCfg = getFirstStreamConfiguration(
1391 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
1392 ASSERT_GT(targetCfg.width, 0);
1393 ASSERT_GT(targetCfg.height, 0);
1394
1395 // Create two clients
1396 std::shared_ptr<IEvsCamera> pCam0;
1397 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam0).isOk());
1398 EXPECT_NE(pCam0, nullptr);
1399
1400 // Store a camera handle for a clean-up
1401 mActiveCameras.push_back(pCam0);
1402
1403 std::shared_ptr<IEvsCamera> pCam1;
1404 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam1).isOk());
1405 EXPECT_NE(pCam1, nullptr);
1406
1407 // Store a camera handle for a clean-up
1408 mActiveCameras.push_back(pCam1);
1409
1410 // Get the parameter list; this test will use the first command in both
1411 // lists.
1412 std::vector<CameraParam> cam0Cmds, cam1Cmds;
1413 ASSERT_TRUE(pCam0->getParameterList(&cam0Cmds).isOk());
1414 ASSERT_TRUE(pCam1->getParameterList(&cam1Cmds).isOk());
1415 if (cam0Cmds.size() < 1 || cam1Cmds.size() < 1) {
1416 // Cannot execute this test.
1417 return;
1418 }
1419
1420 // Set up a frame receiver object which will fire up its own thread.
1421 std::shared_ptr<FrameHandler> frameHandler0 =
1422 std::make_shared<FrameHandler>(pCam0, cam, nullptr, FrameHandler::eAutoReturn);
1423 std::shared_ptr<FrameHandler> frameHandler1 =
1424 std::make_shared<FrameHandler>(pCam1, cam, nullptr, FrameHandler::eAutoReturn);
1425 EXPECT_NE(frameHandler0, nullptr);
1426 EXPECT_NE(frameHandler1, nullptr);
1427
1428 // Activate the display
1429 ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::VISIBLE_ON_NEXT_FRAME).isOk());
1430
1431 // Start the camera's video stream
1432 ASSERT_TRUE(frameHandler0->startStream());
1433 ASSERT_TRUE(frameHandler1->startStream());
1434
1435 // Ensure the stream starts
1436 frameHandler0->waitForFrameCount(1);
1437 frameHandler1->waitForFrameCount(1);
1438
1439 // Client 1 becomes a primary client and programs a parameter.
1440
1441 // Get a valid parameter value range
1442 ParameterRange range;
1443 ASSERT_TRUE(pCam1->getIntParameterRange(cam1Cmds[0], &range).isOk());
1444
1445 // Client1 becomes a primary client
1446 ASSERT_TRUE(pCam1->setPrimaryClient().isOk());
1447
1448 std::vector<int32_t> values;
1449 EvsEventDesc aTargetEvent = {};
1450 EvsEventDesc aNotification = {};
1451 bool listening = false;
1452 std::mutex eventLock;
1453 std::condition_variable eventCond;
1454 if (cam1Cmds[0] == CameraParam::ABSOLUTE_FOCUS) {
1455 std::thread listener =
1456 std::thread([&frameHandler0, &aNotification, &listening, &eventCond] {
1457 listening = true;
1458 eventCond.notify_all();
1459
1460 EvsEventDesc aTargetEvent;
1461 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1462 aTargetEvent.payload[0] = static_cast<uint32_t>(CameraParam::AUTO_FOCUS);
1463 aTargetEvent.payload[1] = 0;
1464 if (!frameHandler0->waitForEvent(aTargetEvent, aNotification)) {
1465 LOG(WARNING) << "A timer is expired before a target event is fired.";
1466 }
1467 });
1468
1469 // Wait until a lister starts.
1470 std::unique_lock<std::mutex> lock(eventLock);
1471 auto timer = std::chrono::system_clock::now();
1472 while (!listening) {
1473 eventCond.wait_until(lock, timer + 1s);
1474 }
1475 lock.unlock();
1476
1477 // Try to turn off auto-focus
1478 ASSERT_TRUE(pCam1->setIntParameter(CameraParam::AUTO_FOCUS, 0, &values).isOk());
1479 for (auto&& v : values) {
1480 EXPECT_EQ(v, 0);
1481 }
1482
1483 // Join a listener
1484 if (listener.joinable()) {
1485 listener.join();
1486 }
1487
1488 // Make sure AUTO_FOCUS is off.
1489 ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
1490 EvsEventType::PARAMETER_CHANGED);
1491 }
1492
1493 // Try to program a parameter with a random value [minVal, maxVal] after
1494 // rounding it down.
1495 int32_t val0 = range.min + (std::rand() % (range.max - range.min));
1496 val0 = val0 - (val0 % range.step);
1497
1498 std::thread listener = std::thread(
1499 [&frameHandler1, &aNotification, &listening, &eventCond, &cam1Cmds, val0] {
1500 listening = true;
1501 eventCond.notify_all();
1502
1503 EvsEventDesc aTargetEvent;
1504 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1505 aTargetEvent.payload[0] = static_cast<uint32_t>(cam1Cmds[0]);
1506 aTargetEvent.payload[1] = val0;
1507 if (!frameHandler1->waitForEvent(aTargetEvent, aNotification)) {
1508 LOG(WARNING) << "A timer is expired before a target event is fired.";
1509 }
1510 });
1511
1512 // Wait until a lister starts.
1513 listening = false;
1514 std::unique_lock<std::mutex> lock(eventLock);
1515 auto timer = std::chrono::system_clock::now();
1516 while (!listening) {
1517 eventCond.wait_until(lock, timer + 1s);
1518 }
1519 lock.unlock();
1520
1521 values.clear();
1522 ASSERT_TRUE(pCam1->setIntParameter(cam1Cmds[0], val0, &values).isOk());
1523 for (auto&& v : values) {
1524 EXPECT_EQ(val0, v);
1525 }
1526
1527 // Join a listener
1528 if (listener.joinable()) {
1529 listener.join();
1530 }
1531
1532 // Verify a change notification
1533 ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType), EvsEventType::PARAMETER_CHANGED);
1534 ASSERT_EQ(static_cast<CameraParam>(aNotification.payload[0]), cam1Cmds[0]);
1535 for (auto&& v : values) {
1536 ASSERT_EQ(v, static_cast<int32_t>(aNotification.payload[1]));
1537 }
1538
1539 listener = std::thread([&frameHandler1, &aNotification, &listening, &eventCond] {
1540 listening = true;
1541 eventCond.notify_all();
1542
1543 EvsEventDesc aTargetEvent;
1544 aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
1545 if (!frameHandler1->waitForEvent(aTargetEvent, aNotification, true)) {
1546 LOG(WARNING) << "A timer is expired before a target event is fired.";
1547 }
1548 });
1549
1550 // Wait until a lister starts.
1551 listening = false;
1552 lock.lock();
1553 timer = std::chrono::system_clock::now();
1554 while (!listening) {
1555 eventCond.wait_until(lock, timer + 1s);
1556 }
1557 lock.unlock();
1558
1559 // Client 0 steals a primary client role
1560 ASSERT_TRUE(pCam0->forcePrimaryClient(pDisplay).isOk());
1561
1562 // Join a listener
1563 if (listener.joinable()) {
1564 listener.join();
1565 }
1566
1567 ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType), EvsEventType::MASTER_RELEASED);
1568
1569 // Client 0 programs a parameter
1570 val0 = range.min + (std::rand() % (range.max - range.min));
1571
1572 // Rounding down
1573 val0 = val0 - (val0 % range.step);
1574
1575 if (cam0Cmds[0] == CameraParam::ABSOLUTE_FOCUS) {
1576 std::thread listener =
1577 std::thread([&frameHandler1, &aNotification, &listening, &eventCond] {
1578 listening = true;
1579 eventCond.notify_all();
1580
1581 EvsEventDesc aTargetEvent;
1582 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1583 aTargetEvent.payload[0] = static_cast<uint32_t>(CameraParam::AUTO_FOCUS);
1584 aTargetEvent.payload[1] = 0;
1585 if (!frameHandler1->waitForEvent(aTargetEvent, aNotification)) {
1586 LOG(WARNING) << "A timer is expired before a target event is fired.";
1587 }
1588 });
1589
1590 // Wait until a lister starts.
1591 std::unique_lock<std::mutex> lock(eventLock);
1592 auto timer = std::chrono::system_clock::now();
1593 while (!listening) {
1594 eventCond.wait_until(lock, timer + 1s);
1595 }
1596 lock.unlock();
1597
1598 // Try to turn off auto-focus
1599 values.clear();
1600 ASSERT_TRUE(pCam0->setIntParameter(CameraParam::AUTO_FOCUS, 0, &values).isOk());
1601 for (auto&& v : values) {
1602 EXPECT_EQ(v, 0);
1603 }
1604
1605 // Join a listener
1606 if (listener.joinable()) {
1607 listener.join();
1608 }
1609
1610 // Make sure AUTO_FOCUS is off.
1611 ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
1612 EvsEventType::PARAMETER_CHANGED);
1613 }
1614
1615 listener = std::thread(
1616 [&frameHandler0, &aNotification, &listening, &eventCond, &cam0Cmds, val0] {
1617 listening = true;
1618 eventCond.notify_all();
1619
1620 EvsEventDesc aTargetEvent;
1621 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1622 aTargetEvent.payload[0] = static_cast<uint32_t>(cam0Cmds[0]);
1623 aTargetEvent.payload[1] = val0;
1624 if (!frameHandler0->waitForEvent(aTargetEvent, aNotification)) {
1625 LOG(WARNING) << "A timer is expired before a target event is fired.";
1626 }
1627 });
1628
1629 // Wait until a lister starts.
1630 listening = false;
1631 timer = std::chrono::system_clock::now();
1632 lock.lock();
1633 while (!listening) {
1634 eventCond.wait_until(lock, timer + 1s);
1635 }
1636 lock.unlock();
1637
1638 values.clear();
1639 ASSERT_TRUE(pCam0->setIntParameter(cam0Cmds[0], val0, &values).isOk());
1640
1641 // Join a listener
1642 if (listener.joinable()) {
1643 listener.join();
1644 }
1645 // Verify a change notification
1646 ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType), EvsEventType::PARAMETER_CHANGED);
1647 ASSERT_EQ(static_cast<CameraParam>(aNotification.payload[0]), cam0Cmds[0]);
1648 for (auto&& v : values) {
1649 ASSERT_EQ(v, static_cast<int32_t>(aNotification.payload[1]));
1650 }
1651
1652 // Turn off the display (yes, before the stream stops -- it should be handled)
1653 ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::NOT_VISIBLE).isOk());
1654
1655 // Shut down the streamer
1656 frameHandler0->shutdown();
1657 frameHandler1->shutdown();
1658
1659 // Explicitly release the camera
1660 ASSERT_TRUE(mEnumerator->closeCamera(pCam0).isOk());
1661 ASSERT_TRUE(mEnumerator->closeCamera(pCam1).isOk());
1662 mActiveCameras.clear();
1663 }
1664
1665 // Explicitly release the display
1666 ASSERT_TRUE(mEnumerator->closeDisplay(pDisplay).isOk());
1667}
1668
1669/*
1670 * CameraUseStreamConfigToDisplay:
1671 * End to end test of data flowing from the camera to the display. Similar to
1672 * CameraToDisplayRoundTrip test case but this case retrieves available stream
1673 * configurations from EVS and uses one of them to start a video stream.
1674 */
1675TEST_P(EvsAidlTest, CameraUseStreamConfigToDisplay) {
1676 LOG(INFO) << "Starting CameraUseStreamConfigToDisplay test";
1677
1678 // Get the camera list
1679 loadCameraList();
1680
1681 // Request available display IDs
1682 uint8_t targetDisplayId = 0;
1683 std::vector<uint8_t> displayIds;
1684 ASSERT_TRUE(mEnumerator->getDisplayIdList(&displayIds).isOk());
1685 EXPECT_GT(displayIds.size(), 0);
1686 targetDisplayId = displayIds[0];
1687
1688 // Request exclusive access to the EVS display
1689 std::shared_ptr<IEvsDisplay> pDisplay;
1690 ASSERT_TRUE(mEnumerator->openDisplay(targetDisplayId, &pDisplay).isOk());
1691 EXPECT_NE(pDisplay, nullptr);
1692
1693 // Test each reported camera
1694 for (auto&& cam : mCameraInfo) {
1695 // choose a configuration that has a frame rate faster than minReqFps.
1696 Stream targetCfg = {};
1697 const int32_t minReqFps = 15;
1698 int32_t maxArea = 0;
1699 camera_metadata_entry_t streamCfgs;
1700 bool foundCfg = false;
1701 if (!find_camera_metadata_entry(reinterpret_cast<camera_metadata_t*>(cam.metadata.data()),
1702 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
1703 &streamCfgs)) {
1704 // Stream configurations are found in metadata
1705 RawStreamConfig* ptr = reinterpret_cast<RawStreamConfig*>(streamCfgs.data.i32);
1706 for (unsigned offset = 0; offset < streamCfgs.count; offset += kStreamCfgSz) {
1707 if (ptr->direction == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT &&
1708 ptr->format == HAL_PIXEL_FORMAT_RGBA_8888) {
1709 if (ptr->width * ptr->height > maxArea && ptr->framerate >= minReqFps) {
1710 targetCfg.width = ptr->width;
1711 targetCfg.height = ptr->height;
1712
1713 maxArea = ptr->width * ptr->height;
1714 foundCfg = true;
1715 }
1716 }
1717 ++ptr;
1718 }
1719 }
1720 targetCfg.format = static_cast<PixelFormat>(HAL_PIXEL_FORMAT_RGBA_8888);
1721
1722 if (!foundCfg) {
1723 // Current EVS camera does not provide stream configurations in the
1724 // metadata.
1725 continue;
1726 }
1727
1728 std::shared_ptr<IEvsCamera> pCam;
1729 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
1730 EXPECT_NE(pCam, nullptr);
1731
1732 // Store a camera handle for a clean-up
1733 mActiveCameras.push_back(pCam);
1734
1735 // Set up a frame receiver object which will fire up its own thread.
1736 std::shared_ptr<FrameHandler> frameHandler =
1737 std::make_shared<FrameHandler>(pCam, cam, pDisplay, FrameHandler::eAutoReturn);
1738 EXPECT_NE(frameHandler, nullptr);
1739
1740 // Activate the display
1741 ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::VISIBLE_ON_NEXT_FRAME).isOk());
1742
1743 // Start the camera's video stream
1744 ASSERT_TRUE(frameHandler->startStream());
1745
1746 // Wait a while to let the data flow
1747 static const int kSecondsToWait = 5;
1748 const int streamTimeMs =
1749 kSecondsToWait * kSecondsToMilliseconds - kMaxStreamStartMilliseconds;
1750 const unsigned minimumFramesExpected =
1751 streamTimeMs * kMinimumFramesPerSecond / kSecondsToMilliseconds;
1752 sleep(kSecondsToWait);
1753 unsigned framesReceived = 0;
1754 unsigned framesDisplayed = 0;
1755 frameHandler->getFramesCounters(&framesReceived, &framesDisplayed);
1756 EXPECT_EQ(framesReceived, framesDisplayed);
1757 EXPECT_GE(framesDisplayed, minimumFramesExpected);
1758
1759 // Turn off the display (yes, before the stream stops -- it should be handled)
1760 ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::NOT_VISIBLE).isOk());
1761
1762 // Shut down the streamer
1763 frameHandler->shutdown();
1764
1765 // Explicitly release the camera
1766 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
1767 mActiveCameras.clear();
1768 }
1769
1770 // Explicitly release the display
1771 ASSERT_TRUE(mEnumerator->closeDisplay(pDisplay).isOk());
1772}
1773
1774/*
1775 * MultiCameraStreamUseConfig:
1776 * Verify that each client can start and stop video streams on the same
1777 * underlying camera with same configuration.
1778 */
1779TEST_P(EvsAidlTest, MultiCameraStreamUseConfig) {
1780 LOG(INFO) << "Starting MultiCameraStream test";
1781
1782 if (mIsHwModule) {
1783 // This test is not for HW module implementation.
1784 return;
1785 }
1786
1787 // Get the camera list
1788 loadCameraList();
1789
1790 // Test each reported camera
1791 for (auto&& cam : mCameraInfo) {
1792 // choose a configuration that has a frame rate faster than minReqFps.
1793 Stream targetCfg = {};
1794 const int32_t minReqFps = 15;
1795 int32_t maxArea = 0;
1796 camera_metadata_entry_t streamCfgs;
1797 bool foundCfg = false;
1798 if (!find_camera_metadata_entry(reinterpret_cast<camera_metadata_t*>(cam.metadata.data()),
1799 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
1800 &streamCfgs)) {
1801 // Stream configurations are found in metadata
1802 RawStreamConfig* ptr = reinterpret_cast<RawStreamConfig*>(streamCfgs.data.i32);
1803 for (unsigned offset = 0; offset < streamCfgs.count; offset += kStreamCfgSz) {
1804 if (ptr->direction == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT &&
1805 ptr->format == HAL_PIXEL_FORMAT_RGBA_8888) {
1806 if (ptr->width * ptr->height > maxArea && ptr->framerate >= minReqFps) {
1807 targetCfg.width = ptr->width;
1808 targetCfg.height = ptr->height;
1809
1810 maxArea = ptr->width * ptr->height;
1811 foundCfg = true;
1812 }
1813 }
1814 ++ptr;
1815 }
1816 }
1817 targetCfg.format = static_cast<PixelFormat>(HAL_PIXEL_FORMAT_RGBA_8888);
1818
1819 if (!foundCfg) {
1820 LOG(INFO) << "Device " << cam.id
1821 << " does not provide a list of supported stream configurations, skipped";
1822 continue;
1823 }
1824
1825 // Create the first camera client with a selected stream configuration.
1826 std::shared_ptr<IEvsCamera> pCam0;
1827 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam0).isOk());
1828 EXPECT_NE(pCam0, nullptr);
1829
1830 // Store a camera handle for a clean-up
1831 mActiveCameras.push_back(pCam0);
1832
1833 // Try to create the second camera client with different stream
1834 // configuration.
1835 int32_t id = targetCfg.id;
1836 targetCfg.id += 1; // EVS manager sees only the stream id.
1837 std::shared_ptr<IEvsCamera> pCam1;
1838 ASSERT_FALSE(mEnumerator->openCamera(cam.id, targetCfg, &pCam1).isOk());
1839
1840 // Try again with same stream configuration.
1841 targetCfg.id = id;
1842 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam1).isOk());
1843 EXPECT_NE(pCam1, nullptr);
1844
1845 // Set up per-client frame receiver objects which will fire up its own thread
1846 std::shared_ptr<FrameHandler> frameHandler0 =
1847 std::make_shared<FrameHandler>(pCam0, cam, nullptr, FrameHandler::eAutoReturn);
1848 std::shared_ptr<FrameHandler> frameHandler1 =
1849 std::make_shared<FrameHandler>(pCam1, cam, nullptr, FrameHandler::eAutoReturn);
1850 EXPECT_NE(frameHandler0, nullptr);
1851 EXPECT_NE(frameHandler1, nullptr);
1852
1853 // Start the camera's video stream via client 0
1854 ASSERT_TRUE(frameHandler0->startStream());
1855 ASSERT_TRUE(frameHandler1->startStream());
1856
1857 // Ensure the stream starts
1858 frameHandler0->waitForFrameCount(1);
1859 frameHandler1->waitForFrameCount(1);
1860
1861 nsecs_t firstFrame = systemTime(SYSTEM_TIME_MONOTONIC);
1862
1863 // Wait a bit, then ensure both clients get at least the required minimum number of frames
1864 sleep(5);
1865 nsecs_t end = systemTime(SYSTEM_TIME_MONOTONIC);
1866 unsigned framesReceived0 = 0, framesReceived1 = 0;
1867 frameHandler0->getFramesCounters(&framesReceived0, nullptr);
1868 frameHandler1->getFramesCounters(&framesReceived1, nullptr);
1869 framesReceived0 = framesReceived0 - 1; // Back out the first frame we already waited for
1870 framesReceived1 = framesReceived1 - 1; // Back out the first frame we already waited for
1871 nsecs_t runTime = end - firstFrame;
1872 float framesPerSecond0 = framesReceived0 / (runTime * kNanoToSeconds);
1873 float framesPerSecond1 = framesReceived1 / (runTime * kNanoToSeconds);
1874 LOG(INFO) << "Measured camera rate " << std::scientific << framesPerSecond0 << " fps and "
1875 << framesPerSecond1 << " fps";
1876 EXPECT_GE(framesPerSecond0, kMinimumFramesPerSecond);
1877 EXPECT_GE(framesPerSecond1, kMinimumFramesPerSecond);
1878
1879 // Shutdown one client
1880 frameHandler0->shutdown();
1881
1882 // Read frame counters again
1883 frameHandler0->getFramesCounters(&framesReceived0, nullptr);
1884 frameHandler1->getFramesCounters(&framesReceived1, nullptr);
1885
1886 // Wait a bit again
1887 sleep(5);
1888 unsigned framesReceivedAfterStop0 = 0, framesReceivedAfterStop1 = 0;
1889 frameHandler0->getFramesCounters(&framesReceivedAfterStop0, nullptr);
1890 frameHandler1->getFramesCounters(&framesReceivedAfterStop1, nullptr);
1891 EXPECT_EQ(framesReceived0, framesReceivedAfterStop0);
1892 EXPECT_LT(framesReceived1, framesReceivedAfterStop1);
1893
1894 // Shutdown another
1895 frameHandler1->shutdown();
1896
1897 // Explicitly release the camera
1898 ASSERT_TRUE(mEnumerator->closeCamera(pCam0).isOk());
1899 ASSERT_TRUE(mEnumerator->closeCamera(pCam1).isOk());
1900 mActiveCameras.clear();
1901 }
1902}
1903
1904/*
1905 * LogicalCameraMetadata:
1906 * Opens logical camera reported by the enumerator and validate its metadata by
1907 * checking its capability and locating supporting physical camera device
1908 * identifiers.
1909 */
1910TEST_P(EvsAidlTest, LogicalCameraMetadata) {
1911 LOG(INFO) << "Starting LogicalCameraMetadata test";
1912
1913 // Get the camera list
1914 loadCameraList();
1915
1916 // Open and close each camera twice
1917 for (auto&& cam : mCameraInfo) {
1918 bool isLogicalCam = false;
1919 auto devices = getPhysicalCameraIds(cam.id, isLogicalCam);
1920 if (isLogicalCam) {
1921 ASSERT_GE(devices.size(), 1) << "Logical camera device must have at least one physical "
1922 "camera device ID in its metadata.";
1923 }
1924 }
1925}
1926
1927/*
1928 * CameraStreamExternalBuffering:
1929 * This is same with CameraStreamBuffering except frame buffers are allocated by
1930 * the test client and then imported by EVS framework.
1931 */
1932TEST_P(EvsAidlTest, CameraStreamExternalBuffering) {
1933 LOG(INFO) << "Starting CameraStreamExternalBuffering test";
1934
1935 // Arbitrary constant (should be > 1 and not too big)
1936 static const unsigned int kBuffersToHold = 3;
1937
1938 // Get the camera list
1939 loadCameraList();
1940
1941 // Acquire the graphics buffer allocator
1942 android::GraphicBufferAllocator& alloc(android::GraphicBufferAllocator::get());
1943 const auto usage =
1944 GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_SW_READ_RARELY | GRALLOC_USAGE_SW_WRITE_OFTEN;
1945
1946 // Test each reported camera
1947 for (auto&& cam : mCameraInfo) {
1948 // Read a target resolution from the metadata
1949 Stream targetCfg = getFirstStreamConfiguration(
1950 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
1951 ASSERT_GT(targetCfg.width, 0);
1952 ASSERT_GT(targetCfg.height, 0);
1953
1954 // Allocate buffers to use
1955 std::vector<BufferDesc> buffers;
1956 buffers.resize(kBuffersToHold);
1957 for (auto i = 0; i < kBuffersToHold; ++i) {
1958 unsigned pixelsPerLine;
1959 buffer_handle_t memHandle = nullptr;
1960 android::status_t result =
1961 alloc.allocate(targetCfg.width, targetCfg.height,
1962 static_cast<android::PixelFormat>(targetCfg.format),
1963 /* layerCount = */ 1, usage, &memHandle, &pixelsPerLine,
1964 /* graphicBufferId = */ 0,
1965 /* requestorName = */ "CameraStreamExternalBufferingTest");
1966 if (result != android::NO_ERROR) {
1967 LOG(ERROR) << __FUNCTION__ << " failed to allocate memory.";
1968 // Release previous allocated buffers
1969 for (auto j = 0; j < i; j++) {
1970 alloc.free(::android::dupFromAidl(buffers[i].buffer.handle));
1971 }
1972 return;
1973 } else {
1974 BufferDesc buf;
1975 HardwareBufferDescription* pDesc =
1976 reinterpret_cast<HardwareBufferDescription*>(&buf.buffer.description);
1977 pDesc->width = targetCfg.width;
1978 pDesc->height = targetCfg.height;
1979 pDesc->layers = 1;
1980 pDesc->format = targetCfg.format;
1981 pDesc->usage = static_cast<BufferUsage>(usage);
1982 pDesc->stride = pixelsPerLine;
1983 buf.buffer.handle = ::android::dupToAidl(memHandle);
1984 buf.bufferId = i; // Unique number to identify this buffer
1985 buffers[i] = std::move(buf);
1986 }
1987 }
1988
1989 bool isLogicalCam = false;
1990 getPhysicalCameraIds(cam.id, isLogicalCam);
1991
1992 std::shared_ptr<IEvsCamera> pCam;
1993 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
1994 EXPECT_NE(pCam, nullptr);
1995
1996 // Store a camera handle for a clean-up
1997 mActiveCameras.push_back(pCam);
1998
1999 // Request to import buffers
2000 int delta = 0;
2001 auto status = pCam->importExternalBuffers(buffers, &delta);
2002 if (isLogicalCam) {
2003 ASSERT_FALSE(status.isOk());
2004 continue;
2005 }
2006
2007 ASSERT_TRUE(status.isOk());
2008 EXPECT_GE(delta, kBuffersToHold);
2009
2010 // Set up a frame receiver object which will fire up its own thread.
2011 std::shared_ptr<FrameHandler> frameHandler =
2012 std::make_shared<FrameHandler>(pCam, cam, nullptr, FrameHandler::eNoAutoReturn);
2013 EXPECT_NE(frameHandler, nullptr);
2014
2015 // Start the camera's video stream
2016 ASSERT_TRUE(frameHandler->startStream());
2017
2018 // Check that the video stream stalls once we've gotten exactly the number of buffers
2019 // we requested since we told the frameHandler not to return them.
2020 sleep(1); // 1 second should be enough for at least 5 frames to be delivered worst case
2021 unsigned framesReceived = 0;
2022 frameHandler->getFramesCounters(&framesReceived, nullptr);
2023 ASSERT_LE(kBuffersToHold, framesReceived) << "Stream didn't stall at expected buffer limit";
2024
2025 // Give back one buffer
2026 EXPECT_TRUE(frameHandler->returnHeldBuffer());
2027
2028 // Once we return a buffer, it shouldn't take more than 1/10 second to get a new one
2029 // filled since we require 10fps minimum -- but give a 10% allowance just in case.
2030 unsigned framesReceivedAfter = 0;
2031 usleep(110 * kMillisecondsToMicroseconds);
2032 frameHandler->getFramesCounters(&framesReceivedAfter, nullptr);
2033 EXPECT_EQ(framesReceived + 1, framesReceivedAfter) << "Stream should've resumed";
2034
2035 // Even when the camera pointer goes out of scope, the FrameHandler object will
2036 // keep the stream alive unless we tell it to shutdown.
2037 // Also note that the FrameHandle and the Camera have a mutual circular reference, so
2038 // we have to break that cycle in order for either of them to get cleaned up.
2039 frameHandler->shutdown();
2040
2041 // Explicitly release the camera
2042 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
2043 mActiveCameras.clear();
2044 // Release buffers
2045 for (auto& b : buffers) {
2046 alloc.free(::android::dupFromAidl(b.buffer.handle));
2047 }
2048 buffers.resize(0);
2049 }
2050}
2051
2052/*
2053 * UltrasonicsArrayOpenClean:
2054 * Opens each ultrasonics arrays reported by the enumerator and then explicitly closes it via a
2055 * call to closeUltrasonicsArray. Then repeats the test to ensure all ultrasonics arrays
2056 * can be reopened.
2057 */
2058TEST_P(EvsAidlTest, UltrasonicsArrayOpenClean) {
2059 LOG(INFO) << "Starting UltrasonicsArrayOpenClean test";
2060
2061 // Get the ultrasonics array list
2062 loadUltrasonicsArrayList();
2063
2064 // Open and close each ultrasonics array twice
2065 for (auto&& ultraInfo : mUltrasonicsArraysInfo) {
2066 for (int pass = 0; pass < 2; pass++) {
2067 std::shared_ptr<IEvsUltrasonicsArray> pUltrasonicsArray;
2068 ASSERT_TRUE(
2069 mEnumerator
2070 ->openUltrasonicsArray(ultraInfo.ultrasonicsArrayId, &pUltrasonicsArray)
2071 .isOk());
2072 EXPECT_NE(pUltrasonicsArray, nullptr);
2073
2074 // Verify that this ultrasonics array self-identifies correctly
2075 UltrasonicsArrayDesc desc;
2076 ASSERT_TRUE(pUltrasonicsArray->getUltrasonicArrayInfo(&desc).isOk());
2077 EXPECT_EQ(ultraInfo.ultrasonicsArrayId, desc.ultrasonicsArrayId);
2078 LOG(DEBUG) << "Found ultrasonics array " << ultraInfo.ultrasonicsArrayId;
2079
2080 // Explicitly close the ultrasonics array so resources are released right away
2081 ASSERT_TRUE(mEnumerator->closeUltrasonicsArray(pUltrasonicsArray).isOk());
2082 }
2083 }
2084}
2085
2086// Starts a stream and verifies all data received is valid.
2087TEST_P(EvsAidlTest, UltrasonicsVerifyStreamData) {
2088 LOG(INFO) << "Starting UltrasonicsVerifyStreamData";
2089
2090 // Get the ultrasonics array list
2091 loadUltrasonicsArrayList();
2092
2093 // For each ultrasonics array.
2094 for (auto&& ultraInfo : mUltrasonicsArraysInfo) {
2095 LOG(DEBUG) << "Testing ultrasonics array: " << ultraInfo.ultrasonicsArrayId;
2096
2097 std::shared_ptr<IEvsUltrasonicsArray> pUltrasonicsArray;
2098 ASSERT_TRUE(
2099 mEnumerator->openUltrasonicsArray(ultraInfo.ultrasonicsArrayId, &pUltrasonicsArray)
2100 .isOk());
2101 EXPECT_NE(pUltrasonicsArray, nullptr);
2102
2103 std::shared_ptr<FrameHandlerUltrasonics> frameHandler =
2104 std::make_shared<FrameHandlerUltrasonics>(pUltrasonicsArray);
2105 EXPECT_NE(frameHandler, nullptr);
2106
2107 // Start stream.
2108 ASSERT_TRUE(pUltrasonicsArray->startStream(frameHandler).isOk());
2109
2110 // Wait 5 seconds to receive frames.
2111 sleep(5);
2112
2113 // Stop stream.
2114 ASSERT_TRUE(pUltrasonicsArray->stopStream().isOk());
2115
2116 EXPECT_GT(frameHandler->getReceiveFramesCount(), 0);
2117 EXPECT_TRUE(frameHandler->areAllFramesValid());
2118
2119 // Explicitly close the ultrasonics array so resources are released right away
2120 ASSERT_TRUE(mEnumerator->closeUltrasonicsArray(pUltrasonicsArray).isOk());
2121 }
2122}
2123
2124// Sets frames in flight before and after start of stream and verfies success.
2125TEST_P(EvsAidlTest, UltrasonicsSetFramesInFlight) {
2126 LOG(INFO) << "Starting UltrasonicsSetFramesInFlight";
2127
2128 // Get the ultrasonics array list
2129 loadUltrasonicsArrayList();
2130
2131 // For each ultrasonics array.
2132 for (auto&& ultraInfo : mUltrasonicsArraysInfo) {
2133 LOG(DEBUG) << "Testing ultrasonics array: " << ultraInfo.ultrasonicsArrayId;
2134
2135 std::shared_ptr<IEvsUltrasonicsArray> pUltrasonicsArray;
2136 ASSERT_TRUE(
2137 mEnumerator->openUltrasonicsArray(ultraInfo.ultrasonicsArrayId, &pUltrasonicsArray)
2138 .isOk());
2139 EXPECT_NE(pUltrasonicsArray, nullptr);
2140
2141 ASSERT_TRUE(pUltrasonicsArray->setMaxFramesInFlight(10).isOk());
2142
2143 std::shared_ptr<FrameHandlerUltrasonics> frameHandler =
2144 std::make_shared<FrameHandlerUltrasonics>(pUltrasonicsArray);
2145 EXPECT_NE(frameHandler, nullptr);
2146
2147 // Start stream.
2148 ASSERT_TRUE(pUltrasonicsArray->startStream(frameHandler).isOk());
2149 ASSERT_TRUE(pUltrasonicsArray->setMaxFramesInFlight(5).isOk());
2150
2151 // Stop stream.
2152 ASSERT_TRUE(pUltrasonicsArray->stopStream().isOk());
2153
2154 // Explicitly close the ultrasonics array so resources are released right away
2155 ASSERT_TRUE(mEnumerator->closeUltrasonicsArray(pUltrasonicsArray).isOk());
2156 }
2157}
2158
2159GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(EvsAidlTest);
2160INSTANTIATE_TEST_SUITE_P(
2161 PerInstance, EvsAidlTest,
2162 testing::ValuesIn(android::getAidlHalInstanceNames(IEvsEnumerator::descriptor)),
2163 android::PrintInstanceNameToString);
2164
2165int main(int argc, char** argv) {
2166 ::testing::InitGoogleTest(&argc, argv);
2167 ABinderProcess_setThreadPoolMaxThreadCount(1);
2168 ABinderProcess_startThreadPool();
2169 return RUN_ALL_TESTS();
2170}