blob: 7fcac38aeadc5385d1459562bf6a51c1c0c8138a [file] [log] [blame]
Changyeon Jo80189012021-10-10 16:34:21 -07001/*
2 * Copyright (C) 2022 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "FrameHandler.h"
18#include "FrameHandlerUltrasonics.h"
19
20#include <aidl/Gtest.h>
21#include <aidl/Vintf.h>
22#include <aidl/android/hardware/automotive/evs/BufferDesc.h>
23#include <aidl/android/hardware/automotive/evs/CameraDesc.h>
24#include <aidl/android/hardware/automotive/evs/CameraParam.h>
25#include <aidl/android/hardware/automotive/evs/DisplayDesc.h>
26#include <aidl/android/hardware/automotive/evs/DisplayState.h>
27#include <aidl/android/hardware/automotive/evs/EvsEventDesc.h>
28#include <aidl/android/hardware/automotive/evs/EvsEventType.h>
29#include <aidl/android/hardware/automotive/evs/EvsResult.h>
30#include <aidl/android/hardware/automotive/evs/IEvsCamera.h>
31#include <aidl/android/hardware/automotive/evs/IEvsDisplay.h>
32#include <aidl/android/hardware/automotive/evs/IEvsEnumerator.h>
33#include <aidl/android/hardware/automotive/evs/IEvsUltrasonicsArray.h>
34#include <aidl/android/hardware/automotive/evs/ParameterRange.h>
35#include <aidl/android/hardware/automotive/evs/Stream.h>
36#include <aidl/android/hardware/automotive/evs/UltrasonicsArrayDesc.h>
37#include <aidl/android/hardware/common/NativeHandle.h>
38#include <aidl/android/hardware/graphics/common/HardwareBufferDescription.h>
39#include <aidl/android/hardware/graphics/common/PixelFormat.h>
40#include <aidlcommonsupport/NativeHandle.h>
41#include <android-base/logging.h>
42#include <android/binder_ibinder.h>
43#include <android/binder_manager.h>
44#include <android/binder_process.h>
45#include <android/binder_status.h>
46#include <system/camera_metadata.h>
47#include <ui/GraphicBuffer.h>
48#include <ui/GraphicBufferAllocator.h>
49#include <utils/Timers.h>
50
51#include <deque>
52#include <thread>
53#include <unordered_set>
54
55namespace {
56
57// These values are called out in the EVS design doc (as of Mar 8, 2017)
58constexpr int kMaxStreamStartMilliseconds = 500;
59constexpr int kMinimumFramesPerSecond = 10;
60constexpr int kSecondsToMilliseconds = 1000;
61constexpr int kMillisecondsToMicroseconds = 1000;
62constexpr float kNanoToMilliseconds = 0.000001f;
63constexpr float kNanoToSeconds = 0.000000001f;
64
65/*
66 * Please note that this is different from what is defined in
67 * libhardware/modules/camera/3_4/metadata/types.h; this has one additional
68 * field to store a framerate.
69 */
70typedef struct {
71 int32_t id;
72 int32_t width;
73 int32_t height;
74 int32_t format;
75 int32_t direction;
76 int32_t framerate;
77} RawStreamConfig;
78constexpr size_t kStreamCfgSz = sizeof(RawStreamConfig) / sizeof(int32_t);
79
80} // namespace
81
82using ::aidl::android::hardware::automotive::evs::BufferDesc;
83using ::aidl::android::hardware::automotive::evs::CameraDesc;
84using ::aidl::android::hardware::automotive::evs::CameraParam;
85using ::aidl::android::hardware::automotive::evs::DisplayDesc;
86using ::aidl::android::hardware::automotive::evs::DisplayState;
87using ::aidl::android::hardware::automotive::evs::EvsEventDesc;
88using ::aidl::android::hardware::automotive::evs::EvsEventType;
89using ::aidl::android::hardware::automotive::evs::EvsResult;
90using ::aidl::android::hardware::automotive::evs::IEvsCamera;
91using ::aidl::android::hardware::automotive::evs::IEvsDisplay;
92using ::aidl::android::hardware::automotive::evs::IEvsEnumerator;
93using ::aidl::android::hardware::automotive::evs::IEvsUltrasonicsArray;
94using ::aidl::android::hardware::automotive::evs::ParameterRange;
95using ::aidl::android::hardware::automotive::evs::Stream;
96using ::aidl::android::hardware::automotive::evs::UltrasonicsArrayDesc;
97using ::aidl::android::hardware::graphics::common::BufferUsage;
98using ::aidl::android::hardware::graphics::common::HardwareBufferDescription;
99using ::aidl::android::hardware::graphics::common::PixelFormat;
100using std::chrono_literals::operator""s;
101
102// The main test class for EVS
103class EvsAidlTest : public ::testing::TestWithParam<std::string> {
104 public:
105 virtual void SetUp() override {
106 // Make sure we can connect to the enumerator
107 std::string service_name = GetParam();
108 AIBinder* binder = AServiceManager_waitForService(service_name.data());
109 ASSERT_NE(binder, nullptr);
110 mEnumerator = IEvsEnumerator::fromBinder(::ndk::SpAIBinder(binder));
111 LOG(INFO) << "Test target service: " << service_name;
112
113 ASSERT_TRUE(mEnumerator->isHardware(&mIsHwModule).isOk());
114 }
115
116 virtual void TearDown() override {
117 // Attempt to close any active camera
118 for (auto&& cam : mActiveCameras) {
119 if (cam != nullptr) {
120 mEnumerator->closeCamera(cam);
121 }
122 }
123 mActiveCameras.clear();
124 }
125
126 protected:
127 void loadCameraList() {
128 // SetUp() must run first!
129 ASSERT_NE(mEnumerator, nullptr);
130
131 // Get the camera list
132 ASSERT_TRUE(mEnumerator->getCameraList(&mCameraInfo).isOk())
133 << "Failed to get a list of available cameras";
134 LOG(INFO) << "We have " << mCameraInfo.size() << " cameras.";
135 }
136
137 void loadUltrasonicsArrayList() {
138 // SetUp() must run first!
139 ASSERT_NE(mEnumerator, nullptr);
140
141 // Get the ultrasonics array list
142 ASSERT_TRUE(mEnumerator->getUltrasonicsArrayList(&mUltrasonicsArraysInfo).isOk())
143 << "Failed to get a list of available ultrasonics arrays";
144 LOG(INFO) << "We have " << mCameraInfo.size() << " ultrasonics arrays.";
145 }
146
147 bool isLogicalCamera(const camera_metadata_t* metadata) {
148 if (metadata == nullptr) {
149 // A logical camera device must have a valid camera metadata.
150 return false;
151 }
152
153 // Looking for LOGICAL_MULTI_CAMERA capability from metadata.
154 camera_metadata_ro_entry_t entry;
155 int rc = find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
156 &entry);
157 if (rc != 0) {
158 // No capabilities are found.
159 return false;
160 }
161
162 for (size_t i = 0; i < entry.count; ++i) {
163 uint8_t cap = entry.data.u8[i];
164 if (cap == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA) {
165 return true;
166 }
167 }
168
169 return false;
170 }
171
172 std::unordered_set<std::string> getPhysicalCameraIds(const std::string& id, bool& flag) {
173 std::unordered_set<std::string> physicalCameras;
174 const auto it = std::find_if(mCameraInfo.begin(), mCameraInfo.end(),
175 [&id](const CameraDesc& desc) { return id == desc.id; });
176 if (it == mCameraInfo.end()) {
177 // Unknown camera is requested. Return an empty list.
178 return physicalCameras;
179 }
180
181 const camera_metadata_t* metadata = reinterpret_cast<camera_metadata_t*>(&it->metadata[0]);
182 flag = isLogicalCamera(metadata);
183 if (!flag) {
184 // EVS assumes that the device w/o a valid metadata is a physical
185 // device.
186 LOG(INFO) << id << " is not a logical camera device.";
187 physicalCameras.insert(id);
188 return physicalCameras;
189 }
190
191 // Look for physical camera identifiers
192 camera_metadata_ro_entry entry;
193 int rc = find_camera_metadata_ro_entry(metadata, ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS,
194 &entry);
195 if (rc != 0) {
196 LOG(ERROR) << "No physical camera ID is found for a logical camera device";
197 }
198
199 const uint8_t* ids = entry.data.u8;
200 size_t start = 0;
201 for (size_t i = 0; i < entry.count; ++i) {
202 if (ids[i] == '\0') {
203 if (start != i) {
204 std::string id(reinterpret_cast<const char*>(ids + start));
205 physicalCameras.insert(id);
206 }
207 start = i + 1;
208 }
209 }
210
211 LOG(INFO) << id << " consists of " << physicalCameras.size() << " physical camera devices";
212 return physicalCameras;
213 }
214
215 Stream getFirstStreamConfiguration(camera_metadata_t* metadata) {
216 Stream targetCfg = {};
217 camera_metadata_entry_t streamCfgs;
218 if (!find_camera_metadata_entry(metadata, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
219 &streamCfgs)) {
220 // Stream configurations are found in metadata
221 RawStreamConfig* ptr = reinterpret_cast<RawStreamConfig*>(streamCfgs.data.i32);
222 for (unsigned offset = 0; offset < streamCfgs.count; offset += kStreamCfgSz) {
223 if (ptr->direction == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT &&
224 ptr->format == HAL_PIXEL_FORMAT_RGBA_8888) {
225 targetCfg.width = ptr->width;
226 targetCfg.height = ptr->height;
227 targetCfg.format = static_cast<PixelFormat>(ptr->format);
228 break;
229 }
230 ++ptr;
231 }
232 }
233
234 return targetCfg;
235 }
236
237 // Every test needs access to the service
238 std::shared_ptr<IEvsEnumerator> mEnumerator;
239 // Empty unless/util loadCameraList() is called
240 std::vector<CameraDesc> mCameraInfo;
241 // boolean to tell current module under testing is HW module implementation
242 // or not
243 bool mIsHwModule;
244 // A list of active camera handles that are need to be cleaned up
245 std::deque<std::shared_ptr<IEvsCamera>> mActiveCameras;
246 // Empty unless/util loadUltrasonicsArrayList() is called
247 std::vector<UltrasonicsArrayDesc> mUltrasonicsArraysInfo;
248 // A list of active ultrasonics array handles that are to be cleaned up
249 std::deque<std::weak_ptr<IEvsUltrasonicsArray>> mActiveUltrasonicsArrays;
250};
251
252// Test cases, their implementations, and corresponding requirements are
253// documented at go/aae-evs-public-api-test.
254
255/*
256 * CameraOpenClean:
257 * Opens each camera reported by the enumerator and then explicitly closes it via a
258 * call to closeCamera. Then repeats the test to ensure all cameras can be reopened.
259 */
260TEST_P(EvsAidlTest, CameraOpenClean) {
261 LOG(INFO) << "Starting CameraOpenClean test";
262
263 // Get the camera list
264 loadCameraList();
265
266 // Open and close each camera twice
267 for (auto&& cam : mCameraInfo) {
268 bool isLogicalCam = false;
269 auto devices = getPhysicalCameraIds(cam.id, isLogicalCam);
270 if (mIsHwModule && isLogicalCam) {
271 LOG(INFO) << "Skip a logical device, " << cam.id << " for HW target.";
272 continue;
273 }
274
275 // Read a target resolution from the metadata
276 Stream targetCfg = getFirstStreamConfiguration(
277 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
278 ASSERT_GT(targetCfg.width, 0);
279 ASSERT_GT(targetCfg.height, 0);
280
281 for (int pass = 0; pass < 2; pass++) {
282 std::shared_ptr<IEvsCamera> pCam;
283 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
284 ASSERT_NE(pCam, nullptr);
285
286 CameraDesc cameraInfo;
287 for (auto&& devName : devices) {
288 ASSERT_TRUE(pCam->getPhysicalCameraInfo(devName, &cameraInfo).isOk());
289 EXPECT_EQ(devName, cameraInfo.id);
290 }
291
292 // Store a camera handle for a clean-up
293 mActiveCameras.push_back(pCam);
294
295 // Verify that this camera self-identifies correctly
296 ASSERT_TRUE(pCam->getCameraInfo(&cameraInfo).isOk());
297 EXPECT_EQ(cam.id, cameraInfo.id);
298
299 // Verify methods for extended info
300 const auto id = 0xFFFFFFFF; // meaningless id
301 std::vector<uint8_t> values;
302 auto status = pCam->setExtendedInfo(id, values);
303 if (isLogicalCam) {
304 EXPECT_TRUE(!status.isOk() && status.getServiceSpecificError() ==
305 static_cast<int>(EvsResult::NOT_SUPPORTED));
306 } else {
307 EXPECT_TRUE(status.isOk());
308 }
309
310 status = pCam->getExtendedInfo(id, &values);
311 if (isLogicalCam) {
312 EXPECT_TRUE(!status.isOk() && status.getServiceSpecificError() ==
313 static_cast<int>(EvsResult::NOT_SUPPORTED));
314 } else {
315 EXPECT_TRUE(status.isOk());
316 }
317
318 // Explicitly close the camera so resources are released right away
319 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
320 mActiveCameras.clear();
321 }
322 }
323}
324
325/*
326 * CameraOpenAggressive:
327 * Opens each camera reported by the enumerator twice in a row without an intervening closeCamera
328 * call. This ensures that the intended "aggressive open" behavior works. This is necessary for
329 * the system to be tolerant of shutdown/restart race conditions.
330 */
331TEST_P(EvsAidlTest, CameraOpenAggressive) {
332 LOG(INFO) << "Starting CameraOpenAggressive test";
333
334 // Get the camera list
335 loadCameraList();
336
337 // Open and close each camera twice
338 for (auto&& cam : mCameraInfo) {
339 bool isLogicalCam = false;
340 getPhysicalCameraIds(cam.id, isLogicalCam);
341 if (mIsHwModule && isLogicalCam) {
342 LOG(INFO) << "Skip a logical device, " << cam.id << " for HW target.";
343 continue;
344 }
345
346 // Read a target resolution from the metadata
347 Stream targetCfg = getFirstStreamConfiguration(
348 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
349 ASSERT_GT(targetCfg.width, 0);
350 ASSERT_GT(targetCfg.height, 0);
351
352 mActiveCameras.clear();
353 std::shared_ptr<IEvsCamera> pCam;
354 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
355 EXPECT_NE(pCam, nullptr);
356
357 // Store a camera handle for a clean-up
358 mActiveCameras.push_back(pCam);
359
360 // Verify that this camera self-identifies correctly
361 CameraDesc cameraInfo;
362 ASSERT_TRUE(pCam->getCameraInfo(&cameraInfo).isOk());
363 EXPECT_EQ(cam.id, cameraInfo.id);
364
365 std::shared_ptr<IEvsCamera> pCam2;
366 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam2).isOk());
367 EXPECT_NE(pCam2, nullptr);
368 EXPECT_NE(pCam, pCam2);
369
370 // Store a camera handle for a clean-up
371 mActiveCameras.push_back(pCam2);
372
373 auto status = pCam->setMaxFramesInFlight(2);
374 if (mIsHwModule) {
375 // Verify that the old camera rejects calls via HW module.
376 EXPECT_TRUE(!status.isOk() && status.getServiceSpecificError() ==
377 static_cast<int>(EvsResult::OWNERSHIP_LOST));
378 } else {
379 // default implementation supports multiple clients.
380 EXPECT_TRUE(status.isOk());
381 }
382
383 // Close the superseded camera
384 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
385 mActiveCameras.pop_front();
386
387 // Verify that the second camera instance self-identifies correctly
388 ASSERT_TRUE(pCam2->getCameraInfo(&cameraInfo).isOk());
389 EXPECT_EQ(cam.id, cameraInfo.id);
390
391 // Close the second camera instance
392 ASSERT_TRUE(mEnumerator->closeCamera(pCam2).isOk());
393 mActiveCameras.pop_front();
394 }
395
396 // Sleep here to ensure the destructor cleanup has time to run so we don't break follow on tests
397 sleep(1); // I hate that this is an arbitrary time to wait. :( b/36122635
398}
399
400/*
401 * CameraStreamPerformance:
402 * Measure and qualify the stream start up time and streaming frame rate of each reported camera
403 */
404TEST_P(EvsAidlTest, CameraStreamPerformance) {
405 LOG(INFO) << "Starting CameraStreamPerformance test";
406
407 // Get the camera list
408 loadCameraList();
409
410 // Test each reported camera
411 for (auto&& cam : mCameraInfo) {
412 bool isLogicalCam = false;
413 auto devices = getPhysicalCameraIds(cam.id, isLogicalCam);
414 if (mIsHwModule && isLogicalCam) {
415 LOG(INFO) << "Skip a logical device " << cam.id;
416 continue;
417 }
418
419 // Read a target resolution from the metadata
420 Stream targetCfg = getFirstStreamConfiguration(
421 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
422 ASSERT_GT(targetCfg.width, 0);
423 ASSERT_GT(targetCfg.height, 0);
424
425 std::shared_ptr<IEvsCamera> pCam;
426 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
427 EXPECT_NE(pCam, nullptr);
428
429 // Store a camera handle for a clean-up
430 mActiveCameras.push_back(pCam);
431
432 // Set up a frame receiver object which will fire up its own thread
Frederick Mayle7056b242022-03-29 02:38:12 +0000433 std::shared_ptr<FrameHandler> frameHandler = ndk::SharedRefBase::make<FrameHandler>(
434 pCam, cam, nullptr, FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -0700435 EXPECT_NE(frameHandler, nullptr);
436
437 // Start the camera's video stream
438 nsecs_t start = systemTime(SYSTEM_TIME_MONOTONIC);
439 ASSERT_TRUE(frameHandler->startStream());
440
441 // Ensure the first frame arrived within the expected time
442 frameHandler->waitForFrameCount(1);
443 nsecs_t firstFrame = systemTime(SYSTEM_TIME_MONOTONIC);
444 nsecs_t timeToFirstFrame = systemTime(SYSTEM_TIME_MONOTONIC) - start;
445
446 // Extra delays are expected when we attempt to start a video stream on
447 // the logical camera device. The amount of delay is expected the
448 // number of physical camera devices multiplied by
449 // kMaxStreamStartMilliseconds at most.
450 EXPECT_LE(nanoseconds_to_milliseconds(timeToFirstFrame),
451 kMaxStreamStartMilliseconds * devices.size());
452 printf("%s: Measured time to first frame %0.2f ms\n", cam.id.data(),
453 timeToFirstFrame * kNanoToMilliseconds);
454 LOG(INFO) << cam.id << ": Measured time to first frame " << std::scientific
455 << timeToFirstFrame * kNanoToMilliseconds << " ms.";
456
457 // Check aspect ratio
458 unsigned width = 0, height = 0;
459 frameHandler->getFrameDimension(&width, &height);
460 EXPECT_GE(width, height);
461
462 // Wait a bit, then ensure we get at least the required minimum number of frames
463 sleep(5);
464 nsecs_t end = systemTime(SYSTEM_TIME_MONOTONIC);
465
466 // Even when the camera pointer goes out of scope, the FrameHandler object will
467 // keep the stream alive unless we tell it to shutdown.
468 // Also note that the FrameHandle and the Camera have a mutual circular reference, so
469 // we have to break that cycle in order for either of them to get cleaned up.
470 frameHandler->shutdown();
471
472 unsigned framesReceived = 0;
473 frameHandler->getFramesCounters(&framesReceived, nullptr);
474 framesReceived = framesReceived - 1; // Back out the first frame we already waited for
475 nsecs_t runTime = end - firstFrame;
476 float framesPerSecond = framesReceived / (runTime * kNanoToSeconds);
477 printf("Measured camera rate %3.2f fps\n", framesPerSecond);
478 LOG(INFO) << "Measured camera rate " << std::scientific << framesPerSecond << " fps.";
479 EXPECT_GE(framesPerSecond, kMinimumFramesPerSecond);
480
481 // Explicitly release the camera
482 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
483 mActiveCameras.clear();
484 }
485}
486
487/*
488 * CameraStreamBuffering:
489 * Ensure the camera implementation behaves properly when the client holds onto buffers for more
490 * than one frame time. The camera must cleanly skip frames until the client is ready again.
491 */
492TEST_P(EvsAidlTest, CameraStreamBuffering) {
493 LOG(INFO) << "Starting CameraStreamBuffering test";
494
495 // Arbitrary constant (should be > 1 and not too big)
496 static const unsigned int kBuffersToHold = 6;
497
498 // Get the camera list
499 loadCameraList();
500
501 // Test each reported camera
502 for (auto&& cam : mCameraInfo) {
503 bool isLogicalCam = false;
504 getPhysicalCameraIds(cam.id, isLogicalCam);
505 if (mIsHwModule && isLogicalCam) {
506 LOG(INFO) << "Skip a logical device " << cam.id << " for HW target.";
507 continue;
508 }
509
510 // Read a target resolution from the metadata
511 Stream targetCfg = getFirstStreamConfiguration(
512 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
513 ASSERT_GT(targetCfg.width, 0);
514 ASSERT_GT(targetCfg.height, 0);
515
516 std::shared_ptr<IEvsCamera> pCam;
517 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
518 EXPECT_NE(pCam, nullptr);
519
520 // Store a camera handle for a clean-up
521 mActiveCameras.push_back(pCam);
522
523 // Ask for a very large number of buffers in flight to ensure it errors correctly
524 auto badResult = pCam->setMaxFramesInFlight(0xFFFFFFFF);
525 EXPECT_TRUE(!badResult.isOk() && badResult.getServiceSpecificError() ==
526 static_cast<int>(EvsResult::BUFFER_NOT_AVAILABLE));
527
528 // Now ask for exactly two buffers in flight as we'll test behavior in that case
529 ASSERT_TRUE(pCam->setMaxFramesInFlight(kBuffersToHold).isOk());
530
531 // Set up a frame receiver object which will fire up its own thread.
Frederick Mayle7056b242022-03-29 02:38:12 +0000532 std::shared_ptr<FrameHandler> frameHandler = ndk::SharedRefBase::make<FrameHandler>(
533 pCam, cam, nullptr, FrameHandler::eNoAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -0700534 EXPECT_NE(frameHandler, nullptr);
535
536 // Start the camera's video stream
537 ASSERT_TRUE(frameHandler->startStream());
538
539 // Check that the video stream stalls once we've gotten exactly the number of buffers
540 // we requested since we told the frameHandler not to return them.
541 sleep(1); // 1 second should be enough for at least 5 frames to be delivered worst case
542 unsigned framesReceived = 0;
543 frameHandler->getFramesCounters(&framesReceived, nullptr);
544 ASSERT_EQ(kBuffersToHold, framesReceived) << "Stream didn't stall at expected buffer limit";
545
546 // Give back one buffer
547 ASSERT_TRUE(frameHandler->returnHeldBuffer());
548
549 // Once we return a buffer, it shouldn't take more than 1/10 second to get a new one
550 // filled since we require 10fps minimum -- but give a 10% allowance just in case.
551 usleep(110 * kMillisecondsToMicroseconds);
552 frameHandler->getFramesCounters(&framesReceived, nullptr);
553 EXPECT_EQ(kBuffersToHold + 1, framesReceived) << "Stream should've resumed";
554
555 // Even when the camera pointer goes out of scope, the FrameHandler object will
556 // keep the stream alive unless we tell it to shutdown.
557 // Also note that the FrameHandle and the Camera have a mutual circular reference, so
558 // we have to break that cycle in order for either of them to get cleaned up.
559 frameHandler->shutdown();
560
561 // Explicitly release the camera
562 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
563 mActiveCameras.clear();
564 }
565}
566
567/*
568 * CameraToDisplayRoundTrip:
569 * End to end test of data flowing from the camera to the display. Each delivered frame of camera
570 * imagery is simply copied to the display buffer and presented on screen. This is the one test
571 * which a human could observe to see the operation of the system on the physical display.
572 */
573TEST_P(EvsAidlTest, CameraToDisplayRoundTrip) {
574 LOG(INFO) << "Starting CameraToDisplayRoundTrip test";
575
576 // Get the camera list
577 loadCameraList();
578
579 // Request available display IDs
580 uint8_t targetDisplayId = 0;
581 std::vector<uint8_t> displayIds;
582 ASSERT_TRUE(mEnumerator->getDisplayIdList(&displayIds).isOk());
583 EXPECT_GT(displayIds.size(), 0);
584 targetDisplayId = displayIds[0];
585
586 // Request exclusive access to the first EVS display
587 std::shared_ptr<IEvsDisplay> pDisplay;
588 ASSERT_TRUE(mEnumerator->openDisplay(targetDisplayId, &pDisplay).isOk());
589 EXPECT_NE(pDisplay, nullptr);
590 LOG(INFO) << "Display " << targetDisplayId << " is in use.";
591
592 // Get the display descriptor
593 DisplayDesc displayDesc;
594 ASSERT_TRUE(pDisplay->getDisplayInfo(&displayDesc).isOk());
595 LOG(INFO) << " Resolution: " << displayDesc.width << "x" << displayDesc.height;
596 ASSERT_GT(displayDesc.width, 0);
597 ASSERT_GT(displayDesc.height, 0);
598
599 // Test each reported camera
600 for (auto&& cam : mCameraInfo) {
601 bool isLogicalCam = false;
602 getPhysicalCameraIds(cam.id, isLogicalCam);
603 if (mIsHwModule && isLogicalCam) {
604 LOG(INFO) << "Skip a logical device " << cam.id << " for HW target.";
605 continue;
606 }
607
608 // Read a target resolution from the metadata
609 Stream targetCfg = getFirstStreamConfiguration(
610 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
611 ASSERT_GT(targetCfg.width, 0);
612 ASSERT_GT(targetCfg.height, 0);
613
614 std::shared_ptr<IEvsCamera> pCam;
615 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
616 EXPECT_NE(pCam, nullptr);
617
618 // Store a camera handle for a clean-up
619 mActiveCameras.push_back(pCam);
620
621 // Set up a frame receiver object which will fire up its own thread.
Frederick Mayle7056b242022-03-29 02:38:12 +0000622 std::shared_ptr<FrameHandler> frameHandler = ndk::SharedRefBase::make<FrameHandler>(
623 pCam, cam, pDisplay, FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -0700624 EXPECT_NE(frameHandler, nullptr);
625
626 // Activate the display
627 ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::VISIBLE_ON_NEXT_FRAME).isOk());
628
629 // Start the camera's video stream
630 ASSERT_TRUE(frameHandler->startStream());
631
632 // Wait a while to let the data flow
633 static const int kSecondsToWait = 5;
634 const int streamTimeMs =
635 kSecondsToWait * kSecondsToMilliseconds - kMaxStreamStartMilliseconds;
636 const unsigned minimumFramesExpected =
637 streamTimeMs * kMinimumFramesPerSecond / kSecondsToMilliseconds;
638 sleep(kSecondsToWait);
639 unsigned framesReceived = 0;
640 unsigned framesDisplayed = 0;
641 frameHandler->getFramesCounters(&framesReceived, &framesDisplayed);
642 EXPECT_EQ(framesReceived, framesDisplayed);
643 EXPECT_GE(framesDisplayed, minimumFramesExpected);
644
645 // Turn off the display (yes, before the stream stops -- it should be handled)
646 ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::NOT_VISIBLE).isOk());
647
648 // Shut down the streamer
649 frameHandler->shutdown();
650
651 // Explicitly release the camera
652 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
653 mActiveCameras.clear();
654 }
655
656 // Explicitly release the display
657 ASSERT_TRUE(mEnumerator->closeDisplay(pDisplay).isOk());
658}
659
660/*
661 * MultiCameraStream:
662 * Verify that each client can start and stop video streams on the same
663 * underlying camera.
664 */
665TEST_P(EvsAidlTest, MultiCameraStream) {
666 LOG(INFO) << "Starting MultiCameraStream test";
667
668 if (mIsHwModule) {
669 // This test is not for HW module implementation.
670 return;
671 }
672
673 // Get the camera list
674 loadCameraList();
675
676 // Test each reported camera
677 for (auto&& cam : mCameraInfo) {
678 // Read a target resolution from the metadata
679 Stream targetCfg = getFirstStreamConfiguration(
680 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
681 ASSERT_GT(targetCfg.width, 0);
682 ASSERT_GT(targetCfg.height, 0);
683
684 // Create two camera clients.
685 std::shared_ptr<IEvsCamera> pCam0;
686 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam0).isOk());
687 EXPECT_NE(pCam0, nullptr);
688
689 // Store a camera handle for a clean-up
690 mActiveCameras.push_back(pCam0);
691
692 std::shared_ptr<IEvsCamera> pCam1;
693 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam1).isOk());
694 EXPECT_NE(pCam1, nullptr);
695
696 // Store a camera handle for a clean-up
697 mActiveCameras.push_back(pCam1);
698
699 // Set up per-client frame receiver objects which will fire up its own thread
Frederick Mayle7056b242022-03-29 02:38:12 +0000700 std::shared_ptr<FrameHandler> frameHandler0 = ndk::SharedRefBase::make<FrameHandler>(
701 pCam0, cam, nullptr, FrameHandler::eAutoReturn);
702 std::shared_ptr<FrameHandler> frameHandler1 = ndk::SharedRefBase::make<FrameHandler>(
703 pCam1, cam, nullptr, FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -0700704 EXPECT_NE(frameHandler0, nullptr);
705 EXPECT_NE(frameHandler1, nullptr);
706
707 // Start the camera's video stream via client 0
708 ASSERT_TRUE(frameHandler0->startStream());
709 ASSERT_TRUE(frameHandler1->startStream());
710
711 // Ensure the stream starts
712 frameHandler0->waitForFrameCount(1);
713 frameHandler1->waitForFrameCount(1);
714
715 nsecs_t firstFrame = systemTime(SYSTEM_TIME_MONOTONIC);
716
717 // Wait a bit, then ensure both clients get at least the required minimum number of frames
718 sleep(5);
719 nsecs_t end = systemTime(SYSTEM_TIME_MONOTONIC);
720 unsigned framesReceived0 = 0, framesReceived1 = 0;
721 frameHandler0->getFramesCounters(&framesReceived0, nullptr);
722 frameHandler1->getFramesCounters(&framesReceived1, nullptr);
723 framesReceived0 = framesReceived0 - 1; // Back out the first frame we already waited for
724 framesReceived1 = framesReceived1 - 1; // Back out the first frame we already waited for
725 nsecs_t runTime = end - firstFrame;
726 float framesPerSecond0 = framesReceived0 / (runTime * kNanoToSeconds);
727 float framesPerSecond1 = framesReceived1 / (runTime * kNanoToSeconds);
728 LOG(INFO) << "Measured camera rate " << std::scientific << framesPerSecond0 << " fps and "
729 << framesPerSecond1 << " fps";
730 EXPECT_GE(framesPerSecond0, kMinimumFramesPerSecond);
731 EXPECT_GE(framesPerSecond1, kMinimumFramesPerSecond);
732
733 // Shutdown one client
734 frameHandler0->shutdown();
735
736 // Read frame counters again
737 frameHandler0->getFramesCounters(&framesReceived0, nullptr);
738 frameHandler1->getFramesCounters(&framesReceived1, nullptr);
739
740 // Wait a bit again
741 sleep(5);
742 unsigned framesReceivedAfterStop0 = 0, framesReceivedAfterStop1 = 0;
743 frameHandler0->getFramesCounters(&framesReceivedAfterStop0, nullptr);
744 frameHandler1->getFramesCounters(&framesReceivedAfterStop1, nullptr);
745 EXPECT_EQ(framesReceived0, framesReceivedAfterStop0);
746 EXPECT_LT(framesReceived1, framesReceivedAfterStop1);
747
748 // Shutdown another
749 frameHandler1->shutdown();
750
751 // Explicitly release the camera
752 ASSERT_TRUE(mEnumerator->closeCamera(pCam0).isOk());
753 ASSERT_TRUE(mEnumerator->closeCamera(pCam1).isOk());
754 mActiveCameras.clear();
755
756 // TODO(b/145459970, b/145457727): below sleep() is added to ensure the
757 // destruction of active camera objects; this may be related with two
758 // issues.
759 sleep(1);
760 }
761}
762
763/*
764 * CameraParameter:
765 * Verify that a client can adjust a camera parameter.
766 */
767TEST_P(EvsAidlTest, CameraParameter) {
768 LOG(INFO) << "Starting CameraParameter test";
769
770 // Get the camera list
771 loadCameraList();
772
773 // Test each reported camera
774 for (auto&& cam : mCameraInfo) {
775 bool isLogicalCam = false;
776 getPhysicalCameraIds(cam.id, isLogicalCam);
777 if (isLogicalCam) {
778 // TODO(b/145465724): Support camera parameter programming on
779 // logical devices.
780 LOG(INFO) << "Skip a logical device " << cam.id;
781 continue;
782 }
783
784 // Read a target resolution from the metadata
785 Stream targetCfg = getFirstStreamConfiguration(
786 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
787 ASSERT_GT(targetCfg.width, 0);
788 ASSERT_GT(targetCfg.height, 0);
789
790 // Create a camera client
791 std::shared_ptr<IEvsCamera> pCam;
792 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
793 EXPECT_NE(pCam, nullptr);
794
795 // Store a camera
796 mActiveCameras.push_back(pCam);
797
798 // Get the parameter list
799 std::vector<CameraParam> cmds;
800 ASSERT_TRUE(pCam->getParameterList(&cmds).isOk());
801 if (cmds.size() < 1) {
802 continue;
803 }
804
805 // Set up per-client frame receiver objects which will fire up its own thread
Frederick Mayle7056b242022-03-29 02:38:12 +0000806 std::shared_ptr<FrameHandler> frameHandler = ndk::SharedRefBase::make<FrameHandler>(
807 pCam, cam, nullptr, FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -0700808 EXPECT_NE(frameHandler, nullptr);
809
810 // Start the camera's video stream
811 ASSERT_TRUE(frameHandler->startStream());
812
813 // Ensure the stream starts
814 frameHandler->waitForFrameCount(1);
815
816 // Set current client is the primary client
817 ASSERT_TRUE(pCam->setPrimaryClient().isOk());
818 for (auto& cmd : cmds) {
819 // Get a valid parameter value range
820 ParameterRange range;
821 ASSERT_TRUE(pCam->getIntParameterRange(cmd, &range).isOk());
822
823 std::vector<int32_t> values;
824 if (cmd == CameraParam::ABSOLUTE_FOCUS) {
825 // Try to turn off auto-focus
826 ASSERT_TRUE(pCam->setIntParameter(CameraParam::AUTO_FOCUS, 0, &values).isOk());
827 for (auto&& v : values) {
828 EXPECT_EQ(v, 0);
829 }
830 }
831
832 // Try to program a parameter with a random value [minVal, maxVal]
833 int32_t val0 = range.min + (std::rand() % (range.max - range.min));
834
835 // Rounding down
836 val0 = val0 - (val0 % range.step);
837 values.clear();
838 ASSERT_TRUE(pCam->setIntParameter(cmd, val0, &values).isOk());
839
840 values.clear();
841 ASSERT_TRUE(pCam->getIntParameter(cmd, &values).isOk());
842 for (auto&& v : values) {
843 EXPECT_EQ(val0, v) << "Values are not matched.";
844 }
845 }
846 ASSERT_TRUE(pCam->unsetPrimaryClient().isOk());
847
848 // Shutdown
849 frameHandler->shutdown();
850
851 // Explicitly release the camera
852 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
853 mActiveCameras.clear();
854 }
855}
856
857/*
858 * CameraPrimaryClientRelease
859 * Verify that non-primary client gets notified when the primary client either
860 * terminates or releases a role.
861 */
862TEST_P(EvsAidlTest, CameraPrimaryClientRelease) {
863 LOG(INFO) << "Starting CameraPrimaryClientRelease test";
864
865 if (mIsHwModule) {
866 // This test is not for HW module implementation.
867 return;
868 }
869
870 // Get the camera list
871 loadCameraList();
872
873 // Test each reported camera
874 for (auto&& cam : mCameraInfo) {
875 bool isLogicalCam = false;
876 getPhysicalCameraIds(cam.id, isLogicalCam);
877 if (isLogicalCam) {
878 // TODO(b/145465724): Support camera parameter programming on
879 // logical devices.
880 LOG(INFO) << "Skip a logical device " << cam.id;
881 continue;
882 }
883
884 // Read a target resolution from the metadata
885 Stream targetCfg = getFirstStreamConfiguration(
886 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
887 ASSERT_GT(targetCfg.width, 0);
888 ASSERT_GT(targetCfg.height, 0);
889
890 // Create two camera clients.
891 std::shared_ptr<IEvsCamera> pPrimaryCam;
892 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pPrimaryCam).isOk());
893 EXPECT_NE(pPrimaryCam, nullptr);
894
895 // Store a camera handle for a clean-up
896 mActiveCameras.push_back(pPrimaryCam);
897
898 std::shared_ptr<IEvsCamera> pSecondaryCam;
899 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pSecondaryCam).isOk());
900 EXPECT_NE(pSecondaryCam, nullptr);
901
902 // Store a camera handle for a clean-up
903 mActiveCameras.push_back(pSecondaryCam);
904
905 // Set up per-client frame receiver objects which will fire up its own thread
Frederick Mayle7056b242022-03-29 02:38:12 +0000906 std::shared_ptr<FrameHandler> frameHandlerPrimary = ndk::SharedRefBase::make<FrameHandler>(
Changyeon Jo80189012021-10-10 16:34:21 -0700907 pPrimaryCam, cam, nullptr, FrameHandler::eAutoReturn);
Frederick Mayle7056b242022-03-29 02:38:12 +0000908 std::shared_ptr<FrameHandler> frameHandlerSecondary =
909 ndk::SharedRefBase::make<FrameHandler>(pSecondaryCam, cam, nullptr,
910 FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -0700911 EXPECT_NE(frameHandlerPrimary, nullptr);
912 EXPECT_NE(frameHandlerSecondary, nullptr);
913
914 // Set one client as the primary client
915 ASSERT_TRUE(pPrimaryCam->setPrimaryClient().isOk());
916
917 // Try to set another client as the primary client.
918 ASSERT_FALSE(pSecondaryCam->setPrimaryClient().isOk());
919
920 // Start the camera's video stream via a primary client client.
921 ASSERT_TRUE(frameHandlerPrimary->startStream());
922
923 // Ensure the stream starts
924 frameHandlerPrimary->waitForFrameCount(1);
925
926 // Start the camera's video stream via another client
927 ASSERT_TRUE(frameHandlerSecondary->startStream());
928
929 // Ensure the stream starts
930 frameHandlerSecondary->waitForFrameCount(1);
931
932 // Non-primary client expects to receive a primary client role relesed
933 // notification.
934 EvsEventDesc aTargetEvent = {};
935 EvsEventDesc aNotification = {};
936
937 bool listening = false;
938 std::mutex eventLock;
939 std::condition_variable eventCond;
940 std::thread listener =
941 std::thread([&aNotification, &frameHandlerSecondary, &listening, &eventCond]() {
942 // Notify that a listening thread is running.
943 listening = true;
944 eventCond.notify_all();
945
946 EvsEventDesc aTargetEvent;
947 aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
948 if (!frameHandlerSecondary->waitForEvent(aTargetEvent, aNotification, true)) {
949 LOG(WARNING) << "A timer is expired before a target event is fired.";
950 }
951 });
952
953 // Wait until a listening thread starts.
954 std::unique_lock<std::mutex> lock(eventLock);
955 auto timer = std::chrono::system_clock::now();
956 while (!listening) {
957 timer += 1s;
958 eventCond.wait_until(lock, timer);
959 }
960 lock.unlock();
961
962 // Release a primary client role.
963 ASSERT_TRUE(pPrimaryCam->unsetPrimaryClient().isOk());
964
965 // Join a listening thread.
966 if (listener.joinable()) {
967 listener.join();
968 }
969
970 // Verify change notifications.
971 ASSERT_EQ(EvsEventType::MASTER_RELEASED, static_cast<EvsEventType>(aNotification.aType));
972
973 // Non-primary becomes a primary client.
974 ASSERT_TRUE(pSecondaryCam->setPrimaryClient().isOk());
975
976 // Previous primary client fails to become a primary client.
977 ASSERT_FALSE(pPrimaryCam->setPrimaryClient().isOk());
978
979 listening = false;
980 listener = std::thread([&aNotification, &frameHandlerPrimary, &listening, &eventCond]() {
981 // Notify that a listening thread is running.
982 listening = true;
983 eventCond.notify_all();
984
985 EvsEventDesc aTargetEvent;
986 aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
987 if (!frameHandlerPrimary->waitForEvent(aTargetEvent, aNotification, true)) {
988 LOG(WARNING) << "A timer is expired before a target event is fired.";
989 }
990 });
991
992 // Wait until a listening thread starts.
993 timer = std::chrono::system_clock::now();
994 lock.lock();
995 while (!listening) {
996 eventCond.wait_until(lock, timer + 1s);
997 }
998 lock.unlock();
999
1000 // Closing current primary client.
1001 frameHandlerSecondary->shutdown();
1002
1003 // Join a listening thread.
1004 if (listener.joinable()) {
1005 listener.join();
1006 }
1007
1008 // Verify change notifications.
1009 ASSERT_EQ(EvsEventType::MASTER_RELEASED, static_cast<EvsEventType>(aNotification.aType));
1010
1011 // Closing streams.
1012 frameHandlerPrimary->shutdown();
1013
1014 // Explicitly release the camera
1015 ASSERT_TRUE(mEnumerator->closeCamera(pPrimaryCam).isOk());
1016 ASSERT_TRUE(mEnumerator->closeCamera(pSecondaryCam).isOk());
1017 mActiveCameras.clear();
1018 }
1019}
1020
1021/*
1022 * MultiCameraParameter:
1023 * Verify that primary and non-primary clients behave as expected when they try to adjust
1024 * camera parameters.
1025 */
1026TEST_P(EvsAidlTest, MultiCameraParameter) {
1027 LOG(INFO) << "Starting MultiCameraParameter test";
1028
1029 if (mIsHwModule) {
1030 // This test is not for HW module implementation.
1031 return;
1032 }
1033
1034 // Get the camera list
1035 loadCameraList();
1036
1037 // Test each reported camera
1038 for (auto&& cam : mCameraInfo) {
1039 bool isLogicalCam = false;
1040 getPhysicalCameraIds(cam.id, isLogicalCam);
1041 if (isLogicalCam) {
1042 // TODO(b/145465724): Support camera parameter programming on
1043 // logical devices.
1044 LOG(INFO) << "Skip a logical device " << cam.id;
1045 continue;
1046 }
1047
1048 // Read a target resolution from the metadata
1049 Stream targetCfg = getFirstStreamConfiguration(
1050 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
1051 ASSERT_GT(targetCfg.width, 0);
1052 ASSERT_GT(targetCfg.height, 0);
1053
1054 // Create two camera clients.
1055 std::shared_ptr<IEvsCamera> pPrimaryCam;
1056 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pPrimaryCam).isOk());
1057 EXPECT_NE(pPrimaryCam, nullptr);
1058
1059 // Store a camera handle for a clean-up
1060 mActiveCameras.push_back(pPrimaryCam);
1061
1062 std::shared_ptr<IEvsCamera> pSecondaryCam;
1063 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pSecondaryCam).isOk());
1064 EXPECT_NE(pSecondaryCam, nullptr);
1065
1066 // Store a camera handle for a clean-up
1067 mActiveCameras.push_back(pSecondaryCam);
1068
1069 // Get the parameter list
1070 std::vector<CameraParam> camPrimaryCmds, camSecondaryCmds;
1071 ASSERT_TRUE(pPrimaryCam->getParameterList(&camPrimaryCmds).isOk());
1072 ASSERT_TRUE(pSecondaryCam->getParameterList(&camSecondaryCmds).isOk());
1073 if (camPrimaryCmds.size() < 1 || camSecondaryCmds.size() < 1) {
1074 // Skip a camera device if it does not support any parameter.
1075 continue;
1076 }
1077
1078 // Set up per-client frame receiver objects which will fire up its own thread
Frederick Mayle7056b242022-03-29 02:38:12 +00001079 std::shared_ptr<FrameHandler> frameHandlerPrimary = ndk::SharedRefBase::make<FrameHandler>(
Changyeon Jo80189012021-10-10 16:34:21 -07001080 pPrimaryCam, cam, nullptr, FrameHandler::eAutoReturn);
Frederick Mayle7056b242022-03-29 02:38:12 +00001081 std::shared_ptr<FrameHandler> frameHandlerSecondary =
1082 ndk::SharedRefBase::make<FrameHandler>(pSecondaryCam, cam, nullptr,
1083 FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -07001084 EXPECT_NE(frameHandlerPrimary, nullptr);
1085 EXPECT_NE(frameHandlerSecondary, nullptr);
1086
1087 // Set one client as the primary client.
1088 ASSERT_TRUE(pPrimaryCam->setPrimaryClient().isOk());
1089
1090 // Try to set another client as the primary client.
1091 ASSERT_FALSE(pSecondaryCam->setPrimaryClient().isOk());
1092
1093 // Start the camera's video stream via a primary client client.
1094 ASSERT_TRUE(frameHandlerPrimary->startStream());
1095
1096 // Ensure the stream starts
1097 frameHandlerPrimary->waitForFrameCount(1);
1098
1099 // Start the camera's video stream via another client
1100 ASSERT_TRUE(frameHandlerSecondary->startStream());
1101
1102 // Ensure the stream starts
1103 frameHandlerSecondary->waitForFrameCount(1);
1104
1105 int32_t val0 = 0;
1106 std::vector<int32_t> values;
1107 EvsEventDesc aNotification0 = {};
1108 EvsEventDesc aNotification1 = {};
1109 for (auto& cmd : camPrimaryCmds) {
1110 // Get a valid parameter value range
1111 ParameterRange range;
1112 ASSERT_TRUE(pPrimaryCam->getIntParameterRange(cmd, &range).isOk());
1113 if (cmd == CameraParam::ABSOLUTE_FOCUS) {
1114 // Try to turn off auto-focus
1115 values.clear();
1116 ASSERT_TRUE(
1117 pPrimaryCam->setIntParameter(CameraParam::AUTO_FOCUS, 0, &values).isOk());
1118 for (auto&& v : values) {
1119 EXPECT_EQ(v, 0);
1120 }
1121 }
1122
1123 // Calculate a parameter value to program.
1124 val0 = range.min + (std::rand() % (range.max - range.min));
1125 val0 = val0 - (val0 % range.step);
1126
1127 // Prepare and start event listeners.
1128 bool listening0 = false;
1129 bool listening1 = false;
1130 std::condition_variable eventCond;
1131 std::thread listener0 = std::thread([cmd, val0, &aNotification0, &frameHandlerPrimary,
1132 &listening0, &listening1, &eventCond]() {
1133 listening0 = true;
1134 if (listening1) {
1135 eventCond.notify_all();
1136 }
1137
1138 EvsEventDesc aTargetEvent;
1139 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1140 aTargetEvent.payload[0] = static_cast<uint32_t>(cmd);
1141 aTargetEvent.payload[1] = val0;
1142 if (!frameHandlerPrimary->waitForEvent(aTargetEvent, aNotification0)) {
1143 LOG(WARNING) << "A timer is expired before a target event is fired.";
1144 }
1145 });
1146 std::thread listener1 = std::thread([cmd, val0, &aNotification1, &frameHandlerSecondary,
1147 &listening0, &listening1, &eventCond]() {
1148 listening1 = true;
1149 if (listening0) {
1150 eventCond.notify_all();
1151 }
1152
1153 EvsEventDesc aTargetEvent;
1154 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1155 aTargetEvent.payload[0] = static_cast<uint32_t>(cmd);
1156 aTargetEvent.payload[1] = val0;
1157 if (!frameHandlerSecondary->waitForEvent(aTargetEvent, aNotification1)) {
1158 LOG(WARNING) << "A timer is expired before a target event is fired.";
1159 }
1160 });
1161
1162 // Wait until a listening thread starts.
1163 std::mutex eventLock;
1164 std::unique_lock<std::mutex> lock(eventLock);
1165 auto timer = std::chrono::system_clock::now();
1166 while (!listening0 || !listening1) {
1167 eventCond.wait_until(lock, timer + 1s);
1168 }
1169 lock.unlock();
1170
1171 // Try to program a parameter
1172 values.clear();
1173 ASSERT_TRUE(pPrimaryCam->setIntParameter(cmd, val0, &values).isOk());
1174 for (auto&& v : values) {
1175 EXPECT_EQ(val0, v) << "Values are not matched.";
1176 }
1177
1178 // Join a listening thread.
1179 if (listener0.joinable()) {
1180 listener0.join();
1181 }
1182 if (listener1.joinable()) {
1183 listener1.join();
1184 }
1185
1186 // Verify a change notification
1187 ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
1188 static_cast<EvsEventType>(aNotification0.aType));
1189 ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
1190 static_cast<EvsEventType>(aNotification1.aType));
1191 ASSERT_EQ(cmd, static_cast<CameraParam>(aNotification0.payload[0]));
1192 ASSERT_EQ(cmd, static_cast<CameraParam>(aNotification1.payload[0]));
1193 for (auto&& v : values) {
1194 ASSERT_EQ(v, static_cast<int32_t>(aNotification0.payload[1]));
1195 ASSERT_EQ(v, static_cast<int32_t>(aNotification1.payload[1]));
1196 }
1197
1198 // Clients expects to receive a parameter change notification
1199 // whenever a primary client client adjusts it.
1200 values.clear();
1201 ASSERT_TRUE(pPrimaryCam->getIntParameter(cmd, &values).isOk());
1202 for (auto&& v : values) {
1203 EXPECT_EQ(val0, v) << "Values are not matched.";
1204 }
1205 }
1206
1207 // Try to adjust a parameter via non-primary client
1208 values.clear();
1209 ASSERT_FALSE(pSecondaryCam->setIntParameter(camSecondaryCmds[0], val0, &values).isOk());
1210
1211 // Non-primary client attempts to be a primary client
1212 ASSERT_FALSE(pSecondaryCam->setPrimaryClient().isOk());
1213
1214 // Primary client retires from a primary client role
1215 bool listening = false;
1216 std::condition_variable eventCond;
1217 std::thread listener =
1218 std::thread([&aNotification0, &frameHandlerSecondary, &listening, &eventCond]() {
1219 listening = true;
1220 eventCond.notify_all();
1221
1222 EvsEventDesc aTargetEvent;
1223 aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
1224 if (!frameHandlerSecondary->waitForEvent(aTargetEvent, aNotification0, true)) {
1225 LOG(WARNING) << "A timer is expired before a target event is fired.";
1226 }
1227 });
1228
1229 std::mutex eventLock;
1230 auto timer = std::chrono::system_clock::now();
1231 std::unique_lock<std::mutex> lock(eventLock);
1232 while (!listening) {
1233 eventCond.wait_until(lock, timer + 1s);
1234 }
1235 lock.unlock();
1236
1237 ASSERT_TRUE(pPrimaryCam->unsetPrimaryClient().isOk());
1238
1239 if (listener.joinable()) {
1240 listener.join();
1241 }
1242 ASSERT_EQ(EvsEventType::MASTER_RELEASED, static_cast<EvsEventType>(aNotification0.aType));
1243
1244 // Try to adjust a parameter after being retired
1245 values.clear();
1246 ASSERT_FALSE(pPrimaryCam->setIntParameter(camPrimaryCmds[0], val0, &values).isOk());
1247
1248 // Non-primary client becomes a primary client
1249 ASSERT_TRUE(pSecondaryCam->setPrimaryClient().isOk());
1250
1251 // Try to adjust a parameter via new primary client
1252 for (auto& cmd : camSecondaryCmds) {
1253 // Get a valid parameter value range
1254 ParameterRange range;
1255 ASSERT_TRUE(pSecondaryCam->getIntParameterRange(cmd, &range).isOk());
1256
1257 values.clear();
1258 if (cmd == CameraParam::ABSOLUTE_FOCUS) {
1259 // Try to turn off auto-focus
1260 values.clear();
1261 ASSERT_TRUE(
1262 pSecondaryCam->setIntParameter(CameraParam::AUTO_FOCUS, 0, &values).isOk());
1263 for (auto&& v : values) {
1264 EXPECT_EQ(v, 0);
1265 }
1266 }
1267
1268 // Calculate a parameter value to program. This is being rounding down.
1269 val0 = range.min + (std::rand() % (range.max - range.min));
1270 val0 = val0 - (val0 % range.step);
1271
1272 // Prepare and start event listeners.
1273 bool listening0 = false;
1274 bool listening1 = false;
1275 std::condition_variable eventCond;
1276 std::thread listener0 = std::thread([&]() {
1277 listening0 = true;
1278 if (listening1) {
1279 eventCond.notify_all();
1280 }
1281
1282 EvsEventDesc aTargetEvent;
1283 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1284 aTargetEvent.payload[0] = static_cast<uint32_t>(cmd);
1285 aTargetEvent.payload[1] = val0;
1286 if (!frameHandlerPrimary->waitForEvent(aTargetEvent, aNotification0)) {
1287 LOG(WARNING) << "A timer is expired before a target event is fired.";
1288 }
1289 });
1290 std::thread listener1 = std::thread([&]() {
1291 listening1 = true;
1292 if (listening0) {
1293 eventCond.notify_all();
1294 }
1295
1296 EvsEventDesc aTargetEvent;
1297 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1298 aTargetEvent.payload[0] = static_cast<uint32_t>(cmd);
1299 aTargetEvent.payload[1] = val0;
1300 if (!frameHandlerSecondary->waitForEvent(aTargetEvent, aNotification1)) {
1301 LOG(WARNING) << "A timer is expired before a target event is fired.";
1302 }
1303 });
1304
1305 // Wait until a listening thread starts.
1306 std::mutex eventLock;
1307 std::unique_lock<std::mutex> lock(eventLock);
1308 auto timer = std::chrono::system_clock::now();
1309 while (!listening0 || !listening1) {
1310 eventCond.wait_until(lock, timer + 1s);
1311 }
1312 lock.unlock();
1313
1314 // Try to program a parameter
1315 values.clear();
1316 ASSERT_TRUE(pSecondaryCam->setIntParameter(cmd, val0, &values).isOk());
1317
1318 // Clients expects to receive a parameter change notification
1319 // whenever a primary client client adjusts it.
1320 values.clear();
1321 ASSERT_TRUE(pSecondaryCam->getIntParameter(cmd, &values).isOk());
1322 for (auto&& v : values) {
1323 EXPECT_EQ(val0, v) << "Values are not matched.";
1324 }
1325
1326 // Join a listening thread.
1327 if (listener0.joinable()) {
1328 listener0.join();
1329 }
1330 if (listener1.joinable()) {
1331 listener1.join();
1332 }
1333
1334 // Verify a change notification
1335 ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
1336 static_cast<EvsEventType>(aNotification0.aType));
1337 ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
1338 static_cast<EvsEventType>(aNotification1.aType));
1339 ASSERT_EQ(cmd, static_cast<CameraParam>(aNotification0.payload[0]));
1340 ASSERT_EQ(cmd, static_cast<CameraParam>(aNotification1.payload[0]));
1341 for (auto&& v : values) {
1342 ASSERT_EQ(v, static_cast<int32_t>(aNotification0.payload[1]));
1343 ASSERT_EQ(v, static_cast<int32_t>(aNotification1.payload[1]));
1344 }
1345 }
1346
1347 // New primary client retires from the role
1348 ASSERT_TRUE(pSecondaryCam->unsetPrimaryClient().isOk());
1349
1350 // Shutdown
1351 frameHandlerPrimary->shutdown();
1352 frameHandlerSecondary->shutdown();
1353
1354 // Explicitly release the camera
1355 ASSERT_TRUE(mEnumerator->closeCamera(pPrimaryCam).isOk());
1356 ASSERT_TRUE(mEnumerator->closeCamera(pSecondaryCam).isOk());
1357 mActiveCameras.clear();
1358 }
1359}
1360
1361/*
1362 * HighPriorityCameraClient:
1363 * EVS client, which owns the display, is priortized and therefore can take over
1364 * a primary client role from other EVS clients without the display.
1365 */
1366TEST_P(EvsAidlTest, HighPriorityCameraClient) {
1367 LOG(INFO) << "Starting HighPriorityCameraClient test";
1368
1369 if (mIsHwModule) {
1370 // This test is not for HW module implementation.
1371 return;
1372 }
1373
1374 // Get the camera list
1375 loadCameraList();
1376
1377 // Request available display IDs
1378 uint8_t targetDisplayId = 0;
1379 std::vector<uint8_t> displayIds;
1380 ASSERT_TRUE(mEnumerator->getDisplayIdList(&displayIds).isOk());
1381 EXPECT_GT(displayIds.size(), 0);
1382 targetDisplayId = displayIds[0];
1383
1384 // Request exclusive access to the EVS display
1385 std::shared_ptr<IEvsDisplay> pDisplay;
1386 ASSERT_TRUE(mEnumerator->openDisplay(targetDisplayId, &pDisplay).isOk());
1387 EXPECT_NE(pDisplay, nullptr);
1388
1389 // Test each reported camera
1390 for (auto&& cam : mCameraInfo) {
1391 // Read a target resolution from the metadata
1392 Stream targetCfg = getFirstStreamConfiguration(
1393 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
1394 ASSERT_GT(targetCfg.width, 0);
1395 ASSERT_GT(targetCfg.height, 0);
1396
1397 // Create two clients
1398 std::shared_ptr<IEvsCamera> pCam0;
1399 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam0).isOk());
1400 EXPECT_NE(pCam0, nullptr);
1401
1402 // Store a camera handle for a clean-up
1403 mActiveCameras.push_back(pCam0);
1404
1405 std::shared_ptr<IEvsCamera> pCam1;
1406 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam1).isOk());
1407 EXPECT_NE(pCam1, nullptr);
1408
1409 // Store a camera handle for a clean-up
1410 mActiveCameras.push_back(pCam1);
1411
1412 // Get the parameter list; this test will use the first command in both
1413 // lists.
1414 std::vector<CameraParam> cam0Cmds, cam1Cmds;
1415 ASSERT_TRUE(pCam0->getParameterList(&cam0Cmds).isOk());
1416 ASSERT_TRUE(pCam1->getParameterList(&cam1Cmds).isOk());
1417 if (cam0Cmds.size() < 1 || cam1Cmds.size() < 1) {
1418 // Cannot execute this test.
1419 return;
1420 }
1421
1422 // Set up a frame receiver object which will fire up its own thread.
Frederick Mayle7056b242022-03-29 02:38:12 +00001423 std::shared_ptr<FrameHandler> frameHandler0 = ndk::SharedRefBase::make<FrameHandler>(
1424 pCam0, cam, nullptr, FrameHandler::eAutoReturn);
1425 std::shared_ptr<FrameHandler> frameHandler1 = ndk::SharedRefBase::make<FrameHandler>(
1426 pCam1, cam, nullptr, FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -07001427 EXPECT_NE(frameHandler0, nullptr);
1428 EXPECT_NE(frameHandler1, nullptr);
1429
1430 // Activate the display
1431 ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::VISIBLE_ON_NEXT_FRAME).isOk());
1432
1433 // Start the camera's video stream
1434 ASSERT_TRUE(frameHandler0->startStream());
1435 ASSERT_TRUE(frameHandler1->startStream());
1436
1437 // Ensure the stream starts
1438 frameHandler0->waitForFrameCount(1);
1439 frameHandler1->waitForFrameCount(1);
1440
1441 // Client 1 becomes a primary client and programs a parameter.
1442
1443 // Get a valid parameter value range
1444 ParameterRange range;
1445 ASSERT_TRUE(pCam1->getIntParameterRange(cam1Cmds[0], &range).isOk());
1446
1447 // Client1 becomes a primary client
1448 ASSERT_TRUE(pCam1->setPrimaryClient().isOk());
1449
1450 std::vector<int32_t> values;
1451 EvsEventDesc aTargetEvent = {};
1452 EvsEventDesc aNotification = {};
1453 bool listening = false;
1454 std::mutex eventLock;
1455 std::condition_variable eventCond;
1456 if (cam1Cmds[0] == CameraParam::ABSOLUTE_FOCUS) {
1457 std::thread listener =
1458 std::thread([&frameHandler0, &aNotification, &listening, &eventCond] {
1459 listening = true;
1460 eventCond.notify_all();
1461
1462 EvsEventDesc aTargetEvent;
1463 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1464 aTargetEvent.payload[0] = static_cast<uint32_t>(CameraParam::AUTO_FOCUS);
1465 aTargetEvent.payload[1] = 0;
1466 if (!frameHandler0->waitForEvent(aTargetEvent, aNotification)) {
1467 LOG(WARNING) << "A timer is expired before a target event is fired.";
1468 }
1469 });
1470
1471 // Wait until a lister starts.
1472 std::unique_lock<std::mutex> lock(eventLock);
1473 auto timer = std::chrono::system_clock::now();
1474 while (!listening) {
1475 eventCond.wait_until(lock, timer + 1s);
1476 }
1477 lock.unlock();
1478
1479 // Try to turn off auto-focus
1480 ASSERT_TRUE(pCam1->setIntParameter(CameraParam::AUTO_FOCUS, 0, &values).isOk());
1481 for (auto&& v : values) {
1482 EXPECT_EQ(v, 0);
1483 }
1484
1485 // Join a listener
1486 if (listener.joinable()) {
1487 listener.join();
1488 }
1489
1490 // Make sure AUTO_FOCUS is off.
1491 ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
1492 EvsEventType::PARAMETER_CHANGED);
1493 }
1494
1495 // Try to program a parameter with a random value [minVal, maxVal] after
1496 // rounding it down.
1497 int32_t val0 = range.min + (std::rand() % (range.max - range.min));
1498 val0 = val0 - (val0 % range.step);
1499
1500 std::thread listener = std::thread(
1501 [&frameHandler1, &aNotification, &listening, &eventCond, &cam1Cmds, val0] {
1502 listening = true;
1503 eventCond.notify_all();
1504
1505 EvsEventDesc aTargetEvent;
1506 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1507 aTargetEvent.payload[0] = static_cast<uint32_t>(cam1Cmds[0]);
1508 aTargetEvent.payload[1] = val0;
1509 if (!frameHandler1->waitForEvent(aTargetEvent, aNotification)) {
1510 LOG(WARNING) << "A timer is expired before a target event is fired.";
1511 }
1512 });
1513
1514 // Wait until a lister starts.
1515 listening = false;
1516 std::unique_lock<std::mutex> lock(eventLock);
1517 auto timer = std::chrono::system_clock::now();
1518 while (!listening) {
1519 eventCond.wait_until(lock, timer + 1s);
1520 }
1521 lock.unlock();
1522
1523 values.clear();
1524 ASSERT_TRUE(pCam1->setIntParameter(cam1Cmds[0], val0, &values).isOk());
1525 for (auto&& v : values) {
1526 EXPECT_EQ(val0, v);
1527 }
1528
1529 // Join a listener
1530 if (listener.joinable()) {
1531 listener.join();
1532 }
1533
1534 // Verify a change notification
1535 ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType), EvsEventType::PARAMETER_CHANGED);
1536 ASSERT_EQ(static_cast<CameraParam>(aNotification.payload[0]), cam1Cmds[0]);
1537 for (auto&& v : values) {
1538 ASSERT_EQ(v, static_cast<int32_t>(aNotification.payload[1]));
1539 }
1540
1541 listener = std::thread([&frameHandler1, &aNotification, &listening, &eventCond] {
1542 listening = true;
1543 eventCond.notify_all();
1544
1545 EvsEventDesc aTargetEvent;
1546 aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
1547 if (!frameHandler1->waitForEvent(aTargetEvent, aNotification, true)) {
1548 LOG(WARNING) << "A timer is expired before a target event is fired.";
1549 }
1550 });
1551
1552 // Wait until a lister starts.
1553 listening = false;
1554 lock.lock();
1555 timer = std::chrono::system_clock::now();
1556 while (!listening) {
1557 eventCond.wait_until(lock, timer + 1s);
1558 }
1559 lock.unlock();
1560
1561 // Client 0 steals a primary client role
1562 ASSERT_TRUE(pCam0->forcePrimaryClient(pDisplay).isOk());
1563
1564 // Join a listener
1565 if (listener.joinable()) {
1566 listener.join();
1567 }
1568
1569 ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType), EvsEventType::MASTER_RELEASED);
1570
1571 // Client 0 programs a parameter
1572 val0 = range.min + (std::rand() % (range.max - range.min));
1573
1574 // Rounding down
1575 val0 = val0 - (val0 % range.step);
1576
1577 if (cam0Cmds[0] == CameraParam::ABSOLUTE_FOCUS) {
1578 std::thread listener =
1579 std::thread([&frameHandler1, &aNotification, &listening, &eventCond] {
1580 listening = true;
1581 eventCond.notify_all();
1582
1583 EvsEventDesc aTargetEvent;
1584 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1585 aTargetEvent.payload[0] = static_cast<uint32_t>(CameraParam::AUTO_FOCUS);
1586 aTargetEvent.payload[1] = 0;
1587 if (!frameHandler1->waitForEvent(aTargetEvent, aNotification)) {
1588 LOG(WARNING) << "A timer is expired before a target event is fired.";
1589 }
1590 });
1591
1592 // Wait until a lister starts.
1593 std::unique_lock<std::mutex> lock(eventLock);
1594 auto timer = std::chrono::system_clock::now();
1595 while (!listening) {
1596 eventCond.wait_until(lock, timer + 1s);
1597 }
1598 lock.unlock();
1599
1600 // Try to turn off auto-focus
1601 values.clear();
1602 ASSERT_TRUE(pCam0->setIntParameter(CameraParam::AUTO_FOCUS, 0, &values).isOk());
1603 for (auto&& v : values) {
1604 EXPECT_EQ(v, 0);
1605 }
1606
1607 // Join a listener
1608 if (listener.joinable()) {
1609 listener.join();
1610 }
1611
1612 // Make sure AUTO_FOCUS is off.
1613 ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
1614 EvsEventType::PARAMETER_CHANGED);
1615 }
1616
1617 listener = std::thread(
1618 [&frameHandler0, &aNotification, &listening, &eventCond, &cam0Cmds, val0] {
1619 listening = true;
1620 eventCond.notify_all();
1621
1622 EvsEventDesc aTargetEvent;
1623 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1624 aTargetEvent.payload[0] = static_cast<uint32_t>(cam0Cmds[0]);
1625 aTargetEvent.payload[1] = val0;
1626 if (!frameHandler0->waitForEvent(aTargetEvent, aNotification)) {
1627 LOG(WARNING) << "A timer is expired before a target event is fired.";
1628 }
1629 });
1630
1631 // Wait until a lister starts.
1632 listening = false;
1633 timer = std::chrono::system_clock::now();
1634 lock.lock();
1635 while (!listening) {
1636 eventCond.wait_until(lock, timer + 1s);
1637 }
1638 lock.unlock();
1639
1640 values.clear();
1641 ASSERT_TRUE(pCam0->setIntParameter(cam0Cmds[0], val0, &values).isOk());
1642
1643 // Join a listener
1644 if (listener.joinable()) {
1645 listener.join();
1646 }
1647 // Verify a change notification
1648 ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType), EvsEventType::PARAMETER_CHANGED);
1649 ASSERT_EQ(static_cast<CameraParam>(aNotification.payload[0]), cam0Cmds[0]);
1650 for (auto&& v : values) {
1651 ASSERT_EQ(v, static_cast<int32_t>(aNotification.payload[1]));
1652 }
1653
1654 // Turn off the display (yes, before the stream stops -- it should be handled)
1655 ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::NOT_VISIBLE).isOk());
1656
1657 // Shut down the streamer
1658 frameHandler0->shutdown();
1659 frameHandler1->shutdown();
1660
1661 // Explicitly release the camera
1662 ASSERT_TRUE(mEnumerator->closeCamera(pCam0).isOk());
1663 ASSERT_TRUE(mEnumerator->closeCamera(pCam1).isOk());
1664 mActiveCameras.clear();
1665 }
1666
1667 // Explicitly release the display
1668 ASSERT_TRUE(mEnumerator->closeDisplay(pDisplay).isOk());
1669}
1670
1671/*
1672 * CameraUseStreamConfigToDisplay:
1673 * End to end test of data flowing from the camera to the display. Similar to
1674 * CameraToDisplayRoundTrip test case but this case retrieves available stream
1675 * configurations from EVS and uses one of them to start a video stream.
1676 */
1677TEST_P(EvsAidlTest, CameraUseStreamConfigToDisplay) {
1678 LOG(INFO) << "Starting CameraUseStreamConfigToDisplay test";
1679
1680 // Get the camera list
1681 loadCameraList();
1682
1683 // Request available display IDs
1684 uint8_t targetDisplayId = 0;
1685 std::vector<uint8_t> displayIds;
1686 ASSERT_TRUE(mEnumerator->getDisplayIdList(&displayIds).isOk());
1687 EXPECT_GT(displayIds.size(), 0);
1688 targetDisplayId = displayIds[0];
1689
1690 // Request exclusive access to the EVS display
1691 std::shared_ptr<IEvsDisplay> pDisplay;
1692 ASSERT_TRUE(mEnumerator->openDisplay(targetDisplayId, &pDisplay).isOk());
1693 EXPECT_NE(pDisplay, nullptr);
1694
1695 // Test each reported camera
1696 for (auto&& cam : mCameraInfo) {
1697 // choose a configuration that has a frame rate faster than minReqFps.
1698 Stream targetCfg = {};
1699 const int32_t minReqFps = 15;
1700 int32_t maxArea = 0;
1701 camera_metadata_entry_t streamCfgs;
1702 bool foundCfg = false;
1703 if (!find_camera_metadata_entry(reinterpret_cast<camera_metadata_t*>(cam.metadata.data()),
1704 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
1705 &streamCfgs)) {
1706 // Stream configurations are found in metadata
1707 RawStreamConfig* ptr = reinterpret_cast<RawStreamConfig*>(streamCfgs.data.i32);
1708 for (unsigned offset = 0; offset < streamCfgs.count; offset += kStreamCfgSz) {
1709 if (ptr->direction == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT &&
1710 ptr->format == HAL_PIXEL_FORMAT_RGBA_8888) {
1711 if (ptr->width * ptr->height > maxArea && ptr->framerate >= minReqFps) {
1712 targetCfg.width = ptr->width;
1713 targetCfg.height = ptr->height;
1714
1715 maxArea = ptr->width * ptr->height;
1716 foundCfg = true;
1717 }
1718 }
1719 ++ptr;
1720 }
1721 }
1722 targetCfg.format = static_cast<PixelFormat>(HAL_PIXEL_FORMAT_RGBA_8888);
1723
1724 if (!foundCfg) {
1725 // Current EVS camera does not provide stream configurations in the
1726 // metadata.
1727 continue;
1728 }
1729
1730 std::shared_ptr<IEvsCamera> pCam;
1731 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
1732 EXPECT_NE(pCam, nullptr);
1733
1734 // Store a camera handle for a clean-up
1735 mActiveCameras.push_back(pCam);
1736
1737 // Set up a frame receiver object which will fire up its own thread.
Frederick Mayle7056b242022-03-29 02:38:12 +00001738 std::shared_ptr<FrameHandler> frameHandler = ndk::SharedRefBase::make<FrameHandler>(
1739 pCam, cam, pDisplay, FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -07001740 EXPECT_NE(frameHandler, nullptr);
1741
1742 // Activate the display
1743 ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::VISIBLE_ON_NEXT_FRAME).isOk());
1744
1745 // Start the camera's video stream
1746 ASSERT_TRUE(frameHandler->startStream());
1747
1748 // Wait a while to let the data flow
1749 static const int kSecondsToWait = 5;
1750 const int streamTimeMs =
1751 kSecondsToWait * kSecondsToMilliseconds - kMaxStreamStartMilliseconds;
1752 const unsigned minimumFramesExpected =
1753 streamTimeMs * kMinimumFramesPerSecond / kSecondsToMilliseconds;
1754 sleep(kSecondsToWait);
1755 unsigned framesReceived = 0;
1756 unsigned framesDisplayed = 0;
1757 frameHandler->getFramesCounters(&framesReceived, &framesDisplayed);
1758 EXPECT_EQ(framesReceived, framesDisplayed);
1759 EXPECT_GE(framesDisplayed, minimumFramesExpected);
1760
1761 // Turn off the display (yes, before the stream stops -- it should be handled)
1762 ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::NOT_VISIBLE).isOk());
1763
1764 // Shut down the streamer
1765 frameHandler->shutdown();
1766
1767 // Explicitly release the camera
1768 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
1769 mActiveCameras.clear();
1770 }
1771
1772 // Explicitly release the display
1773 ASSERT_TRUE(mEnumerator->closeDisplay(pDisplay).isOk());
1774}
1775
1776/*
1777 * MultiCameraStreamUseConfig:
1778 * Verify that each client can start and stop video streams on the same
1779 * underlying camera with same configuration.
1780 */
1781TEST_P(EvsAidlTest, MultiCameraStreamUseConfig) {
1782 LOG(INFO) << "Starting MultiCameraStream test";
1783
1784 if (mIsHwModule) {
1785 // This test is not for HW module implementation.
1786 return;
1787 }
1788
1789 // Get the camera list
1790 loadCameraList();
1791
1792 // Test each reported camera
1793 for (auto&& cam : mCameraInfo) {
1794 // choose a configuration that has a frame rate faster than minReqFps.
1795 Stream targetCfg = {};
1796 const int32_t minReqFps = 15;
1797 int32_t maxArea = 0;
1798 camera_metadata_entry_t streamCfgs;
1799 bool foundCfg = false;
1800 if (!find_camera_metadata_entry(reinterpret_cast<camera_metadata_t*>(cam.metadata.data()),
1801 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
1802 &streamCfgs)) {
1803 // Stream configurations are found in metadata
1804 RawStreamConfig* ptr = reinterpret_cast<RawStreamConfig*>(streamCfgs.data.i32);
1805 for (unsigned offset = 0; offset < streamCfgs.count; offset += kStreamCfgSz) {
1806 if (ptr->direction == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT &&
1807 ptr->format == HAL_PIXEL_FORMAT_RGBA_8888) {
1808 if (ptr->width * ptr->height > maxArea && ptr->framerate >= minReqFps) {
1809 targetCfg.width = ptr->width;
1810 targetCfg.height = ptr->height;
1811
1812 maxArea = ptr->width * ptr->height;
1813 foundCfg = true;
1814 }
1815 }
1816 ++ptr;
1817 }
1818 }
1819 targetCfg.format = static_cast<PixelFormat>(HAL_PIXEL_FORMAT_RGBA_8888);
1820
1821 if (!foundCfg) {
1822 LOG(INFO) << "Device " << cam.id
1823 << " does not provide a list of supported stream configurations, skipped";
1824 continue;
1825 }
1826
1827 // Create the first camera client with a selected stream configuration.
1828 std::shared_ptr<IEvsCamera> pCam0;
1829 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam0).isOk());
1830 EXPECT_NE(pCam0, nullptr);
1831
1832 // Store a camera handle for a clean-up
1833 mActiveCameras.push_back(pCam0);
1834
1835 // Try to create the second camera client with different stream
1836 // configuration.
1837 int32_t id = targetCfg.id;
1838 targetCfg.id += 1; // EVS manager sees only the stream id.
1839 std::shared_ptr<IEvsCamera> pCam1;
1840 ASSERT_FALSE(mEnumerator->openCamera(cam.id, targetCfg, &pCam1).isOk());
1841
1842 // Try again with same stream configuration.
1843 targetCfg.id = id;
1844 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam1).isOk());
1845 EXPECT_NE(pCam1, nullptr);
1846
1847 // Set up per-client frame receiver objects which will fire up its own thread
Frederick Mayle7056b242022-03-29 02:38:12 +00001848 std::shared_ptr<FrameHandler> frameHandler0 = ndk::SharedRefBase::make<FrameHandler>(
1849 pCam0, cam, nullptr, FrameHandler::eAutoReturn);
1850 std::shared_ptr<FrameHandler> frameHandler1 = ndk::SharedRefBase::make<FrameHandler>(
1851 pCam1, cam, nullptr, FrameHandler::eAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -07001852 EXPECT_NE(frameHandler0, nullptr);
1853 EXPECT_NE(frameHandler1, nullptr);
1854
1855 // Start the camera's video stream via client 0
1856 ASSERT_TRUE(frameHandler0->startStream());
1857 ASSERT_TRUE(frameHandler1->startStream());
1858
1859 // Ensure the stream starts
1860 frameHandler0->waitForFrameCount(1);
1861 frameHandler1->waitForFrameCount(1);
1862
1863 nsecs_t firstFrame = systemTime(SYSTEM_TIME_MONOTONIC);
1864
1865 // Wait a bit, then ensure both clients get at least the required minimum number of frames
1866 sleep(5);
1867 nsecs_t end = systemTime(SYSTEM_TIME_MONOTONIC);
1868 unsigned framesReceived0 = 0, framesReceived1 = 0;
1869 frameHandler0->getFramesCounters(&framesReceived0, nullptr);
1870 frameHandler1->getFramesCounters(&framesReceived1, nullptr);
1871 framesReceived0 = framesReceived0 - 1; // Back out the first frame we already waited for
1872 framesReceived1 = framesReceived1 - 1; // Back out the first frame we already waited for
1873 nsecs_t runTime = end - firstFrame;
1874 float framesPerSecond0 = framesReceived0 / (runTime * kNanoToSeconds);
1875 float framesPerSecond1 = framesReceived1 / (runTime * kNanoToSeconds);
1876 LOG(INFO) << "Measured camera rate " << std::scientific << framesPerSecond0 << " fps and "
1877 << framesPerSecond1 << " fps";
1878 EXPECT_GE(framesPerSecond0, kMinimumFramesPerSecond);
1879 EXPECT_GE(framesPerSecond1, kMinimumFramesPerSecond);
1880
1881 // Shutdown one client
1882 frameHandler0->shutdown();
1883
1884 // Read frame counters again
1885 frameHandler0->getFramesCounters(&framesReceived0, nullptr);
1886 frameHandler1->getFramesCounters(&framesReceived1, nullptr);
1887
1888 // Wait a bit again
1889 sleep(5);
1890 unsigned framesReceivedAfterStop0 = 0, framesReceivedAfterStop1 = 0;
1891 frameHandler0->getFramesCounters(&framesReceivedAfterStop0, nullptr);
1892 frameHandler1->getFramesCounters(&framesReceivedAfterStop1, nullptr);
1893 EXPECT_EQ(framesReceived0, framesReceivedAfterStop0);
1894 EXPECT_LT(framesReceived1, framesReceivedAfterStop1);
1895
1896 // Shutdown another
1897 frameHandler1->shutdown();
1898
1899 // Explicitly release the camera
1900 ASSERT_TRUE(mEnumerator->closeCamera(pCam0).isOk());
1901 ASSERT_TRUE(mEnumerator->closeCamera(pCam1).isOk());
1902 mActiveCameras.clear();
1903 }
1904}
1905
1906/*
1907 * LogicalCameraMetadata:
1908 * Opens logical camera reported by the enumerator and validate its metadata by
1909 * checking its capability and locating supporting physical camera device
1910 * identifiers.
1911 */
1912TEST_P(EvsAidlTest, LogicalCameraMetadata) {
1913 LOG(INFO) << "Starting LogicalCameraMetadata test";
1914
1915 // Get the camera list
1916 loadCameraList();
1917
1918 // Open and close each camera twice
1919 for (auto&& cam : mCameraInfo) {
1920 bool isLogicalCam = false;
1921 auto devices = getPhysicalCameraIds(cam.id, isLogicalCam);
1922 if (isLogicalCam) {
1923 ASSERT_GE(devices.size(), 1) << "Logical camera device must have at least one physical "
1924 "camera device ID in its metadata.";
1925 }
1926 }
1927}
1928
1929/*
1930 * CameraStreamExternalBuffering:
1931 * This is same with CameraStreamBuffering except frame buffers are allocated by
1932 * the test client and then imported by EVS framework.
1933 */
1934TEST_P(EvsAidlTest, CameraStreamExternalBuffering) {
1935 LOG(INFO) << "Starting CameraStreamExternalBuffering test";
1936
1937 // Arbitrary constant (should be > 1 and not too big)
1938 static const unsigned int kBuffersToHold = 3;
1939
1940 // Get the camera list
1941 loadCameraList();
1942
1943 // Acquire the graphics buffer allocator
1944 android::GraphicBufferAllocator& alloc(android::GraphicBufferAllocator::get());
1945 const auto usage =
1946 GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_SW_READ_RARELY | GRALLOC_USAGE_SW_WRITE_OFTEN;
1947
1948 // Test each reported camera
1949 for (auto&& cam : mCameraInfo) {
1950 // Read a target resolution from the metadata
1951 Stream targetCfg = getFirstStreamConfiguration(
1952 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
1953 ASSERT_GT(targetCfg.width, 0);
1954 ASSERT_GT(targetCfg.height, 0);
1955
1956 // Allocate buffers to use
1957 std::vector<BufferDesc> buffers;
1958 buffers.resize(kBuffersToHold);
1959 for (auto i = 0; i < kBuffersToHold; ++i) {
1960 unsigned pixelsPerLine;
1961 buffer_handle_t memHandle = nullptr;
1962 android::status_t result =
1963 alloc.allocate(targetCfg.width, targetCfg.height,
1964 static_cast<android::PixelFormat>(targetCfg.format),
1965 /* layerCount = */ 1, usage, &memHandle, &pixelsPerLine,
1966 /* graphicBufferId = */ 0,
1967 /* requestorName = */ "CameraStreamExternalBufferingTest");
1968 if (result != android::NO_ERROR) {
1969 LOG(ERROR) << __FUNCTION__ << " failed to allocate memory.";
1970 // Release previous allocated buffers
1971 for (auto j = 0; j < i; j++) {
1972 alloc.free(::android::dupFromAidl(buffers[i].buffer.handle));
1973 }
1974 return;
1975 } else {
1976 BufferDesc buf;
1977 HardwareBufferDescription* pDesc =
1978 reinterpret_cast<HardwareBufferDescription*>(&buf.buffer.description);
1979 pDesc->width = targetCfg.width;
1980 pDesc->height = targetCfg.height;
1981 pDesc->layers = 1;
1982 pDesc->format = targetCfg.format;
1983 pDesc->usage = static_cast<BufferUsage>(usage);
1984 pDesc->stride = pixelsPerLine;
1985 buf.buffer.handle = ::android::dupToAidl(memHandle);
1986 buf.bufferId = i; // Unique number to identify this buffer
1987 buffers[i] = std::move(buf);
1988 }
1989 }
1990
1991 bool isLogicalCam = false;
1992 getPhysicalCameraIds(cam.id, isLogicalCam);
1993
1994 std::shared_ptr<IEvsCamera> pCam;
1995 ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
1996 EXPECT_NE(pCam, nullptr);
1997
1998 // Store a camera handle for a clean-up
1999 mActiveCameras.push_back(pCam);
2000
2001 // Request to import buffers
2002 int delta = 0;
2003 auto status = pCam->importExternalBuffers(buffers, &delta);
2004 if (isLogicalCam) {
2005 ASSERT_FALSE(status.isOk());
2006 continue;
2007 }
2008
2009 ASSERT_TRUE(status.isOk());
2010 EXPECT_GE(delta, kBuffersToHold);
2011
2012 // Set up a frame receiver object which will fire up its own thread.
Frederick Mayle7056b242022-03-29 02:38:12 +00002013 std::shared_ptr<FrameHandler> frameHandler = ndk::SharedRefBase::make<FrameHandler>(
2014 pCam, cam, nullptr, FrameHandler::eNoAutoReturn);
Changyeon Jo80189012021-10-10 16:34:21 -07002015 EXPECT_NE(frameHandler, nullptr);
2016
2017 // Start the camera's video stream
2018 ASSERT_TRUE(frameHandler->startStream());
2019
2020 // Check that the video stream stalls once we've gotten exactly the number of buffers
2021 // we requested since we told the frameHandler not to return them.
2022 sleep(1); // 1 second should be enough for at least 5 frames to be delivered worst case
2023 unsigned framesReceived = 0;
2024 frameHandler->getFramesCounters(&framesReceived, nullptr);
2025 ASSERT_LE(kBuffersToHold, framesReceived) << "Stream didn't stall at expected buffer limit";
2026
2027 // Give back one buffer
2028 EXPECT_TRUE(frameHandler->returnHeldBuffer());
2029
2030 // Once we return a buffer, it shouldn't take more than 1/10 second to get a new one
2031 // filled since we require 10fps minimum -- but give a 10% allowance just in case.
2032 unsigned framesReceivedAfter = 0;
2033 usleep(110 * kMillisecondsToMicroseconds);
2034 frameHandler->getFramesCounters(&framesReceivedAfter, nullptr);
2035 EXPECT_EQ(framesReceived + 1, framesReceivedAfter) << "Stream should've resumed";
2036
2037 // Even when the camera pointer goes out of scope, the FrameHandler object will
2038 // keep the stream alive unless we tell it to shutdown.
2039 // Also note that the FrameHandle and the Camera have a mutual circular reference, so
2040 // we have to break that cycle in order for either of them to get cleaned up.
2041 frameHandler->shutdown();
2042
2043 // Explicitly release the camera
2044 ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
2045 mActiveCameras.clear();
2046 // Release buffers
2047 for (auto& b : buffers) {
2048 alloc.free(::android::dupFromAidl(b.buffer.handle));
2049 }
2050 buffers.resize(0);
2051 }
2052}
2053
2054/*
2055 * UltrasonicsArrayOpenClean:
2056 * Opens each ultrasonics arrays reported by the enumerator and then explicitly closes it via a
2057 * call to closeUltrasonicsArray. Then repeats the test to ensure all ultrasonics arrays
2058 * can be reopened.
2059 */
2060TEST_P(EvsAidlTest, UltrasonicsArrayOpenClean) {
2061 LOG(INFO) << "Starting UltrasonicsArrayOpenClean test";
2062
2063 // Get the ultrasonics array list
2064 loadUltrasonicsArrayList();
2065
2066 // Open and close each ultrasonics array twice
2067 for (auto&& ultraInfo : mUltrasonicsArraysInfo) {
2068 for (int pass = 0; pass < 2; pass++) {
2069 std::shared_ptr<IEvsUltrasonicsArray> pUltrasonicsArray;
2070 ASSERT_TRUE(
2071 mEnumerator
2072 ->openUltrasonicsArray(ultraInfo.ultrasonicsArrayId, &pUltrasonicsArray)
2073 .isOk());
2074 EXPECT_NE(pUltrasonicsArray, nullptr);
2075
2076 // Verify that this ultrasonics array self-identifies correctly
2077 UltrasonicsArrayDesc desc;
2078 ASSERT_TRUE(pUltrasonicsArray->getUltrasonicArrayInfo(&desc).isOk());
2079 EXPECT_EQ(ultraInfo.ultrasonicsArrayId, desc.ultrasonicsArrayId);
2080 LOG(DEBUG) << "Found ultrasonics array " << ultraInfo.ultrasonicsArrayId;
2081
2082 // Explicitly close the ultrasonics array so resources are released right away
2083 ASSERT_TRUE(mEnumerator->closeUltrasonicsArray(pUltrasonicsArray).isOk());
2084 }
2085 }
2086}
2087
2088// Starts a stream and verifies all data received is valid.
2089TEST_P(EvsAidlTest, UltrasonicsVerifyStreamData) {
2090 LOG(INFO) << "Starting UltrasonicsVerifyStreamData";
2091
2092 // Get the ultrasonics array list
2093 loadUltrasonicsArrayList();
2094
2095 // For each ultrasonics array.
2096 for (auto&& ultraInfo : mUltrasonicsArraysInfo) {
2097 LOG(DEBUG) << "Testing ultrasonics array: " << ultraInfo.ultrasonicsArrayId;
2098
2099 std::shared_ptr<IEvsUltrasonicsArray> pUltrasonicsArray;
2100 ASSERT_TRUE(
2101 mEnumerator->openUltrasonicsArray(ultraInfo.ultrasonicsArrayId, &pUltrasonicsArray)
2102 .isOk());
2103 EXPECT_NE(pUltrasonicsArray, nullptr);
2104
2105 std::shared_ptr<FrameHandlerUltrasonics> frameHandler =
Frederick Mayle7056b242022-03-29 02:38:12 +00002106 ndk::SharedRefBase::make<FrameHandlerUltrasonics>(pUltrasonicsArray);
Changyeon Jo80189012021-10-10 16:34:21 -07002107 EXPECT_NE(frameHandler, nullptr);
2108
2109 // Start stream.
2110 ASSERT_TRUE(pUltrasonicsArray->startStream(frameHandler).isOk());
2111
2112 // Wait 5 seconds to receive frames.
2113 sleep(5);
2114
2115 // Stop stream.
2116 ASSERT_TRUE(pUltrasonicsArray->stopStream().isOk());
2117
2118 EXPECT_GT(frameHandler->getReceiveFramesCount(), 0);
2119 EXPECT_TRUE(frameHandler->areAllFramesValid());
2120
2121 // Explicitly close the ultrasonics array so resources are released right away
2122 ASSERT_TRUE(mEnumerator->closeUltrasonicsArray(pUltrasonicsArray).isOk());
2123 }
2124}
2125
2126// Sets frames in flight before and after start of stream and verfies success.
2127TEST_P(EvsAidlTest, UltrasonicsSetFramesInFlight) {
2128 LOG(INFO) << "Starting UltrasonicsSetFramesInFlight";
2129
2130 // Get the ultrasonics array list
2131 loadUltrasonicsArrayList();
2132
2133 // For each ultrasonics array.
2134 for (auto&& ultraInfo : mUltrasonicsArraysInfo) {
2135 LOG(DEBUG) << "Testing ultrasonics array: " << ultraInfo.ultrasonicsArrayId;
2136
2137 std::shared_ptr<IEvsUltrasonicsArray> pUltrasonicsArray;
2138 ASSERT_TRUE(
2139 mEnumerator->openUltrasonicsArray(ultraInfo.ultrasonicsArrayId, &pUltrasonicsArray)
2140 .isOk());
2141 EXPECT_NE(pUltrasonicsArray, nullptr);
2142
2143 ASSERT_TRUE(pUltrasonicsArray->setMaxFramesInFlight(10).isOk());
2144
2145 std::shared_ptr<FrameHandlerUltrasonics> frameHandler =
Frederick Mayle7056b242022-03-29 02:38:12 +00002146 ndk::SharedRefBase::make<FrameHandlerUltrasonics>(pUltrasonicsArray);
Changyeon Jo80189012021-10-10 16:34:21 -07002147 EXPECT_NE(frameHandler, nullptr);
2148
2149 // Start stream.
2150 ASSERT_TRUE(pUltrasonicsArray->startStream(frameHandler).isOk());
2151 ASSERT_TRUE(pUltrasonicsArray->setMaxFramesInFlight(5).isOk());
2152
2153 // Stop stream.
2154 ASSERT_TRUE(pUltrasonicsArray->stopStream().isOk());
2155
2156 // Explicitly close the ultrasonics array so resources are released right away
2157 ASSERT_TRUE(mEnumerator->closeUltrasonicsArray(pUltrasonicsArray).isOk());
2158 }
2159}
2160
2161GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(EvsAidlTest);
2162INSTANTIATE_TEST_SUITE_P(
2163 PerInstance, EvsAidlTest,
2164 testing::ValuesIn(android::getAidlHalInstanceNames(IEvsEnumerator::descriptor)),
2165 android::PrintInstanceNameToString);
2166
2167int main(int argc, char** argv) {
2168 ::testing::InitGoogleTest(&argc, argv);
2169 ABinderProcess_setThreadPoolMaxThreadCount(1);
2170 ABinderProcess_startThreadPool();
2171 return RUN_ALL_TESTS();
2172}