blob: c1bffb4164223241404fb1919157cde02c0dda19 [file] [log] [blame]
Avichal Rakesh362242f2022-02-08 12:40:53 -08001/*
2 * Copyright (C) 2022 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
Rucha Katakward08a0152023-10-13 15:47:58 -070016#include <gtest/gtest.h>
Avichal Rakesh362242f2022-02-08 12:40:53 -080017
18#include <aidl/Vintf.h>
19#include <aidl/android/hardware/camera/common/VendorTagSection.h>
20#include <aidl/android/hardware/camera/device/ICameraDevice.h>
21#include <aidlcommonsupport/NativeHandle.h>
22#include <camera_aidl_test.h>
23#include <cutils/properties.h>
24#include <device_cb.h>
25#include <empty_device_cb.h>
26#include <grallocusage/GrallocUsageConversion.h>
27#include <gtest/gtest.h>
28#include <hardware/gralloc.h>
29#include <hardware/gralloc1.h>
30#include <hidl/GtestPrinter.h>
31#include <hidl/HidlSupport.h>
32#include <torch_provider_cb.h>
Rucha Katakward08a0152023-10-13 15:47:58 -070033#include <com_android_internal_camera_flags.h>
Avichal Rakesh362242f2022-02-08 12:40:53 -080034#include <list>
Fang Huif097c4d2024-03-19 19:23:36 +080035#include <nativebase/nativebase.h>
Avichal Rakesh362242f2022-02-08 12:40:53 -080036
37using ::aidl::android::hardware::camera::common::CameraDeviceStatus;
38using ::aidl::android::hardware::camera::common::CameraResourceCost;
39using ::aidl::android::hardware::camera::common::TorchModeStatus;
40using ::aidl::android::hardware::camera::common::VendorTagSection;
41using ::aidl::android::hardware::camera::device::ICameraDevice;
Austin Borger4728fc42022-07-15 11:27:53 -070042using ::aidl::android::hardware::camera::metadata::RequestAvailableColorSpaceProfilesMap;
Avichal Rakeshd3503a32022-02-25 06:23:14 +000043using ::aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap;
Avichal Rakesh362242f2022-02-08 12:40:53 -080044using ::aidl::android::hardware::camera::metadata::SensorPixelMode;
45using ::aidl::android::hardware::camera::provider::CameraIdAndStreamCombination;
Avichal Rakesh4bf91c72022-05-23 20:44:02 +000046using ::aidl::android::hardware::camera::provider::BnCameraProviderCallback;
Avichal Rakesh362242f2022-02-08 12:40:53 -080047
48using ::ndk::ScopedAStatus;
49
50namespace {
51const int32_t kBurstFrameCount = 10;
52const uint32_t kMaxStillWidth = 2048;
53const uint32_t kMaxStillHeight = 1536;
54
55const int64_t kEmptyFlushTimeoutMSec = 200;
Rucha Katakward08a0152023-10-13 15:47:58 -070056namespace flags = com::android::internal::camera::flags;
Avichal Rakesh362242f2022-02-08 12:40:53 -080057
Shuzhen Wang36efa712022-03-08 10:10:44 -080058const static std::vector<int64_t> kMandatoryUseCases = {
Avichal Rakesh362242f2022-02-08 12:40:53 -080059 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
60 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW,
61 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_STILL_CAPTURE,
62 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_RECORD,
63 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW_VIDEO_STILL,
64 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL};
65} // namespace
66
67TEST_P(CameraAidlTest, getCameraIdList) {
68 std::vector<std::string> idList;
69 ScopedAStatus ret = mProvider->getCameraIdList(&idList);
70 ASSERT_TRUE(ret.isOk());
71
72 for (size_t i = 0; i < idList.size(); i++) {
73 ALOGI("Camera Id[%zu] is %s", i, idList[i].c_str());
74 }
75}
76
77// Test if ICameraProvider::getVendorTags returns Status::OK
78TEST_P(CameraAidlTest, getVendorTags) {
79 std::vector<VendorTagSection> vendorTags;
80 ScopedAStatus ret = mProvider->getVendorTags(&vendorTags);
81
82 ASSERT_TRUE(ret.isOk());
83 for (size_t i = 0; i < vendorTags.size(); i++) {
84 ALOGI("Vendor tag section %zu name %s", i, vendorTags[i].sectionName.c_str());
85 for (auto& tag : vendorTags[i].tags) {
86 ALOGI("Vendor tag id %u name %s type %d", tag.tagId, tag.tagName.c_str(),
87 (int)tag.tagType);
88 }
89 }
90}
91
92// Test if ICameraProvider::setCallback returns Status::OK
93TEST_P(CameraAidlTest, setCallback) {
Avichal Rakesh4bf91c72022-05-23 20:44:02 +000094 struct ProviderCb : public BnCameraProviderCallback {
Avichal Rakesh362242f2022-02-08 12:40:53 -080095 ScopedAStatus cameraDeviceStatusChange(const std::string& cameraDeviceName,
96 CameraDeviceStatus newStatus) override {
97 ALOGI("camera device status callback name %s, status %d", cameraDeviceName.c_str(),
98 (int)newStatus);
99 return ScopedAStatus::ok();
100 }
101 ScopedAStatus torchModeStatusChange(const std::string& cameraDeviceName,
102 TorchModeStatus newStatus) override {
103 ALOGI("Torch mode status callback name %s, status %d", cameraDeviceName.c_str(),
104 (int)newStatus);
105 return ScopedAStatus::ok();
106 }
107 ScopedAStatus physicalCameraDeviceStatusChange(const std::string& cameraDeviceName,
108 const std::string& physicalCameraDeviceName,
109 CameraDeviceStatus newStatus) override {
110 ALOGI("physical camera device status callback name %s, physical camera name %s,"
111 " status %d",
112 cameraDeviceName.c_str(), physicalCameraDeviceName.c_str(), (int)newStatus);
113 return ScopedAStatus::ok();
114 }
115 };
116
Avichal Rakesh4bf91c72022-05-23 20:44:02 +0000117 std::shared_ptr<ProviderCb> cb = ndk::SharedRefBase::make<ProviderCb>();
Avichal Rakesh362242f2022-02-08 12:40:53 -0800118 ScopedAStatus ret = mProvider->setCallback(cb);
119 ASSERT_TRUE(ret.isOk());
120 ret = mProvider->setCallback(nullptr);
Avichal Rakesh4bf91c72022-05-23 20:44:02 +0000121 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
Avichal Rakesh362242f2022-02-08 12:40:53 -0800122}
123
124// Test if ICameraProvider::getCameraDeviceInterface returns Status::OK and non-null device
125TEST_P(CameraAidlTest, getCameraDeviceInterface) {
126 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
127
128 for (const auto& name : cameraDeviceNames) {
129 std::shared_ptr<ICameraDevice> cameraDevice;
130 ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &cameraDevice);
131 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
132 ret.getServiceSpecificError());
133 ASSERT_TRUE(ret.isOk());
134 ASSERT_NE(cameraDevice, nullptr);
135 }
136}
137
138// Verify that the device resource cost can be retrieved and the values are
139// correct.
140TEST_P(CameraAidlTest, getResourceCost) {
141 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
142
143 for (const auto& deviceName : cameraDeviceNames) {
144 std::shared_ptr<ICameraDevice> cameraDevice;
145 ScopedAStatus ret = mProvider->getCameraDeviceInterface(deviceName, &cameraDevice);
146 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
147 ret.getServiceSpecificError());
148 ASSERT_TRUE(ret.isOk());
149 ASSERT_NE(cameraDevice, nullptr);
150
151 CameraResourceCost resourceCost;
152 ret = cameraDevice->getResourceCost(&resourceCost);
153 ALOGI("getResourceCost returns: %d:%d", ret.getExceptionCode(),
154 ret.getServiceSpecificError());
155 ASSERT_TRUE(ret.isOk());
156
157 ALOGI(" Resource cost is %d", resourceCost.resourceCost);
158 ASSERT_LE(resourceCost.resourceCost, 100u);
159
160 for (const auto& name : resourceCost.conflictingDevices) {
161 ALOGI(" Conflicting device: %s", name.c_str());
162 }
163 }
164}
165
Rucha Katakward08a0152023-10-13 15:47:58 -0700166// Validate the integrity of manual flash strength control metadata
167TEST_P(CameraAidlTest, validateManualFlashStrengthControlKeys) {
168 if (flags::camera_manual_flash_strength_control()) {
169 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
170 for (const auto& name : cameraDeviceNames) {
171 ALOGI("validateManualFlashStrengthControlKeys: Testing camera device %s", name.c_str());
172 CameraMetadata meta;
173 std::shared_ptr<ICameraDevice> cameraDevice;
174 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
175 &cameraDevice /*out*/);
176 ndk::ScopedAStatus ret = cameraDevice->getCameraCharacteristics(&meta);
177 ASSERT_TRUE(ret.isOk());
178 const camera_metadata_t* staticMeta =
179 reinterpret_cast<const camera_metadata_t*>(meta.metadata.data());
180 verifyManualFlashStrengthControlCharacteristics(staticMeta);
181 }
182 } else {
183 ALOGI("validateManualFlashStrengthControlKeys: Test skipped.\n");
184 GTEST_SKIP();
185 }
186}
187
Avichal Rakesh362242f2022-02-08 12:40:53 -0800188TEST_P(CameraAidlTest, systemCameraTest) {
189 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
190 std::map<std::string, std::vector<SystemCameraKind>> hiddenPhysicalIdToLogicalMap;
191 for (const auto& name : cameraDeviceNames) {
192 std::shared_ptr<ICameraDevice> device;
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +0000193 ALOGI("systemCameraTest: Testing camera device %s", name.c_str());
Avichal Rakesh362242f2022-02-08 12:40:53 -0800194 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
195 ASSERT_TRUE(ret.isOk());
196 ASSERT_NE(device, nullptr);
197
198 CameraMetadata cameraCharacteristics;
199 ret = device->getCameraCharacteristics(&cameraCharacteristics);
200 ASSERT_TRUE(ret.isOk());
201
202 const camera_metadata_t* staticMeta =
203 reinterpret_cast<const camera_metadata_t*>(cameraCharacteristics.metadata.data());
204 Status rc = isLogicalMultiCamera(staticMeta);
205 if (rc == Status::OPERATION_NOT_SUPPORTED) {
206 return;
207 }
208
209 ASSERT_EQ(rc, Status::OK);
210 std::unordered_set<std::string> physicalIds;
211 ASSERT_EQ(getPhysicalCameraIds(staticMeta, &physicalIds), Status::OK);
212 SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC;
213 Status retStatus = getSystemCameraKind(staticMeta, &systemCameraKind);
214 ASSERT_EQ(retStatus, Status::OK);
215
216 for (auto physicalId : physicalIds) {
217 bool isPublicId = false;
218 for (auto& deviceName : cameraDeviceNames) {
219 std::string publicVersion, publicId;
220 ASSERT_TRUE(matchDeviceName(deviceName, mProviderType, &publicVersion, &publicId));
221 if (physicalId == publicId) {
222 isPublicId = true;
223 break;
224 }
225 }
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +0000226
Avichal Rakesh362242f2022-02-08 12:40:53 -0800227 // For hidden physical cameras, collect their associated logical cameras
228 // and store the system camera kind.
229 if (!isPublicId) {
230 auto it = hiddenPhysicalIdToLogicalMap.find(physicalId);
231 if (it == hiddenPhysicalIdToLogicalMap.end()) {
232 hiddenPhysicalIdToLogicalMap.insert(std::make_pair(
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +0000233 physicalId, std::vector<SystemCameraKind>({systemCameraKind})));
Avichal Rakesh362242f2022-02-08 12:40:53 -0800234 } else {
235 it->second.push_back(systemCameraKind);
236 }
237 }
238 }
239 }
240
241 // Check that the system camera kind of the logical cameras associated with
242 // each hidden physical camera is the same.
243 for (const auto& it : hiddenPhysicalIdToLogicalMap) {
244 SystemCameraKind neededSystemCameraKind = it.second.front();
245 for (auto foundSystemCamera : it.second) {
246 ASSERT_EQ(neededSystemCameraKind, foundSystemCamera);
247 }
248 }
249}
250
251// Verify that the static camera characteristics can be retrieved
252// successfully.
253TEST_P(CameraAidlTest, getCameraCharacteristics) {
254 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
255
256 for (const auto& name : cameraDeviceNames) {
257 std::shared_ptr<ICameraDevice> device;
258 ALOGI("getCameraCharacteristics: Testing camera device %s", name.c_str());
259 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
260 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
261 ret.getServiceSpecificError());
262 ASSERT_TRUE(ret.isOk());
263 ASSERT_NE(device, nullptr);
264
265 CameraMetadata chars;
266 ret = device->getCameraCharacteristics(&chars);
267 ASSERT_TRUE(ret.isOk());
268 verifyCameraCharacteristics(chars);
269 verifyMonochromeCharacteristics(chars);
270 verifyRecommendedConfigs(chars);
Kwangkyu Park4b7fd452023-05-12 00:22:22 +0900271 verifyHighSpeedRecordingCharacteristics(name, chars);
Avichal Rakesh362242f2022-02-08 12:40:53 -0800272 verifyLogicalOrUltraHighResCameraMetadata(name, device, chars, cameraDeviceNames);
273
274 ASSERT_TRUE(ret.isOk());
275
276 // getPhysicalCameraCharacteristics will fail for publicly
277 // advertised camera IDs.
278 std::string version, cameraId;
279 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &cameraId));
280 CameraMetadata devChars;
281 ret = device->getPhysicalCameraCharacteristics(cameraId, &devChars);
282 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
283 ASSERT_EQ(0, devChars.metadata.size());
284 }
285}
286
287// Verify that the torch strength level can be set and retrieved successfully.
288TEST_P(CameraAidlTest, turnOnTorchWithStrengthLevel) {
289 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
290
291 std::shared_ptr<TorchProviderCb> cb = ndk::SharedRefBase::make<TorchProviderCb>(this);
292 ndk::ScopedAStatus ret = mProvider->setCallback(cb);
293 ASSERT_TRUE(ret.isOk());
294
295 for (const auto& name : cameraDeviceNames) {
296 int32_t defaultLevel;
297 std::shared_ptr<ICameraDevice> device;
298 ALOGI("%s: Testing camera device %s", __FUNCTION__, name.c_str());
299
300 ret = mProvider->getCameraDeviceInterface(name, &device);
301 ASSERT_TRUE(ret.isOk());
302 ASSERT_NE(device, nullptr);
303
304 CameraMetadata chars;
305 ret = device->getCameraCharacteristics(&chars);
306 ASSERT_TRUE(ret.isOk());
307
308 const camera_metadata_t* staticMeta =
309 reinterpret_cast<const camera_metadata_t*>(chars.metadata.data());
310 bool torchStrengthControlSupported = isTorchStrengthControlSupported(staticMeta);
311 camera_metadata_ro_entry entry;
312 int rc = find_camera_metadata_ro_entry(staticMeta,
313 ANDROID_FLASH_INFO_STRENGTH_DEFAULT_LEVEL, &entry);
314 if (torchStrengthControlSupported) {
315 ASSERT_EQ(rc, 0);
316 ASSERT_GT(entry.count, 0);
317 defaultLevel = *entry.data.i32;
318 ALOGI("Default level is:%d", defaultLevel);
319 }
320
321 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
322 ret = device->turnOnTorchWithStrengthLevel(2);
323 ALOGI("turnOnTorchWithStrengthLevel returns status: %d", ret.getServiceSpecificError());
324 // OPERATION_NOT_SUPPORTED check
325 if (!torchStrengthControlSupported) {
326 ALOGI("Torch strength control not supported.");
327 ASSERT_EQ(static_cast<int32_t>(Status::OPERATION_NOT_SUPPORTED),
328 ret.getServiceSpecificError());
329 } else {
330 {
331 ASSERT_TRUE(ret.isOk());
332 std::unique_lock<std::mutex> l(mTorchLock);
333 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
334 auto timeout = std::chrono::system_clock::now() +
335 std::chrono::seconds(kTorchTimeoutSec);
336 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
337 }
338 ASSERT_EQ(TorchModeStatus::AVAILABLE_ON, mTorchStatus);
339 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
340 }
341 ALOGI("getTorchStrengthLevel: Testing");
342 int32_t strengthLevel;
343 ret = device->getTorchStrengthLevel(&strengthLevel);
344 ASSERT_TRUE(ret.isOk());
345 ALOGI("Torch strength level is : %d", strengthLevel);
346 ASSERT_EQ(strengthLevel, 2);
347
348 // Turn OFF the torch and verify torch strength level is reset to default level.
349 ALOGI("Testing torch strength level reset after turning the torch OFF.");
350 ret = device->setTorchMode(false);
351 ASSERT_TRUE(ret.isOk());
352 {
353 std::unique_lock<std::mutex> l(mTorchLock);
354 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
355 auto timeout = std::chrono::system_clock::now() +
356 std::chrono::seconds(kTorchTimeoutSec);
357 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
358 }
359 ASSERT_EQ(TorchModeStatus::AVAILABLE_OFF, mTorchStatus);
360 }
361
362 ret = device->getTorchStrengthLevel(&strengthLevel);
363 ASSERT_TRUE(ret.isOk());
364 ALOGI("Torch strength level after turning OFF torch is : %d", strengthLevel);
365 ASSERT_EQ(strengthLevel, defaultLevel);
366 }
367 }
368}
369
370// In case it is supported verify that torch can be enabled.
371// Check for corresponding torch callbacks as well.
372TEST_P(CameraAidlTest, setTorchMode) {
373 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
374
375 std::shared_ptr<TorchProviderCb> cb = ndk::SharedRefBase::make<TorchProviderCb>(this);
376 ndk::ScopedAStatus ret = mProvider->setCallback(cb);
377 ALOGI("setCallback returns status: %d", ret.getServiceSpecificError());
378 ASSERT_TRUE(ret.isOk());
379 ASSERT_NE(cb, nullptr);
380
381 for (const auto& name : cameraDeviceNames) {
382 std::shared_ptr<ICameraDevice> device;
383 ALOGI("setTorchMode: Testing camera device %s", name.c_str());
384 ret = mProvider->getCameraDeviceInterface(name, &device);
385 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
386 ret.getServiceSpecificError());
387 ASSERT_TRUE(ret.isOk());
388 ASSERT_NE(device, nullptr);
389
390 CameraMetadata metadata;
391 ret = device->getCameraCharacteristics(&metadata);
392 ALOGI("getCameraCharacteristics returns status:%d", ret.getServiceSpecificError());
393 ASSERT_TRUE(ret.isOk());
394 camera_metadata_t* staticMeta =
395 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
396 bool torchSupported = isTorchSupported(staticMeta);
397
398 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
399 ret = device->setTorchMode(true);
400 ALOGI("setTorchMode returns status: %d", ret.getServiceSpecificError());
401 if (!torchSupported) {
402 ASSERT_EQ(static_cast<int32_t>(Status::OPERATION_NOT_SUPPORTED),
403 ret.getServiceSpecificError());
404 } else {
405 ASSERT_TRUE(ret.isOk());
406 {
407 std::unique_lock<std::mutex> l(mTorchLock);
408 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
409 auto timeout = std::chrono::system_clock::now() +
410 std::chrono::seconds(kTorchTimeoutSec);
411 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
412 }
413 ASSERT_EQ(TorchModeStatus::AVAILABLE_ON, mTorchStatus);
414 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
415 }
416
417 ret = device->setTorchMode(false);
418 ASSERT_TRUE(ret.isOk());
419 {
420 std::unique_lock<std::mutex> l(mTorchLock);
421 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
422 auto timeout = std::chrono::system_clock::now() +
423 std::chrono::seconds(kTorchTimeoutSec);
424 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
425 }
426 ASSERT_EQ(TorchModeStatus::AVAILABLE_OFF, mTorchStatus);
427 }
428 }
429 }
Avichal Rakesh362242f2022-02-08 12:40:53 -0800430}
431
432// Check dump functionality.
433TEST_P(CameraAidlTest, dump) {
434 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
435
436 for (const auto& name : cameraDeviceNames) {
437 std::shared_ptr<ICameraDevice> device;
438 ALOGI("dump: Testing camera device %s", name.c_str());
439
440 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
441 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
442 ret.getServiceSpecificError());
443 ASSERT_TRUE(ret.isOk());
444 ASSERT_NE(device, nullptr);
445
446 int raw_handle = open(kDumpOutput, O_RDWR);
447 ASSERT_GE(raw_handle, 0);
448
449 auto retStatus = device->dump(raw_handle, nullptr, 0);
450 ASSERT_EQ(retStatus, ::android::OK);
451 close(raw_handle);
452 }
453}
454
455// Open, dump, then close
456TEST_P(CameraAidlTest, openClose) {
457 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
458
459 for (const auto& name : cameraDeviceNames) {
460 std::shared_ptr<ICameraDevice> device;
461 ALOGI("openClose: Testing camera device %s", name.c_str());
462 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
463 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
464 ret.getServiceSpecificError());
465 ASSERT_TRUE(ret.isOk());
466 ASSERT_NE(device, nullptr);
467
468 std::shared_ptr<EmptyDeviceCb> cb = ndk::SharedRefBase::make<EmptyDeviceCb>();
469
470 ret = device->open(cb, &mSession);
471 ASSERT_TRUE(ret.isOk());
472 ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
473 ret.getServiceSpecificError());
474 ASSERT_NE(mSession, nullptr);
475 int raw_handle = open(kDumpOutput, O_RDWR);
476 ASSERT_GE(raw_handle, 0);
477
478 auto retStatus = device->dump(raw_handle, nullptr, 0);
479 ASSERT_EQ(retStatus, ::android::OK);
480 close(raw_handle);
481
482 ret = mSession->close();
483 mSession = nullptr;
484 ASSERT_TRUE(ret.isOk());
485 // TODO: test all session API calls return INTERNAL_ERROR after close
486 // TODO: keep a wp copy here and verify session cannot be promoted out of this scope
487 }
488}
489
490// Check whether all common default request settings can be successfully
491// constructed.
492TEST_P(CameraAidlTest, constructDefaultRequestSettings) {
493 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
494
495 for (const auto& name : cameraDeviceNames) {
496 std::shared_ptr<ICameraDevice> device;
497 ALOGI("constructDefaultRequestSettings: Testing camera device %s", name.c_str());
498 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
499 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
500 ret.getServiceSpecificError());
501 ASSERT_TRUE(ret.isOk());
502 ASSERT_NE(device, nullptr);
503
Shuzhen Wangdf89cb92023-11-09 18:24:42 -0800504 int32_t interfaceVersion;
505 ret = device->getInterfaceVersion(&interfaceVersion);
506 ASSERT_TRUE(ret.isOk());
507 bool supportFeatureCombinationQuery =
508 (interfaceVersion >= CAMERA_DEVICE_API_MINOR_VERSION_3);
509
Avichal Rakesh362242f2022-02-08 12:40:53 -0800510 std::shared_ptr<EmptyDeviceCb> cb = ndk::SharedRefBase::make<EmptyDeviceCb>();
511 ret = device->open(cb, &mSession);
512 ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
513 ret.getServiceSpecificError());
514 ASSERT_TRUE(ret.isOk());
515 ASSERT_NE(mSession, nullptr);
516
517 for (int32_t t = (int32_t)RequestTemplate::PREVIEW; t <= (int32_t)RequestTemplate::MANUAL;
518 t++) {
519 RequestTemplate reqTemplate = (RequestTemplate)t;
520 CameraMetadata rawMetadata;
521 ret = mSession->constructDefaultRequestSettings(reqTemplate, &rawMetadata);
522 ALOGI("constructDefaultRequestSettings returns status:%d:%d", ret.getExceptionCode(),
523 ret.getServiceSpecificError());
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000524
Avichal Rakesh362242f2022-02-08 12:40:53 -0800525 if (reqTemplate == RequestTemplate::ZERO_SHUTTER_LAG ||
526 reqTemplate == RequestTemplate::MANUAL) {
527 // optional templates
528 ASSERT_TRUE(ret.isOk() || static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
529 ret.getServiceSpecificError());
530 } else {
531 ASSERT_TRUE(ret.isOk());
532 }
533
534 if (ret.isOk()) {
535 const camera_metadata_t* metadata = (camera_metadata_t*)rawMetadata.metadata.data();
536 size_t expectedSize = rawMetadata.metadata.size();
537 int result = validate_camera_metadata_structure(metadata, &expectedSize);
538 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
539 verifyRequestTemplate(metadata, reqTemplate);
540 } else {
541 ASSERT_EQ(0u, rawMetadata.metadata.size());
542 }
Shuzhen Wangdf89cb92023-11-09 18:24:42 -0800543
544 if (flags::feature_combination_query()) {
545 if (supportFeatureCombinationQuery) {
546 CameraMetadata rawMetadata2;
547 ndk::ScopedAStatus ret2 =
548 device->constructDefaultRequestSettings(reqTemplate, &rawMetadata2);
549
550 // TODO: Do not allow OPERATION_NOT_SUPPORTED once HAL
551 // implementation is in place.
552 if (static_cast<Status>(ret2.getServiceSpecificError()) !=
553 Status::OPERATION_NOT_SUPPORTED) {
554 ASSERT_EQ(ret.isOk(), ret2.isOk());
555 ASSERT_EQ(ret.getStatus(), ret2.getStatus());
556
557 ASSERT_EQ(rawMetadata.metadata.size(), rawMetadata2.metadata.size());
558 if (ret2.isOk()) {
559 const camera_metadata_t* metadata =
560 (camera_metadata_t*)rawMetadata2.metadata.data();
561 size_t expectedSize = rawMetadata2.metadata.size();
562 int result =
563 validate_camera_metadata_structure(metadata, &expectedSize);
564 ASSERT_TRUE((result == 0) ||
565 (result == CAMERA_METADATA_VALIDATION_SHIFTED));
566 verifyRequestTemplate(metadata, reqTemplate);
567 }
568 }
569 }
570 }
Avichal Rakesh362242f2022-02-08 12:40:53 -0800571 }
572 ret = mSession->close();
573 mSession = nullptr;
574 ASSERT_TRUE(ret.isOk());
575 }
576}
577
578// Verify that all supported stream formats and sizes can be configured
579// successfully.
580TEST_P(CameraAidlTest, configureStreamsAvailableOutputs) {
581 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
582 std::vector<AvailableStream> outputStreams;
583
584 for (const auto& name : cameraDeviceNames) {
585 CameraMetadata meta;
586 std::shared_ptr<ICameraDevice> device;
587
588 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/, &device /*out*/);
589
590 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
591 outputStreams.clear();
592 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputStreams));
593 ASSERT_NE(0u, outputStreams.size());
594
595 int32_t jpegBufferSize = 0;
596 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
597 ASSERT_NE(0u, jpegBufferSize);
598
599 int32_t streamId = 0;
600 int32_t streamConfigCounter = 0;
601 for (auto& it : outputStreams) {
602 Stream stream;
603 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(it.format));
604 stream.id = streamId;
605 stream.streamType = StreamType::OUTPUT;
606 stream.width = it.width;
607 stream.height = it.height;
608 stream.format = static_cast<PixelFormat>(it.format);
609 stream.dataSpace = dataspace;
610 stream.usage = static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
611 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER);
612 stream.rotation = StreamRotation::ROTATION_0;
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000613 stream.dynamicRangeProfile = RequestAvailableDynamicRangeProfilesMap::
614 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
Austin Borger263e3622023-06-15 11:32:04 -0700615 stream.useCase = ScalerAvailableStreamUseCases::
616 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
617 stream.colorSpace = static_cast<int>(
618 RequestAvailableColorSpaceProfilesMap::
619 ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED);
Avichal Rakesh362242f2022-02-08 12:40:53 -0800620
621 std::vector<Stream> streams = {stream};
622 StreamConfiguration config;
623 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
624 jpegBufferSize);
625
Shuzhen Wangdf89cb92023-11-09 18:24:42 -0800626 verifyStreamCombination(device, config, /*expectedStatus*/ true);
Avichal Rakesh362242f2022-02-08 12:40:53 -0800627
628 config.streamConfigCounter = streamConfigCounter++;
629 std::vector<HalStream> halConfigs;
630 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
631 ASSERT_TRUE(ret.isOk());
632 ASSERT_EQ(halConfigs.size(), 1);
633 ASSERT_EQ(halConfigs[0].id, streamId);
634
635 streamId++;
636 }
637
638 ndk::ScopedAStatus ret = mSession->close();
639 mSession = nullptr;
640 ASSERT_TRUE(ret.isOk());
641 }
642}
643
644// Verify that mandatory concurrent streams and outputs are supported.
645TEST_P(CameraAidlTest, configureConcurrentStreamsAvailableOutputs) {
646 struct CameraTestInfo {
647 CameraMetadata staticMeta;
648 std::shared_ptr<ICameraDeviceSession> session;
649 std::shared_ptr<ICameraDevice> cameraDevice;
650 StreamConfiguration config;
651 };
652
653 std::map<std::string, std::string> idToNameMap = getCameraDeviceIdToNameMap(mProvider);
654 std::vector<ConcurrentCameraIdCombination> concurrentDeviceCombinations =
655 getConcurrentDeviceCombinations(mProvider);
656 std::vector<AvailableStream> outputStreams;
657 for (const auto& cameraDeviceIds : concurrentDeviceCombinations) {
658 std::vector<CameraIdAndStreamCombination> cameraIdsAndStreamCombinations;
659 std::vector<CameraTestInfo> cameraTestInfos;
660 size_t i = 0;
661 for (const auto& id : cameraDeviceIds.combination) {
662 CameraTestInfo cti;
663 auto it = idToNameMap.find(id);
664 ASSERT_TRUE(idToNameMap.end() != it);
665 std::string name = it->second;
666
667 openEmptyDeviceSession(name, mProvider, &cti.session /*out*/, &cti.staticMeta /*out*/,
668 &cti.cameraDevice /*out*/);
669
670 outputStreams.clear();
671 camera_metadata_t* staticMeta =
672 reinterpret_cast<camera_metadata_t*>(cti.staticMeta.metadata.data());
673 ASSERT_EQ(Status::OK, getMandatoryConcurrentStreams(staticMeta, &outputStreams));
674 ASSERT_NE(0u, outputStreams.size());
675
676 int32_t jpegBufferSize = 0;
677 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
678 ASSERT_NE(0u, jpegBufferSize);
679
680 int32_t streamId = 0;
681 std::vector<Stream> streams(outputStreams.size());
682 size_t j = 0;
683 for (const auto& s : outputStreams) {
684 Stream stream;
685 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(s.format));
686 stream.id = streamId++;
687 stream.streamType = StreamType::OUTPUT;
688 stream.width = s.width;
689 stream.height = s.height;
690 stream.format = static_cast<PixelFormat>(s.format);
691 stream.usage = static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
692 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER);
693 stream.dataSpace = dataspace;
694 stream.rotation = StreamRotation::ROTATION_0;
695 stream.sensorPixelModesUsed = {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT};
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000696 stream.dynamicRangeProfile = RequestAvailableDynamicRangeProfilesMap::
697 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
Avichal Rakesh362242f2022-02-08 12:40:53 -0800698 streams[j] = stream;
699 j++;
700 }
701
702 // Add the created stream configs to cameraIdsAndStreamCombinations
703 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &cti.config,
704 jpegBufferSize);
705
706 cti.config.streamConfigCounter = outputStreams.size();
707 CameraIdAndStreamCombination cameraIdAndStreamCombination;
708 cameraIdAndStreamCombination.cameraId = id;
709 cameraIdAndStreamCombination.streamConfiguration = cti.config;
710 cameraIdsAndStreamCombinations.push_back(cameraIdAndStreamCombination);
711 i++;
712 cameraTestInfos.push_back(cti);
713 }
714 // Now verify that concurrent streams are supported
715 bool combinationSupported;
716 ndk::ScopedAStatus ret = mProvider->isConcurrentStreamCombinationSupported(
717 cameraIdsAndStreamCombinations, &combinationSupported);
718 ASSERT_TRUE(ret.isOk());
719 ASSERT_EQ(combinationSupported, true);
720
721 // Test the stream can actually be configured
722 for (auto& cti : cameraTestInfos) {
723 if (cti.session != nullptr) {
Shuzhen Wangdf89cb92023-11-09 18:24:42 -0800724 verifyStreamCombination(cti.cameraDevice, cti.config, /*expectedStatus*/ true);
Avichal Rakesh362242f2022-02-08 12:40:53 -0800725 }
726
727 if (cti.session != nullptr) {
728 std::vector<HalStream> streamConfigs;
729 ret = cti.session->configureStreams(cti.config, &streamConfigs);
730 ASSERT_TRUE(ret.isOk());
731 ASSERT_EQ(cti.config.streams.size(), streamConfigs.size());
732 }
733 }
734
735 for (auto& cti : cameraTestInfos) {
736 ret = cti.session->close();
737 ASSERT_TRUE(ret.isOk());
738 }
739 }
740}
741
742// Check for correct handling of invalid/incorrect configuration parameters.
743TEST_P(CameraAidlTest, configureStreamsInvalidOutputs) {
744 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
745 std::vector<AvailableStream> outputStreams;
746
747 for (const auto& name : cameraDeviceNames) {
748 CameraMetadata meta;
749 std::shared_ptr<ICameraDevice> cameraDevice;
750
751 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
752 &cameraDevice /*out*/);
753 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
754 outputStreams.clear();
755
756 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputStreams));
757 ASSERT_NE(0u, outputStreams.size());
758
759 int32_t jpegBufferSize = 0;
760 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
761 ASSERT_NE(0u, jpegBufferSize);
762
763 int32_t streamId = 0;
764 Stream stream = {streamId++,
765 StreamType::OUTPUT,
766 static_cast<uint32_t>(0),
767 static_cast<uint32_t>(0),
768 static_cast<PixelFormat>(outputStreams[0].format),
769 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
770 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
771 Dataspace::UNKNOWN,
772 StreamRotation::ROTATION_0,
773 std::string(),
774 jpegBufferSize,
775 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000776 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
777 RequestAvailableDynamicRangeProfilesMap::
778 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800779 int32_t streamConfigCounter = 0;
780 std::vector<Stream> streams = {stream};
781 StreamConfiguration config;
782 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
783 jpegBufferSize);
784
Shuzhen Wangdf89cb92023-11-09 18:24:42 -0800785 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ false);
Avichal Rakesh362242f2022-02-08 12:40:53 -0800786
787 config.streamConfigCounter = streamConfigCounter++;
788 std::vector<HalStream> halConfigs;
789 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
790 ASSERT_TRUE(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
791 ret.getServiceSpecificError() ||
792 static_cast<int32_t>(Status::INTERNAL_ERROR) == ret.getServiceSpecificError());
793
794 stream = {streamId++,
795 StreamType::OUTPUT,
796 /*width*/ INT32_MAX,
797 /*height*/ INT32_MAX,
798 static_cast<PixelFormat>(outputStreams[0].format),
799 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
800 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
801 Dataspace::UNKNOWN,
802 StreamRotation::ROTATION_0,
803 std::string(),
804 jpegBufferSize,
805 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000806 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
807 RequestAvailableDynamicRangeProfilesMap::
808 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800809
810 streams[0] = stream;
811 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
812 jpegBufferSize);
813
814 config.streamConfigCounter = streamConfigCounter++;
815 halConfigs.clear();
816 ret = mSession->configureStreams(config, &halConfigs);
817 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
818
819 for (auto& it : outputStreams) {
820 stream = {streamId++,
821 StreamType::OUTPUT,
822 it.width,
823 it.height,
824 static_cast<PixelFormat>(UINT32_MAX),
825 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
826 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
827 Dataspace::UNKNOWN,
828 StreamRotation::ROTATION_0,
829 std::string(),
830 jpegBufferSize,
831 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000832 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
833 RequestAvailableDynamicRangeProfilesMap::
834 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800835
836 streams[0] = stream;
837 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
838 jpegBufferSize);
839 config.streamConfigCounter = streamConfigCounter++;
840 halConfigs.clear();
841 ret = mSession->configureStreams(config, &halConfigs);
842 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
843 ret.getServiceSpecificError());
844
845 stream = {streamId++,
846 StreamType::OUTPUT,
847 it.width,
848 it.height,
849 static_cast<PixelFormat>(it.format),
850 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
851 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
852 Dataspace::UNKNOWN,
853 static_cast<StreamRotation>(UINT32_MAX),
854 std::string(),
855 jpegBufferSize,
856 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000857 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
858 RequestAvailableDynamicRangeProfilesMap::
859 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800860
861 streams[0] = stream;
862 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
863 jpegBufferSize);
864
865 config.streamConfigCounter = streamConfigCounter++;
866 halConfigs.clear();
867 ret = mSession->configureStreams(config, &halConfigs);
868 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
869 ret.getServiceSpecificError());
870 }
871
872 ret = mSession->close();
873 mSession = nullptr;
874 ASSERT_TRUE(ret.isOk());
875 }
876}
877
878// Check whether all supported ZSL output stream combinations can be
879// configured successfully.
880TEST_P(CameraAidlTest, configureStreamsZSLInputOutputs) {
881 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
882 std::vector<AvailableStream> inputStreams;
883 std::vector<AvailableZSLInputOutput> inputOutputMap;
884
885 for (const auto& name : cameraDeviceNames) {
886 CameraMetadata meta;
887 std::shared_ptr<ICameraDevice> cameraDevice;
888
889 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
890 &cameraDevice /*out*/);
891 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
892
893 Status rc = isZSLModeAvailable(staticMeta);
894 if (Status::OPERATION_NOT_SUPPORTED == rc) {
895 ndk::ScopedAStatus ret = mSession->close();
896 mSession = nullptr;
897 ASSERT_TRUE(ret.isOk());
898 continue;
899 }
900 ASSERT_EQ(Status::OK, rc);
901
902 inputStreams.clear();
903 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, inputStreams));
904 ASSERT_NE(0u, inputStreams.size());
905
906 inputOutputMap.clear();
907 ASSERT_EQ(Status::OK, getZSLInputOutputMap(staticMeta, inputOutputMap));
908 ASSERT_NE(0u, inputOutputMap.size());
909
910 bool supportMonoY8 = false;
911 if (Status::OK == isMonochromeCamera(staticMeta)) {
912 for (auto& it : inputStreams) {
913 if (it.format == static_cast<uint32_t>(PixelFormat::Y8)) {
914 supportMonoY8 = true;
915 break;
916 }
917 }
918 }
919
920 int32_t jpegBufferSize = 0;
921 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
922 ASSERT_NE(0u, jpegBufferSize);
923
924 int32_t streamId = 0;
925 bool hasPrivToY8 = false, hasY8ToY8 = false, hasY8ToBlob = false;
926 uint32_t streamConfigCounter = 0;
927 for (auto& inputIter : inputOutputMap) {
928 AvailableStream input;
929 ASSERT_EQ(Status::OK, findLargestSize(inputStreams, inputIter.inputFormat, input));
930 ASSERT_NE(0u, inputStreams.size());
931
932 if (inputIter.inputFormat ==
933 static_cast<uint32_t>(PixelFormat::IMPLEMENTATION_DEFINED) &&
934 inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::Y8)) {
935 hasPrivToY8 = true;
936 } else if (inputIter.inputFormat == static_cast<uint32_t>(PixelFormat::Y8)) {
937 if (inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::BLOB)) {
938 hasY8ToBlob = true;
939 } else if (inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::Y8)) {
940 hasY8ToY8 = true;
941 }
942 }
943 AvailableStream outputThreshold = {INT32_MAX, INT32_MAX, inputIter.outputFormat};
944 std::vector<AvailableStream> outputStreams;
945 ASSERT_EQ(Status::OK,
946 getAvailableOutputStreams(staticMeta, outputStreams, &outputThreshold));
947 for (auto& outputIter : outputStreams) {
948 Dataspace outputDataSpace =
949 getDataspace(static_cast<PixelFormat>(outputIter.format));
950 Stream zslStream = {
951 streamId++,
952 StreamType::OUTPUT,
953 input.width,
954 input.height,
955 static_cast<PixelFormat>(input.format),
956 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
957 GRALLOC_USAGE_HW_CAMERA_ZSL),
958 Dataspace::UNKNOWN,
959 StreamRotation::ROTATION_0,
960 std::string(),
961 jpegBufferSize,
962 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000963 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
964 RequestAvailableDynamicRangeProfilesMap::
965 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800966 Stream inputStream = {
967 streamId++,
968 StreamType::INPUT,
969 input.width,
970 input.height,
971 static_cast<PixelFormat>(input.format),
972 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(0),
973 Dataspace::UNKNOWN,
974 StreamRotation::ROTATION_0,
975 std::string(),
976 jpegBufferSize,
977 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000978 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
979 RequestAvailableDynamicRangeProfilesMap::
980 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800981 Stream outputStream = {
982 streamId++,
983 StreamType::OUTPUT,
984 outputIter.width,
985 outputIter.height,
986 static_cast<PixelFormat>(outputIter.format),
987 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
988 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
989 outputDataSpace,
990 StreamRotation::ROTATION_0,
991 std::string(),
992 jpegBufferSize,
993 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000994 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
995 RequestAvailableDynamicRangeProfilesMap::
996 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800997
998 std::vector<Stream> streams = {inputStream, zslStream, outputStream};
999
1000 StreamConfiguration config;
1001 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
1002 jpegBufferSize);
1003
Shuzhen Wangdf89cb92023-11-09 18:24:42 -08001004 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001005
1006 config.streamConfigCounter = streamConfigCounter++;
1007 std::vector<HalStream> halConfigs;
1008 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1009 ASSERT_TRUE(ret.isOk());
1010 ASSERT_EQ(3u, halConfigs.size());
1011 }
1012 }
1013
1014 if (supportMonoY8) {
1015 if (Status::OK == isZSLModeAvailable(staticMeta, PRIV_REPROCESS)) {
1016 ASSERT_TRUE(hasPrivToY8);
1017 }
1018 if (Status::OK == isZSLModeAvailable(staticMeta, YUV_REPROCESS)) {
1019 ASSERT_TRUE(hasY8ToY8);
1020 ASSERT_TRUE(hasY8ToBlob);
1021 }
1022 }
1023
1024 ndk::ScopedAStatus ret = mSession->close();
1025 mSession = nullptr;
1026 ASSERT_TRUE(ret.isOk());
1027 }
1028}
1029
1030// Check whether session parameters are supported. If Hal support for them
1031// exist, then try to configure a preview stream using them.
1032TEST_P(CameraAidlTest, configureStreamsWithSessionParameters) {
1033 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1034 std::vector<AvailableStream> outputPreviewStreams;
1035 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
1036 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
1037
1038 for (const auto& name : cameraDeviceNames) {
1039 CameraMetadata meta;
1040
1041 std::shared_ptr<ICameraDevice> unusedCameraDevice;
1042 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1043 &unusedCameraDevice /*out*/);
1044 camera_metadata_t* staticMetaBuffer =
1045 reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1046
1047 std::unordered_set<int32_t> availableSessionKeys;
1048 auto rc = getSupportedKeys(staticMetaBuffer, ANDROID_REQUEST_AVAILABLE_SESSION_KEYS,
1049 &availableSessionKeys);
1050 ASSERT_TRUE(Status::OK == rc);
1051 if (availableSessionKeys.empty()) {
1052 ndk::ScopedAStatus ret = mSession->close();
1053 mSession = nullptr;
1054 ASSERT_TRUE(ret.isOk());
1055 continue;
1056 }
1057
1058 android::hardware::camera::common::V1_0::helper::CameraMetadata previewRequestSettings;
1059 android::hardware::camera::common::V1_0::helper::CameraMetadata sessionParams,
1060 modifiedSessionParams;
1061 constructFilteredSettings(mSession, availableSessionKeys, RequestTemplate::PREVIEW,
1062 &previewRequestSettings, &sessionParams);
1063 if (sessionParams.isEmpty()) {
1064 ndk::ScopedAStatus ret = mSession->close();
1065 mSession = nullptr;
1066 ASSERT_TRUE(ret.isOk());
1067 continue;
1068 }
1069
1070 outputPreviewStreams.clear();
1071
1072 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputPreviewStreams,
1073 &previewThreshold));
1074 ASSERT_NE(0u, outputPreviewStreams.size());
1075
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001076 Stream previewStream = {
1077 0,
1078 StreamType::OUTPUT,
1079 outputPreviewStreams[0].width,
1080 outputPreviewStreams[0].height,
1081 static_cast<PixelFormat>(outputPreviewStreams[0].format),
1082 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1083 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
1084 Dataspace::UNKNOWN,
1085 StreamRotation::ROTATION_0,
1086 std::string(),
1087 /*bufferSize*/ 0,
1088 /*groupId*/ -1,
1089 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1090 RequestAvailableDynamicRangeProfilesMap::
1091 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001092
1093 std::vector<Stream> streams = {previewStream};
1094 StreamConfiguration config;
1095
1096 config.streams = streams;
1097 config.operationMode = StreamConfigurationMode::NORMAL_MODE;
1098 modifiedSessionParams = sessionParams;
1099 auto sessionParamsBuffer = sessionParams.release();
1100 std::vector<uint8_t> rawSessionParam =
1101 std::vector(reinterpret_cast<uint8_t*>(sessionParamsBuffer),
1102 reinterpret_cast<uint8_t*>(sessionParamsBuffer) +
1103 get_camera_metadata_size(sessionParamsBuffer));
1104
1105 config.sessionParams.metadata = rawSessionParam;
1106 config.streamConfigCounter = 0;
1107 config.streams = {previewStream};
1108 config.streamConfigCounter = 0;
1109 config.multiResolutionInputImage = false;
1110
1111 bool newSessionParamsAvailable = false;
1112 for (const auto& it : availableSessionKeys) {
1113 if (modifiedSessionParams.exists(it)) {
1114 modifiedSessionParams.erase(it);
1115 newSessionParamsAvailable = true;
1116 break;
1117 }
1118 }
1119 if (newSessionParamsAvailable) {
1120 auto modifiedSessionParamsBuffer = modifiedSessionParams.release();
1121 verifySessionReconfigurationQuery(mSession, sessionParamsBuffer,
1122 modifiedSessionParamsBuffer);
1123 modifiedSessionParams.acquire(modifiedSessionParamsBuffer);
1124 }
1125
1126 std::vector<HalStream> halConfigs;
1127 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1128 ASSERT_TRUE(ret.isOk());
1129 ASSERT_EQ(1u, halConfigs.size());
1130
1131 sessionParams.acquire(sessionParamsBuffer);
1132 ret = mSession->close();
1133 mSession = nullptr;
1134 ASSERT_TRUE(ret.isOk());
1135 }
1136}
1137
1138// Verify that all supported preview + still capture stream combinations
1139// can be configured successfully.
1140TEST_P(CameraAidlTest, configureStreamsPreviewStillOutputs) {
1141 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1142 std::vector<AvailableStream> outputBlobStreams;
1143 std::vector<AvailableStream> outputPreviewStreams;
1144 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
1145 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
1146 AvailableStream blobThreshold = {INT32_MAX, INT32_MAX, static_cast<int32_t>(PixelFormat::BLOB)};
1147
1148 for (const auto& name : cameraDeviceNames) {
1149 CameraMetadata meta;
1150
1151 std::shared_ptr<ICameraDevice> cameraDevice;
1152 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1153 &cameraDevice /*out*/);
1154
1155 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1156
1157 // Check if camera support depth only
1158 if (isDepthOnly(staticMeta)) {
1159 ndk::ScopedAStatus ret = mSession->close();
1160 mSession = nullptr;
1161 ASSERT_TRUE(ret.isOk());
1162 continue;
1163 }
1164
1165 outputBlobStreams.clear();
1166 ASSERT_EQ(Status::OK,
1167 getAvailableOutputStreams(staticMeta, outputBlobStreams, &blobThreshold));
1168 ASSERT_NE(0u, outputBlobStreams.size());
1169
1170 outputPreviewStreams.clear();
1171 ASSERT_EQ(Status::OK,
1172 getAvailableOutputStreams(staticMeta, outputPreviewStreams, &previewThreshold));
1173 ASSERT_NE(0u, outputPreviewStreams.size());
1174
1175 int32_t jpegBufferSize = 0;
1176 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
1177 ASSERT_NE(0u, jpegBufferSize);
1178
1179 int32_t streamId = 0;
1180 uint32_t streamConfigCounter = 0;
1181
1182 for (auto& blobIter : outputBlobStreams) {
1183 for (auto& previewIter : outputPreviewStreams) {
1184 Stream previewStream = {
1185 streamId++,
1186 StreamType::OUTPUT,
1187 previewIter.width,
1188 previewIter.height,
1189 static_cast<PixelFormat>(previewIter.format),
1190 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1191 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
1192 Dataspace::UNKNOWN,
1193 StreamRotation::ROTATION_0,
1194 std::string(),
1195 /*bufferSize*/ 0,
1196 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001197 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1198 RequestAvailableDynamicRangeProfilesMap::
1199 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001200 Stream blobStream = {
1201 streamId++,
1202 StreamType::OUTPUT,
1203 blobIter.width,
1204 blobIter.height,
1205 static_cast<PixelFormat>(blobIter.format),
1206 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1207 GRALLOC1_CONSUMER_USAGE_CPU_READ),
1208 Dataspace::JFIF,
1209 StreamRotation::ROTATION_0,
1210 std::string(),
1211 /*bufferSize*/ 0,
1212 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001213 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1214 RequestAvailableDynamicRangeProfilesMap::
1215 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001216 std::vector<Stream> streams = {previewStream, blobStream};
1217 StreamConfiguration config;
1218
1219 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
1220 jpegBufferSize);
1221 config.streamConfigCounter = streamConfigCounter++;
Shuzhen Wangdf89cb92023-11-09 18:24:42 -08001222 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001223
1224 std::vector<HalStream> halConfigs;
1225 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1226 ASSERT_TRUE(ret.isOk());
1227 ASSERT_EQ(2u, halConfigs.size());
1228 }
1229 }
1230
1231 ndk::ScopedAStatus ret = mSession->close();
1232 mSession = nullptr;
1233 ASSERT_TRUE(ret.isOk());
1234 }
1235}
1236
1237// In case constrained mode is supported, test whether it can be
1238// configured. Additionally check for common invalid inputs when
1239// using this mode.
1240TEST_P(CameraAidlTest, configureStreamsConstrainedOutputs) {
1241 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1242
1243 for (const auto& name : cameraDeviceNames) {
1244 CameraMetadata meta;
1245 std::shared_ptr<ICameraDevice> cameraDevice;
1246
1247 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1248 &cameraDevice /*out*/);
1249 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1250
1251 Status rc = isConstrainedModeAvailable(staticMeta);
1252 if (Status::OPERATION_NOT_SUPPORTED == rc) {
1253 ndk::ScopedAStatus ret = mSession->close();
1254 mSession = nullptr;
1255 ASSERT_TRUE(ret.isOk());
1256 continue;
1257 }
1258 ASSERT_EQ(Status::OK, rc);
1259
1260 AvailableStream hfrStream;
1261 rc = pickConstrainedModeSize(staticMeta, hfrStream);
1262 ASSERT_EQ(Status::OK, rc);
1263
1264 int32_t streamId = 0;
1265 uint32_t streamConfigCounter = 0;
1266 Stream stream = {streamId,
1267 StreamType::OUTPUT,
1268 hfrStream.width,
1269 hfrStream.height,
1270 static_cast<PixelFormat>(hfrStream.format),
1271 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1272 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1273 Dataspace::UNKNOWN,
1274 StreamRotation::ROTATION_0,
1275 std::string(),
1276 /*bufferSize*/ 0,
1277 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001278 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1279 RequestAvailableDynamicRangeProfilesMap::
1280 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001281 std::vector<Stream> streams = {stream};
1282 StreamConfiguration config;
1283 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1284 &config);
1285
Shuzhen Wangdf89cb92023-11-09 18:24:42 -08001286 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001287
1288 config.streamConfigCounter = streamConfigCounter++;
1289 std::vector<HalStream> halConfigs;
1290 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1291 ASSERT_TRUE(ret.isOk());
1292 ASSERT_EQ(1u, halConfigs.size());
1293 ASSERT_EQ(halConfigs[0].id, streamId);
1294
1295 stream = {streamId++,
1296 StreamType::OUTPUT,
1297 static_cast<uint32_t>(0),
1298 static_cast<uint32_t>(0),
1299 static_cast<PixelFormat>(hfrStream.format),
1300 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1301 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1302 Dataspace::UNKNOWN,
1303 StreamRotation::ROTATION_0,
1304 std::string(),
1305 /*bufferSize*/ 0,
1306 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001307 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1308 RequestAvailableDynamicRangeProfilesMap::
1309 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001310 streams[0] = stream;
1311 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1312 &config);
1313
1314 config.streamConfigCounter = streamConfigCounter++;
1315 std::vector<HalStream> halConfig;
1316 ret = mSession->configureStreams(config, &halConfig);
1317 ASSERT_TRUE(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
1318 ret.getServiceSpecificError() ||
1319 static_cast<int32_t>(Status::INTERNAL_ERROR) == ret.getServiceSpecificError());
1320
1321 stream = {streamId++,
1322 StreamType::OUTPUT,
1323 INT32_MAX,
1324 INT32_MAX,
1325 static_cast<PixelFormat>(hfrStream.format),
1326 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1327 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1328 Dataspace::UNKNOWN,
1329 StreamRotation::ROTATION_0,
1330 std::string(),
1331 /*bufferSize*/ 0,
1332 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001333 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1334 RequestAvailableDynamicRangeProfilesMap::
1335 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001336 streams[0] = stream;
1337 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1338 &config);
1339
1340 config.streamConfigCounter = streamConfigCounter++;
1341 halConfigs.clear();
1342 ret = mSession->configureStreams(config, &halConfigs);
1343 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
1344
1345 stream = {streamId++,
1346 StreamType::OUTPUT,
1347 hfrStream.width,
1348 hfrStream.height,
1349 static_cast<PixelFormat>(UINT32_MAX),
1350 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1351 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1352 Dataspace::UNKNOWN,
1353 StreamRotation::ROTATION_0,
1354 std::string(),
1355 /*bufferSize*/ 0,
1356 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001357 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1358 RequestAvailableDynamicRangeProfilesMap::
1359 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001360 streams[0] = stream;
1361 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1362 &config);
1363
1364 config.streamConfigCounter = streamConfigCounter++;
1365 halConfigs.clear();
1366 ret = mSession->configureStreams(config, &halConfigs);
1367 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
1368
1369 ret = mSession->close();
1370 mSession = nullptr;
1371 ASSERT_TRUE(ret.isOk());
1372 }
1373}
1374
1375// Verify that all supported video + snapshot stream combinations can
1376// be configured successfully.
1377TEST_P(CameraAidlTest, configureStreamsVideoStillOutputs) {
1378 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1379 std::vector<AvailableStream> outputBlobStreams;
1380 std::vector<AvailableStream> outputVideoStreams;
1381 AvailableStream videoThreshold = {kMaxVideoWidth, kMaxVideoHeight,
1382 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
1383 AvailableStream blobThreshold = {kMaxVideoWidth, kMaxVideoHeight,
1384 static_cast<int32_t>(PixelFormat::BLOB)};
1385
1386 for (const auto& name : cameraDeviceNames) {
1387 CameraMetadata meta;
1388 std::shared_ptr<ICameraDevice> cameraDevice;
1389
1390 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1391 &cameraDevice /*out*/);
1392
1393 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1394
1395 // Check if camera support depth only
1396 if (isDepthOnly(staticMeta)) {
1397 ndk::ScopedAStatus ret = mSession->close();
1398 mSession = nullptr;
1399 ASSERT_TRUE(ret.isOk());
1400 continue;
1401 }
1402
1403 outputBlobStreams.clear();
1404 ASSERT_EQ(Status::OK,
1405 getAvailableOutputStreams(staticMeta, outputBlobStreams, &blobThreshold));
1406 ASSERT_NE(0u, outputBlobStreams.size());
1407
1408 outputVideoStreams.clear();
1409 ASSERT_EQ(Status::OK,
1410 getAvailableOutputStreams(staticMeta, outputVideoStreams, &videoThreshold));
1411 ASSERT_NE(0u, outputVideoStreams.size());
1412
1413 int32_t jpegBufferSize = 0;
1414 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
1415 ASSERT_NE(0u, jpegBufferSize);
1416
1417 int32_t streamId = 0;
1418 uint32_t streamConfigCounter = 0;
1419 for (auto& blobIter : outputBlobStreams) {
1420 for (auto& videoIter : outputVideoStreams) {
1421 Stream videoStream = {
1422 streamId++,
1423 StreamType::OUTPUT,
1424 videoIter.width,
1425 videoIter.height,
1426 static_cast<PixelFormat>(videoIter.format),
1427 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1428 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1429 Dataspace::UNKNOWN,
1430 StreamRotation::ROTATION_0,
1431 std::string(),
1432 jpegBufferSize,
1433 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001434 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1435 RequestAvailableDynamicRangeProfilesMap::
1436 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001437 Stream blobStream = {
1438 streamId++,
1439 StreamType::OUTPUT,
1440 blobIter.width,
1441 blobIter.height,
1442 static_cast<PixelFormat>(blobIter.format),
1443 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1444 GRALLOC1_CONSUMER_USAGE_CPU_READ),
1445 Dataspace::JFIF,
1446 StreamRotation::ROTATION_0,
1447 std::string(),
1448 jpegBufferSize,
1449 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001450 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1451 RequestAvailableDynamicRangeProfilesMap::
1452 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001453 std::vector<Stream> streams = {videoStream, blobStream};
1454 StreamConfiguration config;
1455
1456 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
1457 jpegBufferSize);
Shuzhen Wangdf89cb92023-11-09 18:24:42 -08001458 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001459
1460 config.streamConfigCounter = streamConfigCounter++;
1461 std::vector<HalStream> halConfigs;
1462 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1463 ASSERT_TRUE(ret.isOk());
1464 ASSERT_EQ(2u, halConfigs.size());
1465 }
1466 }
1467
1468 ndk::ScopedAStatus ret = mSession->close();
1469 mSession = nullptr;
1470 ASSERT_TRUE(ret.isOk());
1471 }
1472}
1473
1474// Generate and verify a camera capture request
1475TEST_P(CameraAidlTest, processCaptureRequestPreview) {
1476 // TODO(b/220897574): Failing with BUFFER_ERROR
1477 processCaptureRequestInternal(GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, RequestTemplate::PREVIEW,
1478 false /*secureOnlyCameras*/);
1479}
1480
1481// Generate and verify a secure camera capture request
1482TEST_P(CameraAidlTest, processSecureCaptureRequest) {
1483 processCaptureRequestInternal(GRALLOC1_PRODUCER_USAGE_PROTECTED, RequestTemplate::STILL_CAPTURE,
1484 true /*secureOnlyCameras*/);
1485}
1486
1487TEST_P(CameraAidlTest, processCaptureRequestPreviewStabilization) {
1488 std::unordered_map<std::string, nsecs_t> cameraDeviceToTimeLag;
1489 processPreviewStabilizationCaptureRequestInternal(/*previewStabilizationOn*/ false,
1490 cameraDeviceToTimeLag);
1491 processPreviewStabilizationCaptureRequestInternal(/*previewStabilizationOn*/ true,
1492 cameraDeviceToTimeLag);
1493}
1494
1495// Generate and verify a multi-camera capture request
1496TEST_P(CameraAidlTest, processMultiCaptureRequestPreview) {
1497 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1498 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
1499 static_cast<int32_t>(PixelFormat::YCBCR_420_888)};
1500 int64_t bufferId = 1;
1501 uint32_t frameNumber = 1;
1502 std::vector<uint8_t> settings;
1503 std::vector<uint8_t> emptySettings;
1504 std::string invalidPhysicalId = "-1";
1505
1506 for (const auto& name : cameraDeviceNames) {
1507 std::string version, deviceId;
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +00001508 ALOGI("processMultiCaptureRequestPreview: Test device %s", name.c_str());
Avichal Rakesh362242f2022-02-08 12:40:53 -08001509 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
1510 CameraMetadata metadata;
1511
1512 std::shared_ptr<ICameraDevice> unusedDevice;
1513 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &metadata /*out*/,
1514 &unusedDevice /*out*/);
1515
1516 camera_metadata_t* staticMeta =
1517 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
1518 Status rc = isLogicalMultiCamera(staticMeta);
1519 if (Status::OPERATION_NOT_SUPPORTED == rc) {
1520 ndk::ScopedAStatus ret = mSession->close();
1521 mSession = nullptr;
1522 ASSERT_TRUE(ret.isOk());
1523 continue;
1524 }
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +00001525 ASSERT_EQ(Status::OK, rc);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001526
1527 std::unordered_set<std::string> physicalIds;
1528 rc = getPhysicalCameraIds(staticMeta, &physicalIds);
1529 ASSERT_TRUE(Status::OK == rc);
1530 ASSERT_TRUE(physicalIds.size() > 1);
1531
1532 std::unordered_set<int32_t> physicalRequestKeyIDs;
1533 rc = getSupportedKeys(staticMeta, ANDROID_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS,
1534 &physicalRequestKeyIDs);
1535 ASSERT_TRUE(Status::OK == rc);
1536 if (physicalRequestKeyIDs.empty()) {
1537 ndk::ScopedAStatus ret = mSession->close();
1538 mSession = nullptr;
1539 ASSERT_TRUE(ret.isOk());
1540 // The logical camera doesn't support any individual physical requests.
1541 continue;
1542 }
1543
1544 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultPreviewSettings;
1545 android::hardware::camera::common::V1_0::helper::CameraMetadata filteredSettings;
1546 constructFilteredSettings(mSession, physicalRequestKeyIDs, RequestTemplate::PREVIEW,
1547 &defaultPreviewSettings, &filteredSettings);
1548 if (filteredSettings.isEmpty()) {
1549 // No physical device settings in default request.
1550 ndk::ScopedAStatus ret = mSession->close();
1551 mSession = nullptr;
1552 ASSERT_TRUE(ret.isOk());
1553 continue;
1554 }
1555
1556 const camera_metadata_t* settingsBuffer = defaultPreviewSettings.getAndLock();
1557 uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
1558 settings.assign(rawSettingsBuffer,
1559 rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
1560 CameraMetadata settingsMetadata = {settings};
1561 overrideRotateAndCrop(&settingsMetadata);
1562
1563 ndk::ScopedAStatus ret = mSession->close();
1564 mSession = nullptr;
1565 ASSERT_TRUE(ret.isOk());
1566
1567 // Leave only 2 physical devices in the id set.
1568 auto it = physicalIds.begin();
1569 std::string physicalDeviceId = *it;
1570 it++;
1571 physicalIds.erase(++it, physicalIds.end());
1572 ASSERT_EQ(physicalIds.size(), 2u);
1573
1574 std::vector<HalStream> halStreams;
1575 bool supportsPartialResults = false;
1576 bool useHalBufManager = false;
1577 int32_t partialResultCount = 0;
1578 Stream previewStream;
1579 std::shared_ptr<DeviceCb> cb;
1580
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +00001581 configurePreviewStreams(
1582 name, mProvider, &previewThreshold, physicalIds, &mSession, &previewStream,
1583 &halStreams /*out*/, &supportsPartialResults /*out*/, &partialResultCount /*out*/,
1584 &useHalBufManager /*out*/, &cb /*out*/, 0 /*streamConfigCounter*/, true);
1585 if (mSession == nullptr) {
1586 // stream combination not supported by HAL, skip test for device
1587 continue;
1588 }
Avichal Rakesh362242f2022-02-08 12:40:53 -08001589
1590 ::aidl::android::hardware::common::fmq::MQDescriptor<
1591 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
1592 descriptor;
1593 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
1594 ASSERT_TRUE(resultQueueRet.isOk());
1595 std::shared_ptr<ResultMetadataQueue> resultQueue =
1596 std::make_shared<ResultMetadataQueue>(descriptor);
1597 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
1598 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
1599 resultQueue = nullptr;
1600 // Don't use the queue onwards.
1601 }
1602
1603 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
1604 static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
1605 partialResultCount, physicalIds, resultQueue);
1606
1607 std::vector<CaptureRequest> requests(1);
1608 CaptureRequest& request = requests[0];
1609 request.frameNumber = frameNumber;
1610 request.fmqSettingsSize = 0;
Emilian Peev3d919f92022-04-20 13:50:59 -07001611 request.settings = settingsMetadata;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001612
1613 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
1614
1615 std::vector<buffer_handle_t> graphicBuffers;
1616 graphicBuffers.reserve(halStreams.size());
1617 outputBuffers.resize(halStreams.size());
1618 size_t k = 0;
1619 for (const auto& halStream : halStreams) {
1620 buffer_handle_t buffer_handle;
1621 if (useHalBufManager) {
1622 outputBuffers[k] = {halStream.id, /*bufferId*/ 0, NativeHandle(),
1623 BufferStatus::OK, NativeHandle(), NativeHandle()};
1624 } else {
1625 allocateGraphicBuffer(previewStream.width, previewStream.height,
Fang Huif097c4d2024-03-19 19:23:36 +08001626 ANDROID_NATIVE_UNSIGNED_CAST(android_convertGralloc1To0Usage(
Avichal Rakesh362242f2022-02-08 12:40:53 -08001627 static_cast<uint64_t>(halStream.producerUsage),
Fang Huif097c4d2024-03-19 19:23:36 +08001628 static_cast<uint64_t>(halStream.consumerUsage))),
Avichal Rakesh362242f2022-02-08 12:40:53 -08001629 halStream.overrideFormat, &buffer_handle);
1630 graphicBuffers.push_back(buffer_handle);
1631 outputBuffers[k] = {
1632 halStream.id, bufferId, ::android::makeToAidl(buffer_handle),
1633 BufferStatus::OK, NativeHandle(), NativeHandle()};
1634 bufferId++;
1635 }
1636 k++;
1637 }
1638
1639 std::vector<PhysicalCameraSetting> camSettings(1);
1640 const camera_metadata_t* filteredSettingsBuffer = filteredSettings.getAndLock();
1641 uint8_t* rawFilteredSettingsBuffer = (uint8_t*)filteredSettingsBuffer;
1642 camSettings[0].settings = {std::vector(
1643 rawFilteredSettingsBuffer,
1644 rawFilteredSettingsBuffer + get_camera_metadata_size(filteredSettingsBuffer))};
1645 overrideRotateAndCrop(&camSettings[0].settings);
1646 camSettings[0].fmqSettingsSize = 0;
1647 camSettings[0].physicalCameraId = physicalDeviceId;
1648
1649 request.inputBuffer = {
1650 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
1651 request.physicalCameraSettings = camSettings;
1652
1653 {
1654 std::unique_lock<std::mutex> l(mLock);
1655 mInflightMap.clear();
1656 mInflightMap[frameNumber] = inflightReq;
1657 }
1658
1659 int32_t numRequestProcessed = 0;
1660 std::vector<BufferCache> cachesToRemove;
1661 ndk::ScopedAStatus returnStatus =
1662 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1663 ASSERT_TRUE(returnStatus.isOk());
1664 ASSERT_EQ(numRequestProcessed, 1u);
1665
1666 {
1667 std::unique_lock<std::mutex> l(mLock);
1668 while (!inflightReq->errorCodeValid &&
1669 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1670 auto timeout = std::chrono::system_clock::now() +
1671 std::chrono::seconds(kStreamBufferTimeoutSec);
1672 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1673 }
1674
1675 ASSERT_FALSE(inflightReq->errorCodeValid);
1676 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1677
1678 request.frameNumber++;
1679 // Empty settings should be supported after the first call
1680 // for repeating requests.
1681 request.settings.metadata.clear();
1682 request.physicalCameraSettings[0].settings.metadata.clear();
1683 // The buffer has been registered to HAL by bufferId, so per
1684 // API contract we should send a null handle for this buffer
1685 request.outputBuffers[0].buffer = NativeHandle();
1686 mInflightMap.clear();
1687 inflightReq = std::make_shared<InFlightRequest>(
1688 static_cast<ssize_t>(physicalIds.size()), false, supportsPartialResults,
1689 partialResultCount, physicalIds, resultQueue);
1690 mInflightMap[request.frameNumber] = inflightReq;
1691 }
1692
1693 returnStatus =
1694 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1695 ASSERT_TRUE(returnStatus.isOk());
1696 ASSERT_EQ(numRequestProcessed, 1u);
1697
1698 {
1699 std::unique_lock<std::mutex> l(mLock);
1700 while (!inflightReq->errorCodeValid &&
1701 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1702 auto timeout = std::chrono::system_clock::now() +
1703 std::chrono::seconds(kStreamBufferTimeoutSec);
1704 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1705 }
1706
1707 ASSERT_FALSE(inflightReq->errorCodeValid);
1708 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1709 }
1710
1711 // Invalid physical camera id should fail process requests
1712 frameNumber++;
1713 camSettings[0].physicalCameraId = invalidPhysicalId;
1714 camSettings[0].settings.metadata = settings;
1715
1716 request.physicalCameraSettings = camSettings; // Invalid camera settings
1717 returnStatus =
1718 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1719 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
1720 returnStatus.getServiceSpecificError());
1721
1722 defaultPreviewSettings.unlock(settingsBuffer);
1723 filteredSettings.unlock(filteredSettingsBuffer);
1724
1725 if (useHalBufManager) {
1726 std::vector<int32_t> streamIds(halStreams.size());
1727 for (size_t i = 0; i < streamIds.size(); i++) {
1728 streamIds[i] = halStreams[i].id;
1729 }
1730 verifyBuffersReturned(mSession, streamIds, cb);
1731 }
1732
1733 ret = mSession->close();
1734 mSession = nullptr;
1735 ASSERT_TRUE(ret.isOk());
1736 }
1737}
1738
1739// Generate and verify an ultra high resolution capture request
1740TEST_P(CameraAidlTest, processUltraHighResolutionRequest) {
1741 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1742 int64_t bufferId = 1;
1743 int32_t frameNumber = 1;
1744 CameraMetadata settings;
1745
1746 for (const auto& name : cameraDeviceNames) {
1747 std::string version, deviceId;
1748 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
1749 CameraMetadata meta;
1750
1751 std::shared_ptr<ICameraDevice> unusedDevice;
1752 openEmptyDeviceSession(name, mProvider, &mSession, &meta, &unusedDevice);
1753 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1754 if (!isUltraHighResolution(staticMeta)) {
1755 ndk::ScopedAStatus ret = mSession->close();
1756 mSession = nullptr;
1757 ASSERT_TRUE(ret.isOk());
1758 continue;
1759 }
1760 CameraMetadata req;
1761 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultSettings;
1762 ndk::ScopedAStatus ret =
1763 mSession->constructDefaultRequestSettings(RequestTemplate::STILL_CAPTURE, &req);
1764 ASSERT_TRUE(ret.isOk());
1765
1766 const camera_metadata_t* metadata =
1767 reinterpret_cast<const camera_metadata_t*>(req.metadata.data());
1768 size_t expectedSize = req.metadata.size();
1769 int result = validate_camera_metadata_structure(metadata, &expectedSize);
1770 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
1771
1772 size_t entryCount = get_camera_metadata_entry_count(metadata);
1773 ASSERT_GT(entryCount, 0u);
1774 defaultSettings = metadata;
1775 uint8_t sensorPixelMode =
1776 static_cast<uint8_t>(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
1777 ASSERT_EQ(::android::OK,
1778 defaultSettings.update(ANDROID_SENSOR_PIXEL_MODE, &sensorPixelMode, 1));
1779
1780 const camera_metadata_t* settingsBuffer = defaultSettings.getAndLock();
1781 uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
1782 settings.metadata = std::vector(
1783 rawSettingsBuffer, rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
1784 overrideRotateAndCrop(&settings);
1785
1786 ret = mSession->close();
1787 mSession = nullptr;
1788 ASSERT_TRUE(ret.isOk());
1789
1790 std::vector<HalStream> halStreams;
1791 bool supportsPartialResults = false;
1792 bool useHalBufManager = false;
1793 int32_t partialResultCount = 0;
1794 Stream previewStream;
1795 std::shared_ptr<DeviceCb> cb;
1796
1797 std::list<PixelFormat> pixelFormats = {PixelFormat::YCBCR_420_888, PixelFormat::RAW16};
1798 for (PixelFormat format : pixelFormats) {
Emilian Peevdda1eb72022-07-28 16:37:40 -07001799 previewStream.usage =
1800 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1801 GRALLOC1_CONSUMER_USAGE_CPU_READ);
1802 previewStream.dataSpace = Dataspace::UNKNOWN;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001803 configureStreams(name, mProvider, format, &mSession, &previewStream, &halStreams,
1804 &supportsPartialResults, &partialResultCount, &useHalBufManager, &cb,
1805 0, /*maxResolution*/ true);
1806 ASSERT_NE(mSession, nullptr);
1807
1808 ::aidl::android::hardware::common::fmq::MQDescriptor<
1809 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
1810 descriptor;
1811 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
1812 ASSERT_TRUE(resultQueueRet.isOk());
1813
1814 std::shared_ptr<ResultMetadataQueue> resultQueue =
1815 std::make_shared<ResultMetadataQueue>(descriptor);
1816 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
1817 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
1818 resultQueue = nullptr;
1819 // Don't use the queue onwards.
1820 }
1821
1822 std::vector<buffer_handle_t> graphicBuffers;
1823 graphicBuffers.reserve(halStreams.size());
1824 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
1825 static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
1826 partialResultCount, std::unordered_set<std::string>(), resultQueue);
1827
1828 std::vector<CaptureRequest> requests(1);
1829 CaptureRequest& request = requests[0];
1830 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
1831 outputBuffers.resize(halStreams.size());
1832
1833 size_t k = 0;
1834 for (const auto& halStream : halStreams) {
1835 buffer_handle_t buffer_handle;
1836 if (useHalBufManager) {
1837 outputBuffers[k] = {halStream.id, 0,
1838 NativeHandle(), BufferStatus::OK,
1839 NativeHandle(), NativeHandle()};
1840 } else {
1841 allocateGraphicBuffer(previewStream.width, previewStream.height,
Fang Huif097c4d2024-03-19 19:23:36 +08001842 ANDROID_NATIVE_UNSIGNED_CAST(android_convertGralloc1To0Usage(
Avichal Rakesh362242f2022-02-08 12:40:53 -08001843 static_cast<uint64_t>(halStream.producerUsage),
Fang Huif097c4d2024-03-19 19:23:36 +08001844 static_cast<uint64_t>(halStream.consumerUsage))),
Avichal Rakesh362242f2022-02-08 12:40:53 -08001845 halStream.overrideFormat, &buffer_handle);
1846 graphicBuffers.push_back(buffer_handle);
1847 outputBuffers[k] = {
1848 halStream.id, bufferId, ::android::makeToAidl(buffer_handle),
1849 BufferStatus::OK, NativeHandle(), NativeHandle()};
1850 bufferId++;
1851 }
1852 k++;
1853 }
1854
1855 request.inputBuffer = {
1856 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
1857 request.frameNumber = frameNumber;
1858 request.fmqSettingsSize = 0;
1859 request.settings = settings;
1860 request.inputWidth = 0;
1861 request.inputHeight = 0;
1862
1863 {
1864 std::unique_lock<std::mutex> l(mLock);
1865 mInflightMap.clear();
1866 mInflightMap[frameNumber] = inflightReq;
1867 }
1868
1869 int32_t numRequestProcessed = 0;
1870 std::vector<BufferCache> cachesToRemove;
1871 ndk::ScopedAStatus returnStatus =
1872 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1873 ASSERT_TRUE(returnStatus.isOk());
1874 ASSERT_EQ(numRequestProcessed, 1u);
1875
1876 {
1877 std::unique_lock<std::mutex> l(mLock);
1878 while (!inflightReq->errorCodeValid &&
1879 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1880 auto timeout = std::chrono::system_clock::now() +
1881 std::chrono::seconds(kStreamBufferTimeoutSec);
1882 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1883 }
1884
1885 ASSERT_FALSE(inflightReq->errorCodeValid);
1886 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1887 }
1888 if (useHalBufManager) {
1889 std::vector<int32_t> streamIds(halStreams.size());
1890 for (size_t i = 0; i < streamIds.size(); i++) {
1891 streamIds[i] = halStreams[i].id;
1892 }
1893 verifyBuffersReturned(mSession, streamIds, cb);
1894 }
1895
1896 ret = mSession->close();
1897 mSession = nullptr;
1898 ASSERT_TRUE(ret.isOk());
1899 }
1900 }
1901}
1902
1903// Generate and verify 10-bit dynamic range request
1904TEST_P(CameraAidlTest, process10BitDynamicRangeRequest) {
1905 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001906 CameraMetadata settings;
1907
1908 for (const auto& name : cameraDeviceNames) {
1909 std::string version, deviceId;
1910 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
1911 CameraMetadata meta;
1912 std::shared_ptr<ICameraDevice> device;
1913 openEmptyDeviceSession(name, mProvider, &mSession, &meta, &device);
1914 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1915 if (!is10BitDynamicRangeCapable(staticMeta)) {
1916 ndk::ScopedAStatus ret = mSession->close();
1917 mSession = nullptr;
1918 ASSERT_TRUE(ret.isOk());
1919 continue;
1920 }
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001921 std::vector<RequestAvailableDynamicRangeProfilesMap> profileList;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001922 get10BitDynamicRangeProfiles(staticMeta, &profileList);
1923 ASSERT_FALSE(profileList.empty());
1924
1925 CameraMetadata req;
1926 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultSettings;
1927 ndk::ScopedAStatus ret =
Emilian Peevdda1eb72022-07-28 16:37:40 -07001928 mSession->constructDefaultRequestSettings(RequestTemplate::PREVIEW, &req);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001929 ASSERT_TRUE(ret.isOk());
1930
1931 const camera_metadata_t* metadata =
1932 reinterpret_cast<const camera_metadata_t*>(req.metadata.data());
1933 size_t expectedSize = req.metadata.size();
1934 int result = validate_camera_metadata_structure(metadata, &expectedSize);
1935 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
1936
1937 size_t entryCount = get_camera_metadata_entry_count(metadata);
1938 ASSERT_GT(entryCount, 0u);
1939 defaultSettings = metadata;
1940
1941 const camera_metadata_t* settingsBuffer = defaultSettings.getAndLock();
1942 uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
1943 settings.metadata = std::vector(
1944 rawSettingsBuffer, rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
1945 overrideRotateAndCrop(&settings);
1946
1947 ret = mSession->close();
1948 mSession = nullptr;
1949 ASSERT_TRUE(ret.isOk());
1950
1951 std::vector<HalStream> halStreams;
1952 bool supportsPartialResults = false;
1953 bool useHalBufManager = false;
1954 int32_t partialResultCount = 0;
1955 Stream previewStream;
1956 std::shared_ptr<DeviceCb> cb;
1957 for (const auto& profile : profileList) {
Emilian Peevdda1eb72022-07-28 16:37:40 -07001958 previewStream.usage =
1959 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1960 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER);
1961 previewStream.dataSpace = getDataspace(PixelFormat::IMPLEMENTATION_DEFINED);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001962 configureStreams(name, mProvider, PixelFormat::IMPLEMENTATION_DEFINED, &mSession,
1963 &previewStream, &halStreams, &supportsPartialResults,
1964 &partialResultCount, &useHalBufManager, &cb, 0,
1965 /*maxResolution*/ false, profile);
1966 ASSERT_NE(mSession, nullptr);
1967
1968 ::aidl::android::hardware::common::fmq::MQDescriptor<
1969 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
1970 descriptor;
1971 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
1972 ASSERT_TRUE(resultQueueRet.isOk());
1973
1974 std::shared_ptr<ResultMetadataQueue> resultQueue =
1975 std::make_shared<ResultMetadataQueue>(descriptor);
1976 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
1977 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
1978 resultQueue = nullptr;
1979 // Don't use the queue onwards.
1980 }
1981
Emilian Peevdda1eb72022-07-28 16:37:40 -07001982 mInflightMap.clear();
1983 // Stream as long as needed to fill the Hal inflight queue
1984 std::vector<CaptureRequest> requests(halStreams[0].maxBuffers);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001985
Emilian Peev470d1382023-01-18 11:09:09 -08001986 for (int32_t requestId = 0; requestId < requests.size(); requestId++) {
Emilian Peevdda1eb72022-07-28 16:37:40 -07001987 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
1988 static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
1989 partialResultCount, std::unordered_set<std::string>(), resultQueue);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001990
Emilian Peev470d1382023-01-18 11:09:09 -08001991 CaptureRequest& request = requests[requestId];
Emilian Peevdda1eb72022-07-28 16:37:40 -07001992 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
1993 outputBuffers.resize(halStreams.size());
Avichal Rakesh362242f2022-02-08 12:40:53 -08001994
Emilian Peevdda1eb72022-07-28 16:37:40 -07001995 size_t k = 0;
1996 inflightReq->mOutstandingBufferIds.resize(halStreams.size());
1997 std::vector<buffer_handle_t> graphicBuffers;
1998 graphicBuffers.reserve(halStreams.size());
Avichal Rakesh362242f2022-02-08 12:40:53 -08001999
Emilian Peev470d1382023-01-18 11:09:09 -08002000 auto bufferId = requestId + 1; // Buffer id value 0 is not valid
Emilian Peevdda1eb72022-07-28 16:37:40 -07002001 for (const auto& halStream : halStreams) {
2002 buffer_handle_t buffer_handle;
2003 if (useHalBufManager) {
2004 outputBuffers[k] = {halStream.id, 0,
2005 NativeHandle(), BufferStatus::OK,
2006 NativeHandle(), NativeHandle()};
2007 } else {
Fang Huif097c4d2024-03-19 19:23:36 +08002008 auto usage = ANDROID_NATIVE_UNSIGNED_CAST(android_convertGralloc1To0Usage(
Emilian Peevdda1eb72022-07-28 16:37:40 -07002009 static_cast<uint64_t>(halStream.producerUsage),
Fang Huif097c4d2024-03-19 19:23:36 +08002010 static_cast<uint64_t>(halStream.consumerUsage)));
Emilian Peevdda1eb72022-07-28 16:37:40 -07002011 allocateGraphicBuffer(previewStream.width, previewStream.height, usage,
2012 halStream.overrideFormat, &buffer_handle);
2013
2014 inflightReq->mOutstandingBufferIds[halStream.id][bufferId] = buffer_handle;
2015 graphicBuffers.push_back(buffer_handle);
2016 outputBuffers[k] = {halStream.id, bufferId,
2017 android::makeToAidl(buffer_handle), BufferStatus::OK, NativeHandle(),
2018 NativeHandle()};
Emilian Peevdda1eb72022-07-28 16:37:40 -07002019 }
2020 k++;
Avichal Rakesh362242f2022-02-08 12:40:53 -08002021 }
Avichal Rakesh362242f2022-02-08 12:40:53 -08002022
Emilian Peevdda1eb72022-07-28 16:37:40 -07002023 request.inputBuffer = {
2024 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
Emilian Peev470d1382023-01-18 11:09:09 -08002025 request.frameNumber = bufferId;
Emilian Peevdda1eb72022-07-28 16:37:40 -07002026 request.fmqSettingsSize = 0;
2027 request.settings = settings;
2028 request.inputWidth = 0;
2029 request.inputHeight = 0;
Avichal Rakesh362242f2022-02-08 12:40:53 -08002030
Emilian Peevdda1eb72022-07-28 16:37:40 -07002031 {
2032 std::unique_lock<std::mutex> l(mLock);
Emilian Peev470d1382023-01-18 11:09:09 -08002033 mInflightMap[bufferId] = inflightReq;
Emilian Peevdda1eb72022-07-28 16:37:40 -07002034 }
2035
Avichal Rakesh362242f2022-02-08 12:40:53 -08002036 }
2037
2038 int32_t numRequestProcessed = 0;
2039 std::vector<BufferCache> cachesToRemove;
2040 ndk::ScopedAStatus returnStatus =
Emilian Peevdda1eb72022-07-28 16:37:40 -07002041 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
Avichal Rakesh362242f2022-02-08 12:40:53 -08002042 ASSERT_TRUE(returnStatus.isOk());
Emilian Peevdda1eb72022-07-28 16:37:40 -07002043 ASSERT_EQ(numRequestProcessed, requests.size());
Avichal Rakesh362242f2022-02-08 12:40:53 -08002044
Emilian Peevdda1eb72022-07-28 16:37:40 -07002045 returnStatus = mSession->repeatingRequestEnd(requests.size() - 1,
2046 std::vector<int32_t> {halStreams[0].id});
2047 ASSERT_TRUE(returnStatus.isOk());
2048
Emilian Peev470d1382023-01-18 11:09:09 -08002049 // We are keeping frame numbers and buffer ids consistent. Buffer id value of 0
2050 // is used to indicate a buffer that is not present/available so buffer ids as well
2051 // as frame numbers begin with 1.
2052 for (int32_t frameNumber = 1; frameNumber <= requests.size(); frameNumber++) {
Emilian Peevdda1eb72022-07-28 16:37:40 -07002053 const auto& inflightReq = mInflightMap[frameNumber];
Avichal Rakesh362242f2022-02-08 12:40:53 -08002054 std::unique_lock<std::mutex> l(mLock);
2055 while (!inflightReq->errorCodeValid &&
2056 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
2057 auto timeout = std::chrono::system_clock::now() +
2058 std::chrono::seconds(kStreamBufferTimeoutSec);
2059 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2060 }
2061
Shuzhen Wang0f56c562023-04-03 16:58:59 -07002062 waitForReleaseFence(inflightReq->resultOutputBuffers);
2063
Avichal Rakesh362242f2022-02-08 12:40:53 -08002064 ASSERT_FALSE(inflightReq->errorCodeValid);
2065 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
2066 verify10BitMetadata(mHandleImporter, *inflightReq, profile);
2067 }
Emilian Peevdda1eb72022-07-28 16:37:40 -07002068
Avichal Rakesh362242f2022-02-08 12:40:53 -08002069 if (useHalBufManager) {
2070 std::vector<int32_t> streamIds(halStreams.size());
2071 for (size_t i = 0; i < streamIds.size(); i++) {
2072 streamIds[i] = halStreams[i].id;
2073 }
2074 mSession->signalStreamFlush(streamIds, /*streamConfigCounter*/ 0);
2075 cb->waitForBuffersReturned();
2076 }
2077
2078 ret = mSession->close();
2079 mSession = nullptr;
2080 ASSERT_TRUE(ret.isOk());
2081 }
2082 }
2083}
2084
Austin Borger4728fc42022-07-15 11:27:53 -07002085TEST_P(CameraAidlTest, process8BitColorSpaceRequests) {
Austin Borger54b22362023-03-22 11:25:06 -07002086 static int profiles[] = {ColorSpaceNamed::DISPLAY_P3, ColorSpaceNamed::SRGB};
Austin Borger4728fc42022-07-15 11:27:53 -07002087
2088 for (int32_t i = 0; i < sizeof(profiles) / sizeof(profiles[0]); i++) {
2089 processColorSpaceRequest(static_cast<RequestAvailableColorSpaceProfilesMap>(profiles[i]),
2090 static_cast<RequestAvailableDynamicRangeProfilesMap>(
2091 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD));
2092 }
2093}
2094
2095TEST_P(CameraAidlTest, process10BitColorSpaceRequests) {
2096 static const camera_metadata_enum_android_request_available_dynamic_range_profiles_map
2097 dynamicRangeProfiles[] = {
2098 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10,
2099 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10,
2100 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS,
2101 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF,
2102 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF_PO,
2103 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM,
2104 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM_PO,
2105 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF,
2106 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF_PO,
2107 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM,
2108 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM_PO
2109 };
2110
Austin Borger54b22362023-03-22 11:25:06 -07002111 // Process all dynamic range profiles with BT2020_HLG
Austin Borger4728fc42022-07-15 11:27:53 -07002112 for (int32_t i = 0; i < sizeof(dynamicRangeProfiles) / sizeof(dynamicRangeProfiles[0]); i++) {
2113 processColorSpaceRequest(
Austin Borger54b22362023-03-22 11:25:06 -07002114 static_cast<RequestAvailableColorSpaceProfilesMap>(ColorSpaceNamed::BT2020_HLG),
Austin Borger4728fc42022-07-15 11:27:53 -07002115 static_cast<RequestAvailableDynamicRangeProfilesMap>(dynamicRangeProfiles[i]));
2116 }
2117}
2118
Shuzhen Wang4dd6a512022-11-08 20:47:20 +00002119TEST_P(CameraAidlTest, processZoomSettingsOverrideRequests) {
2120 const int32_t kFrameCount = 5;
2121 const int32_t kTestCases = 2;
Shuzhen Wang38ddb272023-05-22 09:40:28 -07002122 const bool kOverrideSequence[kTestCases][kFrameCount] = {// ZOOM, ZOOM, ZOOM, ZOOM, ZOOM;
2123 {true, true, true, true, true},
2124 // OFF, ZOOM, ZOOM, ZOOM, OFF;
2125 {false, true, true, true, false}};
Shuzhen Wang4dd6a512022-11-08 20:47:20 +00002126 const bool kExpectedOverrideResults[kTestCases][kFrameCount] = {
Shuzhen Wang38ddb272023-05-22 09:40:28 -07002127 // All resuls should be overridden except the last one. The last result's
2128 // zoom doesn't have speed-up.
2129 {true, true, true, true, false},
2130 // Because we require at least 1 frame speed-up, request #1, #2 and #3
2131 // will be overridden.
2132 {true, true, true, false, false}};
Shuzhen Wang4dd6a512022-11-08 20:47:20 +00002133
2134 for (int i = 0; i < kTestCases; i++) {
2135 processZoomSettingsOverrideRequests(kFrameCount, kOverrideSequence[i],
2136 kExpectedOverrideResults[i]);
2137 }
2138}
2139
Avichal Rakesh362242f2022-02-08 12:40:53 -08002140// Generate and verify a burst containing alternating sensor sensitivity values
2141TEST_P(CameraAidlTest, processCaptureRequestBurstISO) {
2142 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2143 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2144 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2145 int64_t bufferId = 1;
2146 int32_t frameNumber = 1;
2147 float isoTol = .03f;
2148 CameraMetadata settings;
2149
2150 for (const auto& name : cameraDeviceNames) {
2151 CameraMetadata meta;
2152 settings.metadata.clear();
2153 std::shared_ptr<ICameraDevice> unusedDevice;
2154 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
2155 &unusedDevice /*out*/);
2156 camera_metadata_t* staticMetaBuffer =
2157 clone_camera_metadata(reinterpret_cast<camera_metadata_t*>(meta.metadata.data()));
2158 ::android::hardware::camera::common::V1_0::helper::CameraMetadata staticMeta(
2159 staticMetaBuffer);
2160
2161 camera_metadata_entry_t hwLevel = staticMeta.find(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL);
2162 ASSERT_TRUE(0 < hwLevel.count);
2163 if (ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED == hwLevel.data.u8[0] ||
2164 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL == hwLevel.data.u8[0]) {
2165 // Limited/External devices can skip this test
2166 ndk::ScopedAStatus ret = mSession->close();
2167 mSession = nullptr;
2168 ASSERT_TRUE(ret.isOk());
2169 continue;
2170 }
2171
2172 camera_metadata_entry_t isoRange = staticMeta.find(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE);
2173 ASSERT_EQ(isoRange.count, 2u);
2174
2175 ndk::ScopedAStatus ret = mSession->close();
2176 mSession = nullptr;
2177 ASSERT_TRUE(ret.isOk());
2178
2179 bool supportsPartialResults = false;
2180 bool useHalBufManager = false;
2181 int32_t partialResultCount = 0;
2182 Stream previewStream;
2183 std::vector<HalStream> halStreams;
2184 std::shared_ptr<DeviceCb> cb;
2185 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2186 &previewStream /*out*/, &halStreams /*out*/,
2187 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2188 &useHalBufManager /*out*/, &cb /*out*/);
2189
2190 ::aidl::android::hardware::common::fmq::MQDescriptor<
2191 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2192 descriptor;
2193 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2194 std::shared_ptr<ResultMetadataQueue> resultQueue =
2195 std::make_shared<ResultMetadataQueue>(descriptor);
2196 ASSERT_TRUE(resultQueueRet.isOk());
2197 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2198 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2199 resultQueue = nullptr;
2200 // Don't use the queue onwards.
2201 }
2202
2203 ret = mSession->constructDefaultRequestSettings(RequestTemplate::PREVIEW, &settings);
2204 ASSERT_TRUE(ret.isOk());
2205
2206 ::android::hardware::camera::common::V1_0::helper::CameraMetadata requestMeta;
2207 std::vector<CaptureRequest> requests(kBurstFrameCount);
2208 std::vector<buffer_handle_t> buffers(kBurstFrameCount);
2209 std::vector<std::shared_ptr<InFlightRequest>> inflightReqs(kBurstFrameCount);
2210 std::vector<int32_t> isoValues(kBurstFrameCount);
2211 std::vector<CameraMetadata> requestSettings(kBurstFrameCount);
2212
2213 for (int32_t i = 0; i < kBurstFrameCount; i++) {
2214 std::unique_lock<std::mutex> l(mLock);
2215 CaptureRequest& request = requests[i];
2216 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2217 outputBuffers.resize(1);
2218 StreamBuffer& outputBuffer = outputBuffers[0];
2219
2220 isoValues[i] = ((i % 2) == 0) ? isoRange.data.i32[0] : isoRange.data.i32[1];
2221 if (useHalBufManager) {
2222 outputBuffer = {halStreams[0].id, 0,
2223 NativeHandle(), BufferStatus::OK,
2224 NativeHandle(), NativeHandle()};
2225 } else {
2226 allocateGraphicBuffer(previewStream.width, previewStream.height,
Fang Huif097c4d2024-03-19 19:23:36 +08002227 ANDROID_NATIVE_UNSIGNED_CAST(android_convertGralloc1To0Usage(
Avichal Rakesh362242f2022-02-08 12:40:53 -08002228 static_cast<uint64_t>(halStreams[0].producerUsage),
Fang Huif097c4d2024-03-19 19:23:36 +08002229 static_cast<uint64_t>(halStreams[0].consumerUsage))),
Avichal Rakesh362242f2022-02-08 12:40:53 -08002230 halStreams[0].overrideFormat, &buffers[i]);
2231 outputBuffer = {halStreams[0].id, bufferId + i, ::android::makeToAidl(buffers[i]),
2232 BufferStatus::OK, NativeHandle(), NativeHandle()};
2233 }
2234
2235 requestMeta.append(reinterpret_cast<camera_metadata_t*>(settings.metadata.data()));
2236
2237 // Disable all 3A routines
2238 uint8_t mode = static_cast<uint8_t>(ANDROID_CONTROL_MODE_OFF);
2239 ASSERT_EQ(::android::OK, requestMeta.update(ANDROID_CONTROL_MODE, &mode, 1));
2240 ASSERT_EQ(::android::OK,
2241 requestMeta.update(ANDROID_SENSOR_SENSITIVITY, &isoValues[i], 1));
2242 camera_metadata_t* metaBuffer = requestMeta.release();
2243 uint8_t* rawMetaBuffer = reinterpret_cast<uint8_t*>(metaBuffer);
2244 requestSettings[i].metadata = std::vector(
2245 rawMetaBuffer, rawMetaBuffer + get_camera_metadata_size(metaBuffer));
2246 overrideRotateAndCrop(&(requestSettings[i]));
2247
2248 request.frameNumber = frameNumber + i;
2249 request.fmqSettingsSize = 0;
2250 request.settings = requestSettings[i];
2251 request.inputBuffer = {
2252 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2253
2254 inflightReqs[i] = std::make_shared<InFlightRequest>(1, false, supportsPartialResults,
2255 partialResultCount, resultQueue);
2256 mInflightMap[frameNumber + i] = inflightReqs[i];
2257 }
2258
2259 int32_t numRequestProcessed = 0;
2260 std::vector<BufferCache> cachesToRemove;
2261
2262 ndk::ScopedAStatus returnStatus =
2263 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2264 ASSERT_TRUE(returnStatus.isOk());
2265 ASSERT_EQ(numRequestProcessed, kBurstFrameCount);
2266
2267 for (size_t i = 0; i < kBurstFrameCount; i++) {
2268 std::unique_lock<std::mutex> l(mLock);
2269 while (!inflightReqs[i]->errorCodeValid && ((0 < inflightReqs[i]->numBuffersLeft) ||
2270 (!inflightReqs[i]->haveResultMetadata))) {
2271 auto timeout = std::chrono::system_clock::now() +
2272 std::chrono::seconds(kStreamBufferTimeoutSec);
2273 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2274 }
2275
2276 ASSERT_FALSE(inflightReqs[i]->errorCodeValid);
2277 ASSERT_NE(inflightReqs[i]->resultOutputBuffers.size(), 0u);
2278 ASSERT_EQ(previewStream.id, inflightReqs[i]->resultOutputBuffers[0].buffer.streamId);
2279 ASSERT_FALSE(inflightReqs[i]->collectedResult.isEmpty());
2280 ASSERT_TRUE(inflightReqs[i]->collectedResult.exists(ANDROID_SENSOR_SENSITIVITY));
2281 camera_metadata_entry_t isoResult =
2282 inflightReqs[i]->collectedResult.find(ANDROID_SENSOR_SENSITIVITY);
2283 ASSERT_TRUE(std::abs(isoResult.data.i32[0] - isoValues[i]) <=
2284 std::round(isoValues[i] * isoTol));
2285 }
2286
2287 if (useHalBufManager) {
2288 verifyBuffersReturned(mSession, previewStream.id, cb);
2289 }
2290 ret = mSession->close();
2291 mSession = nullptr;
2292 ASSERT_TRUE(ret.isOk());
2293 }
2294}
2295
2296// Test whether an incorrect capture request with missing settings will
2297// be reported correctly.
2298TEST_P(CameraAidlTest, processCaptureRequestInvalidSinglePreview) {
2299 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2300 std::vector<AvailableStream> outputPreviewStreams;
2301 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2302 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2303 int64_t bufferId = 1;
2304 int32_t frameNumber = 1;
2305 CameraMetadata settings;
2306
2307 for (const auto& name : cameraDeviceNames) {
2308 Stream previewStream;
2309 std::vector<HalStream> halStreams;
2310 std::shared_ptr<DeviceCb> cb;
2311 bool supportsPartialResults = false;
2312 bool useHalBufManager = false;
2313 int32_t partialResultCount = 0;
2314 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2315 &previewStream /*out*/, &halStreams /*out*/,
2316 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2317 &useHalBufManager /*out*/, &cb /*out*/);
2318 ASSERT_NE(mSession, nullptr);
2319 ASSERT_FALSE(halStreams.empty());
2320
2321 buffer_handle_t buffer_handle = nullptr;
2322
2323 if (useHalBufManager) {
2324 bufferId = 0;
2325 } else {
2326 allocateGraphicBuffer(previewStream.width, previewStream.height,
Fang Huif097c4d2024-03-19 19:23:36 +08002327 ANDROID_NATIVE_UNSIGNED_CAST(android_convertGralloc1To0Usage(
Avichal Rakesh362242f2022-02-08 12:40:53 -08002328 static_cast<uint64_t>(halStreams[0].producerUsage),
Fang Huif097c4d2024-03-19 19:23:36 +08002329 static_cast<uint64_t>(halStreams[0].consumerUsage))),
Avichal Rakesh362242f2022-02-08 12:40:53 -08002330 halStreams[0].overrideFormat, &buffer_handle);
2331 }
2332
2333 std::vector<CaptureRequest> requests(1);
2334 CaptureRequest& request = requests[0];
2335 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2336 outputBuffers.resize(1);
2337 StreamBuffer& outputBuffer = outputBuffers[0];
2338
2339 outputBuffer = {
2340 halStreams[0].id,
2341 bufferId,
2342 buffer_handle == nullptr ? NativeHandle() : ::android::makeToAidl(buffer_handle),
2343 BufferStatus::OK,
2344 NativeHandle(),
2345 NativeHandle()};
2346
2347 request.inputBuffer = {
2348 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2349 request.frameNumber = frameNumber;
2350 request.fmqSettingsSize = 0;
2351 request.settings = settings;
2352
2353 // Settings were not correctly initialized, we should fail here
2354 int32_t numRequestProcessed = 0;
2355 std::vector<BufferCache> cachesToRemove;
2356 ndk::ScopedAStatus ret =
2357 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2358 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
2359 ASSERT_EQ(numRequestProcessed, 0u);
2360
2361 ret = mSession->close();
2362 mSession = nullptr;
2363 ASSERT_TRUE(ret.isOk());
2364 }
2365}
2366
2367// Verify camera offline session behavior
2368TEST_P(CameraAidlTest, switchToOffline) {
2369 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2370 AvailableStream threshold = {kMaxStillWidth, kMaxStillHeight,
2371 static_cast<int32_t>(PixelFormat::BLOB)};
2372 int64_t bufferId = 1;
2373 int32_t frameNumber = 1;
2374 CameraMetadata settings;
2375
2376 for (const auto& name : cameraDeviceNames) {
2377 CameraMetadata meta;
2378 {
2379 std::shared_ptr<ICameraDevice> unusedDevice;
2380 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
2381 &unusedDevice);
2382 camera_metadata_t* staticMetaBuffer = clone_camera_metadata(
2383 reinterpret_cast<camera_metadata_t*>(meta.metadata.data()));
2384 ::android::hardware::camera::common::V1_0::helper::CameraMetadata staticMeta(
2385 staticMetaBuffer);
2386
2387 if (isOfflineSessionSupported(staticMetaBuffer) != Status::OK) {
2388 ndk::ScopedAStatus ret = mSession->close();
2389 mSession = nullptr;
2390 ASSERT_TRUE(ret.isOk());
2391 continue;
2392 }
2393 ndk::ScopedAStatus ret = mSession->close();
2394 mSession = nullptr;
2395 ASSERT_TRUE(ret.isOk());
2396 }
2397
2398 bool supportsPartialResults = false;
2399 int32_t partialResultCount = 0;
2400 Stream stream;
2401 std::vector<HalStream> halStreams;
2402 std::shared_ptr<DeviceCb> cb;
2403 int32_t jpegBufferSize;
2404 bool useHalBufManager;
2405 configureOfflineStillStream(name, mProvider, &threshold, &mSession /*out*/, &stream /*out*/,
2406 &halStreams /*out*/, &supportsPartialResults /*out*/,
2407 &partialResultCount /*out*/, &cb /*out*/,
2408 &jpegBufferSize /*out*/, &useHalBufManager /*out*/);
2409
2410 auto ret = mSession->constructDefaultRequestSettings(RequestTemplate::STILL_CAPTURE,
2411 &settings);
2412 ASSERT_TRUE(ret.isOk());
2413
2414 ::aidl::android::hardware::common::fmq::MQDescriptor<
2415 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2416 descriptor;
2417
2418 ndk::ScopedAStatus resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2419 ASSERT_TRUE(resultQueueRet.isOk());
2420 std::shared_ptr<ResultMetadataQueue> resultQueue =
2421 std::make_shared<ResultMetadataQueue>(descriptor);
2422 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2423 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2424 resultQueue = nullptr;
2425 // Don't use the queue onwards.
2426 }
2427
2428 ::android::hardware::camera::common::V1_0::helper::CameraMetadata requestMeta;
2429
2430 std::vector<buffer_handle_t> buffers(kBurstFrameCount);
2431 std::vector<std::shared_ptr<InFlightRequest>> inflightReqs(kBurstFrameCount);
2432 std::vector<CameraMetadata> requestSettings(kBurstFrameCount);
2433
2434 std::vector<CaptureRequest> requests(kBurstFrameCount);
2435
2436 HalStream halStream = halStreams[0];
2437 for (uint32_t i = 0; i < kBurstFrameCount; i++) {
2438 CaptureRequest& request = requests[i];
2439 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2440 outputBuffers.resize(1);
2441 StreamBuffer& outputBuffer = outputBuffers[0];
2442
2443 std::unique_lock<std::mutex> l(mLock);
2444 if (useHalBufManager) {
2445 outputBuffer = {halStream.id, 0, NativeHandle(), BufferStatus::OK, NativeHandle(),
2446 NativeHandle()};
2447 } else {
2448 // jpeg buffer (w,h) = (blobLen, 1)
2449 allocateGraphicBuffer(jpegBufferSize, /*height*/ 1,
Fang Huif097c4d2024-03-19 19:23:36 +08002450 ANDROID_NATIVE_UNSIGNED_CAST(android_convertGralloc1To0Usage(
Avichal Rakesh362242f2022-02-08 12:40:53 -08002451 static_cast<uint64_t>(halStream.producerUsage),
Fang Huif097c4d2024-03-19 19:23:36 +08002452 static_cast<uint64_t>(halStream.consumerUsage))),
Avichal Rakesh362242f2022-02-08 12:40:53 -08002453 halStream.overrideFormat, &buffers[i]);
2454 outputBuffer = {halStream.id, bufferId + i, ::android::makeToAidl(buffers[i]),
2455 BufferStatus::OK, NativeHandle(), NativeHandle()};
2456 }
2457
2458 requestMeta.clear();
2459 requestMeta.append(reinterpret_cast<camera_metadata_t*>(settings.metadata.data()));
2460
2461 camera_metadata_t* metaBuffer = requestMeta.release();
2462 uint8_t* rawMetaBuffer = reinterpret_cast<uint8_t*>(metaBuffer);
2463 requestSettings[i].metadata = std::vector(
2464 rawMetaBuffer, rawMetaBuffer + get_camera_metadata_size(metaBuffer));
2465 overrideRotateAndCrop(&requestSettings[i]);
2466
2467 request.frameNumber = frameNumber + i;
2468 request.fmqSettingsSize = 0;
2469 request.settings = requestSettings[i];
2470 request.inputBuffer = {/*streamId*/ -1,
2471 /*bufferId*/ 0, NativeHandle(),
2472 BufferStatus::ERROR, NativeHandle(),
2473 NativeHandle()};
2474
2475 inflightReqs[i] = std::make_shared<InFlightRequest>(1, false, supportsPartialResults,
2476 partialResultCount, resultQueue);
2477 mInflightMap[frameNumber + i] = inflightReqs[i];
2478 }
2479
2480 int32_t numRequestProcessed = 0;
2481 std::vector<BufferCache> cachesToRemove;
2482
2483 ndk::ScopedAStatus returnStatus =
2484 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2485 ASSERT_TRUE(returnStatus.isOk());
2486 ASSERT_EQ(numRequestProcessed, kBurstFrameCount);
2487
2488 std::vector<int32_t> offlineStreamIds = {halStream.id};
2489 CameraOfflineSessionInfo offlineSessionInfo;
2490 std::shared_ptr<ICameraOfflineSession> offlineSession;
2491 returnStatus =
2492 mSession->switchToOffline(offlineStreamIds, &offlineSessionInfo, &offlineSession);
2493
2494 if (!halStreams[0].supportOffline) {
2495 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
2496 returnStatus.getServiceSpecificError());
2497 ret = mSession->close();
2498 mSession = nullptr;
2499 ASSERT_TRUE(ret.isOk());
2500 continue;
2501 }
2502
2503 ASSERT_TRUE(returnStatus.isOk());
2504 // Hal might be unable to find any requests qualified for offline mode.
2505 if (offlineSession == nullptr) {
2506 ret = mSession->close();
2507 mSession = nullptr;
2508 ASSERT_TRUE(ret.isOk());
2509 continue;
2510 }
2511
2512 ASSERT_EQ(offlineSessionInfo.offlineStreams.size(), 1u);
2513 ASSERT_EQ(offlineSessionInfo.offlineStreams[0].id, halStream.id);
2514 ASSERT_NE(offlineSessionInfo.offlineRequests.size(), 0u);
2515
2516 // close device session to make sure offline session does not rely on it
2517 ret = mSession->close();
2518 mSession = nullptr;
2519 ASSERT_TRUE(ret.isOk());
2520
2521 ::aidl::android::hardware::common::fmq::MQDescriptor<
2522 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2523 offlineResultDescriptor;
2524
2525 auto offlineResultQueueRet =
2526 offlineSession->getCaptureResultMetadataQueue(&offlineResultDescriptor);
2527 std::shared_ptr<ResultMetadataQueue> offlineResultQueue =
2528 std::make_shared<ResultMetadataQueue>(descriptor);
2529 if (!offlineResultQueue->isValid() || offlineResultQueue->availableToWrite() <= 0) {
2530 ALOGE("%s: offline session returns empty result metadata fmq, not use it", __func__);
2531 offlineResultQueue = nullptr;
2532 // Don't use the queue onwards.
2533 }
2534 ASSERT_TRUE(offlineResultQueueRet.isOk());
2535
2536 updateInflightResultQueue(offlineResultQueue);
2537
2538 ret = offlineSession->setCallback(cb);
2539 ASSERT_TRUE(ret.isOk());
2540
2541 for (size_t i = 0; i < kBurstFrameCount; i++) {
2542 std::unique_lock<std::mutex> l(mLock);
2543 while (!inflightReqs[i]->errorCodeValid && ((0 < inflightReqs[i]->numBuffersLeft) ||
2544 (!inflightReqs[i]->haveResultMetadata))) {
2545 auto timeout = std::chrono::system_clock::now() +
2546 std::chrono::seconds(kStreamBufferTimeoutSec);
2547 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2548 }
2549
2550 ASSERT_FALSE(inflightReqs[i]->errorCodeValid);
2551 ASSERT_NE(inflightReqs[i]->resultOutputBuffers.size(), 0u);
2552 ASSERT_EQ(stream.id, inflightReqs[i]->resultOutputBuffers[0].buffer.streamId);
2553 ASSERT_FALSE(inflightReqs[i]->collectedResult.isEmpty());
2554 }
2555
2556 ret = offlineSession->close();
2557 ASSERT_TRUE(ret.isOk());
2558 }
2559}
2560
2561// Check whether an invalid capture request with missing output buffers
2562// will be reported correctly.
2563TEST_P(CameraAidlTest, processCaptureRequestInvalidBuffer) {
2564 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2565 std::vector<AvailableStream> outputBlobStreams;
2566 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2567 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2568 int32_t frameNumber = 1;
2569 CameraMetadata settings;
2570
2571 for (const auto& name : cameraDeviceNames) {
2572 Stream previewStream;
2573 std::vector<HalStream> halStreams;
2574 std::shared_ptr<DeviceCb> cb;
2575 bool supportsPartialResults = false;
2576 bool useHalBufManager = false;
2577 int32_t partialResultCount = 0;
2578 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2579 &previewStream /*out*/, &halStreams /*out*/,
2580 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2581 &useHalBufManager /*out*/, &cb /*out*/);
2582
2583 RequestTemplate reqTemplate = RequestTemplate::PREVIEW;
2584 ndk::ScopedAStatus ret = mSession->constructDefaultRequestSettings(reqTemplate, &settings);
2585 ASSERT_TRUE(ret.isOk());
2586 overrideRotateAndCrop(&settings);
2587
2588 std::vector<CaptureRequest> requests(1);
2589 CaptureRequest& request = requests[0];
2590 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2591 outputBuffers.resize(1);
2592 // Empty output buffer
2593 outputBuffers[0] = {
2594 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2595
2596 request.inputBuffer = {
2597 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2598 request.frameNumber = frameNumber;
2599 request.fmqSettingsSize = 0;
2600 request.settings = settings;
2601
2602 // Output buffers are missing, we should fail here
2603 int32_t numRequestProcessed = 0;
2604 std::vector<BufferCache> cachesToRemove;
2605 ret = mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2606 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
2607 ASSERT_EQ(numRequestProcessed, 0u);
2608
2609 ret = mSession->close();
2610 mSession = nullptr;
2611 ASSERT_TRUE(ret.isOk());
2612 }
2613}
2614
2615// Generate, trigger and flush a preview request
2616TEST_P(CameraAidlTest, flushPreviewRequest) {
2617 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2618 std::vector<AvailableStream> outputPreviewStreams;
2619 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2620 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2621 int64_t bufferId = 1;
2622 int32_t frameNumber = 1;
2623 CameraMetadata settings;
2624
2625 for (const auto& name : cameraDeviceNames) {
2626 Stream previewStream;
2627 std::vector<HalStream> halStreams;
2628 std::shared_ptr<DeviceCb> cb;
2629 bool supportsPartialResults = false;
2630 bool useHalBufManager = false;
2631 int32_t partialResultCount = 0;
2632
2633 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2634 &previewStream /*out*/, &halStreams /*out*/,
2635 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2636 &useHalBufManager /*out*/, &cb /*out*/);
2637
2638 ASSERT_NE(mSession, nullptr);
2639 ASSERT_NE(cb, nullptr);
2640 ASSERT_FALSE(halStreams.empty());
2641
2642 ::aidl::android::hardware::common::fmq::MQDescriptor<
2643 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2644 descriptor;
2645
2646 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2647 std::shared_ptr<ResultMetadataQueue> resultQueue =
2648 std::make_shared<ResultMetadataQueue>(descriptor);
2649 ASSERT_TRUE(resultQueueRet.isOk());
2650 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2651 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2652 resultQueue = nullptr;
2653 // Don't use the queue onwards.
2654 }
2655
2656 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
2657 1, false, supportsPartialResults, partialResultCount, resultQueue);
2658 RequestTemplate reqTemplate = RequestTemplate::PREVIEW;
2659
2660 ndk::ScopedAStatus ret = mSession->constructDefaultRequestSettings(reqTemplate, &settings);
2661 ASSERT_TRUE(ret.isOk());
2662 overrideRotateAndCrop(&settings);
2663
2664 buffer_handle_t buffer_handle;
2665 std::vector<CaptureRequest> requests(1);
2666 CaptureRequest& request = requests[0];
2667 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2668 outputBuffers.resize(1);
2669 StreamBuffer& outputBuffer = outputBuffers[0];
2670 if (useHalBufManager) {
2671 bufferId = 0;
2672 outputBuffer = {halStreams[0].id, bufferId, NativeHandle(),
2673 BufferStatus::OK, NativeHandle(), NativeHandle()};
2674 } else {
2675 allocateGraphicBuffer(previewStream.width, previewStream.height,
Fang Huif097c4d2024-03-19 19:23:36 +08002676 ANDROID_NATIVE_UNSIGNED_CAST(android_convertGralloc1To0Usage(
Avichal Rakesh362242f2022-02-08 12:40:53 -08002677 static_cast<uint64_t>(halStreams[0].producerUsage),
Fang Huif097c4d2024-03-19 19:23:36 +08002678 static_cast<uint64_t>(halStreams[0].consumerUsage))),
Avichal Rakesh362242f2022-02-08 12:40:53 -08002679 halStreams[0].overrideFormat, &buffer_handle);
2680 outputBuffer = {halStreams[0].id, bufferId, ::android::makeToAidl(buffer_handle),
2681 BufferStatus::OK, NativeHandle(), NativeHandle()};
2682 }
2683
2684 request.frameNumber = frameNumber;
2685 request.fmqSettingsSize = 0;
2686 request.settings = settings;
2687 request.inputBuffer = {
2688 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2689
2690 {
2691 std::unique_lock<std::mutex> l(mLock);
2692 mInflightMap.clear();
2693 mInflightMap[frameNumber] = inflightReq;
2694 }
2695
2696 int32_t numRequestProcessed = 0;
2697 std::vector<BufferCache> cachesToRemove;
2698 ret = mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2699 ASSERT_TRUE(ret.isOk());
2700 ASSERT_EQ(numRequestProcessed, 1u);
2701
2702 // Flush before waiting for request to complete.
2703 ndk::ScopedAStatus returnStatus = mSession->flush();
2704 ASSERT_TRUE(returnStatus.isOk());
2705
2706 {
2707 std::unique_lock<std::mutex> l(mLock);
2708 while (!inflightReq->errorCodeValid &&
2709 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
2710 auto timeout = std::chrono::system_clock::now() +
2711 std::chrono::seconds(kStreamBufferTimeoutSec);
2712 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2713 }
2714
2715 if (!inflightReq->errorCodeValid) {
2716 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
2717 ASSERT_EQ(previewStream.id, inflightReq->resultOutputBuffers[0].buffer.streamId);
2718 } else {
2719 switch (inflightReq->errorCode) {
2720 case ErrorCode::ERROR_REQUEST:
2721 case ErrorCode::ERROR_RESULT:
2722 case ErrorCode::ERROR_BUFFER:
2723 // Expected
2724 break;
2725 case ErrorCode::ERROR_DEVICE:
2726 default:
2727 FAIL() << "Unexpected error:"
2728 << static_cast<uint32_t>(inflightReq->errorCode);
2729 }
2730 }
2731 }
2732
2733 if (useHalBufManager) {
2734 verifyBuffersReturned(mSession, previewStream.id, cb);
2735 }
2736
2737 ret = mSession->close();
2738 mSession = nullptr;
2739 ASSERT_TRUE(ret.isOk());
2740 }
2741}
2742
2743// Verify that camera flushes correctly without any pending requests.
2744TEST_P(CameraAidlTest, flushEmpty) {
2745 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2746 std::vector<AvailableStream> outputPreviewStreams;
2747 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2748 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2749
2750 for (const auto& name : cameraDeviceNames) {
2751 Stream previewStream;
2752 std::vector<HalStream> halStreams;
2753 std::shared_ptr<DeviceCb> cb;
2754 bool supportsPartialResults = false;
2755 bool useHalBufManager = false;
2756
2757 int32_t partialResultCount = 0;
2758 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2759 &previewStream /*out*/, &halStreams /*out*/,
2760 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2761 &useHalBufManager /*out*/, &cb /*out*/);
2762
2763 ndk::ScopedAStatus returnStatus = mSession->flush();
2764 ASSERT_TRUE(returnStatus.isOk());
2765
2766 {
2767 std::unique_lock<std::mutex> l(mLock);
2768 auto timeout = std::chrono::system_clock::now() +
2769 std::chrono::milliseconds(kEmptyFlushTimeoutMSec);
2770 ASSERT_EQ(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2771 }
2772
2773 ndk::ScopedAStatus ret = mSession->close();
2774 mSession = nullptr;
2775 ASSERT_TRUE(ret.isOk());
2776 }
2777}
2778
2779// Test camera provider notify method
2780TEST_P(CameraAidlTest, providerDeviceStateNotification) {
2781 notifyDeviceState(ICameraProvider::DEVICE_STATE_BACK_COVERED);
2782 notifyDeviceState(ICameraProvider::DEVICE_STATE_NORMAL);
2783}
2784
2785// Verify that all supported stream formats and sizes can be configured
2786// successfully for injection camera.
2787TEST_P(CameraAidlTest, configureInjectionStreamsAvailableOutputs) {
2788 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2789 std::vector<AvailableStream> outputStreams;
2790
2791 for (const auto& name : cameraDeviceNames) {
2792 CameraMetadata metadata;
2793
2794 std::shared_ptr<ICameraInjectionSession> injectionSession;
2795 std::shared_ptr<ICameraDevice> unusedDevice;
2796 openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/,
2797 &unusedDevice /*out*/);
2798 if (injectionSession == nullptr) {
2799 continue;
2800 }
2801
2802 camera_metadata_t* staticMetaBuffer =
2803 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
2804 CameraMetadata chars;
2805 chars.metadata = metadata.metadata;
2806
2807 outputStreams.clear();
2808 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputStreams));
2809 ASSERT_NE(0u, outputStreams.size());
2810
2811 int32_t jpegBufferSize = 0;
2812 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMetaBuffer, &jpegBufferSize));
2813 ASSERT_NE(0u, jpegBufferSize);
2814
2815 int32_t streamId = 0;
2816 int32_t streamConfigCounter = 0;
2817 for (auto& it : outputStreams) {
2818 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(it.format));
2819 Stream stream = {streamId,
2820 StreamType::OUTPUT,
2821 it.width,
2822 it.height,
2823 static_cast<PixelFormat>(it.format),
2824 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2825 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2826 dataspace,
2827 StreamRotation::ROTATION_0,
2828 std::string(),
2829 jpegBufferSize,
2830 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002831 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2832 RequestAvailableDynamicRangeProfilesMap::
2833 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002834
2835 std::vector<Stream> streams = {stream};
2836 StreamConfiguration config;
2837 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2838 jpegBufferSize);
2839
2840 config.streamConfigCounter = streamConfigCounter++;
2841 ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars);
2842 ASSERT_TRUE(s.isOk());
2843 streamId++;
2844 }
2845
2846 std::shared_ptr<ICameraDeviceSession> session;
2847 ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session);
2848 ASSERT_TRUE(ret.isOk());
2849 ASSERT_NE(session, nullptr);
2850 ret = session->close();
2851 ASSERT_TRUE(ret.isOk());
2852 }
2853}
2854
2855// Check for correct handling of invalid/incorrect configuration parameters for injection camera.
2856TEST_P(CameraAidlTest, configureInjectionStreamsInvalidOutputs) {
2857 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2858 std::vector<AvailableStream> outputStreams;
2859
2860 for (const auto& name : cameraDeviceNames) {
2861 CameraMetadata metadata;
2862 std::shared_ptr<ICameraInjectionSession> injectionSession;
2863 std::shared_ptr<ICameraDevice> unusedDevice;
2864 openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/,
2865 &unusedDevice);
2866 if (injectionSession == nullptr) {
2867 continue;
2868 }
2869
2870 camera_metadata_t* staticMetaBuffer =
2871 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
2872 std::shared_ptr<ICameraDeviceSession> session;
2873 ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session);
2874 ASSERT_TRUE(ret.isOk());
2875 ASSERT_NE(session, nullptr);
2876
2877 CameraMetadata chars;
2878 chars.metadata = metadata.metadata;
2879
2880 outputStreams.clear();
2881 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputStreams));
2882 ASSERT_NE(0u, outputStreams.size());
2883
2884 int32_t jpegBufferSize = 0;
2885 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMetaBuffer, &jpegBufferSize));
2886 ASSERT_NE(0u, jpegBufferSize);
2887
2888 int32_t streamId = 0;
2889 Stream stream = {streamId++,
2890 StreamType::OUTPUT,
2891 0,
2892 0,
2893 static_cast<PixelFormat>(outputStreams[0].format),
2894 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2895 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2896 Dataspace::UNKNOWN,
2897 StreamRotation::ROTATION_0,
2898 std::string(),
2899 jpegBufferSize,
2900 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002901 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2902 RequestAvailableDynamicRangeProfilesMap::
2903 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002904
2905 int32_t streamConfigCounter = 0;
2906 std::vector<Stream> streams = {stream};
2907 StreamConfiguration config;
2908 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2909 jpegBufferSize);
2910
2911 config.streamConfigCounter = streamConfigCounter++;
2912 ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars);
2913 ASSERT_TRUE(
2914 (static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) == s.getServiceSpecificError()) ||
2915 (static_cast<int32_t>(Status::INTERNAL_ERROR) == s.getServiceSpecificError()));
2916
2917 stream = {streamId++,
2918 StreamType::OUTPUT,
2919 INT32_MAX,
2920 INT32_MAX,
2921 static_cast<PixelFormat>(outputStreams[0].format),
2922 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2923 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2924 Dataspace::UNKNOWN,
2925 StreamRotation::ROTATION_0,
2926 std::string(),
2927 jpegBufferSize,
2928 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002929 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2930 RequestAvailableDynamicRangeProfilesMap::
2931 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
2932
Avichal Rakesh362242f2022-02-08 12:40:53 -08002933 streams[0] = stream;
2934 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2935 jpegBufferSize);
2936 config.streamConfigCounter = streamConfigCounter++;
2937 s = injectionSession->configureInjectionStreams(config, chars);
2938 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError());
2939
2940 for (auto& it : outputStreams) {
2941 stream = {streamId++,
2942 StreamType::OUTPUT,
2943 it.width,
2944 it.height,
2945 static_cast<PixelFormat>(INT32_MAX),
2946 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2947 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2948 Dataspace::UNKNOWN,
2949 StreamRotation::ROTATION_0,
2950 std::string(),
2951 jpegBufferSize,
2952 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002953 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2954 RequestAvailableDynamicRangeProfilesMap::
2955 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002956 streams[0] = stream;
2957 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2958 jpegBufferSize);
2959 config.streamConfigCounter = streamConfigCounter++;
2960 s = injectionSession->configureInjectionStreams(config, chars);
2961 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError());
2962
2963 stream = {streamId++,
2964 StreamType::OUTPUT,
2965 it.width,
2966 it.height,
2967 static_cast<PixelFormat>(it.format),
2968 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2969 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2970 Dataspace::UNKNOWN,
2971 static_cast<StreamRotation>(INT32_MAX),
2972 std::string(),
2973 jpegBufferSize,
2974 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002975 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2976 RequestAvailableDynamicRangeProfilesMap::
2977 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002978 streams[0] = stream;
2979 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2980 jpegBufferSize);
2981 config.streamConfigCounter = streamConfigCounter++;
2982 s = injectionSession->configureInjectionStreams(config, chars);
2983 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError());
2984 }
2985
2986 ret = session->close();
2987 ASSERT_TRUE(ret.isOk());
2988 }
2989}
2990
2991// Check whether session parameters are supported for injection camera. If Hal support for them
2992// exist, then try to configure a preview stream using them.
2993TEST_P(CameraAidlTest, configureInjectionStreamsWithSessionParameters) {
2994 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2995 std::vector<AvailableStream> outputPreviewStreams;
2996 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2997 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2998
2999 for (const auto& name : cameraDeviceNames) {
3000 CameraMetadata metadata;
3001 std::shared_ptr<ICameraInjectionSession> injectionSession;
3002 std::shared_ptr<ICameraDevice> unusedDevice;
3003 openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/,
3004 &unusedDevice /*out*/);
3005 if (injectionSession == nullptr) {
3006 continue;
3007 }
3008
3009 std::shared_ptr<ICameraDeviceSession> session;
3010 ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session);
3011 ASSERT_TRUE(ret.isOk());
3012 ASSERT_NE(session, nullptr);
3013
3014 camera_metadata_t* staticMetaBuffer =
3015 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
3016 CameraMetadata chars;
3017 chars.metadata = metadata.metadata;
3018
3019 std::unordered_set<int32_t> availableSessionKeys;
3020 Status rc = getSupportedKeys(staticMetaBuffer, ANDROID_REQUEST_AVAILABLE_SESSION_KEYS,
3021 &availableSessionKeys);
3022 ASSERT_EQ(Status::OK, rc);
3023 if (availableSessionKeys.empty()) {
3024 ret = session->close();
3025 ASSERT_TRUE(ret.isOk());
3026 continue;
3027 }
3028
3029 android::hardware::camera::common::V1_0::helper::CameraMetadata previewRequestSettings;
3030 android::hardware::camera::common::V1_0::helper::CameraMetadata sessionParams,
3031 modifiedSessionParams;
3032 constructFilteredSettings(session, availableSessionKeys, RequestTemplate::PREVIEW,
3033 &previewRequestSettings, &sessionParams);
3034 if (sessionParams.isEmpty()) {
3035 ret = session->close();
3036 ASSERT_TRUE(ret.isOk());
3037 continue;
3038 }
3039
3040 outputPreviewStreams.clear();
3041
3042 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputPreviewStreams,
3043 &previewThreshold));
3044 ASSERT_NE(0u, outputPreviewStreams.size());
3045
3046 Stream previewStream = {
3047 0,
3048 StreamType::OUTPUT,
3049 outputPreviewStreams[0].width,
3050 outputPreviewStreams[0].height,
3051 static_cast<PixelFormat>(outputPreviewStreams[0].format),
3052 static_cast<::aidl::android::hardware::graphics::common::BufferUsage>(
3053 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
3054 Dataspace::UNKNOWN,
3055 StreamRotation::ROTATION_0,
3056 std::string(),
3057 0,
3058 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00003059 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
3060 RequestAvailableDynamicRangeProfilesMap::
3061 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08003062 std::vector<Stream> streams = {previewStream};
3063 StreamConfiguration config;
3064 config.streams = streams;
3065 config.operationMode = StreamConfigurationMode::NORMAL_MODE;
3066
3067 modifiedSessionParams = sessionParams;
3068 camera_metadata_t* sessionParamsBuffer = sessionParams.release();
3069 uint8_t* rawSessionParamsBuffer = reinterpret_cast<uint8_t*>(sessionParamsBuffer);
3070 config.sessionParams.metadata =
3071 std::vector(rawSessionParamsBuffer,
3072 rawSessionParamsBuffer + get_camera_metadata_size(sessionParamsBuffer));
3073
3074 config.streamConfigCounter = 0;
3075 config.streamConfigCounter = 0;
3076 config.multiResolutionInputImage = false;
3077
3078 ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars);
3079 ASSERT_TRUE(s.isOk());
3080
3081 sessionParams.acquire(sessionParamsBuffer);
3082 free_camera_metadata(staticMetaBuffer);
3083 ret = session->close();
3084 ASSERT_TRUE(ret.isOk());
3085 }
3086}
3087
Jayant Chowdharyde1909e2022-11-23 17:18:38 +00003088TEST_P(CameraAidlTest, configureStreamsUseCasesCroppedRaw) {
3089 AvailableStream rawStreamThreshold =
3090 {INT_MAX, INT_MAX, static_cast<int32_t>(PixelFormat::RAW16)};
3091 configureStreamUseCaseInternal(rawStreamThreshold);
3092}
3093
Avichal Rakesh362242f2022-02-08 12:40:53 -08003094// Verify that valid stream use cases can be configured successfully, and invalid use cases
3095// fail stream configuration.
3096TEST_P(CameraAidlTest, configureStreamsUseCases) {
Jayant Chowdharyde1909e2022-11-23 17:18:38 +00003097 AvailableStream previewStreamThreshold =
3098 {kMaxPreviewWidth, kMaxPreviewHeight, static_cast<int32_t>(PixelFormat::YCBCR_420_888)};
3099 configureStreamUseCaseInternal(previewStreamThreshold);
Avichal Rakesh362242f2022-02-08 12:40:53 -08003100}
3101
Austin Borger0918fc82023-03-21 18:48:18 -07003102// Validate the integrity of stream configuration metadata
3103TEST_P(CameraAidlTest, validateStreamConfigurations) {
3104 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
3105 std::vector<AvailableStream> outputStreams;
3106
3107 const int32_t scalerSizesTag = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS;
3108 const int32_t scalerMinFrameDurationsTag = ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS;
3109 const int32_t scalerStallDurationsTag = ANDROID_SCALER_AVAILABLE_STALL_DURATIONS;
3110
3111 for (const auto& name : cameraDeviceNames) {
3112 CameraMetadata meta;
3113 std::shared_ptr<ICameraDevice> cameraDevice;
3114
3115 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
3116 &cameraDevice /*out*/);
3117 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
3118
3119 if (is10BitDynamicRangeCapable(staticMeta)) {
3120 std::vector<std::tuple<size_t, size_t>> supportedP010Sizes, supportedBlobSizes;
3121
3122 getSupportedSizes(staticMeta, scalerSizesTag, HAL_PIXEL_FORMAT_BLOB,
3123 &supportedBlobSizes);
3124 getSupportedSizes(staticMeta, scalerSizesTag, HAL_PIXEL_FORMAT_YCBCR_P010,
3125 &supportedP010Sizes);
3126 ASSERT_FALSE(supportedP010Sizes.empty());
3127
3128 std::vector<int64_t> blobMinDurations, blobStallDurations;
3129 getSupportedDurations(staticMeta, scalerMinFrameDurationsTag, HAL_PIXEL_FORMAT_BLOB,
3130 supportedP010Sizes, &blobMinDurations);
3131 getSupportedDurations(staticMeta, scalerStallDurationsTag, HAL_PIXEL_FORMAT_BLOB,
3132 supportedP010Sizes, &blobStallDurations);
3133 ASSERT_FALSE(blobStallDurations.empty());
3134 ASSERT_FALSE(blobMinDurations.empty());
3135 ASSERT_EQ(supportedP010Sizes.size(), blobMinDurations.size());
3136 ASSERT_EQ(blobMinDurations.size(), blobStallDurations.size());
3137 }
3138
Austin Borger8e9ac022023-05-04 11:17:26 -07003139 // TODO (b/280887191): Validate other aspects of stream configuration metadata...
3140
3141 ndk::ScopedAStatus ret = mSession->close();
3142 mSession = nullptr;
3143 ASSERT_TRUE(ret.isOk());
Austin Borger0918fc82023-03-21 18:48:18 -07003144 }
3145}
3146
Avichal Rakesh362242f2022-02-08 12:40:53 -08003147GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(CameraAidlTest);
3148INSTANTIATE_TEST_SUITE_P(
3149 PerInstance, CameraAidlTest,
3150 testing::ValuesIn(android::getAidlHalInstanceNames(ICameraProvider::descriptor)),
Jayant Chowdharyde1909e2022-11-23 17:18:38 +00003151 android::hardware::PrintInstanceNameToString);