blob: def6233fc0a6f9273a308846fbeea3d0727a359d [file] [log] [blame]
Avichal Rakesh362242f2022-02-08 12:40:53 -08001/*
2 * Copyright (C) 2022 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
Rucha Katakward08a0152023-10-13 15:47:58 -070016#include <gtest/gtest.h>
Avichal Rakesh362242f2022-02-08 12:40:53 -080017
18#include <aidl/Vintf.h>
19#include <aidl/android/hardware/camera/common/VendorTagSection.h>
20#include <aidl/android/hardware/camera/device/ICameraDevice.h>
21#include <aidlcommonsupport/NativeHandle.h>
22#include <camera_aidl_test.h>
23#include <cutils/properties.h>
24#include <device_cb.h>
25#include <empty_device_cb.h>
26#include <grallocusage/GrallocUsageConversion.h>
27#include <gtest/gtest.h>
28#include <hardware/gralloc.h>
29#include <hardware/gralloc1.h>
30#include <hidl/GtestPrinter.h>
31#include <hidl/HidlSupport.h>
32#include <torch_provider_cb.h>
Rucha Katakward08a0152023-10-13 15:47:58 -070033#include <com_android_internal_camera_flags.h>
Avichal Rakesh362242f2022-02-08 12:40:53 -080034#include <list>
35
36using ::aidl::android::hardware::camera::common::CameraDeviceStatus;
37using ::aidl::android::hardware::camera::common::CameraResourceCost;
38using ::aidl::android::hardware::camera::common::TorchModeStatus;
39using ::aidl::android::hardware::camera::common::VendorTagSection;
40using ::aidl::android::hardware::camera::device::ICameraDevice;
Austin Borger4728fc42022-07-15 11:27:53 -070041using ::aidl::android::hardware::camera::metadata::RequestAvailableColorSpaceProfilesMap;
Avichal Rakeshd3503a32022-02-25 06:23:14 +000042using ::aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap;
Avichal Rakesh362242f2022-02-08 12:40:53 -080043using ::aidl::android::hardware::camera::metadata::SensorPixelMode;
44using ::aidl::android::hardware::camera::provider::CameraIdAndStreamCombination;
Avichal Rakesh4bf91c72022-05-23 20:44:02 +000045using ::aidl::android::hardware::camera::provider::BnCameraProviderCallback;
Avichal Rakesh362242f2022-02-08 12:40:53 -080046
47using ::ndk::ScopedAStatus;
48
49namespace {
50const int32_t kBurstFrameCount = 10;
51const uint32_t kMaxStillWidth = 2048;
52const uint32_t kMaxStillHeight = 1536;
53
54const int64_t kEmptyFlushTimeoutMSec = 200;
Rucha Katakward08a0152023-10-13 15:47:58 -070055namespace flags = com::android::internal::camera::flags;
Avichal Rakesh362242f2022-02-08 12:40:53 -080056
Shuzhen Wang36efa712022-03-08 10:10:44 -080057const static std::vector<int64_t> kMandatoryUseCases = {
Avichal Rakesh362242f2022-02-08 12:40:53 -080058 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
59 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW,
60 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_STILL_CAPTURE,
61 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_RECORD,
62 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW_VIDEO_STILL,
63 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL};
64} // namespace
65
66TEST_P(CameraAidlTest, getCameraIdList) {
67 std::vector<std::string> idList;
68 ScopedAStatus ret = mProvider->getCameraIdList(&idList);
69 ASSERT_TRUE(ret.isOk());
70
71 for (size_t i = 0; i < idList.size(); i++) {
72 ALOGI("Camera Id[%zu] is %s", i, idList[i].c_str());
73 }
74}
75
76// Test if ICameraProvider::getVendorTags returns Status::OK
77TEST_P(CameraAidlTest, getVendorTags) {
78 std::vector<VendorTagSection> vendorTags;
79 ScopedAStatus ret = mProvider->getVendorTags(&vendorTags);
80
81 ASSERT_TRUE(ret.isOk());
82 for (size_t i = 0; i < vendorTags.size(); i++) {
83 ALOGI("Vendor tag section %zu name %s", i, vendorTags[i].sectionName.c_str());
84 for (auto& tag : vendorTags[i].tags) {
85 ALOGI("Vendor tag id %u name %s type %d", tag.tagId, tag.tagName.c_str(),
86 (int)tag.tagType);
87 }
88 }
89}
90
91// Test if ICameraProvider::setCallback returns Status::OK
92TEST_P(CameraAidlTest, setCallback) {
Avichal Rakesh4bf91c72022-05-23 20:44:02 +000093 struct ProviderCb : public BnCameraProviderCallback {
Avichal Rakesh362242f2022-02-08 12:40:53 -080094 ScopedAStatus cameraDeviceStatusChange(const std::string& cameraDeviceName,
95 CameraDeviceStatus newStatus) override {
96 ALOGI("camera device status callback name %s, status %d", cameraDeviceName.c_str(),
97 (int)newStatus);
98 return ScopedAStatus::ok();
99 }
100 ScopedAStatus torchModeStatusChange(const std::string& cameraDeviceName,
101 TorchModeStatus newStatus) override {
102 ALOGI("Torch mode status callback name %s, status %d", cameraDeviceName.c_str(),
103 (int)newStatus);
104 return ScopedAStatus::ok();
105 }
106 ScopedAStatus physicalCameraDeviceStatusChange(const std::string& cameraDeviceName,
107 const std::string& physicalCameraDeviceName,
108 CameraDeviceStatus newStatus) override {
109 ALOGI("physical camera device status callback name %s, physical camera name %s,"
110 " status %d",
111 cameraDeviceName.c_str(), physicalCameraDeviceName.c_str(), (int)newStatus);
112 return ScopedAStatus::ok();
113 }
114 };
115
Avichal Rakesh4bf91c72022-05-23 20:44:02 +0000116 std::shared_ptr<ProviderCb> cb = ndk::SharedRefBase::make<ProviderCb>();
Avichal Rakesh362242f2022-02-08 12:40:53 -0800117 ScopedAStatus ret = mProvider->setCallback(cb);
118 ASSERT_TRUE(ret.isOk());
119 ret = mProvider->setCallback(nullptr);
Avichal Rakesh4bf91c72022-05-23 20:44:02 +0000120 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
Avichal Rakesh362242f2022-02-08 12:40:53 -0800121}
122
123// Test if ICameraProvider::getCameraDeviceInterface returns Status::OK and non-null device
124TEST_P(CameraAidlTest, getCameraDeviceInterface) {
125 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
126
127 for (const auto& name : cameraDeviceNames) {
128 std::shared_ptr<ICameraDevice> cameraDevice;
129 ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &cameraDevice);
130 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
131 ret.getServiceSpecificError());
132 ASSERT_TRUE(ret.isOk());
133 ASSERT_NE(cameraDevice, nullptr);
134 }
135}
136
137// Verify that the device resource cost can be retrieved and the values are
138// correct.
139TEST_P(CameraAidlTest, getResourceCost) {
140 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
141
142 for (const auto& deviceName : cameraDeviceNames) {
143 std::shared_ptr<ICameraDevice> cameraDevice;
144 ScopedAStatus ret = mProvider->getCameraDeviceInterface(deviceName, &cameraDevice);
145 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
146 ret.getServiceSpecificError());
147 ASSERT_TRUE(ret.isOk());
148 ASSERT_NE(cameraDevice, nullptr);
149
150 CameraResourceCost resourceCost;
151 ret = cameraDevice->getResourceCost(&resourceCost);
152 ALOGI("getResourceCost returns: %d:%d", ret.getExceptionCode(),
153 ret.getServiceSpecificError());
154 ASSERT_TRUE(ret.isOk());
155
156 ALOGI(" Resource cost is %d", resourceCost.resourceCost);
157 ASSERT_LE(resourceCost.resourceCost, 100u);
158
159 for (const auto& name : resourceCost.conflictingDevices) {
160 ALOGI(" Conflicting device: %s", name.c_str());
161 }
162 }
163}
164
Rucha Katakward08a0152023-10-13 15:47:58 -0700165// Validate the integrity of manual flash strength control metadata
166TEST_P(CameraAidlTest, validateManualFlashStrengthControlKeys) {
167 if (flags::camera_manual_flash_strength_control()) {
168 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
169 for (const auto& name : cameraDeviceNames) {
170 ALOGI("validateManualFlashStrengthControlKeys: Testing camera device %s", name.c_str());
171 CameraMetadata meta;
172 std::shared_ptr<ICameraDevice> cameraDevice;
173 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
174 &cameraDevice /*out*/);
175 ndk::ScopedAStatus ret = cameraDevice->getCameraCharacteristics(&meta);
176 ASSERT_TRUE(ret.isOk());
177 const camera_metadata_t* staticMeta =
178 reinterpret_cast<const camera_metadata_t*>(meta.metadata.data());
179 verifyManualFlashStrengthControlCharacteristics(staticMeta);
180 }
181 } else {
182 ALOGI("validateManualFlashStrengthControlKeys: Test skipped.\n");
183 GTEST_SKIP();
184 }
185}
186
Avichal Rakesh362242f2022-02-08 12:40:53 -0800187TEST_P(CameraAidlTest, systemCameraTest) {
188 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
189 std::map<std::string, std::vector<SystemCameraKind>> hiddenPhysicalIdToLogicalMap;
190 for (const auto& name : cameraDeviceNames) {
191 std::shared_ptr<ICameraDevice> device;
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +0000192 ALOGI("systemCameraTest: Testing camera device %s", name.c_str());
Avichal Rakesh362242f2022-02-08 12:40:53 -0800193 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
194 ASSERT_TRUE(ret.isOk());
195 ASSERT_NE(device, nullptr);
196
197 CameraMetadata cameraCharacteristics;
198 ret = device->getCameraCharacteristics(&cameraCharacteristics);
199 ASSERT_TRUE(ret.isOk());
200
201 const camera_metadata_t* staticMeta =
202 reinterpret_cast<const camera_metadata_t*>(cameraCharacteristics.metadata.data());
203 Status rc = isLogicalMultiCamera(staticMeta);
204 if (rc == Status::OPERATION_NOT_SUPPORTED) {
205 return;
206 }
207
208 ASSERT_EQ(rc, Status::OK);
209 std::unordered_set<std::string> physicalIds;
210 ASSERT_EQ(getPhysicalCameraIds(staticMeta, &physicalIds), Status::OK);
211 SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC;
212 Status retStatus = getSystemCameraKind(staticMeta, &systemCameraKind);
213 ASSERT_EQ(retStatus, Status::OK);
214
215 for (auto physicalId : physicalIds) {
216 bool isPublicId = false;
217 for (auto& deviceName : cameraDeviceNames) {
218 std::string publicVersion, publicId;
219 ASSERT_TRUE(matchDeviceName(deviceName, mProviderType, &publicVersion, &publicId));
220 if (physicalId == publicId) {
221 isPublicId = true;
222 break;
223 }
224 }
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +0000225
Avichal Rakesh362242f2022-02-08 12:40:53 -0800226 // For hidden physical cameras, collect their associated logical cameras
227 // and store the system camera kind.
228 if (!isPublicId) {
229 auto it = hiddenPhysicalIdToLogicalMap.find(physicalId);
230 if (it == hiddenPhysicalIdToLogicalMap.end()) {
231 hiddenPhysicalIdToLogicalMap.insert(std::make_pair(
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +0000232 physicalId, std::vector<SystemCameraKind>({systemCameraKind})));
Avichal Rakesh362242f2022-02-08 12:40:53 -0800233 } else {
234 it->second.push_back(systemCameraKind);
235 }
236 }
237 }
238 }
239
240 // Check that the system camera kind of the logical cameras associated with
241 // each hidden physical camera is the same.
242 for (const auto& it : hiddenPhysicalIdToLogicalMap) {
243 SystemCameraKind neededSystemCameraKind = it.second.front();
244 for (auto foundSystemCamera : it.second) {
245 ASSERT_EQ(neededSystemCameraKind, foundSystemCamera);
246 }
247 }
248}
249
250// Verify that the static camera characteristics can be retrieved
251// successfully.
252TEST_P(CameraAidlTest, getCameraCharacteristics) {
253 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
254
255 for (const auto& name : cameraDeviceNames) {
256 std::shared_ptr<ICameraDevice> device;
257 ALOGI("getCameraCharacteristics: Testing camera device %s", name.c_str());
258 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
259 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
260 ret.getServiceSpecificError());
261 ASSERT_TRUE(ret.isOk());
262 ASSERT_NE(device, nullptr);
263
264 CameraMetadata chars;
265 ret = device->getCameraCharacteristics(&chars);
266 ASSERT_TRUE(ret.isOk());
267 verifyCameraCharacteristics(chars);
268 verifyMonochromeCharacteristics(chars);
269 verifyRecommendedConfigs(chars);
Kwangkyu Park4b7fd452023-05-12 00:22:22 +0900270 verifyHighSpeedRecordingCharacteristics(name, chars);
Avichal Rakesh362242f2022-02-08 12:40:53 -0800271 verifyLogicalOrUltraHighResCameraMetadata(name, device, chars, cameraDeviceNames);
272
273 ASSERT_TRUE(ret.isOk());
274
275 // getPhysicalCameraCharacteristics will fail for publicly
276 // advertised camera IDs.
277 std::string version, cameraId;
278 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &cameraId));
279 CameraMetadata devChars;
280 ret = device->getPhysicalCameraCharacteristics(cameraId, &devChars);
281 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
282 ASSERT_EQ(0, devChars.metadata.size());
283 }
284}
285
286// Verify that the torch strength level can be set and retrieved successfully.
287TEST_P(CameraAidlTest, turnOnTorchWithStrengthLevel) {
288 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
289
290 std::shared_ptr<TorchProviderCb> cb = ndk::SharedRefBase::make<TorchProviderCb>(this);
291 ndk::ScopedAStatus ret = mProvider->setCallback(cb);
292 ASSERT_TRUE(ret.isOk());
293
294 for (const auto& name : cameraDeviceNames) {
295 int32_t defaultLevel;
296 std::shared_ptr<ICameraDevice> device;
297 ALOGI("%s: Testing camera device %s", __FUNCTION__, name.c_str());
298
299 ret = mProvider->getCameraDeviceInterface(name, &device);
300 ASSERT_TRUE(ret.isOk());
301 ASSERT_NE(device, nullptr);
302
303 CameraMetadata chars;
304 ret = device->getCameraCharacteristics(&chars);
305 ASSERT_TRUE(ret.isOk());
306
307 const camera_metadata_t* staticMeta =
308 reinterpret_cast<const camera_metadata_t*>(chars.metadata.data());
309 bool torchStrengthControlSupported = isTorchStrengthControlSupported(staticMeta);
310 camera_metadata_ro_entry entry;
311 int rc = find_camera_metadata_ro_entry(staticMeta,
312 ANDROID_FLASH_INFO_STRENGTH_DEFAULT_LEVEL, &entry);
313 if (torchStrengthControlSupported) {
314 ASSERT_EQ(rc, 0);
315 ASSERT_GT(entry.count, 0);
316 defaultLevel = *entry.data.i32;
317 ALOGI("Default level is:%d", defaultLevel);
318 }
319
320 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
321 ret = device->turnOnTorchWithStrengthLevel(2);
322 ALOGI("turnOnTorchWithStrengthLevel returns status: %d", ret.getServiceSpecificError());
323 // OPERATION_NOT_SUPPORTED check
324 if (!torchStrengthControlSupported) {
325 ALOGI("Torch strength control not supported.");
326 ASSERT_EQ(static_cast<int32_t>(Status::OPERATION_NOT_SUPPORTED),
327 ret.getServiceSpecificError());
328 } else {
329 {
330 ASSERT_TRUE(ret.isOk());
331 std::unique_lock<std::mutex> l(mTorchLock);
332 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
333 auto timeout = std::chrono::system_clock::now() +
334 std::chrono::seconds(kTorchTimeoutSec);
335 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
336 }
337 ASSERT_EQ(TorchModeStatus::AVAILABLE_ON, mTorchStatus);
338 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
339 }
340 ALOGI("getTorchStrengthLevel: Testing");
341 int32_t strengthLevel;
342 ret = device->getTorchStrengthLevel(&strengthLevel);
343 ASSERT_TRUE(ret.isOk());
344 ALOGI("Torch strength level is : %d", strengthLevel);
345 ASSERT_EQ(strengthLevel, 2);
346
347 // Turn OFF the torch and verify torch strength level is reset to default level.
348 ALOGI("Testing torch strength level reset after turning the torch OFF.");
349 ret = device->setTorchMode(false);
350 ASSERT_TRUE(ret.isOk());
351 {
352 std::unique_lock<std::mutex> l(mTorchLock);
353 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
354 auto timeout = std::chrono::system_clock::now() +
355 std::chrono::seconds(kTorchTimeoutSec);
356 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
357 }
358 ASSERT_EQ(TorchModeStatus::AVAILABLE_OFF, mTorchStatus);
359 }
360
361 ret = device->getTorchStrengthLevel(&strengthLevel);
362 ASSERT_TRUE(ret.isOk());
363 ALOGI("Torch strength level after turning OFF torch is : %d", strengthLevel);
364 ASSERT_EQ(strengthLevel, defaultLevel);
365 }
366 }
367}
368
369// In case it is supported verify that torch can be enabled.
370// Check for corresponding torch callbacks as well.
371TEST_P(CameraAidlTest, setTorchMode) {
372 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
373
374 std::shared_ptr<TorchProviderCb> cb = ndk::SharedRefBase::make<TorchProviderCb>(this);
375 ndk::ScopedAStatus ret = mProvider->setCallback(cb);
376 ALOGI("setCallback returns status: %d", ret.getServiceSpecificError());
377 ASSERT_TRUE(ret.isOk());
378 ASSERT_NE(cb, nullptr);
379
380 for (const auto& name : cameraDeviceNames) {
381 std::shared_ptr<ICameraDevice> device;
382 ALOGI("setTorchMode: Testing camera device %s", name.c_str());
383 ret = mProvider->getCameraDeviceInterface(name, &device);
384 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
385 ret.getServiceSpecificError());
386 ASSERT_TRUE(ret.isOk());
387 ASSERT_NE(device, nullptr);
388
389 CameraMetadata metadata;
390 ret = device->getCameraCharacteristics(&metadata);
391 ALOGI("getCameraCharacteristics returns status:%d", ret.getServiceSpecificError());
392 ASSERT_TRUE(ret.isOk());
393 camera_metadata_t* staticMeta =
394 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
395 bool torchSupported = isTorchSupported(staticMeta);
396
397 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
398 ret = device->setTorchMode(true);
399 ALOGI("setTorchMode returns status: %d", ret.getServiceSpecificError());
400 if (!torchSupported) {
401 ASSERT_EQ(static_cast<int32_t>(Status::OPERATION_NOT_SUPPORTED),
402 ret.getServiceSpecificError());
403 } else {
404 ASSERT_TRUE(ret.isOk());
405 {
406 std::unique_lock<std::mutex> l(mTorchLock);
407 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
408 auto timeout = std::chrono::system_clock::now() +
409 std::chrono::seconds(kTorchTimeoutSec);
410 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
411 }
412 ASSERT_EQ(TorchModeStatus::AVAILABLE_ON, mTorchStatus);
413 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
414 }
415
416 ret = device->setTorchMode(false);
417 ASSERT_TRUE(ret.isOk());
418 {
419 std::unique_lock<std::mutex> l(mTorchLock);
420 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
421 auto timeout = std::chrono::system_clock::now() +
422 std::chrono::seconds(kTorchTimeoutSec);
423 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
424 }
425 ASSERT_EQ(TorchModeStatus::AVAILABLE_OFF, mTorchStatus);
426 }
427 }
428 }
Avichal Rakesh362242f2022-02-08 12:40:53 -0800429}
430
431// Check dump functionality.
432TEST_P(CameraAidlTest, dump) {
433 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
434
435 for (const auto& name : cameraDeviceNames) {
436 std::shared_ptr<ICameraDevice> device;
437 ALOGI("dump: Testing camera device %s", name.c_str());
438
439 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
440 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
441 ret.getServiceSpecificError());
442 ASSERT_TRUE(ret.isOk());
443 ASSERT_NE(device, nullptr);
444
445 int raw_handle = open(kDumpOutput, O_RDWR);
446 ASSERT_GE(raw_handle, 0);
447
448 auto retStatus = device->dump(raw_handle, nullptr, 0);
449 ASSERT_EQ(retStatus, ::android::OK);
450 close(raw_handle);
451 }
452}
453
454// Open, dump, then close
455TEST_P(CameraAidlTest, openClose) {
456 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
457
458 for (const auto& name : cameraDeviceNames) {
459 std::shared_ptr<ICameraDevice> device;
460 ALOGI("openClose: Testing camera device %s", name.c_str());
461 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
462 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
463 ret.getServiceSpecificError());
464 ASSERT_TRUE(ret.isOk());
465 ASSERT_NE(device, nullptr);
466
467 std::shared_ptr<EmptyDeviceCb> cb = ndk::SharedRefBase::make<EmptyDeviceCb>();
468
469 ret = device->open(cb, &mSession);
470 ASSERT_TRUE(ret.isOk());
471 ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
472 ret.getServiceSpecificError());
473 ASSERT_NE(mSession, nullptr);
474 int raw_handle = open(kDumpOutput, O_RDWR);
475 ASSERT_GE(raw_handle, 0);
476
477 auto retStatus = device->dump(raw_handle, nullptr, 0);
478 ASSERT_EQ(retStatus, ::android::OK);
479 close(raw_handle);
480
481 ret = mSession->close();
482 mSession = nullptr;
483 ASSERT_TRUE(ret.isOk());
484 // TODO: test all session API calls return INTERNAL_ERROR after close
485 // TODO: keep a wp copy here and verify session cannot be promoted out of this scope
486 }
487}
488
489// Check whether all common default request settings can be successfully
490// constructed.
491TEST_P(CameraAidlTest, constructDefaultRequestSettings) {
492 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
493
494 for (const auto& name : cameraDeviceNames) {
495 std::shared_ptr<ICameraDevice> device;
496 ALOGI("constructDefaultRequestSettings: Testing camera device %s", name.c_str());
497 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
498 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
499 ret.getServiceSpecificError());
500 ASSERT_TRUE(ret.isOk());
501 ASSERT_NE(device, nullptr);
502
503 std::shared_ptr<EmptyDeviceCb> cb = ndk::SharedRefBase::make<EmptyDeviceCb>();
504 ret = device->open(cb, &mSession);
505 ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
506 ret.getServiceSpecificError());
507 ASSERT_TRUE(ret.isOk());
508 ASSERT_NE(mSession, nullptr);
509
510 for (int32_t t = (int32_t)RequestTemplate::PREVIEW; t <= (int32_t)RequestTemplate::MANUAL;
511 t++) {
512 RequestTemplate reqTemplate = (RequestTemplate)t;
513 CameraMetadata rawMetadata;
514 ret = mSession->constructDefaultRequestSettings(reqTemplate, &rawMetadata);
515 ALOGI("constructDefaultRequestSettings returns status:%d:%d", ret.getExceptionCode(),
516 ret.getServiceSpecificError());
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000517
Avichal Rakesh362242f2022-02-08 12:40:53 -0800518 if (reqTemplate == RequestTemplate::ZERO_SHUTTER_LAG ||
519 reqTemplate == RequestTemplate::MANUAL) {
520 // optional templates
521 ASSERT_TRUE(ret.isOk() || static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
522 ret.getServiceSpecificError());
523 } else {
524 ASSERT_TRUE(ret.isOk());
525 }
526
527 if (ret.isOk()) {
528 const camera_metadata_t* metadata = (camera_metadata_t*)rawMetadata.metadata.data();
529 size_t expectedSize = rawMetadata.metadata.size();
530 int result = validate_camera_metadata_structure(metadata, &expectedSize);
531 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
532 verifyRequestTemplate(metadata, reqTemplate);
533 } else {
534 ASSERT_EQ(0u, rawMetadata.metadata.size());
535 }
536 }
537 ret = mSession->close();
538 mSession = nullptr;
539 ASSERT_TRUE(ret.isOk());
540 }
541}
542
543// Verify that all supported stream formats and sizes can be configured
544// successfully.
545TEST_P(CameraAidlTest, configureStreamsAvailableOutputs) {
546 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
547 std::vector<AvailableStream> outputStreams;
548
549 for (const auto& name : cameraDeviceNames) {
550 CameraMetadata meta;
551 std::shared_ptr<ICameraDevice> device;
552
553 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/, &device /*out*/);
554
555 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
556 outputStreams.clear();
557 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputStreams));
558 ASSERT_NE(0u, outputStreams.size());
559
560 int32_t jpegBufferSize = 0;
561 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
562 ASSERT_NE(0u, jpegBufferSize);
563
564 int32_t streamId = 0;
565 int32_t streamConfigCounter = 0;
566 for (auto& it : outputStreams) {
567 Stream stream;
568 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(it.format));
569 stream.id = streamId;
570 stream.streamType = StreamType::OUTPUT;
571 stream.width = it.width;
572 stream.height = it.height;
573 stream.format = static_cast<PixelFormat>(it.format);
574 stream.dataSpace = dataspace;
575 stream.usage = static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
576 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER);
577 stream.rotation = StreamRotation::ROTATION_0;
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000578 stream.dynamicRangeProfile = RequestAvailableDynamicRangeProfilesMap::
579 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
Austin Borger263e3622023-06-15 11:32:04 -0700580 stream.useCase = ScalerAvailableStreamUseCases::
581 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
582 stream.colorSpace = static_cast<int>(
583 RequestAvailableColorSpaceProfilesMap::
584 ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED);
Avichal Rakesh362242f2022-02-08 12:40:53 -0800585
586 std::vector<Stream> streams = {stream};
587 StreamConfiguration config;
588 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
589 jpegBufferSize);
590
591 bool expectStreamCombQuery = (isLogicalMultiCamera(staticMeta) == Status::OK);
592 verifyStreamCombination(device, config, /*expectedStatus*/ true, expectStreamCombQuery);
593
594 config.streamConfigCounter = streamConfigCounter++;
595 std::vector<HalStream> halConfigs;
596 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
597 ASSERT_TRUE(ret.isOk());
598 ASSERT_EQ(halConfigs.size(), 1);
599 ASSERT_EQ(halConfigs[0].id, streamId);
600
601 streamId++;
602 }
603
604 ndk::ScopedAStatus ret = mSession->close();
605 mSession = nullptr;
606 ASSERT_TRUE(ret.isOk());
607 }
608}
609
610// Verify that mandatory concurrent streams and outputs are supported.
611TEST_P(CameraAidlTest, configureConcurrentStreamsAvailableOutputs) {
612 struct CameraTestInfo {
613 CameraMetadata staticMeta;
614 std::shared_ptr<ICameraDeviceSession> session;
615 std::shared_ptr<ICameraDevice> cameraDevice;
616 StreamConfiguration config;
617 };
618
619 std::map<std::string, std::string> idToNameMap = getCameraDeviceIdToNameMap(mProvider);
620 std::vector<ConcurrentCameraIdCombination> concurrentDeviceCombinations =
621 getConcurrentDeviceCombinations(mProvider);
622 std::vector<AvailableStream> outputStreams;
623 for (const auto& cameraDeviceIds : concurrentDeviceCombinations) {
624 std::vector<CameraIdAndStreamCombination> cameraIdsAndStreamCombinations;
625 std::vector<CameraTestInfo> cameraTestInfos;
626 size_t i = 0;
627 for (const auto& id : cameraDeviceIds.combination) {
628 CameraTestInfo cti;
629 auto it = idToNameMap.find(id);
630 ASSERT_TRUE(idToNameMap.end() != it);
631 std::string name = it->second;
632
633 openEmptyDeviceSession(name, mProvider, &cti.session /*out*/, &cti.staticMeta /*out*/,
634 &cti.cameraDevice /*out*/);
635
636 outputStreams.clear();
637 camera_metadata_t* staticMeta =
638 reinterpret_cast<camera_metadata_t*>(cti.staticMeta.metadata.data());
639 ASSERT_EQ(Status::OK, getMandatoryConcurrentStreams(staticMeta, &outputStreams));
640 ASSERT_NE(0u, outputStreams.size());
641
642 int32_t jpegBufferSize = 0;
643 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
644 ASSERT_NE(0u, jpegBufferSize);
645
646 int32_t streamId = 0;
647 std::vector<Stream> streams(outputStreams.size());
648 size_t j = 0;
649 for (const auto& s : outputStreams) {
650 Stream stream;
651 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(s.format));
652 stream.id = streamId++;
653 stream.streamType = StreamType::OUTPUT;
654 stream.width = s.width;
655 stream.height = s.height;
656 stream.format = static_cast<PixelFormat>(s.format);
657 stream.usage = static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
658 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER);
659 stream.dataSpace = dataspace;
660 stream.rotation = StreamRotation::ROTATION_0;
661 stream.sensorPixelModesUsed = {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT};
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000662 stream.dynamicRangeProfile = RequestAvailableDynamicRangeProfilesMap::
663 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
Avichal Rakesh362242f2022-02-08 12:40:53 -0800664 streams[j] = stream;
665 j++;
666 }
667
668 // Add the created stream configs to cameraIdsAndStreamCombinations
669 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &cti.config,
670 jpegBufferSize);
671
672 cti.config.streamConfigCounter = outputStreams.size();
673 CameraIdAndStreamCombination cameraIdAndStreamCombination;
674 cameraIdAndStreamCombination.cameraId = id;
675 cameraIdAndStreamCombination.streamConfiguration = cti.config;
676 cameraIdsAndStreamCombinations.push_back(cameraIdAndStreamCombination);
677 i++;
678 cameraTestInfos.push_back(cti);
679 }
680 // Now verify that concurrent streams are supported
681 bool combinationSupported;
682 ndk::ScopedAStatus ret = mProvider->isConcurrentStreamCombinationSupported(
683 cameraIdsAndStreamCombinations, &combinationSupported);
684 ASSERT_TRUE(ret.isOk());
685 ASSERT_EQ(combinationSupported, true);
686
687 // Test the stream can actually be configured
688 for (auto& cti : cameraTestInfos) {
689 if (cti.session != nullptr) {
690 camera_metadata_t* staticMeta =
691 reinterpret_cast<camera_metadata_t*>(cti.staticMeta.metadata.data());
692 bool expectStreamCombQuery = (isLogicalMultiCamera(staticMeta) == Status::OK);
693 verifyStreamCombination(cti.cameraDevice, cti.config, /*expectedStatus*/ true,
694 expectStreamCombQuery);
695 }
696
697 if (cti.session != nullptr) {
698 std::vector<HalStream> streamConfigs;
699 ret = cti.session->configureStreams(cti.config, &streamConfigs);
700 ASSERT_TRUE(ret.isOk());
701 ASSERT_EQ(cti.config.streams.size(), streamConfigs.size());
702 }
703 }
704
705 for (auto& cti : cameraTestInfos) {
706 ret = cti.session->close();
707 ASSERT_TRUE(ret.isOk());
708 }
709 }
710}
711
712// Check for correct handling of invalid/incorrect configuration parameters.
713TEST_P(CameraAidlTest, configureStreamsInvalidOutputs) {
714 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
715 std::vector<AvailableStream> outputStreams;
716
717 for (const auto& name : cameraDeviceNames) {
718 CameraMetadata meta;
719 std::shared_ptr<ICameraDevice> cameraDevice;
720
721 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
722 &cameraDevice /*out*/);
723 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
724 outputStreams.clear();
725
726 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputStreams));
727 ASSERT_NE(0u, outputStreams.size());
728
729 int32_t jpegBufferSize = 0;
730 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
731 ASSERT_NE(0u, jpegBufferSize);
732
733 int32_t streamId = 0;
734 Stream stream = {streamId++,
735 StreamType::OUTPUT,
736 static_cast<uint32_t>(0),
737 static_cast<uint32_t>(0),
738 static_cast<PixelFormat>(outputStreams[0].format),
739 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
740 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
741 Dataspace::UNKNOWN,
742 StreamRotation::ROTATION_0,
743 std::string(),
744 jpegBufferSize,
745 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000746 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
747 RequestAvailableDynamicRangeProfilesMap::
748 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800749 int32_t streamConfigCounter = 0;
750 std::vector<Stream> streams = {stream};
751 StreamConfiguration config;
752 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
753 jpegBufferSize);
754
755 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ false,
756 /*expectStreamCombQuery*/ false);
757
758 config.streamConfigCounter = streamConfigCounter++;
759 std::vector<HalStream> halConfigs;
760 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
761 ASSERT_TRUE(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
762 ret.getServiceSpecificError() ||
763 static_cast<int32_t>(Status::INTERNAL_ERROR) == ret.getServiceSpecificError());
764
765 stream = {streamId++,
766 StreamType::OUTPUT,
767 /*width*/ INT32_MAX,
768 /*height*/ INT32_MAX,
769 static_cast<PixelFormat>(outputStreams[0].format),
770 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
771 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
772 Dataspace::UNKNOWN,
773 StreamRotation::ROTATION_0,
774 std::string(),
775 jpegBufferSize,
776 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000777 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
778 RequestAvailableDynamicRangeProfilesMap::
779 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800780
781 streams[0] = stream;
782 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
783 jpegBufferSize);
784
785 config.streamConfigCounter = streamConfigCounter++;
786 halConfigs.clear();
787 ret = mSession->configureStreams(config, &halConfigs);
788 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
789
790 for (auto& it : outputStreams) {
791 stream = {streamId++,
792 StreamType::OUTPUT,
793 it.width,
794 it.height,
795 static_cast<PixelFormat>(UINT32_MAX),
796 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
797 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
798 Dataspace::UNKNOWN,
799 StreamRotation::ROTATION_0,
800 std::string(),
801 jpegBufferSize,
802 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000803 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
804 RequestAvailableDynamicRangeProfilesMap::
805 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800806
807 streams[0] = stream;
808 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
809 jpegBufferSize);
810 config.streamConfigCounter = streamConfigCounter++;
811 halConfigs.clear();
812 ret = mSession->configureStreams(config, &halConfigs);
813 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
814 ret.getServiceSpecificError());
815
816 stream = {streamId++,
817 StreamType::OUTPUT,
818 it.width,
819 it.height,
820 static_cast<PixelFormat>(it.format),
821 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
822 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
823 Dataspace::UNKNOWN,
824 static_cast<StreamRotation>(UINT32_MAX),
825 std::string(),
826 jpegBufferSize,
827 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000828 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
829 RequestAvailableDynamicRangeProfilesMap::
830 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800831
832 streams[0] = stream;
833 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
834 jpegBufferSize);
835
836 config.streamConfigCounter = streamConfigCounter++;
837 halConfigs.clear();
838 ret = mSession->configureStreams(config, &halConfigs);
839 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
840 ret.getServiceSpecificError());
841 }
842
843 ret = mSession->close();
844 mSession = nullptr;
845 ASSERT_TRUE(ret.isOk());
846 }
847}
848
849// Check whether all supported ZSL output stream combinations can be
850// configured successfully.
851TEST_P(CameraAidlTest, configureStreamsZSLInputOutputs) {
852 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
853 std::vector<AvailableStream> inputStreams;
854 std::vector<AvailableZSLInputOutput> inputOutputMap;
855
856 for (const auto& name : cameraDeviceNames) {
857 CameraMetadata meta;
858 std::shared_ptr<ICameraDevice> cameraDevice;
859
860 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
861 &cameraDevice /*out*/);
862 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
863
864 Status rc = isZSLModeAvailable(staticMeta);
865 if (Status::OPERATION_NOT_SUPPORTED == rc) {
866 ndk::ScopedAStatus ret = mSession->close();
867 mSession = nullptr;
868 ASSERT_TRUE(ret.isOk());
869 continue;
870 }
871 ASSERT_EQ(Status::OK, rc);
872
873 inputStreams.clear();
874 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, inputStreams));
875 ASSERT_NE(0u, inputStreams.size());
876
877 inputOutputMap.clear();
878 ASSERT_EQ(Status::OK, getZSLInputOutputMap(staticMeta, inputOutputMap));
879 ASSERT_NE(0u, inputOutputMap.size());
880
881 bool supportMonoY8 = false;
882 if (Status::OK == isMonochromeCamera(staticMeta)) {
883 for (auto& it : inputStreams) {
884 if (it.format == static_cast<uint32_t>(PixelFormat::Y8)) {
885 supportMonoY8 = true;
886 break;
887 }
888 }
889 }
890
891 int32_t jpegBufferSize = 0;
892 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
893 ASSERT_NE(0u, jpegBufferSize);
894
895 int32_t streamId = 0;
896 bool hasPrivToY8 = false, hasY8ToY8 = false, hasY8ToBlob = false;
897 uint32_t streamConfigCounter = 0;
898 for (auto& inputIter : inputOutputMap) {
899 AvailableStream input;
900 ASSERT_EQ(Status::OK, findLargestSize(inputStreams, inputIter.inputFormat, input));
901 ASSERT_NE(0u, inputStreams.size());
902
903 if (inputIter.inputFormat ==
904 static_cast<uint32_t>(PixelFormat::IMPLEMENTATION_DEFINED) &&
905 inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::Y8)) {
906 hasPrivToY8 = true;
907 } else if (inputIter.inputFormat == static_cast<uint32_t>(PixelFormat::Y8)) {
908 if (inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::BLOB)) {
909 hasY8ToBlob = true;
910 } else if (inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::Y8)) {
911 hasY8ToY8 = true;
912 }
913 }
914 AvailableStream outputThreshold = {INT32_MAX, INT32_MAX, inputIter.outputFormat};
915 std::vector<AvailableStream> outputStreams;
916 ASSERT_EQ(Status::OK,
917 getAvailableOutputStreams(staticMeta, outputStreams, &outputThreshold));
918 for (auto& outputIter : outputStreams) {
919 Dataspace outputDataSpace =
920 getDataspace(static_cast<PixelFormat>(outputIter.format));
921 Stream zslStream = {
922 streamId++,
923 StreamType::OUTPUT,
924 input.width,
925 input.height,
926 static_cast<PixelFormat>(input.format),
927 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
928 GRALLOC_USAGE_HW_CAMERA_ZSL),
929 Dataspace::UNKNOWN,
930 StreamRotation::ROTATION_0,
931 std::string(),
932 jpegBufferSize,
933 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000934 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
935 RequestAvailableDynamicRangeProfilesMap::
936 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800937 Stream inputStream = {
938 streamId++,
939 StreamType::INPUT,
940 input.width,
941 input.height,
942 static_cast<PixelFormat>(input.format),
943 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(0),
944 Dataspace::UNKNOWN,
945 StreamRotation::ROTATION_0,
946 std::string(),
947 jpegBufferSize,
948 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000949 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
950 RequestAvailableDynamicRangeProfilesMap::
951 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800952 Stream outputStream = {
953 streamId++,
954 StreamType::OUTPUT,
955 outputIter.width,
956 outputIter.height,
957 static_cast<PixelFormat>(outputIter.format),
958 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
959 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
960 outputDataSpace,
961 StreamRotation::ROTATION_0,
962 std::string(),
963 jpegBufferSize,
964 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000965 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
966 RequestAvailableDynamicRangeProfilesMap::
967 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800968
969 std::vector<Stream> streams = {inputStream, zslStream, outputStream};
970
971 StreamConfiguration config;
972 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
973 jpegBufferSize);
974
975 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true,
976 /*expectStreamCombQuery*/ false);
977
978 config.streamConfigCounter = streamConfigCounter++;
979 std::vector<HalStream> halConfigs;
980 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
981 ASSERT_TRUE(ret.isOk());
982 ASSERT_EQ(3u, halConfigs.size());
983 }
984 }
985
986 if (supportMonoY8) {
987 if (Status::OK == isZSLModeAvailable(staticMeta, PRIV_REPROCESS)) {
988 ASSERT_TRUE(hasPrivToY8);
989 }
990 if (Status::OK == isZSLModeAvailable(staticMeta, YUV_REPROCESS)) {
991 ASSERT_TRUE(hasY8ToY8);
992 ASSERT_TRUE(hasY8ToBlob);
993 }
994 }
995
996 ndk::ScopedAStatus ret = mSession->close();
997 mSession = nullptr;
998 ASSERT_TRUE(ret.isOk());
999 }
1000}
1001
1002// Check whether session parameters are supported. If Hal support for them
1003// exist, then try to configure a preview stream using them.
1004TEST_P(CameraAidlTest, configureStreamsWithSessionParameters) {
1005 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1006 std::vector<AvailableStream> outputPreviewStreams;
1007 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
1008 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
1009
1010 for (const auto& name : cameraDeviceNames) {
1011 CameraMetadata meta;
1012
1013 std::shared_ptr<ICameraDevice> unusedCameraDevice;
1014 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1015 &unusedCameraDevice /*out*/);
1016 camera_metadata_t* staticMetaBuffer =
1017 reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1018
1019 std::unordered_set<int32_t> availableSessionKeys;
1020 auto rc = getSupportedKeys(staticMetaBuffer, ANDROID_REQUEST_AVAILABLE_SESSION_KEYS,
1021 &availableSessionKeys);
1022 ASSERT_TRUE(Status::OK == rc);
1023 if (availableSessionKeys.empty()) {
1024 ndk::ScopedAStatus ret = mSession->close();
1025 mSession = nullptr;
1026 ASSERT_TRUE(ret.isOk());
1027 continue;
1028 }
1029
1030 android::hardware::camera::common::V1_0::helper::CameraMetadata previewRequestSettings;
1031 android::hardware::camera::common::V1_0::helper::CameraMetadata sessionParams,
1032 modifiedSessionParams;
1033 constructFilteredSettings(mSession, availableSessionKeys, RequestTemplate::PREVIEW,
1034 &previewRequestSettings, &sessionParams);
1035 if (sessionParams.isEmpty()) {
1036 ndk::ScopedAStatus ret = mSession->close();
1037 mSession = nullptr;
1038 ASSERT_TRUE(ret.isOk());
1039 continue;
1040 }
1041
1042 outputPreviewStreams.clear();
1043
1044 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputPreviewStreams,
1045 &previewThreshold));
1046 ASSERT_NE(0u, outputPreviewStreams.size());
1047
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001048 Stream previewStream = {
1049 0,
1050 StreamType::OUTPUT,
1051 outputPreviewStreams[0].width,
1052 outputPreviewStreams[0].height,
1053 static_cast<PixelFormat>(outputPreviewStreams[0].format),
1054 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1055 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
1056 Dataspace::UNKNOWN,
1057 StreamRotation::ROTATION_0,
1058 std::string(),
1059 /*bufferSize*/ 0,
1060 /*groupId*/ -1,
1061 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1062 RequestAvailableDynamicRangeProfilesMap::
1063 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001064
1065 std::vector<Stream> streams = {previewStream};
1066 StreamConfiguration config;
1067
1068 config.streams = streams;
1069 config.operationMode = StreamConfigurationMode::NORMAL_MODE;
1070 modifiedSessionParams = sessionParams;
1071 auto sessionParamsBuffer = sessionParams.release();
1072 std::vector<uint8_t> rawSessionParam =
1073 std::vector(reinterpret_cast<uint8_t*>(sessionParamsBuffer),
1074 reinterpret_cast<uint8_t*>(sessionParamsBuffer) +
1075 get_camera_metadata_size(sessionParamsBuffer));
1076
1077 config.sessionParams.metadata = rawSessionParam;
1078 config.streamConfigCounter = 0;
1079 config.streams = {previewStream};
1080 config.streamConfigCounter = 0;
1081 config.multiResolutionInputImage = false;
1082
1083 bool newSessionParamsAvailable = false;
1084 for (const auto& it : availableSessionKeys) {
1085 if (modifiedSessionParams.exists(it)) {
1086 modifiedSessionParams.erase(it);
1087 newSessionParamsAvailable = true;
1088 break;
1089 }
1090 }
1091 if (newSessionParamsAvailable) {
1092 auto modifiedSessionParamsBuffer = modifiedSessionParams.release();
1093 verifySessionReconfigurationQuery(mSession, sessionParamsBuffer,
1094 modifiedSessionParamsBuffer);
1095 modifiedSessionParams.acquire(modifiedSessionParamsBuffer);
1096 }
1097
1098 std::vector<HalStream> halConfigs;
1099 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1100 ASSERT_TRUE(ret.isOk());
1101 ASSERT_EQ(1u, halConfigs.size());
1102
1103 sessionParams.acquire(sessionParamsBuffer);
1104 ret = mSession->close();
1105 mSession = nullptr;
1106 ASSERT_TRUE(ret.isOk());
1107 }
1108}
1109
1110// Verify that all supported preview + still capture stream combinations
1111// can be configured successfully.
1112TEST_P(CameraAidlTest, configureStreamsPreviewStillOutputs) {
1113 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1114 std::vector<AvailableStream> outputBlobStreams;
1115 std::vector<AvailableStream> outputPreviewStreams;
1116 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
1117 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
1118 AvailableStream blobThreshold = {INT32_MAX, INT32_MAX, static_cast<int32_t>(PixelFormat::BLOB)};
1119
1120 for (const auto& name : cameraDeviceNames) {
1121 CameraMetadata meta;
1122
1123 std::shared_ptr<ICameraDevice> cameraDevice;
1124 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1125 &cameraDevice /*out*/);
1126
1127 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1128
1129 // Check if camera support depth only
1130 if (isDepthOnly(staticMeta)) {
1131 ndk::ScopedAStatus ret = mSession->close();
1132 mSession = nullptr;
1133 ASSERT_TRUE(ret.isOk());
1134 continue;
1135 }
1136
1137 outputBlobStreams.clear();
1138 ASSERT_EQ(Status::OK,
1139 getAvailableOutputStreams(staticMeta, outputBlobStreams, &blobThreshold));
1140 ASSERT_NE(0u, outputBlobStreams.size());
1141
1142 outputPreviewStreams.clear();
1143 ASSERT_EQ(Status::OK,
1144 getAvailableOutputStreams(staticMeta, outputPreviewStreams, &previewThreshold));
1145 ASSERT_NE(0u, outputPreviewStreams.size());
1146
1147 int32_t jpegBufferSize = 0;
1148 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
1149 ASSERT_NE(0u, jpegBufferSize);
1150
1151 int32_t streamId = 0;
1152 uint32_t streamConfigCounter = 0;
1153
1154 for (auto& blobIter : outputBlobStreams) {
1155 for (auto& previewIter : outputPreviewStreams) {
1156 Stream previewStream = {
1157 streamId++,
1158 StreamType::OUTPUT,
1159 previewIter.width,
1160 previewIter.height,
1161 static_cast<PixelFormat>(previewIter.format),
1162 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1163 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
1164 Dataspace::UNKNOWN,
1165 StreamRotation::ROTATION_0,
1166 std::string(),
1167 /*bufferSize*/ 0,
1168 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001169 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1170 RequestAvailableDynamicRangeProfilesMap::
1171 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001172 Stream blobStream = {
1173 streamId++,
1174 StreamType::OUTPUT,
1175 blobIter.width,
1176 blobIter.height,
1177 static_cast<PixelFormat>(blobIter.format),
1178 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1179 GRALLOC1_CONSUMER_USAGE_CPU_READ),
1180 Dataspace::JFIF,
1181 StreamRotation::ROTATION_0,
1182 std::string(),
1183 /*bufferSize*/ 0,
1184 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001185 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1186 RequestAvailableDynamicRangeProfilesMap::
1187 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001188 std::vector<Stream> streams = {previewStream, blobStream};
1189 StreamConfiguration config;
1190
1191 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
1192 jpegBufferSize);
1193 config.streamConfigCounter = streamConfigCounter++;
1194 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true,
1195 /*expectStreamCombQuery*/ false);
1196
1197 std::vector<HalStream> halConfigs;
1198 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1199 ASSERT_TRUE(ret.isOk());
1200 ASSERT_EQ(2u, halConfigs.size());
1201 }
1202 }
1203
1204 ndk::ScopedAStatus ret = mSession->close();
1205 mSession = nullptr;
1206 ASSERT_TRUE(ret.isOk());
1207 }
1208}
1209
1210// In case constrained mode is supported, test whether it can be
1211// configured. Additionally check for common invalid inputs when
1212// using this mode.
1213TEST_P(CameraAidlTest, configureStreamsConstrainedOutputs) {
1214 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1215
1216 for (const auto& name : cameraDeviceNames) {
1217 CameraMetadata meta;
1218 std::shared_ptr<ICameraDevice> cameraDevice;
1219
1220 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1221 &cameraDevice /*out*/);
1222 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1223
1224 Status rc = isConstrainedModeAvailable(staticMeta);
1225 if (Status::OPERATION_NOT_SUPPORTED == rc) {
1226 ndk::ScopedAStatus ret = mSession->close();
1227 mSession = nullptr;
1228 ASSERT_TRUE(ret.isOk());
1229 continue;
1230 }
1231 ASSERT_EQ(Status::OK, rc);
1232
1233 AvailableStream hfrStream;
1234 rc = pickConstrainedModeSize(staticMeta, hfrStream);
1235 ASSERT_EQ(Status::OK, rc);
1236
1237 int32_t streamId = 0;
1238 uint32_t streamConfigCounter = 0;
1239 Stream stream = {streamId,
1240 StreamType::OUTPUT,
1241 hfrStream.width,
1242 hfrStream.height,
1243 static_cast<PixelFormat>(hfrStream.format),
1244 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1245 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1246 Dataspace::UNKNOWN,
1247 StreamRotation::ROTATION_0,
1248 std::string(),
1249 /*bufferSize*/ 0,
1250 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001251 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1252 RequestAvailableDynamicRangeProfilesMap::
1253 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001254 std::vector<Stream> streams = {stream};
1255 StreamConfiguration config;
1256 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1257 &config);
1258
1259 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true,
1260 /*expectStreamCombQuery*/ false);
1261
1262 config.streamConfigCounter = streamConfigCounter++;
1263 std::vector<HalStream> halConfigs;
1264 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1265 ASSERT_TRUE(ret.isOk());
1266 ASSERT_EQ(1u, halConfigs.size());
1267 ASSERT_EQ(halConfigs[0].id, streamId);
1268
1269 stream = {streamId++,
1270 StreamType::OUTPUT,
1271 static_cast<uint32_t>(0),
1272 static_cast<uint32_t>(0),
1273 static_cast<PixelFormat>(hfrStream.format),
1274 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1275 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1276 Dataspace::UNKNOWN,
1277 StreamRotation::ROTATION_0,
1278 std::string(),
1279 /*bufferSize*/ 0,
1280 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001281 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1282 RequestAvailableDynamicRangeProfilesMap::
1283 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001284 streams[0] = stream;
1285 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1286 &config);
1287
1288 config.streamConfigCounter = streamConfigCounter++;
1289 std::vector<HalStream> halConfig;
1290 ret = mSession->configureStreams(config, &halConfig);
1291 ASSERT_TRUE(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
1292 ret.getServiceSpecificError() ||
1293 static_cast<int32_t>(Status::INTERNAL_ERROR) == ret.getServiceSpecificError());
1294
1295 stream = {streamId++,
1296 StreamType::OUTPUT,
1297 INT32_MAX,
1298 INT32_MAX,
1299 static_cast<PixelFormat>(hfrStream.format),
1300 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1301 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1302 Dataspace::UNKNOWN,
1303 StreamRotation::ROTATION_0,
1304 std::string(),
1305 /*bufferSize*/ 0,
1306 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001307 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1308 RequestAvailableDynamicRangeProfilesMap::
1309 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001310 streams[0] = stream;
1311 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1312 &config);
1313
1314 config.streamConfigCounter = streamConfigCounter++;
1315 halConfigs.clear();
1316 ret = mSession->configureStreams(config, &halConfigs);
1317 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
1318
1319 stream = {streamId++,
1320 StreamType::OUTPUT,
1321 hfrStream.width,
1322 hfrStream.height,
1323 static_cast<PixelFormat>(UINT32_MAX),
1324 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1325 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1326 Dataspace::UNKNOWN,
1327 StreamRotation::ROTATION_0,
1328 std::string(),
1329 /*bufferSize*/ 0,
1330 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001331 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1332 RequestAvailableDynamicRangeProfilesMap::
1333 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001334 streams[0] = stream;
1335 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1336 &config);
1337
1338 config.streamConfigCounter = streamConfigCounter++;
1339 halConfigs.clear();
1340 ret = mSession->configureStreams(config, &halConfigs);
1341 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
1342
1343 ret = mSession->close();
1344 mSession = nullptr;
1345 ASSERT_TRUE(ret.isOk());
1346 }
1347}
1348
1349// Verify that all supported video + snapshot stream combinations can
1350// be configured successfully.
1351TEST_P(CameraAidlTest, configureStreamsVideoStillOutputs) {
1352 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1353 std::vector<AvailableStream> outputBlobStreams;
1354 std::vector<AvailableStream> outputVideoStreams;
1355 AvailableStream videoThreshold = {kMaxVideoWidth, kMaxVideoHeight,
1356 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
1357 AvailableStream blobThreshold = {kMaxVideoWidth, kMaxVideoHeight,
1358 static_cast<int32_t>(PixelFormat::BLOB)};
1359
1360 for (const auto& name : cameraDeviceNames) {
1361 CameraMetadata meta;
1362 std::shared_ptr<ICameraDevice> cameraDevice;
1363
1364 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1365 &cameraDevice /*out*/);
1366
1367 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1368
1369 // Check if camera support depth only
1370 if (isDepthOnly(staticMeta)) {
1371 ndk::ScopedAStatus ret = mSession->close();
1372 mSession = nullptr;
1373 ASSERT_TRUE(ret.isOk());
1374 continue;
1375 }
1376
1377 outputBlobStreams.clear();
1378 ASSERT_EQ(Status::OK,
1379 getAvailableOutputStreams(staticMeta, outputBlobStreams, &blobThreshold));
1380 ASSERT_NE(0u, outputBlobStreams.size());
1381
1382 outputVideoStreams.clear();
1383 ASSERT_EQ(Status::OK,
1384 getAvailableOutputStreams(staticMeta, outputVideoStreams, &videoThreshold));
1385 ASSERT_NE(0u, outputVideoStreams.size());
1386
1387 int32_t jpegBufferSize = 0;
1388 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
1389 ASSERT_NE(0u, jpegBufferSize);
1390
1391 int32_t streamId = 0;
1392 uint32_t streamConfigCounter = 0;
1393 for (auto& blobIter : outputBlobStreams) {
1394 for (auto& videoIter : outputVideoStreams) {
1395 Stream videoStream = {
1396 streamId++,
1397 StreamType::OUTPUT,
1398 videoIter.width,
1399 videoIter.height,
1400 static_cast<PixelFormat>(videoIter.format),
1401 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1402 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1403 Dataspace::UNKNOWN,
1404 StreamRotation::ROTATION_0,
1405 std::string(),
1406 jpegBufferSize,
1407 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001408 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1409 RequestAvailableDynamicRangeProfilesMap::
1410 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001411 Stream blobStream = {
1412 streamId++,
1413 StreamType::OUTPUT,
1414 blobIter.width,
1415 blobIter.height,
1416 static_cast<PixelFormat>(blobIter.format),
1417 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1418 GRALLOC1_CONSUMER_USAGE_CPU_READ),
1419 Dataspace::JFIF,
1420 StreamRotation::ROTATION_0,
1421 std::string(),
1422 jpegBufferSize,
1423 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001424 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1425 RequestAvailableDynamicRangeProfilesMap::
1426 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001427 std::vector<Stream> streams = {videoStream, blobStream};
1428 StreamConfiguration config;
1429
1430 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
1431 jpegBufferSize);
1432 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true,
1433 /*expectStreamCombQuery*/ false);
1434
1435 config.streamConfigCounter = streamConfigCounter++;
1436 std::vector<HalStream> halConfigs;
1437 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1438 ASSERT_TRUE(ret.isOk());
1439 ASSERT_EQ(2u, halConfigs.size());
1440 }
1441 }
1442
1443 ndk::ScopedAStatus ret = mSession->close();
1444 mSession = nullptr;
1445 ASSERT_TRUE(ret.isOk());
1446 }
1447}
1448
1449// Generate and verify a camera capture request
1450TEST_P(CameraAidlTest, processCaptureRequestPreview) {
1451 // TODO(b/220897574): Failing with BUFFER_ERROR
1452 processCaptureRequestInternal(GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, RequestTemplate::PREVIEW,
1453 false /*secureOnlyCameras*/);
1454}
1455
1456// Generate and verify a secure camera capture request
1457TEST_P(CameraAidlTest, processSecureCaptureRequest) {
1458 processCaptureRequestInternal(GRALLOC1_PRODUCER_USAGE_PROTECTED, RequestTemplate::STILL_CAPTURE,
1459 true /*secureOnlyCameras*/);
1460}
1461
1462TEST_P(CameraAidlTest, processCaptureRequestPreviewStabilization) {
1463 std::unordered_map<std::string, nsecs_t> cameraDeviceToTimeLag;
1464 processPreviewStabilizationCaptureRequestInternal(/*previewStabilizationOn*/ false,
1465 cameraDeviceToTimeLag);
1466 processPreviewStabilizationCaptureRequestInternal(/*previewStabilizationOn*/ true,
1467 cameraDeviceToTimeLag);
1468}
1469
1470// Generate and verify a multi-camera capture request
1471TEST_P(CameraAidlTest, processMultiCaptureRequestPreview) {
1472 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1473 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
1474 static_cast<int32_t>(PixelFormat::YCBCR_420_888)};
1475 int64_t bufferId = 1;
1476 uint32_t frameNumber = 1;
1477 std::vector<uint8_t> settings;
1478 std::vector<uint8_t> emptySettings;
1479 std::string invalidPhysicalId = "-1";
1480
1481 for (const auto& name : cameraDeviceNames) {
1482 std::string version, deviceId;
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +00001483 ALOGI("processMultiCaptureRequestPreview: Test device %s", name.c_str());
Avichal Rakesh362242f2022-02-08 12:40:53 -08001484 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
1485 CameraMetadata metadata;
1486
1487 std::shared_ptr<ICameraDevice> unusedDevice;
1488 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &metadata /*out*/,
1489 &unusedDevice /*out*/);
1490
1491 camera_metadata_t* staticMeta =
1492 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
1493 Status rc = isLogicalMultiCamera(staticMeta);
1494 if (Status::OPERATION_NOT_SUPPORTED == rc) {
1495 ndk::ScopedAStatus ret = mSession->close();
1496 mSession = nullptr;
1497 ASSERT_TRUE(ret.isOk());
1498 continue;
1499 }
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +00001500 ASSERT_EQ(Status::OK, rc);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001501
1502 std::unordered_set<std::string> physicalIds;
1503 rc = getPhysicalCameraIds(staticMeta, &physicalIds);
1504 ASSERT_TRUE(Status::OK == rc);
1505 ASSERT_TRUE(physicalIds.size() > 1);
1506
1507 std::unordered_set<int32_t> physicalRequestKeyIDs;
1508 rc = getSupportedKeys(staticMeta, ANDROID_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS,
1509 &physicalRequestKeyIDs);
1510 ASSERT_TRUE(Status::OK == rc);
1511 if (physicalRequestKeyIDs.empty()) {
1512 ndk::ScopedAStatus ret = mSession->close();
1513 mSession = nullptr;
1514 ASSERT_TRUE(ret.isOk());
1515 // The logical camera doesn't support any individual physical requests.
1516 continue;
1517 }
1518
1519 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultPreviewSettings;
1520 android::hardware::camera::common::V1_0::helper::CameraMetadata filteredSettings;
1521 constructFilteredSettings(mSession, physicalRequestKeyIDs, RequestTemplate::PREVIEW,
1522 &defaultPreviewSettings, &filteredSettings);
1523 if (filteredSettings.isEmpty()) {
1524 // No physical device settings in default request.
1525 ndk::ScopedAStatus ret = mSession->close();
1526 mSession = nullptr;
1527 ASSERT_TRUE(ret.isOk());
1528 continue;
1529 }
1530
1531 const camera_metadata_t* settingsBuffer = defaultPreviewSettings.getAndLock();
1532 uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
1533 settings.assign(rawSettingsBuffer,
1534 rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
1535 CameraMetadata settingsMetadata = {settings};
1536 overrideRotateAndCrop(&settingsMetadata);
1537
1538 ndk::ScopedAStatus ret = mSession->close();
1539 mSession = nullptr;
1540 ASSERT_TRUE(ret.isOk());
1541
1542 // Leave only 2 physical devices in the id set.
1543 auto it = physicalIds.begin();
1544 std::string physicalDeviceId = *it;
1545 it++;
1546 physicalIds.erase(++it, physicalIds.end());
1547 ASSERT_EQ(physicalIds.size(), 2u);
1548
1549 std::vector<HalStream> halStreams;
1550 bool supportsPartialResults = false;
1551 bool useHalBufManager = false;
1552 int32_t partialResultCount = 0;
1553 Stream previewStream;
1554 std::shared_ptr<DeviceCb> cb;
1555
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +00001556 configurePreviewStreams(
1557 name, mProvider, &previewThreshold, physicalIds, &mSession, &previewStream,
1558 &halStreams /*out*/, &supportsPartialResults /*out*/, &partialResultCount /*out*/,
1559 &useHalBufManager /*out*/, &cb /*out*/, 0 /*streamConfigCounter*/, true);
1560 if (mSession == nullptr) {
1561 // stream combination not supported by HAL, skip test for device
1562 continue;
1563 }
Avichal Rakesh362242f2022-02-08 12:40:53 -08001564
1565 ::aidl::android::hardware::common::fmq::MQDescriptor<
1566 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
1567 descriptor;
1568 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
1569 ASSERT_TRUE(resultQueueRet.isOk());
1570 std::shared_ptr<ResultMetadataQueue> resultQueue =
1571 std::make_shared<ResultMetadataQueue>(descriptor);
1572 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
1573 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
1574 resultQueue = nullptr;
1575 // Don't use the queue onwards.
1576 }
1577
1578 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
1579 static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
1580 partialResultCount, physicalIds, resultQueue);
1581
1582 std::vector<CaptureRequest> requests(1);
1583 CaptureRequest& request = requests[0];
1584 request.frameNumber = frameNumber;
1585 request.fmqSettingsSize = 0;
Emilian Peev3d919f92022-04-20 13:50:59 -07001586 request.settings = settingsMetadata;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001587
1588 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
1589
1590 std::vector<buffer_handle_t> graphicBuffers;
1591 graphicBuffers.reserve(halStreams.size());
1592 outputBuffers.resize(halStreams.size());
1593 size_t k = 0;
1594 for (const auto& halStream : halStreams) {
1595 buffer_handle_t buffer_handle;
1596 if (useHalBufManager) {
1597 outputBuffers[k] = {halStream.id, /*bufferId*/ 0, NativeHandle(),
1598 BufferStatus::OK, NativeHandle(), NativeHandle()};
1599 } else {
1600 allocateGraphicBuffer(previewStream.width, previewStream.height,
1601 android_convertGralloc1To0Usage(
1602 static_cast<uint64_t>(halStream.producerUsage),
1603 static_cast<uint64_t>(halStream.consumerUsage)),
1604 halStream.overrideFormat, &buffer_handle);
1605 graphicBuffers.push_back(buffer_handle);
1606 outputBuffers[k] = {
1607 halStream.id, bufferId, ::android::makeToAidl(buffer_handle),
1608 BufferStatus::OK, NativeHandle(), NativeHandle()};
1609 bufferId++;
1610 }
1611 k++;
1612 }
1613
1614 std::vector<PhysicalCameraSetting> camSettings(1);
1615 const camera_metadata_t* filteredSettingsBuffer = filteredSettings.getAndLock();
1616 uint8_t* rawFilteredSettingsBuffer = (uint8_t*)filteredSettingsBuffer;
1617 camSettings[0].settings = {std::vector(
1618 rawFilteredSettingsBuffer,
1619 rawFilteredSettingsBuffer + get_camera_metadata_size(filteredSettingsBuffer))};
1620 overrideRotateAndCrop(&camSettings[0].settings);
1621 camSettings[0].fmqSettingsSize = 0;
1622 camSettings[0].physicalCameraId = physicalDeviceId;
1623
1624 request.inputBuffer = {
1625 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
1626 request.physicalCameraSettings = camSettings;
1627
1628 {
1629 std::unique_lock<std::mutex> l(mLock);
1630 mInflightMap.clear();
1631 mInflightMap[frameNumber] = inflightReq;
1632 }
1633
1634 int32_t numRequestProcessed = 0;
1635 std::vector<BufferCache> cachesToRemove;
1636 ndk::ScopedAStatus returnStatus =
1637 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1638 ASSERT_TRUE(returnStatus.isOk());
1639 ASSERT_EQ(numRequestProcessed, 1u);
1640
1641 {
1642 std::unique_lock<std::mutex> l(mLock);
1643 while (!inflightReq->errorCodeValid &&
1644 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1645 auto timeout = std::chrono::system_clock::now() +
1646 std::chrono::seconds(kStreamBufferTimeoutSec);
1647 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1648 }
1649
1650 ASSERT_FALSE(inflightReq->errorCodeValid);
1651 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1652
1653 request.frameNumber++;
1654 // Empty settings should be supported after the first call
1655 // for repeating requests.
1656 request.settings.metadata.clear();
1657 request.physicalCameraSettings[0].settings.metadata.clear();
1658 // The buffer has been registered to HAL by bufferId, so per
1659 // API contract we should send a null handle for this buffer
1660 request.outputBuffers[0].buffer = NativeHandle();
1661 mInflightMap.clear();
1662 inflightReq = std::make_shared<InFlightRequest>(
1663 static_cast<ssize_t>(physicalIds.size()), false, supportsPartialResults,
1664 partialResultCount, physicalIds, resultQueue);
1665 mInflightMap[request.frameNumber] = inflightReq;
1666 }
1667
1668 returnStatus =
1669 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1670 ASSERT_TRUE(returnStatus.isOk());
1671 ASSERT_EQ(numRequestProcessed, 1u);
1672
1673 {
1674 std::unique_lock<std::mutex> l(mLock);
1675 while (!inflightReq->errorCodeValid &&
1676 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1677 auto timeout = std::chrono::system_clock::now() +
1678 std::chrono::seconds(kStreamBufferTimeoutSec);
1679 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1680 }
1681
1682 ASSERT_FALSE(inflightReq->errorCodeValid);
1683 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1684 }
1685
1686 // Invalid physical camera id should fail process requests
1687 frameNumber++;
1688 camSettings[0].physicalCameraId = invalidPhysicalId;
1689 camSettings[0].settings.metadata = settings;
1690
1691 request.physicalCameraSettings = camSettings; // Invalid camera settings
1692 returnStatus =
1693 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1694 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
1695 returnStatus.getServiceSpecificError());
1696
1697 defaultPreviewSettings.unlock(settingsBuffer);
1698 filteredSettings.unlock(filteredSettingsBuffer);
1699
1700 if (useHalBufManager) {
1701 std::vector<int32_t> streamIds(halStreams.size());
1702 for (size_t i = 0; i < streamIds.size(); i++) {
1703 streamIds[i] = halStreams[i].id;
1704 }
1705 verifyBuffersReturned(mSession, streamIds, cb);
1706 }
1707
1708 ret = mSession->close();
1709 mSession = nullptr;
1710 ASSERT_TRUE(ret.isOk());
1711 }
1712}
1713
1714// Generate and verify an ultra high resolution capture request
1715TEST_P(CameraAidlTest, processUltraHighResolutionRequest) {
1716 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1717 int64_t bufferId = 1;
1718 int32_t frameNumber = 1;
1719 CameraMetadata settings;
1720
1721 for (const auto& name : cameraDeviceNames) {
1722 std::string version, deviceId;
1723 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
1724 CameraMetadata meta;
1725
1726 std::shared_ptr<ICameraDevice> unusedDevice;
1727 openEmptyDeviceSession(name, mProvider, &mSession, &meta, &unusedDevice);
1728 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1729 if (!isUltraHighResolution(staticMeta)) {
1730 ndk::ScopedAStatus ret = mSession->close();
1731 mSession = nullptr;
1732 ASSERT_TRUE(ret.isOk());
1733 continue;
1734 }
1735 CameraMetadata req;
1736 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultSettings;
1737 ndk::ScopedAStatus ret =
1738 mSession->constructDefaultRequestSettings(RequestTemplate::STILL_CAPTURE, &req);
1739 ASSERT_TRUE(ret.isOk());
1740
1741 const camera_metadata_t* metadata =
1742 reinterpret_cast<const camera_metadata_t*>(req.metadata.data());
1743 size_t expectedSize = req.metadata.size();
1744 int result = validate_camera_metadata_structure(metadata, &expectedSize);
1745 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
1746
1747 size_t entryCount = get_camera_metadata_entry_count(metadata);
1748 ASSERT_GT(entryCount, 0u);
1749 defaultSettings = metadata;
1750 uint8_t sensorPixelMode =
1751 static_cast<uint8_t>(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
1752 ASSERT_EQ(::android::OK,
1753 defaultSettings.update(ANDROID_SENSOR_PIXEL_MODE, &sensorPixelMode, 1));
1754
1755 const camera_metadata_t* settingsBuffer = defaultSettings.getAndLock();
1756 uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
1757 settings.metadata = std::vector(
1758 rawSettingsBuffer, rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
1759 overrideRotateAndCrop(&settings);
1760
1761 ret = mSession->close();
1762 mSession = nullptr;
1763 ASSERT_TRUE(ret.isOk());
1764
1765 std::vector<HalStream> halStreams;
1766 bool supportsPartialResults = false;
1767 bool useHalBufManager = false;
1768 int32_t partialResultCount = 0;
1769 Stream previewStream;
1770 std::shared_ptr<DeviceCb> cb;
1771
1772 std::list<PixelFormat> pixelFormats = {PixelFormat::YCBCR_420_888, PixelFormat::RAW16};
1773 for (PixelFormat format : pixelFormats) {
Emilian Peevdda1eb72022-07-28 16:37:40 -07001774 previewStream.usage =
1775 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1776 GRALLOC1_CONSUMER_USAGE_CPU_READ);
1777 previewStream.dataSpace = Dataspace::UNKNOWN;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001778 configureStreams(name, mProvider, format, &mSession, &previewStream, &halStreams,
1779 &supportsPartialResults, &partialResultCount, &useHalBufManager, &cb,
1780 0, /*maxResolution*/ true);
1781 ASSERT_NE(mSession, nullptr);
1782
1783 ::aidl::android::hardware::common::fmq::MQDescriptor<
1784 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
1785 descriptor;
1786 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
1787 ASSERT_TRUE(resultQueueRet.isOk());
1788
1789 std::shared_ptr<ResultMetadataQueue> resultQueue =
1790 std::make_shared<ResultMetadataQueue>(descriptor);
1791 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
1792 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
1793 resultQueue = nullptr;
1794 // Don't use the queue onwards.
1795 }
1796
1797 std::vector<buffer_handle_t> graphicBuffers;
1798 graphicBuffers.reserve(halStreams.size());
1799 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
1800 static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
1801 partialResultCount, std::unordered_set<std::string>(), resultQueue);
1802
1803 std::vector<CaptureRequest> requests(1);
1804 CaptureRequest& request = requests[0];
1805 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
1806 outputBuffers.resize(halStreams.size());
1807
1808 size_t k = 0;
1809 for (const auto& halStream : halStreams) {
1810 buffer_handle_t buffer_handle;
1811 if (useHalBufManager) {
1812 outputBuffers[k] = {halStream.id, 0,
1813 NativeHandle(), BufferStatus::OK,
1814 NativeHandle(), NativeHandle()};
1815 } else {
1816 allocateGraphicBuffer(previewStream.width, previewStream.height,
1817 android_convertGralloc1To0Usage(
1818 static_cast<uint64_t>(halStream.producerUsage),
1819 static_cast<uint64_t>(halStream.consumerUsage)),
1820 halStream.overrideFormat, &buffer_handle);
1821 graphicBuffers.push_back(buffer_handle);
1822 outputBuffers[k] = {
1823 halStream.id, bufferId, ::android::makeToAidl(buffer_handle),
1824 BufferStatus::OK, NativeHandle(), NativeHandle()};
1825 bufferId++;
1826 }
1827 k++;
1828 }
1829
1830 request.inputBuffer = {
1831 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
1832 request.frameNumber = frameNumber;
1833 request.fmqSettingsSize = 0;
1834 request.settings = settings;
1835 request.inputWidth = 0;
1836 request.inputHeight = 0;
1837
1838 {
1839 std::unique_lock<std::mutex> l(mLock);
1840 mInflightMap.clear();
1841 mInflightMap[frameNumber] = inflightReq;
1842 }
1843
1844 int32_t numRequestProcessed = 0;
1845 std::vector<BufferCache> cachesToRemove;
1846 ndk::ScopedAStatus returnStatus =
1847 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1848 ASSERT_TRUE(returnStatus.isOk());
1849 ASSERT_EQ(numRequestProcessed, 1u);
1850
1851 {
1852 std::unique_lock<std::mutex> l(mLock);
1853 while (!inflightReq->errorCodeValid &&
1854 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1855 auto timeout = std::chrono::system_clock::now() +
1856 std::chrono::seconds(kStreamBufferTimeoutSec);
1857 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1858 }
1859
1860 ASSERT_FALSE(inflightReq->errorCodeValid);
1861 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1862 }
1863 if (useHalBufManager) {
1864 std::vector<int32_t> streamIds(halStreams.size());
1865 for (size_t i = 0; i < streamIds.size(); i++) {
1866 streamIds[i] = halStreams[i].id;
1867 }
1868 verifyBuffersReturned(mSession, streamIds, cb);
1869 }
1870
1871 ret = mSession->close();
1872 mSession = nullptr;
1873 ASSERT_TRUE(ret.isOk());
1874 }
1875 }
1876}
1877
1878// Generate and verify 10-bit dynamic range request
1879TEST_P(CameraAidlTest, process10BitDynamicRangeRequest) {
1880 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001881 CameraMetadata settings;
1882
1883 for (const auto& name : cameraDeviceNames) {
1884 std::string version, deviceId;
1885 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
1886 CameraMetadata meta;
1887 std::shared_ptr<ICameraDevice> device;
1888 openEmptyDeviceSession(name, mProvider, &mSession, &meta, &device);
1889 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1890 if (!is10BitDynamicRangeCapable(staticMeta)) {
1891 ndk::ScopedAStatus ret = mSession->close();
1892 mSession = nullptr;
1893 ASSERT_TRUE(ret.isOk());
1894 continue;
1895 }
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001896 std::vector<RequestAvailableDynamicRangeProfilesMap> profileList;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001897 get10BitDynamicRangeProfiles(staticMeta, &profileList);
1898 ASSERT_FALSE(profileList.empty());
1899
1900 CameraMetadata req;
1901 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultSettings;
1902 ndk::ScopedAStatus ret =
Emilian Peevdda1eb72022-07-28 16:37:40 -07001903 mSession->constructDefaultRequestSettings(RequestTemplate::PREVIEW, &req);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001904 ASSERT_TRUE(ret.isOk());
1905
1906 const camera_metadata_t* metadata =
1907 reinterpret_cast<const camera_metadata_t*>(req.metadata.data());
1908 size_t expectedSize = req.metadata.size();
1909 int result = validate_camera_metadata_structure(metadata, &expectedSize);
1910 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
1911
1912 size_t entryCount = get_camera_metadata_entry_count(metadata);
1913 ASSERT_GT(entryCount, 0u);
1914 defaultSettings = metadata;
1915
1916 const camera_metadata_t* settingsBuffer = defaultSettings.getAndLock();
1917 uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
1918 settings.metadata = std::vector(
1919 rawSettingsBuffer, rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
1920 overrideRotateAndCrop(&settings);
1921
1922 ret = mSession->close();
1923 mSession = nullptr;
1924 ASSERT_TRUE(ret.isOk());
1925
1926 std::vector<HalStream> halStreams;
1927 bool supportsPartialResults = false;
1928 bool useHalBufManager = false;
1929 int32_t partialResultCount = 0;
1930 Stream previewStream;
1931 std::shared_ptr<DeviceCb> cb;
1932 for (const auto& profile : profileList) {
Emilian Peevdda1eb72022-07-28 16:37:40 -07001933 previewStream.usage =
1934 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1935 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER);
1936 previewStream.dataSpace = getDataspace(PixelFormat::IMPLEMENTATION_DEFINED);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001937 configureStreams(name, mProvider, PixelFormat::IMPLEMENTATION_DEFINED, &mSession,
1938 &previewStream, &halStreams, &supportsPartialResults,
1939 &partialResultCount, &useHalBufManager, &cb, 0,
1940 /*maxResolution*/ false, profile);
1941 ASSERT_NE(mSession, nullptr);
1942
1943 ::aidl::android::hardware::common::fmq::MQDescriptor<
1944 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
1945 descriptor;
1946 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
1947 ASSERT_TRUE(resultQueueRet.isOk());
1948
1949 std::shared_ptr<ResultMetadataQueue> resultQueue =
1950 std::make_shared<ResultMetadataQueue>(descriptor);
1951 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
1952 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
1953 resultQueue = nullptr;
1954 // Don't use the queue onwards.
1955 }
1956
Emilian Peevdda1eb72022-07-28 16:37:40 -07001957 mInflightMap.clear();
1958 // Stream as long as needed to fill the Hal inflight queue
1959 std::vector<CaptureRequest> requests(halStreams[0].maxBuffers);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001960
Emilian Peev470d1382023-01-18 11:09:09 -08001961 for (int32_t requestId = 0; requestId < requests.size(); requestId++) {
Emilian Peevdda1eb72022-07-28 16:37:40 -07001962 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
1963 static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
1964 partialResultCount, std::unordered_set<std::string>(), resultQueue);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001965
Emilian Peev470d1382023-01-18 11:09:09 -08001966 CaptureRequest& request = requests[requestId];
Emilian Peevdda1eb72022-07-28 16:37:40 -07001967 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
1968 outputBuffers.resize(halStreams.size());
Avichal Rakesh362242f2022-02-08 12:40:53 -08001969
Emilian Peevdda1eb72022-07-28 16:37:40 -07001970 size_t k = 0;
1971 inflightReq->mOutstandingBufferIds.resize(halStreams.size());
1972 std::vector<buffer_handle_t> graphicBuffers;
1973 graphicBuffers.reserve(halStreams.size());
Avichal Rakesh362242f2022-02-08 12:40:53 -08001974
Emilian Peev470d1382023-01-18 11:09:09 -08001975 auto bufferId = requestId + 1; // Buffer id value 0 is not valid
Emilian Peevdda1eb72022-07-28 16:37:40 -07001976 for (const auto& halStream : halStreams) {
1977 buffer_handle_t buffer_handle;
1978 if (useHalBufManager) {
1979 outputBuffers[k] = {halStream.id, 0,
1980 NativeHandle(), BufferStatus::OK,
1981 NativeHandle(), NativeHandle()};
1982 } else {
1983 auto usage = android_convertGralloc1To0Usage(
1984 static_cast<uint64_t>(halStream.producerUsage),
1985 static_cast<uint64_t>(halStream.consumerUsage));
1986 allocateGraphicBuffer(previewStream.width, previewStream.height, usage,
1987 halStream.overrideFormat, &buffer_handle);
1988
1989 inflightReq->mOutstandingBufferIds[halStream.id][bufferId] = buffer_handle;
1990 graphicBuffers.push_back(buffer_handle);
1991 outputBuffers[k] = {halStream.id, bufferId,
1992 android::makeToAidl(buffer_handle), BufferStatus::OK, NativeHandle(),
1993 NativeHandle()};
Emilian Peevdda1eb72022-07-28 16:37:40 -07001994 }
1995 k++;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001996 }
Avichal Rakesh362242f2022-02-08 12:40:53 -08001997
Emilian Peevdda1eb72022-07-28 16:37:40 -07001998 request.inputBuffer = {
1999 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
Emilian Peev470d1382023-01-18 11:09:09 -08002000 request.frameNumber = bufferId;
Emilian Peevdda1eb72022-07-28 16:37:40 -07002001 request.fmqSettingsSize = 0;
2002 request.settings = settings;
2003 request.inputWidth = 0;
2004 request.inputHeight = 0;
Avichal Rakesh362242f2022-02-08 12:40:53 -08002005
Emilian Peevdda1eb72022-07-28 16:37:40 -07002006 {
2007 std::unique_lock<std::mutex> l(mLock);
Emilian Peev470d1382023-01-18 11:09:09 -08002008 mInflightMap[bufferId] = inflightReq;
Emilian Peevdda1eb72022-07-28 16:37:40 -07002009 }
2010
Avichal Rakesh362242f2022-02-08 12:40:53 -08002011 }
2012
2013 int32_t numRequestProcessed = 0;
2014 std::vector<BufferCache> cachesToRemove;
2015 ndk::ScopedAStatus returnStatus =
Emilian Peevdda1eb72022-07-28 16:37:40 -07002016 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
Avichal Rakesh362242f2022-02-08 12:40:53 -08002017 ASSERT_TRUE(returnStatus.isOk());
Emilian Peevdda1eb72022-07-28 16:37:40 -07002018 ASSERT_EQ(numRequestProcessed, requests.size());
Avichal Rakesh362242f2022-02-08 12:40:53 -08002019
Emilian Peevdda1eb72022-07-28 16:37:40 -07002020 returnStatus = mSession->repeatingRequestEnd(requests.size() - 1,
2021 std::vector<int32_t> {halStreams[0].id});
2022 ASSERT_TRUE(returnStatus.isOk());
2023
Emilian Peev470d1382023-01-18 11:09:09 -08002024 // We are keeping frame numbers and buffer ids consistent. Buffer id value of 0
2025 // is used to indicate a buffer that is not present/available so buffer ids as well
2026 // as frame numbers begin with 1.
2027 for (int32_t frameNumber = 1; frameNumber <= requests.size(); frameNumber++) {
Emilian Peevdda1eb72022-07-28 16:37:40 -07002028 const auto& inflightReq = mInflightMap[frameNumber];
Avichal Rakesh362242f2022-02-08 12:40:53 -08002029 std::unique_lock<std::mutex> l(mLock);
2030 while (!inflightReq->errorCodeValid &&
2031 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
2032 auto timeout = std::chrono::system_clock::now() +
2033 std::chrono::seconds(kStreamBufferTimeoutSec);
2034 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2035 }
2036
Shuzhen Wang0f56c562023-04-03 16:58:59 -07002037 waitForReleaseFence(inflightReq->resultOutputBuffers);
2038
Avichal Rakesh362242f2022-02-08 12:40:53 -08002039 ASSERT_FALSE(inflightReq->errorCodeValid);
2040 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
2041 verify10BitMetadata(mHandleImporter, *inflightReq, profile);
2042 }
Emilian Peevdda1eb72022-07-28 16:37:40 -07002043
Avichal Rakesh362242f2022-02-08 12:40:53 -08002044 if (useHalBufManager) {
2045 std::vector<int32_t> streamIds(halStreams.size());
2046 for (size_t i = 0; i < streamIds.size(); i++) {
2047 streamIds[i] = halStreams[i].id;
2048 }
2049 mSession->signalStreamFlush(streamIds, /*streamConfigCounter*/ 0);
2050 cb->waitForBuffersReturned();
2051 }
2052
2053 ret = mSession->close();
2054 mSession = nullptr;
2055 ASSERT_TRUE(ret.isOk());
2056 }
2057 }
2058}
2059
Austin Borger4728fc42022-07-15 11:27:53 -07002060TEST_P(CameraAidlTest, process8BitColorSpaceRequests) {
Austin Borger54b22362023-03-22 11:25:06 -07002061 static int profiles[] = {ColorSpaceNamed::DISPLAY_P3, ColorSpaceNamed::SRGB};
Austin Borger4728fc42022-07-15 11:27:53 -07002062
2063 for (int32_t i = 0; i < sizeof(profiles) / sizeof(profiles[0]); i++) {
2064 processColorSpaceRequest(static_cast<RequestAvailableColorSpaceProfilesMap>(profiles[i]),
2065 static_cast<RequestAvailableDynamicRangeProfilesMap>(
2066 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD));
2067 }
2068}
2069
2070TEST_P(CameraAidlTest, process10BitColorSpaceRequests) {
2071 static const camera_metadata_enum_android_request_available_dynamic_range_profiles_map
2072 dynamicRangeProfiles[] = {
2073 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10,
2074 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10,
2075 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS,
2076 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF,
2077 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF_PO,
2078 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM,
2079 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM_PO,
2080 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF,
2081 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF_PO,
2082 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM,
2083 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM_PO
2084 };
2085
Austin Borger54b22362023-03-22 11:25:06 -07002086 // Process all dynamic range profiles with BT2020_HLG
Austin Borger4728fc42022-07-15 11:27:53 -07002087 for (int32_t i = 0; i < sizeof(dynamicRangeProfiles) / sizeof(dynamicRangeProfiles[0]); i++) {
2088 processColorSpaceRequest(
Austin Borger54b22362023-03-22 11:25:06 -07002089 static_cast<RequestAvailableColorSpaceProfilesMap>(ColorSpaceNamed::BT2020_HLG),
Austin Borger4728fc42022-07-15 11:27:53 -07002090 static_cast<RequestAvailableDynamicRangeProfilesMap>(dynamicRangeProfiles[i]));
2091 }
2092}
2093
Shuzhen Wang4dd6a512022-11-08 20:47:20 +00002094TEST_P(CameraAidlTest, processZoomSettingsOverrideRequests) {
2095 const int32_t kFrameCount = 5;
2096 const int32_t kTestCases = 2;
Shuzhen Wang38ddb272023-05-22 09:40:28 -07002097 const bool kOverrideSequence[kTestCases][kFrameCount] = {// ZOOM, ZOOM, ZOOM, ZOOM, ZOOM;
2098 {true, true, true, true, true},
2099 // OFF, ZOOM, ZOOM, ZOOM, OFF;
2100 {false, true, true, true, false}};
Shuzhen Wang4dd6a512022-11-08 20:47:20 +00002101 const bool kExpectedOverrideResults[kTestCases][kFrameCount] = {
Shuzhen Wang38ddb272023-05-22 09:40:28 -07002102 // All resuls should be overridden except the last one. The last result's
2103 // zoom doesn't have speed-up.
2104 {true, true, true, true, false},
2105 // Because we require at least 1 frame speed-up, request #1, #2 and #3
2106 // will be overridden.
2107 {true, true, true, false, false}};
Shuzhen Wang4dd6a512022-11-08 20:47:20 +00002108
2109 for (int i = 0; i < kTestCases; i++) {
2110 processZoomSettingsOverrideRequests(kFrameCount, kOverrideSequence[i],
2111 kExpectedOverrideResults[i]);
2112 }
2113}
2114
Avichal Rakesh362242f2022-02-08 12:40:53 -08002115// Generate and verify a burst containing alternating sensor sensitivity values
2116TEST_P(CameraAidlTest, processCaptureRequestBurstISO) {
2117 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2118 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2119 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2120 int64_t bufferId = 1;
2121 int32_t frameNumber = 1;
2122 float isoTol = .03f;
2123 CameraMetadata settings;
2124
2125 for (const auto& name : cameraDeviceNames) {
2126 CameraMetadata meta;
2127 settings.metadata.clear();
2128 std::shared_ptr<ICameraDevice> unusedDevice;
2129 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
2130 &unusedDevice /*out*/);
2131 camera_metadata_t* staticMetaBuffer =
2132 clone_camera_metadata(reinterpret_cast<camera_metadata_t*>(meta.metadata.data()));
2133 ::android::hardware::camera::common::V1_0::helper::CameraMetadata staticMeta(
2134 staticMetaBuffer);
2135
2136 camera_metadata_entry_t hwLevel = staticMeta.find(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL);
2137 ASSERT_TRUE(0 < hwLevel.count);
2138 if (ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED == hwLevel.data.u8[0] ||
2139 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL == hwLevel.data.u8[0]) {
2140 // Limited/External devices can skip this test
2141 ndk::ScopedAStatus ret = mSession->close();
2142 mSession = nullptr;
2143 ASSERT_TRUE(ret.isOk());
2144 continue;
2145 }
2146
2147 camera_metadata_entry_t isoRange = staticMeta.find(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE);
2148 ASSERT_EQ(isoRange.count, 2u);
2149
2150 ndk::ScopedAStatus ret = mSession->close();
2151 mSession = nullptr;
2152 ASSERT_TRUE(ret.isOk());
2153
2154 bool supportsPartialResults = false;
2155 bool useHalBufManager = false;
2156 int32_t partialResultCount = 0;
2157 Stream previewStream;
2158 std::vector<HalStream> halStreams;
2159 std::shared_ptr<DeviceCb> cb;
2160 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2161 &previewStream /*out*/, &halStreams /*out*/,
2162 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2163 &useHalBufManager /*out*/, &cb /*out*/);
2164
2165 ::aidl::android::hardware::common::fmq::MQDescriptor<
2166 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2167 descriptor;
2168 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2169 std::shared_ptr<ResultMetadataQueue> resultQueue =
2170 std::make_shared<ResultMetadataQueue>(descriptor);
2171 ASSERT_TRUE(resultQueueRet.isOk());
2172 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2173 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2174 resultQueue = nullptr;
2175 // Don't use the queue onwards.
2176 }
2177
2178 ret = mSession->constructDefaultRequestSettings(RequestTemplate::PREVIEW, &settings);
2179 ASSERT_TRUE(ret.isOk());
2180
2181 ::android::hardware::camera::common::V1_0::helper::CameraMetadata requestMeta;
2182 std::vector<CaptureRequest> requests(kBurstFrameCount);
2183 std::vector<buffer_handle_t> buffers(kBurstFrameCount);
2184 std::vector<std::shared_ptr<InFlightRequest>> inflightReqs(kBurstFrameCount);
2185 std::vector<int32_t> isoValues(kBurstFrameCount);
2186 std::vector<CameraMetadata> requestSettings(kBurstFrameCount);
2187
2188 for (int32_t i = 0; i < kBurstFrameCount; i++) {
2189 std::unique_lock<std::mutex> l(mLock);
2190 CaptureRequest& request = requests[i];
2191 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2192 outputBuffers.resize(1);
2193 StreamBuffer& outputBuffer = outputBuffers[0];
2194
2195 isoValues[i] = ((i % 2) == 0) ? isoRange.data.i32[0] : isoRange.data.i32[1];
2196 if (useHalBufManager) {
2197 outputBuffer = {halStreams[0].id, 0,
2198 NativeHandle(), BufferStatus::OK,
2199 NativeHandle(), NativeHandle()};
2200 } else {
2201 allocateGraphicBuffer(previewStream.width, previewStream.height,
2202 android_convertGralloc1To0Usage(
2203 static_cast<uint64_t>(halStreams[0].producerUsage),
2204 static_cast<uint64_t>(halStreams[0].consumerUsage)),
2205 halStreams[0].overrideFormat, &buffers[i]);
2206 outputBuffer = {halStreams[0].id, bufferId + i, ::android::makeToAidl(buffers[i]),
2207 BufferStatus::OK, NativeHandle(), NativeHandle()};
2208 }
2209
2210 requestMeta.append(reinterpret_cast<camera_metadata_t*>(settings.metadata.data()));
2211
2212 // Disable all 3A routines
2213 uint8_t mode = static_cast<uint8_t>(ANDROID_CONTROL_MODE_OFF);
2214 ASSERT_EQ(::android::OK, requestMeta.update(ANDROID_CONTROL_MODE, &mode, 1));
2215 ASSERT_EQ(::android::OK,
2216 requestMeta.update(ANDROID_SENSOR_SENSITIVITY, &isoValues[i], 1));
2217 camera_metadata_t* metaBuffer = requestMeta.release();
2218 uint8_t* rawMetaBuffer = reinterpret_cast<uint8_t*>(metaBuffer);
2219 requestSettings[i].metadata = std::vector(
2220 rawMetaBuffer, rawMetaBuffer + get_camera_metadata_size(metaBuffer));
2221 overrideRotateAndCrop(&(requestSettings[i]));
2222
2223 request.frameNumber = frameNumber + i;
2224 request.fmqSettingsSize = 0;
2225 request.settings = requestSettings[i];
2226 request.inputBuffer = {
2227 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2228
2229 inflightReqs[i] = std::make_shared<InFlightRequest>(1, false, supportsPartialResults,
2230 partialResultCount, resultQueue);
2231 mInflightMap[frameNumber + i] = inflightReqs[i];
2232 }
2233
2234 int32_t numRequestProcessed = 0;
2235 std::vector<BufferCache> cachesToRemove;
2236
2237 ndk::ScopedAStatus returnStatus =
2238 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2239 ASSERT_TRUE(returnStatus.isOk());
2240 ASSERT_EQ(numRequestProcessed, kBurstFrameCount);
2241
2242 for (size_t i = 0; i < kBurstFrameCount; i++) {
2243 std::unique_lock<std::mutex> l(mLock);
2244 while (!inflightReqs[i]->errorCodeValid && ((0 < inflightReqs[i]->numBuffersLeft) ||
2245 (!inflightReqs[i]->haveResultMetadata))) {
2246 auto timeout = std::chrono::system_clock::now() +
2247 std::chrono::seconds(kStreamBufferTimeoutSec);
2248 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2249 }
2250
2251 ASSERT_FALSE(inflightReqs[i]->errorCodeValid);
2252 ASSERT_NE(inflightReqs[i]->resultOutputBuffers.size(), 0u);
2253 ASSERT_EQ(previewStream.id, inflightReqs[i]->resultOutputBuffers[0].buffer.streamId);
2254 ASSERT_FALSE(inflightReqs[i]->collectedResult.isEmpty());
2255 ASSERT_TRUE(inflightReqs[i]->collectedResult.exists(ANDROID_SENSOR_SENSITIVITY));
2256 camera_metadata_entry_t isoResult =
2257 inflightReqs[i]->collectedResult.find(ANDROID_SENSOR_SENSITIVITY);
2258 ASSERT_TRUE(std::abs(isoResult.data.i32[0] - isoValues[i]) <=
2259 std::round(isoValues[i] * isoTol));
2260 }
2261
2262 if (useHalBufManager) {
2263 verifyBuffersReturned(mSession, previewStream.id, cb);
2264 }
2265 ret = mSession->close();
2266 mSession = nullptr;
2267 ASSERT_TRUE(ret.isOk());
2268 }
2269}
2270
2271// Test whether an incorrect capture request with missing settings will
2272// be reported correctly.
2273TEST_P(CameraAidlTest, processCaptureRequestInvalidSinglePreview) {
2274 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2275 std::vector<AvailableStream> outputPreviewStreams;
2276 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2277 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2278 int64_t bufferId = 1;
2279 int32_t frameNumber = 1;
2280 CameraMetadata settings;
2281
2282 for (const auto& name : cameraDeviceNames) {
2283 Stream previewStream;
2284 std::vector<HalStream> halStreams;
2285 std::shared_ptr<DeviceCb> cb;
2286 bool supportsPartialResults = false;
2287 bool useHalBufManager = false;
2288 int32_t partialResultCount = 0;
2289 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2290 &previewStream /*out*/, &halStreams /*out*/,
2291 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2292 &useHalBufManager /*out*/, &cb /*out*/);
2293 ASSERT_NE(mSession, nullptr);
2294 ASSERT_FALSE(halStreams.empty());
2295
2296 buffer_handle_t buffer_handle = nullptr;
2297
2298 if (useHalBufManager) {
2299 bufferId = 0;
2300 } else {
2301 allocateGraphicBuffer(previewStream.width, previewStream.height,
2302 android_convertGralloc1To0Usage(
2303 static_cast<uint64_t>(halStreams[0].producerUsage),
2304 static_cast<uint64_t>(halStreams[0].consumerUsage)),
2305 halStreams[0].overrideFormat, &buffer_handle);
2306 }
2307
2308 std::vector<CaptureRequest> requests(1);
2309 CaptureRequest& request = requests[0];
2310 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2311 outputBuffers.resize(1);
2312 StreamBuffer& outputBuffer = outputBuffers[0];
2313
2314 outputBuffer = {
2315 halStreams[0].id,
2316 bufferId,
2317 buffer_handle == nullptr ? NativeHandle() : ::android::makeToAidl(buffer_handle),
2318 BufferStatus::OK,
2319 NativeHandle(),
2320 NativeHandle()};
2321
2322 request.inputBuffer = {
2323 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2324 request.frameNumber = frameNumber;
2325 request.fmqSettingsSize = 0;
2326 request.settings = settings;
2327
2328 // Settings were not correctly initialized, we should fail here
2329 int32_t numRequestProcessed = 0;
2330 std::vector<BufferCache> cachesToRemove;
2331 ndk::ScopedAStatus ret =
2332 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2333 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
2334 ASSERT_EQ(numRequestProcessed, 0u);
2335
2336 ret = mSession->close();
2337 mSession = nullptr;
2338 ASSERT_TRUE(ret.isOk());
2339 }
2340}
2341
2342// Verify camera offline session behavior
2343TEST_P(CameraAidlTest, switchToOffline) {
2344 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2345 AvailableStream threshold = {kMaxStillWidth, kMaxStillHeight,
2346 static_cast<int32_t>(PixelFormat::BLOB)};
2347 int64_t bufferId = 1;
2348 int32_t frameNumber = 1;
2349 CameraMetadata settings;
2350
2351 for (const auto& name : cameraDeviceNames) {
2352 CameraMetadata meta;
2353 {
2354 std::shared_ptr<ICameraDevice> unusedDevice;
2355 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
2356 &unusedDevice);
2357 camera_metadata_t* staticMetaBuffer = clone_camera_metadata(
2358 reinterpret_cast<camera_metadata_t*>(meta.metadata.data()));
2359 ::android::hardware::camera::common::V1_0::helper::CameraMetadata staticMeta(
2360 staticMetaBuffer);
2361
2362 if (isOfflineSessionSupported(staticMetaBuffer) != Status::OK) {
2363 ndk::ScopedAStatus ret = mSession->close();
2364 mSession = nullptr;
2365 ASSERT_TRUE(ret.isOk());
2366 continue;
2367 }
2368 ndk::ScopedAStatus ret = mSession->close();
2369 mSession = nullptr;
2370 ASSERT_TRUE(ret.isOk());
2371 }
2372
2373 bool supportsPartialResults = false;
2374 int32_t partialResultCount = 0;
2375 Stream stream;
2376 std::vector<HalStream> halStreams;
2377 std::shared_ptr<DeviceCb> cb;
2378 int32_t jpegBufferSize;
2379 bool useHalBufManager;
2380 configureOfflineStillStream(name, mProvider, &threshold, &mSession /*out*/, &stream /*out*/,
2381 &halStreams /*out*/, &supportsPartialResults /*out*/,
2382 &partialResultCount /*out*/, &cb /*out*/,
2383 &jpegBufferSize /*out*/, &useHalBufManager /*out*/);
2384
2385 auto ret = mSession->constructDefaultRequestSettings(RequestTemplate::STILL_CAPTURE,
2386 &settings);
2387 ASSERT_TRUE(ret.isOk());
2388
2389 ::aidl::android::hardware::common::fmq::MQDescriptor<
2390 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2391 descriptor;
2392
2393 ndk::ScopedAStatus resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2394 ASSERT_TRUE(resultQueueRet.isOk());
2395 std::shared_ptr<ResultMetadataQueue> resultQueue =
2396 std::make_shared<ResultMetadataQueue>(descriptor);
2397 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2398 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2399 resultQueue = nullptr;
2400 // Don't use the queue onwards.
2401 }
2402
2403 ::android::hardware::camera::common::V1_0::helper::CameraMetadata requestMeta;
2404
2405 std::vector<buffer_handle_t> buffers(kBurstFrameCount);
2406 std::vector<std::shared_ptr<InFlightRequest>> inflightReqs(kBurstFrameCount);
2407 std::vector<CameraMetadata> requestSettings(kBurstFrameCount);
2408
2409 std::vector<CaptureRequest> requests(kBurstFrameCount);
2410
2411 HalStream halStream = halStreams[0];
2412 for (uint32_t i = 0; i < kBurstFrameCount; i++) {
2413 CaptureRequest& request = requests[i];
2414 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2415 outputBuffers.resize(1);
2416 StreamBuffer& outputBuffer = outputBuffers[0];
2417
2418 std::unique_lock<std::mutex> l(mLock);
2419 if (useHalBufManager) {
2420 outputBuffer = {halStream.id, 0, NativeHandle(), BufferStatus::OK, NativeHandle(),
2421 NativeHandle()};
2422 } else {
2423 // jpeg buffer (w,h) = (blobLen, 1)
2424 allocateGraphicBuffer(jpegBufferSize, /*height*/ 1,
2425 android_convertGralloc1To0Usage(
2426 static_cast<uint64_t>(halStream.producerUsage),
2427 static_cast<uint64_t>(halStream.consumerUsage)),
2428 halStream.overrideFormat, &buffers[i]);
2429 outputBuffer = {halStream.id, bufferId + i, ::android::makeToAidl(buffers[i]),
2430 BufferStatus::OK, NativeHandle(), NativeHandle()};
2431 }
2432
2433 requestMeta.clear();
2434 requestMeta.append(reinterpret_cast<camera_metadata_t*>(settings.metadata.data()));
2435
2436 camera_metadata_t* metaBuffer = requestMeta.release();
2437 uint8_t* rawMetaBuffer = reinterpret_cast<uint8_t*>(metaBuffer);
2438 requestSettings[i].metadata = std::vector(
2439 rawMetaBuffer, rawMetaBuffer + get_camera_metadata_size(metaBuffer));
2440 overrideRotateAndCrop(&requestSettings[i]);
2441
2442 request.frameNumber = frameNumber + i;
2443 request.fmqSettingsSize = 0;
2444 request.settings = requestSettings[i];
2445 request.inputBuffer = {/*streamId*/ -1,
2446 /*bufferId*/ 0, NativeHandle(),
2447 BufferStatus::ERROR, NativeHandle(),
2448 NativeHandle()};
2449
2450 inflightReqs[i] = std::make_shared<InFlightRequest>(1, false, supportsPartialResults,
2451 partialResultCount, resultQueue);
2452 mInflightMap[frameNumber + i] = inflightReqs[i];
2453 }
2454
2455 int32_t numRequestProcessed = 0;
2456 std::vector<BufferCache> cachesToRemove;
2457
2458 ndk::ScopedAStatus returnStatus =
2459 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2460 ASSERT_TRUE(returnStatus.isOk());
2461 ASSERT_EQ(numRequestProcessed, kBurstFrameCount);
2462
2463 std::vector<int32_t> offlineStreamIds = {halStream.id};
2464 CameraOfflineSessionInfo offlineSessionInfo;
2465 std::shared_ptr<ICameraOfflineSession> offlineSession;
2466 returnStatus =
2467 mSession->switchToOffline(offlineStreamIds, &offlineSessionInfo, &offlineSession);
2468
2469 if (!halStreams[0].supportOffline) {
2470 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
2471 returnStatus.getServiceSpecificError());
2472 ret = mSession->close();
2473 mSession = nullptr;
2474 ASSERT_TRUE(ret.isOk());
2475 continue;
2476 }
2477
2478 ASSERT_TRUE(returnStatus.isOk());
2479 // Hal might be unable to find any requests qualified for offline mode.
2480 if (offlineSession == nullptr) {
2481 ret = mSession->close();
2482 mSession = nullptr;
2483 ASSERT_TRUE(ret.isOk());
2484 continue;
2485 }
2486
2487 ASSERT_EQ(offlineSessionInfo.offlineStreams.size(), 1u);
2488 ASSERT_EQ(offlineSessionInfo.offlineStreams[0].id, halStream.id);
2489 ASSERT_NE(offlineSessionInfo.offlineRequests.size(), 0u);
2490
2491 // close device session to make sure offline session does not rely on it
2492 ret = mSession->close();
2493 mSession = nullptr;
2494 ASSERT_TRUE(ret.isOk());
2495
2496 ::aidl::android::hardware::common::fmq::MQDescriptor<
2497 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2498 offlineResultDescriptor;
2499
2500 auto offlineResultQueueRet =
2501 offlineSession->getCaptureResultMetadataQueue(&offlineResultDescriptor);
2502 std::shared_ptr<ResultMetadataQueue> offlineResultQueue =
2503 std::make_shared<ResultMetadataQueue>(descriptor);
2504 if (!offlineResultQueue->isValid() || offlineResultQueue->availableToWrite() <= 0) {
2505 ALOGE("%s: offline session returns empty result metadata fmq, not use it", __func__);
2506 offlineResultQueue = nullptr;
2507 // Don't use the queue onwards.
2508 }
2509 ASSERT_TRUE(offlineResultQueueRet.isOk());
2510
2511 updateInflightResultQueue(offlineResultQueue);
2512
2513 ret = offlineSession->setCallback(cb);
2514 ASSERT_TRUE(ret.isOk());
2515
2516 for (size_t i = 0; i < kBurstFrameCount; i++) {
2517 std::unique_lock<std::mutex> l(mLock);
2518 while (!inflightReqs[i]->errorCodeValid && ((0 < inflightReqs[i]->numBuffersLeft) ||
2519 (!inflightReqs[i]->haveResultMetadata))) {
2520 auto timeout = std::chrono::system_clock::now() +
2521 std::chrono::seconds(kStreamBufferTimeoutSec);
2522 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2523 }
2524
2525 ASSERT_FALSE(inflightReqs[i]->errorCodeValid);
2526 ASSERT_NE(inflightReqs[i]->resultOutputBuffers.size(), 0u);
2527 ASSERT_EQ(stream.id, inflightReqs[i]->resultOutputBuffers[0].buffer.streamId);
2528 ASSERT_FALSE(inflightReqs[i]->collectedResult.isEmpty());
2529 }
2530
2531 ret = offlineSession->close();
2532 ASSERT_TRUE(ret.isOk());
2533 }
2534}
2535
2536// Check whether an invalid capture request with missing output buffers
2537// will be reported correctly.
2538TEST_P(CameraAidlTest, processCaptureRequestInvalidBuffer) {
2539 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2540 std::vector<AvailableStream> outputBlobStreams;
2541 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2542 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2543 int32_t frameNumber = 1;
2544 CameraMetadata settings;
2545
2546 for (const auto& name : cameraDeviceNames) {
2547 Stream previewStream;
2548 std::vector<HalStream> halStreams;
2549 std::shared_ptr<DeviceCb> cb;
2550 bool supportsPartialResults = false;
2551 bool useHalBufManager = false;
2552 int32_t partialResultCount = 0;
2553 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2554 &previewStream /*out*/, &halStreams /*out*/,
2555 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2556 &useHalBufManager /*out*/, &cb /*out*/);
2557
2558 RequestTemplate reqTemplate = RequestTemplate::PREVIEW;
2559 ndk::ScopedAStatus ret = mSession->constructDefaultRequestSettings(reqTemplate, &settings);
2560 ASSERT_TRUE(ret.isOk());
2561 overrideRotateAndCrop(&settings);
2562
2563 std::vector<CaptureRequest> requests(1);
2564 CaptureRequest& request = requests[0];
2565 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2566 outputBuffers.resize(1);
2567 // Empty output buffer
2568 outputBuffers[0] = {
2569 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2570
2571 request.inputBuffer = {
2572 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2573 request.frameNumber = frameNumber;
2574 request.fmqSettingsSize = 0;
2575 request.settings = settings;
2576
2577 // Output buffers are missing, we should fail here
2578 int32_t numRequestProcessed = 0;
2579 std::vector<BufferCache> cachesToRemove;
2580 ret = mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2581 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
2582 ASSERT_EQ(numRequestProcessed, 0u);
2583
2584 ret = mSession->close();
2585 mSession = nullptr;
2586 ASSERT_TRUE(ret.isOk());
2587 }
2588}
2589
2590// Generate, trigger and flush a preview request
2591TEST_P(CameraAidlTest, flushPreviewRequest) {
2592 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2593 std::vector<AvailableStream> outputPreviewStreams;
2594 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2595 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2596 int64_t bufferId = 1;
2597 int32_t frameNumber = 1;
2598 CameraMetadata settings;
2599
2600 for (const auto& name : cameraDeviceNames) {
2601 Stream previewStream;
2602 std::vector<HalStream> halStreams;
2603 std::shared_ptr<DeviceCb> cb;
2604 bool supportsPartialResults = false;
2605 bool useHalBufManager = false;
2606 int32_t partialResultCount = 0;
2607
2608 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2609 &previewStream /*out*/, &halStreams /*out*/,
2610 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2611 &useHalBufManager /*out*/, &cb /*out*/);
2612
2613 ASSERT_NE(mSession, nullptr);
2614 ASSERT_NE(cb, nullptr);
2615 ASSERT_FALSE(halStreams.empty());
2616
2617 ::aidl::android::hardware::common::fmq::MQDescriptor<
2618 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2619 descriptor;
2620
2621 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2622 std::shared_ptr<ResultMetadataQueue> resultQueue =
2623 std::make_shared<ResultMetadataQueue>(descriptor);
2624 ASSERT_TRUE(resultQueueRet.isOk());
2625 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2626 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2627 resultQueue = nullptr;
2628 // Don't use the queue onwards.
2629 }
2630
2631 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
2632 1, false, supportsPartialResults, partialResultCount, resultQueue);
2633 RequestTemplate reqTemplate = RequestTemplate::PREVIEW;
2634
2635 ndk::ScopedAStatus ret = mSession->constructDefaultRequestSettings(reqTemplate, &settings);
2636 ASSERT_TRUE(ret.isOk());
2637 overrideRotateAndCrop(&settings);
2638
2639 buffer_handle_t buffer_handle;
2640 std::vector<CaptureRequest> requests(1);
2641 CaptureRequest& request = requests[0];
2642 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2643 outputBuffers.resize(1);
2644 StreamBuffer& outputBuffer = outputBuffers[0];
2645 if (useHalBufManager) {
2646 bufferId = 0;
2647 outputBuffer = {halStreams[0].id, bufferId, NativeHandle(),
2648 BufferStatus::OK, NativeHandle(), NativeHandle()};
2649 } else {
2650 allocateGraphicBuffer(previewStream.width, previewStream.height,
2651 android_convertGralloc1To0Usage(
2652 static_cast<uint64_t>(halStreams[0].producerUsage),
2653 static_cast<uint64_t>(halStreams[0].consumerUsage)),
2654 halStreams[0].overrideFormat, &buffer_handle);
2655 outputBuffer = {halStreams[0].id, bufferId, ::android::makeToAidl(buffer_handle),
2656 BufferStatus::OK, NativeHandle(), NativeHandle()};
2657 }
2658
2659 request.frameNumber = frameNumber;
2660 request.fmqSettingsSize = 0;
2661 request.settings = settings;
2662 request.inputBuffer = {
2663 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2664
2665 {
2666 std::unique_lock<std::mutex> l(mLock);
2667 mInflightMap.clear();
2668 mInflightMap[frameNumber] = inflightReq;
2669 }
2670
2671 int32_t numRequestProcessed = 0;
2672 std::vector<BufferCache> cachesToRemove;
2673 ret = mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2674 ASSERT_TRUE(ret.isOk());
2675 ASSERT_EQ(numRequestProcessed, 1u);
2676
2677 // Flush before waiting for request to complete.
2678 ndk::ScopedAStatus returnStatus = mSession->flush();
2679 ASSERT_TRUE(returnStatus.isOk());
2680
2681 {
2682 std::unique_lock<std::mutex> l(mLock);
2683 while (!inflightReq->errorCodeValid &&
2684 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
2685 auto timeout = std::chrono::system_clock::now() +
2686 std::chrono::seconds(kStreamBufferTimeoutSec);
2687 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2688 }
2689
2690 if (!inflightReq->errorCodeValid) {
2691 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
2692 ASSERT_EQ(previewStream.id, inflightReq->resultOutputBuffers[0].buffer.streamId);
2693 } else {
2694 switch (inflightReq->errorCode) {
2695 case ErrorCode::ERROR_REQUEST:
2696 case ErrorCode::ERROR_RESULT:
2697 case ErrorCode::ERROR_BUFFER:
2698 // Expected
2699 break;
2700 case ErrorCode::ERROR_DEVICE:
2701 default:
2702 FAIL() << "Unexpected error:"
2703 << static_cast<uint32_t>(inflightReq->errorCode);
2704 }
2705 }
2706 }
2707
2708 if (useHalBufManager) {
2709 verifyBuffersReturned(mSession, previewStream.id, cb);
2710 }
2711
2712 ret = mSession->close();
2713 mSession = nullptr;
2714 ASSERT_TRUE(ret.isOk());
2715 }
2716}
2717
2718// Verify that camera flushes correctly without any pending requests.
2719TEST_P(CameraAidlTest, flushEmpty) {
2720 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2721 std::vector<AvailableStream> outputPreviewStreams;
2722 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2723 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2724
2725 for (const auto& name : cameraDeviceNames) {
2726 Stream previewStream;
2727 std::vector<HalStream> halStreams;
2728 std::shared_ptr<DeviceCb> cb;
2729 bool supportsPartialResults = false;
2730 bool useHalBufManager = false;
2731
2732 int32_t partialResultCount = 0;
2733 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2734 &previewStream /*out*/, &halStreams /*out*/,
2735 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2736 &useHalBufManager /*out*/, &cb /*out*/);
2737
2738 ndk::ScopedAStatus returnStatus = mSession->flush();
2739 ASSERT_TRUE(returnStatus.isOk());
2740
2741 {
2742 std::unique_lock<std::mutex> l(mLock);
2743 auto timeout = std::chrono::system_clock::now() +
2744 std::chrono::milliseconds(kEmptyFlushTimeoutMSec);
2745 ASSERT_EQ(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2746 }
2747
2748 ndk::ScopedAStatus ret = mSession->close();
2749 mSession = nullptr;
2750 ASSERT_TRUE(ret.isOk());
2751 }
2752}
2753
2754// Test camera provider notify method
2755TEST_P(CameraAidlTest, providerDeviceStateNotification) {
2756 notifyDeviceState(ICameraProvider::DEVICE_STATE_BACK_COVERED);
2757 notifyDeviceState(ICameraProvider::DEVICE_STATE_NORMAL);
2758}
2759
2760// Verify that all supported stream formats and sizes can be configured
2761// successfully for injection camera.
2762TEST_P(CameraAidlTest, configureInjectionStreamsAvailableOutputs) {
2763 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2764 std::vector<AvailableStream> outputStreams;
2765
2766 for (const auto& name : cameraDeviceNames) {
2767 CameraMetadata metadata;
2768
2769 std::shared_ptr<ICameraInjectionSession> injectionSession;
2770 std::shared_ptr<ICameraDevice> unusedDevice;
2771 openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/,
2772 &unusedDevice /*out*/);
2773 if (injectionSession == nullptr) {
2774 continue;
2775 }
2776
2777 camera_metadata_t* staticMetaBuffer =
2778 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
2779 CameraMetadata chars;
2780 chars.metadata = metadata.metadata;
2781
2782 outputStreams.clear();
2783 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputStreams));
2784 ASSERT_NE(0u, outputStreams.size());
2785
2786 int32_t jpegBufferSize = 0;
2787 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMetaBuffer, &jpegBufferSize));
2788 ASSERT_NE(0u, jpegBufferSize);
2789
2790 int32_t streamId = 0;
2791 int32_t streamConfigCounter = 0;
2792 for (auto& it : outputStreams) {
2793 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(it.format));
2794 Stream stream = {streamId,
2795 StreamType::OUTPUT,
2796 it.width,
2797 it.height,
2798 static_cast<PixelFormat>(it.format),
2799 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2800 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2801 dataspace,
2802 StreamRotation::ROTATION_0,
2803 std::string(),
2804 jpegBufferSize,
2805 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002806 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2807 RequestAvailableDynamicRangeProfilesMap::
2808 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002809
2810 std::vector<Stream> streams = {stream};
2811 StreamConfiguration config;
2812 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2813 jpegBufferSize);
2814
2815 config.streamConfigCounter = streamConfigCounter++;
2816 ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars);
2817 ASSERT_TRUE(s.isOk());
2818 streamId++;
2819 }
2820
2821 std::shared_ptr<ICameraDeviceSession> session;
2822 ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session);
2823 ASSERT_TRUE(ret.isOk());
2824 ASSERT_NE(session, nullptr);
2825 ret = session->close();
2826 ASSERT_TRUE(ret.isOk());
2827 }
2828}
2829
2830// Check for correct handling of invalid/incorrect configuration parameters for injection camera.
2831TEST_P(CameraAidlTest, configureInjectionStreamsInvalidOutputs) {
2832 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2833 std::vector<AvailableStream> outputStreams;
2834
2835 for (const auto& name : cameraDeviceNames) {
2836 CameraMetadata metadata;
2837 std::shared_ptr<ICameraInjectionSession> injectionSession;
2838 std::shared_ptr<ICameraDevice> unusedDevice;
2839 openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/,
2840 &unusedDevice);
2841 if (injectionSession == nullptr) {
2842 continue;
2843 }
2844
2845 camera_metadata_t* staticMetaBuffer =
2846 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
2847 std::shared_ptr<ICameraDeviceSession> session;
2848 ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session);
2849 ASSERT_TRUE(ret.isOk());
2850 ASSERT_NE(session, nullptr);
2851
2852 CameraMetadata chars;
2853 chars.metadata = metadata.metadata;
2854
2855 outputStreams.clear();
2856 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputStreams));
2857 ASSERT_NE(0u, outputStreams.size());
2858
2859 int32_t jpegBufferSize = 0;
2860 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMetaBuffer, &jpegBufferSize));
2861 ASSERT_NE(0u, jpegBufferSize);
2862
2863 int32_t streamId = 0;
2864 Stream stream = {streamId++,
2865 StreamType::OUTPUT,
2866 0,
2867 0,
2868 static_cast<PixelFormat>(outputStreams[0].format),
2869 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2870 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2871 Dataspace::UNKNOWN,
2872 StreamRotation::ROTATION_0,
2873 std::string(),
2874 jpegBufferSize,
2875 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002876 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2877 RequestAvailableDynamicRangeProfilesMap::
2878 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002879
2880 int32_t streamConfigCounter = 0;
2881 std::vector<Stream> streams = {stream};
2882 StreamConfiguration config;
2883 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2884 jpegBufferSize);
2885
2886 config.streamConfigCounter = streamConfigCounter++;
2887 ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars);
2888 ASSERT_TRUE(
2889 (static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) == s.getServiceSpecificError()) ||
2890 (static_cast<int32_t>(Status::INTERNAL_ERROR) == s.getServiceSpecificError()));
2891
2892 stream = {streamId++,
2893 StreamType::OUTPUT,
2894 INT32_MAX,
2895 INT32_MAX,
2896 static_cast<PixelFormat>(outputStreams[0].format),
2897 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2898 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2899 Dataspace::UNKNOWN,
2900 StreamRotation::ROTATION_0,
2901 std::string(),
2902 jpegBufferSize,
2903 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002904 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2905 RequestAvailableDynamicRangeProfilesMap::
2906 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
2907
Avichal Rakesh362242f2022-02-08 12:40:53 -08002908 streams[0] = stream;
2909 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2910 jpegBufferSize);
2911 config.streamConfigCounter = streamConfigCounter++;
2912 s = injectionSession->configureInjectionStreams(config, chars);
2913 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError());
2914
2915 for (auto& it : outputStreams) {
2916 stream = {streamId++,
2917 StreamType::OUTPUT,
2918 it.width,
2919 it.height,
2920 static_cast<PixelFormat>(INT32_MAX),
2921 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2922 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2923 Dataspace::UNKNOWN,
2924 StreamRotation::ROTATION_0,
2925 std::string(),
2926 jpegBufferSize,
2927 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002928 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2929 RequestAvailableDynamicRangeProfilesMap::
2930 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002931 streams[0] = stream;
2932 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2933 jpegBufferSize);
2934 config.streamConfigCounter = streamConfigCounter++;
2935 s = injectionSession->configureInjectionStreams(config, chars);
2936 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError());
2937
2938 stream = {streamId++,
2939 StreamType::OUTPUT,
2940 it.width,
2941 it.height,
2942 static_cast<PixelFormat>(it.format),
2943 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2944 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2945 Dataspace::UNKNOWN,
2946 static_cast<StreamRotation>(INT32_MAX),
2947 std::string(),
2948 jpegBufferSize,
2949 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002950 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2951 RequestAvailableDynamicRangeProfilesMap::
2952 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002953 streams[0] = stream;
2954 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2955 jpegBufferSize);
2956 config.streamConfigCounter = streamConfigCounter++;
2957 s = injectionSession->configureInjectionStreams(config, chars);
2958 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError());
2959 }
2960
2961 ret = session->close();
2962 ASSERT_TRUE(ret.isOk());
2963 }
2964}
2965
2966// Check whether session parameters are supported for injection camera. If Hal support for them
2967// exist, then try to configure a preview stream using them.
2968TEST_P(CameraAidlTest, configureInjectionStreamsWithSessionParameters) {
2969 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2970 std::vector<AvailableStream> outputPreviewStreams;
2971 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2972 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2973
2974 for (const auto& name : cameraDeviceNames) {
2975 CameraMetadata metadata;
2976 std::shared_ptr<ICameraInjectionSession> injectionSession;
2977 std::shared_ptr<ICameraDevice> unusedDevice;
2978 openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/,
2979 &unusedDevice /*out*/);
2980 if (injectionSession == nullptr) {
2981 continue;
2982 }
2983
2984 std::shared_ptr<ICameraDeviceSession> session;
2985 ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session);
2986 ASSERT_TRUE(ret.isOk());
2987 ASSERT_NE(session, nullptr);
2988
2989 camera_metadata_t* staticMetaBuffer =
2990 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
2991 CameraMetadata chars;
2992 chars.metadata = metadata.metadata;
2993
2994 std::unordered_set<int32_t> availableSessionKeys;
2995 Status rc = getSupportedKeys(staticMetaBuffer, ANDROID_REQUEST_AVAILABLE_SESSION_KEYS,
2996 &availableSessionKeys);
2997 ASSERT_EQ(Status::OK, rc);
2998 if (availableSessionKeys.empty()) {
2999 ret = session->close();
3000 ASSERT_TRUE(ret.isOk());
3001 continue;
3002 }
3003
3004 android::hardware::camera::common::V1_0::helper::CameraMetadata previewRequestSettings;
3005 android::hardware::camera::common::V1_0::helper::CameraMetadata sessionParams,
3006 modifiedSessionParams;
3007 constructFilteredSettings(session, availableSessionKeys, RequestTemplate::PREVIEW,
3008 &previewRequestSettings, &sessionParams);
3009 if (sessionParams.isEmpty()) {
3010 ret = session->close();
3011 ASSERT_TRUE(ret.isOk());
3012 continue;
3013 }
3014
3015 outputPreviewStreams.clear();
3016
3017 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputPreviewStreams,
3018 &previewThreshold));
3019 ASSERT_NE(0u, outputPreviewStreams.size());
3020
3021 Stream previewStream = {
3022 0,
3023 StreamType::OUTPUT,
3024 outputPreviewStreams[0].width,
3025 outputPreviewStreams[0].height,
3026 static_cast<PixelFormat>(outputPreviewStreams[0].format),
3027 static_cast<::aidl::android::hardware::graphics::common::BufferUsage>(
3028 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
3029 Dataspace::UNKNOWN,
3030 StreamRotation::ROTATION_0,
3031 std::string(),
3032 0,
3033 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00003034 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
3035 RequestAvailableDynamicRangeProfilesMap::
3036 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08003037 std::vector<Stream> streams = {previewStream};
3038 StreamConfiguration config;
3039 config.streams = streams;
3040 config.operationMode = StreamConfigurationMode::NORMAL_MODE;
3041
3042 modifiedSessionParams = sessionParams;
3043 camera_metadata_t* sessionParamsBuffer = sessionParams.release();
3044 uint8_t* rawSessionParamsBuffer = reinterpret_cast<uint8_t*>(sessionParamsBuffer);
3045 config.sessionParams.metadata =
3046 std::vector(rawSessionParamsBuffer,
3047 rawSessionParamsBuffer + get_camera_metadata_size(sessionParamsBuffer));
3048
3049 config.streamConfigCounter = 0;
3050 config.streamConfigCounter = 0;
3051 config.multiResolutionInputImage = false;
3052
3053 ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars);
3054 ASSERT_TRUE(s.isOk());
3055
3056 sessionParams.acquire(sessionParamsBuffer);
3057 free_camera_metadata(staticMetaBuffer);
3058 ret = session->close();
3059 ASSERT_TRUE(ret.isOk());
3060 }
3061}
3062
Jayant Chowdharyde1909e2022-11-23 17:18:38 +00003063TEST_P(CameraAidlTest, configureStreamsUseCasesCroppedRaw) {
3064 AvailableStream rawStreamThreshold =
3065 {INT_MAX, INT_MAX, static_cast<int32_t>(PixelFormat::RAW16)};
3066 configureStreamUseCaseInternal(rawStreamThreshold);
3067}
3068
Avichal Rakesh362242f2022-02-08 12:40:53 -08003069// Verify that valid stream use cases can be configured successfully, and invalid use cases
3070// fail stream configuration.
3071TEST_P(CameraAidlTest, configureStreamsUseCases) {
Jayant Chowdharyde1909e2022-11-23 17:18:38 +00003072 AvailableStream previewStreamThreshold =
3073 {kMaxPreviewWidth, kMaxPreviewHeight, static_cast<int32_t>(PixelFormat::YCBCR_420_888)};
3074 configureStreamUseCaseInternal(previewStreamThreshold);
Avichal Rakesh362242f2022-02-08 12:40:53 -08003075}
3076
Austin Borger0918fc82023-03-21 18:48:18 -07003077// Validate the integrity of stream configuration metadata
3078TEST_P(CameraAidlTest, validateStreamConfigurations) {
3079 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
3080 std::vector<AvailableStream> outputStreams;
3081
3082 const int32_t scalerSizesTag = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS;
3083 const int32_t scalerMinFrameDurationsTag = ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS;
3084 const int32_t scalerStallDurationsTag = ANDROID_SCALER_AVAILABLE_STALL_DURATIONS;
3085
3086 for (const auto& name : cameraDeviceNames) {
3087 CameraMetadata meta;
3088 std::shared_ptr<ICameraDevice> cameraDevice;
3089
3090 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
3091 &cameraDevice /*out*/);
3092 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
3093
3094 if (is10BitDynamicRangeCapable(staticMeta)) {
3095 std::vector<std::tuple<size_t, size_t>> supportedP010Sizes, supportedBlobSizes;
3096
3097 getSupportedSizes(staticMeta, scalerSizesTag, HAL_PIXEL_FORMAT_BLOB,
3098 &supportedBlobSizes);
3099 getSupportedSizes(staticMeta, scalerSizesTag, HAL_PIXEL_FORMAT_YCBCR_P010,
3100 &supportedP010Sizes);
3101 ASSERT_FALSE(supportedP010Sizes.empty());
3102
3103 std::vector<int64_t> blobMinDurations, blobStallDurations;
3104 getSupportedDurations(staticMeta, scalerMinFrameDurationsTag, HAL_PIXEL_FORMAT_BLOB,
3105 supportedP010Sizes, &blobMinDurations);
3106 getSupportedDurations(staticMeta, scalerStallDurationsTag, HAL_PIXEL_FORMAT_BLOB,
3107 supportedP010Sizes, &blobStallDurations);
3108 ASSERT_FALSE(blobStallDurations.empty());
3109 ASSERT_FALSE(blobMinDurations.empty());
3110 ASSERT_EQ(supportedP010Sizes.size(), blobMinDurations.size());
3111 ASSERT_EQ(blobMinDurations.size(), blobStallDurations.size());
3112 }
3113
Austin Borger8e9ac022023-05-04 11:17:26 -07003114 // TODO (b/280887191): Validate other aspects of stream configuration metadata...
3115
3116 ndk::ScopedAStatus ret = mSession->close();
3117 mSession = nullptr;
3118 ASSERT_TRUE(ret.isOk());
Austin Borger0918fc82023-03-21 18:48:18 -07003119 }
3120}
3121
Avichal Rakesh362242f2022-02-08 12:40:53 -08003122GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(CameraAidlTest);
3123INSTANTIATE_TEST_SUITE_P(
3124 PerInstance, CameraAidlTest,
3125 testing::ValuesIn(android::getAidlHalInstanceNames(ICameraProvider::descriptor)),
Jayant Chowdharyde1909e2022-11-23 17:18:38 +00003126 android::hardware::PrintInstanceNameToString);