blob: e24e0783303f1b9c04f43f54b7a21455243e2dce [file] [log] [blame]
Avichal Rakesh362242f2022-02-08 12:40:53 -08001/*
2 * Copyright (C) 2022 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
Rucha Katakward08a0152023-10-13 15:47:58 -070016#include <gtest/gtest.h>
Avichal Rakesh362242f2022-02-08 12:40:53 -080017
18#include <aidl/Vintf.h>
19#include <aidl/android/hardware/camera/common/VendorTagSection.h>
20#include <aidl/android/hardware/camera/device/ICameraDevice.h>
21#include <aidlcommonsupport/NativeHandle.h>
22#include <camera_aidl_test.h>
23#include <cutils/properties.h>
24#include <device_cb.h>
25#include <empty_device_cb.h>
26#include <grallocusage/GrallocUsageConversion.h>
27#include <gtest/gtest.h>
28#include <hardware/gralloc.h>
29#include <hardware/gralloc1.h>
30#include <hidl/GtestPrinter.h>
31#include <hidl/HidlSupport.h>
32#include <torch_provider_cb.h>
Rucha Katakward08a0152023-10-13 15:47:58 -070033#include <com_android_internal_camera_flags.h>
Avichal Rakesh362242f2022-02-08 12:40:53 -080034#include <list>
35
36using ::aidl::android::hardware::camera::common::CameraDeviceStatus;
37using ::aidl::android::hardware::camera::common::CameraResourceCost;
38using ::aidl::android::hardware::camera::common::TorchModeStatus;
39using ::aidl::android::hardware::camera::common::VendorTagSection;
40using ::aidl::android::hardware::camera::device::ICameraDevice;
Austin Borger4728fc42022-07-15 11:27:53 -070041using ::aidl::android::hardware::camera::metadata::RequestAvailableColorSpaceProfilesMap;
Avichal Rakeshd3503a32022-02-25 06:23:14 +000042using ::aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap;
Avichal Rakesh362242f2022-02-08 12:40:53 -080043using ::aidl::android::hardware::camera::metadata::SensorPixelMode;
44using ::aidl::android::hardware::camera::provider::CameraIdAndStreamCombination;
Avichal Rakesh4bf91c72022-05-23 20:44:02 +000045using ::aidl::android::hardware::camera::provider::BnCameraProviderCallback;
Avichal Rakesh362242f2022-02-08 12:40:53 -080046
47using ::ndk::ScopedAStatus;
48
49namespace {
50const int32_t kBurstFrameCount = 10;
51const uint32_t kMaxStillWidth = 2048;
52const uint32_t kMaxStillHeight = 1536;
53
54const int64_t kEmptyFlushTimeoutMSec = 200;
Rucha Katakward08a0152023-10-13 15:47:58 -070055namespace flags = com::android::internal::camera::flags;
Avichal Rakesh362242f2022-02-08 12:40:53 -080056
Shuzhen Wang36efa712022-03-08 10:10:44 -080057const static std::vector<int64_t> kMandatoryUseCases = {
Avichal Rakesh362242f2022-02-08 12:40:53 -080058 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
59 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW,
60 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_STILL_CAPTURE,
61 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_RECORD,
62 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW_VIDEO_STILL,
63 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL};
64} // namespace
65
66TEST_P(CameraAidlTest, getCameraIdList) {
67 std::vector<std::string> idList;
68 ScopedAStatus ret = mProvider->getCameraIdList(&idList);
69 ASSERT_TRUE(ret.isOk());
70
71 for (size_t i = 0; i < idList.size(); i++) {
72 ALOGI("Camera Id[%zu] is %s", i, idList[i].c_str());
73 }
74}
75
76// Test if ICameraProvider::getVendorTags returns Status::OK
77TEST_P(CameraAidlTest, getVendorTags) {
78 std::vector<VendorTagSection> vendorTags;
79 ScopedAStatus ret = mProvider->getVendorTags(&vendorTags);
80
81 ASSERT_TRUE(ret.isOk());
82 for (size_t i = 0; i < vendorTags.size(); i++) {
83 ALOGI("Vendor tag section %zu name %s", i, vendorTags[i].sectionName.c_str());
84 for (auto& tag : vendorTags[i].tags) {
85 ALOGI("Vendor tag id %u name %s type %d", tag.tagId, tag.tagName.c_str(),
86 (int)tag.tagType);
87 }
88 }
89}
90
91// Test if ICameraProvider::setCallback returns Status::OK
92TEST_P(CameraAidlTest, setCallback) {
Avichal Rakesh4bf91c72022-05-23 20:44:02 +000093 struct ProviderCb : public BnCameraProviderCallback {
Avichal Rakesh362242f2022-02-08 12:40:53 -080094 ScopedAStatus cameraDeviceStatusChange(const std::string& cameraDeviceName,
95 CameraDeviceStatus newStatus) override {
96 ALOGI("camera device status callback name %s, status %d", cameraDeviceName.c_str(),
97 (int)newStatus);
98 return ScopedAStatus::ok();
99 }
100 ScopedAStatus torchModeStatusChange(const std::string& cameraDeviceName,
101 TorchModeStatus newStatus) override {
102 ALOGI("Torch mode status callback name %s, status %d", cameraDeviceName.c_str(),
103 (int)newStatus);
104 return ScopedAStatus::ok();
105 }
106 ScopedAStatus physicalCameraDeviceStatusChange(const std::string& cameraDeviceName,
107 const std::string& physicalCameraDeviceName,
108 CameraDeviceStatus newStatus) override {
109 ALOGI("physical camera device status callback name %s, physical camera name %s,"
110 " status %d",
111 cameraDeviceName.c_str(), physicalCameraDeviceName.c_str(), (int)newStatus);
112 return ScopedAStatus::ok();
113 }
114 };
115
Avichal Rakesh4bf91c72022-05-23 20:44:02 +0000116 std::shared_ptr<ProviderCb> cb = ndk::SharedRefBase::make<ProviderCb>();
Avichal Rakesh362242f2022-02-08 12:40:53 -0800117 ScopedAStatus ret = mProvider->setCallback(cb);
118 ASSERT_TRUE(ret.isOk());
119 ret = mProvider->setCallback(nullptr);
Avichal Rakesh4bf91c72022-05-23 20:44:02 +0000120 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
Avichal Rakesh362242f2022-02-08 12:40:53 -0800121}
122
123// Test if ICameraProvider::getCameraDeviceInterface returns Status::OK and non-null device
124TEST_P(CameraAidlTest, getCameraDeviceInterface) {
125 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
126
127 for (const auto& name : cameraDeviceNames) {
128 std::shared_ptr<ICameraDevice> cameraDevice;
129 ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &cameraDevice);
130 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
131 ret.getServiceSpecificError());
132 ASSERT_TRUE(ret.isOk());
133 ASSERT_NE(cameraDevice, nullptr);
134 }
135}
136
137// Verify that the device resource cost can be retrieved and the values are
138// correct.
139TEST_P(CameraAidlTest, getResourceCost) {
140 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
141
142 for (const auto& deviceName : cameraDeviceNames) {
143 std::shared_ptr<ICameraDevice> cameraDevice;
144 ScopedAStatus ret = mProvider->getCameraDeviceInterface(deviceName, &cameraDevice);
145 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
146 ret.getServiceSpecificError());
147 ASSERT_TRUE(ret.isOk());
148 ASSERT_NE(cameraDevice, nullptr);
149
150 CameraResourceCost resourceCost;
151 ret = cameraDevice->getResourceCost(&resourceCost);
152 ALOGI("getResourceCost returns: %d:%d", ret.getExceptionCode(),
153 ret.getServiceSpecificError());
154 ASSERT_TRUE(ret.isOk());
155
156 ALOGI(" Resource cost is %d", resourceCost.resourceCost);
157 ASSERT_LE(resourceCost.resourceCost, 100u);
158
159 for (const auto& name : resourceCost.conflictingDevices) {
160 ALOGI(" Conflicting device: %s", name.c_str());
161 }
162 }
163}
164
Rucha Katakward08a0152023-10-13 15:47:58 -0700165// Validate the integrity of manual flash strength control metadata
166TEST_P(CameraAidlTest, validateManualFlashStrengthControlKeys) {
167 if (flags::camera_manual_flash_strength_control()) {
168 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
169 for (const auto& name : cameraDeviceNames) {
170 ALOGI("validateManualFlashStrengthControlKeys: Testing camera device %s", name.c_str());
171 CameraMetadata meta;
172 std::shared_ptr<ICameraDevice> cameraDevice;
173 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
174 &cameraDevice /*out*/);
175 ndk::ScopedAStatus ret = cameraDevice->getCameraCharacteristics(&meta);
176 ASSERT_TRUE(ret.isOk());
177 const camera_metadata_t* staticMeta =
178 reinterpret_cast<const camera_metadata_t*>(meta.metadata.data());
179 verifyManualFlashStrengthControlCharacteristics(staticMeta);
180 }
181 } else {
182 ALOGI("validateManualFlashStrengthControlKeys: Test skipped.\n");
183 GTEST_SKIP();
184 }
185}
186
Avichal Rakesh362242f2022-02-08 12:40:53 -0800187TEST_P(CameraAidlTest, systemCameraTest) {
188 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
189 std::map<std::string, std::vector<SystemCameraKind>> hiddenPhysicalIdToLogicalMap;
190 for (const auto& name : cameraDeviceNames) {
191 std::shared_ptr<ICameraDevice> device;
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +0000192 ALOGI("systemCameraTest: Testing camera device %s", name.c_str());
Avichal Rakesh362242f2022-02-08 12:40:53 -0800193 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
194 ASSERT_TRUE(ret.isOk());
195 ASSERT_NE(device, nullptr);
196
197 CameraMetadata cameraCharacteristics;
198 ret = device->getCameraCharacteristics(&cameraCharacteristics);
199 ASSERT_TRUE(ret.isOk());
200
201 const camera_metadata_t* staticMeta =
202 reinterpret_cast<const camera_metadata_t*>(cameraCharacteristics.metadata.data());
203 Status rc = isLogicalMultiCamera(staticMeta);
204 if (rc == Status::OPERATION_NOT_SUPPORTED) {
205 return;
206 }
207
208 ASSERT_EQ(rc, Status::OK);
209 std::unordered_set<std::string> physicalIds;
210 ASSERT_EQ(getPhysicalCameraIds(staticMeta, &physicalIds), Status::OK);
211 SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC;
212 Status retStatus = getSystemCameraKind(staticMeta, &systemCameraKind);
213 ASSERT_EQ(retStatus, Status::OK);
214
215 for (auto physicalId : physicalIds) {
216 bool isPublicId = false;
217 for (auto& deviceName : cameraDeviceNames) {
218 std::string publicVersion, publicId;
219 ASSERT_TRUE(matchDeviceName(deviceName, mProviderType, &publicVersion, &publicId));
220 if (physicalId == publicId) {
221 isPublicId = true;
222 break;
223 }
224 }
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +0000225
Avichal Rakesh362242f2022-02-08 12:40:53 -0800226 // For hidden physical cameras, collect their associated logical cameras
227 // and store the system camera kind.
228 if (!isPublicId) {
229 auto it = hiddenPhysicalIdToLogicalMap.find(physicalId);
230 if (it == hiddenPhysicalIdToLogicalMap.end()) {
231 hiddenPhysicalIdToLogicalMap.insert(std::make_pair(
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +0000232 physicalId, std::vector<SystemCameraKind>({systemCameraKind})));
Avichal Rakesh362242f2022-02-08 12:40:53 -0800233 } else {
234 it->second.push_back(systemCameraKind);
235 }
236 }
237 }
238 }
239
240 // Check that the system camera kind of the logical cameras associated with
241 // each hidden physical camera is the same.
242 for (const auto& it : hiddenPhysicalIdToLogicalMap) {
243 SystemCameraKind neededSystemCameraKind = it.second.front();
244 for (auto foundSystemCamera : it.second) {
245 ASSERT_EQ(neededSystemCameraKind, foundSystemCamera);
246 }
247 }
248}
249
250// Verify that the static camera characteristics can be retrieved
251// successfully.
252TEST_P(CameraAidlTest, getCameraCharacteristics) {
253 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
254
255 for (const auto& name : cameraDeviceNames) {
256 std::shared_ptr<ICameraDevice> device;
257 ALOGI("getCameraCharacteristics: Testing camera device %s", name.c_str());
258 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
259 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
260 ret.getServiceSpecificError());
261 ASSERT_TRUE(ret.isOk());
262 ASSERT_NE(device, nullptr);
263
264 CameraMetadata chars;
265 ret = device->getCameraCharacteristics(&chars);
266 ASSERT_TRUE(ret.isOk());
267 verifyCameraCharacteristics(chars);
268 verifyMonochromeCharacteristics(chars);
269 verifyRecommendedConfigs(chars);
Kwangkyu Park4b7fd452023-05-12 00:22:22 +0900270 verifyHighSpeedRecordingCharacteristics(name, chars);
Avichal Rakesh362242f2022-02-08 12:40:53 -0800271 verifyLogicalOrUltraHighResCameraMetadata(name, device, chars, cameraDeviceNames);
272
273 ASSERT_TRUE(ret.isOk());
274
275 // getPhysicalCameraCharacteristics will fail for publicly
276 // advertised camera IDs.
277 std::string version, cameraId;
278 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &cameraId));
279 CameraMetadata devChars;
280 ret = device->getPhysicalCameraCharacteristics(cameraId, &devChars);
281 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
282 ASSERT_EQ(0, devChars.metadata.size());
283 }
284}
285
Bharatt Kukreja48c35ba2023-12-14 20:55:17 +0000286TEST_P(CameraAidlTest, getSessionCharacteristics) {
287 if (flags::feature_combination_query()) {
288 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
289
290 for (const auto& name : cameraDeviceNames) {
291 std::shared_ptr<ICameraDevice> device;
292 ALOGI("getSessionCharacteristics: Testing camera device %s", name.c_str());
293 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
294 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
295 ret.getServiceSpecificError());
296 ASSERT_TRUE(ret.isOk());
297 ASSERT_NE(device, nullptr);
298
299 CameraMetadata meta;
300 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
301 &device /*out*/);
302
303 std::vector<AvailableStream> outputStreams;
304 camera_metadata_t* staticMeta =
305 reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
306 outputStreams.clear();
307 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputStreams));
308 ASSERT_NE(0u, outputStreams.size());
309
310 AvailableStream sampleStream = outputStreams[0];
311
312 int32_t streamId = 0;
313 Stream stream = {streamId,
314 StreamType::OUTPUT,
315 sampleStream.width,
316 sampleStream.height,
317 static_cast<PixelFormat>(sampleStream.format),
318 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
319 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
320 Dataspace::UNKNOWN,
321 StreamRotation::ROTATION_0,
322 std::string(),
323 /*bufferSize*/ 0,
324 /*groupId*/ -1,
325 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
326 RequestAvailableDynamicRangeProfilesMap::
327 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
328
329 std::vector<Stream> streams = {stream};
330 StreamConfiguration config;
331 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config);
332
333 CameraMetadata chars;
334 ret = device->getSessionCharacteristics(config, &chars);
335 ASSERT_TRUE(ret.isOk());
336 verifySessionCharacteristics(chars);
337 }
338 } else {
339 ALOGI("getSessionCharacteristics: Test skipped.\n");
340 GTEST_SKIP();
341 }
342}
343
Avichal Rakesh362242f2022-02-08 12:40:53 -0800344// Verify that the torch strength level can be set and retrieved successfully.
345TEST_P(CameraAidlTest, turnOnTorchWithStrengthLevel) {
346 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
347
348 std::shared_ptr<TorchProviderCb> cb = ndk::SharedRefBase::make<TorchProviderCb>(this);
349 ndk::ScopedAStatus ret = mProvider->setCallback(cb);
350 ASSERT_TRUE(ret.isOk());
351
352 for (const auto& name : cameraDeviceNames) {
353 int32_t defaultLevel;
354 std::shared_ptr<ICameraDevice> device;
355 ALOGI("%s: Testing camera device %s", __FUNCTION__, name.c_str());
356
357 ret = mProvider->getCameraDeviceInterface(name, &device);
358 ASSERT_TRUE(ret.isOk());
359 ASSERT_NE(device, nullptr);
360
361 CameraMetadata chars;
362 ret = device->getCameraCharacteristics(&chars);
363 ASSERT_TRUE(ret.isOk());
364
365 const camera_metadata_t* staticMeta =
366 reinterpret_cast<const camera_metadata_t*>(chars.metadata.data());
367 bool torchStrengthControlSupported = isTorchStrengthControlSupported(staticMeta);
368 camera_metadata_ro_entry entry;
369 int rc = find_camera_metadata_ro_entry(staticMeta,
370 ANDROID_FLASH_INFO_STRENGTH_DEFAULT_LEVEL, &entry);
371 if (torchStrengthControlSupported) {
372 ASSERT_EQ(rc, 0);
373 ASSERT_GT(entry.count, 0);
374 defaultLevel = *entry.data.i32;
375 ALOGI("Default level is:%d", defaultLevel);
376 }
377
378 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
379 ret = device->turnOnTorchWithStrengthLevel(2);
380 ALOGI("turnOnTorchWithStrengthLevel returns status: %d", ret.getServiceSpecificError());
381 // OPERATION_NOT_SUPPORTED check
382 if (!torchStrengthControlSupported) {
383 ALOGI("Torch strength control not supported.");
384 ASSERT_EQ(static_cast<int32_t>(Status::OPERATION_NOT_SUPPORTED),
385 ret.getServiceSpecificError());
386 } else {
387 {
388 ASSERT_TRUE(ret.isOk());
389 std::unique_lock<std::mutex> l(mTorchLock);
390 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
391 auto timeout = std::chrono::system_clock::now() +
392 std::chrono::seconds(kTorchTimeoutSec);
393 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
394 }
395 ASSERT_EQ(TorchModeStatus::AVAILABLE_ON, mTorchStatus);
396 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
397 }
398 ALOGI("getTorchStrengthLevel: Testing");
399 int32_t strengthLevel;
400 ret = device->getTorchStrengthLevel(&strengthLevel);
401 ASSERT_TRUE(ret.isOk());
402 ALOGI("Torch strength level is : %d", strengthLevel);
403 ASSERT_EQ(strengthLevel, 2);
404
405 // Turn OFF the torch and verify torch strength level is reset to default level.
406 ALOGI("Testing torch strength level reset after turning the torch OFF.");
407 ret = device->setTorchMode(false);
408 ASSERT_TRUE(ret.isOk());
409 {
410 std::unique_lock<std::mutex> l(mTorchLock);
411 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
412 auto timeout = std::chrono::system_clock::now() +
413 std::chrono::seconds(kTorchTimeoutSec);
414 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
415 }
416 ASSERT_EQ(TorchModeStatus::AVAILABLE_OFF, mTorchStatus);
417 }
418
419 ret = device->getTorchStrengthLevel(&strengthLevel);
420 ASSERT_TRUE(ret.isOk());
421 ALOGI("Torch strength level after turning OFF torch is : %d", strengthLevel);
422 ASSERT_EQ(strengthLevel, defaultLevel);
423 }
424 }
425}
426
427// In case it is supported verify that torch can be enabled.
428// Check for corresponding torch callbacks as well.
429TEST_P(CameraAidlTest, setTorchMode) {
430 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
431
432 std::shared_ptr<TorchProviderCb> cb = ndk::SharedRefBase::make<TorchProviderCb>(this);
433 ndk::ScopedAStatus ret = mProvider->setCallback(cb);
434 ALOGI("setCallback returns status: %d", ret.getServiceSpecificError());
435 ASSERT_TRUE(ret.isOk());
436 ASSERT_NE(cb, nullptr);
437
438 for (const auto& name : cameraDeviceNames) {
439 std::shared_ptr<ICameraDevice> device;
440 ALOGI("setTorchMode: Testing camera device %s", name.c_str());
441 ret = mProvider->getCameraDeviceInterface(name, &device);
442 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
443 ret.getServiceSpecificError());
444 ASSERT_TRUE(ret.isOk());
445 ASSERT_NE(device, nullptr);
446
447 CameraMetadata metadata;
448 ret = device->getCameraCharacteristics(&metadata);
449 ALOGI("getCameraCharacteristics returns status:%d", ret.getServiceSpecificError());
450 ASSERT_TRUE(ret.isOk());
451 camera_metadata_t* staticMeta =
452 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
453 bool torchSupported = isTorchSupported(staticMeta);
454
455 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
456 ret = device->setTorchMode(true);
457 ALOGI("setTorchMode returns status: %d", ret.getServiceSpecificError());
458 if (!torchSupported) {
459 ASSERT_EQ(static_cast<int32_t>(Status::OPERATION_NOT_SUPPORTED),
460 ret.getServiceSpecificError());
461 } else {
462 ASSERT_TRUE(ret.isOk());
463 {
464 std::unique_lock<std::mutex> l(mTorchLock);
465 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
466 auto timeout = std::chrono::system_clock::now() +
467 std::chrono::seconds(kTorchTimeoutSec);
468 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
469 }
470 ASSERT_EQ(TorchModeStatus::AVAILABLE_ON, mTorchStatus);
471 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
472 }
473
474 ret = device->setTorchMode(false);
475 ASSERT_TRUE(ret.isOk());
476 {
477 std::unique_lock<std::mutex> l(mTorchLock);
478 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
479 auto timeout = std::chrono::system_clock::now() +
480 std::chrono::seconds(kTorchTimeoutSec);
481 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
482 }
483 ASSERT_EQ(TorchModeStatus::AVAILABLE_OFF, mTorchStatus);
484 }
485 }
486 }
Avichal Rakesh362242f2022-02-08 12:40:53 -0800487}
488
489// Check dump functionality.
490TEST_P(CameraAidlTest, dump) {
491 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
492
493 for (const auto& name : cameraDeviceNames) {
494 std::shared_ptr<ICameraDevice> device;
495 ALOGI("dump: Testing camera device %s", name.c_str());
496
497 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
498 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
499 ret.getServiceSpecificError());
500 ASSERT_TRUE(ret.isOk());
501 ASSERT_NE(device, nullptr);
502
503 int raw_handle = open(kDumpOutput, O_RDWR);
504 ASSERT_GE(raw_handle, 0);
505
506 auto retStatus = device->dump(raw_handle, nullptr, 0);
507 ASSERT_EQ(retStatus, ::android::OK);
508 close(raw_handle);
509 }
510}
511
512// Open, dump, then close
513TEST_P(CameraAidlTest, openClose) {
514 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
515
516 for (const auto& name : cameraDeviceNames) {
517 std::shared_ptr<ICameraDevice> device;
518 ALOGI("openClose: Testing camera device %s", name.c_str());
519 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
520 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
521 ret.getServiceSpecificError());
522 ASSERT_TRUE(ret.isOk());
523 ASSERT_NE(device, nullptr);
524
525 std::shared_ptr<EmptyDeviceCb> cb = ndk::SharedRefBase::make<EmptyDeviceCb>();
526
527 ret = device->open(cb, &mSession);
528 ASSERT_TRUE(ret.isOk());
529 ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
530 ret.getServiceSpecificError());
531 ASSERT_NE(mSession, nullptr);
532 int raw_handle = open(kDumpOutput, O_RDWR);
533 ASSERT_GE(raw_handle, 0);
534
535 auto retStatus = device->dump(raw_handle, nullptr, 0);
536 ASSERT_EQ(retStatus, ::android::OK);
537 close(raw_handle);
538
539 ret = mSession->close();
540 mSession = nullptr;
541 ASSERT_TRUE(ret.isOk());
542 // TODO: test all session API calls return INTERNAL_ERROR after close
543 // TODO: keep a wp copy here and verify session cannot be promoted out of this scope
544 }
545}
546
547// Check whether all common default request settings can be successfully
548// constructed.
549TEST_P(CameraAidlTest, constructDefaultRequestSettings) {
550 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
551
552 for (const auto& name : cameraDeviceNames) {
553 std::shared_ptr<ICameraDevice> device;
554 ALOGI("constructDefaultRequestSettings: Testing camera device %s", name.c_str());
555 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
556 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
557 ret.getServiceSpecificError());
558 ASSERT_TRUE(ret.isOk());
559 ASSERT_NE(device, nullptr);
560
Shuzhen Wangdf89cb92023-11-09 18:24:42 -0800561 int32_t interfaceVersion;
562 ret = device->getInterfaceVersion(&interfaceVersion);
563 ASSERT_TRUE(ret.isOk());
564 bool supportFeatureCombinationQuery =
565 (interfaceVersion >= CAMERA_DEVICE_API_MINOR_VERSION_3);
566
Avichal Rakesh362242f2022-02-08 12:40:53 -0800567 std::shared_ptr<EmptyDeviceCb> cb = ndk::SharedRefBase::make<EmptyDeviceCb>();
568 ret = device->open(cb, &mSession);
569 ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
570 ret.getServiceSpecificError());
571 ASSERT_TRUE(ret.isOk());
572 ASSERT_NE(mSession, nullptr);
573
574 for (int32_t t = (int32_t)RequestTemplate::PREVIEW; t <= (int32_t)RequestTemplate::MANUAL;
575 t++) {
576 RequestTemplate reqTemplate = (RequestTemplate)t;
577 CameraMetadata rawMetadata;
578 ret = mSession->constructDefaultRequestSettings(reqTemplate, &rawMetadata);
579 ALOGI("constructDefaultRequestSettings returns status:%d:%d", ret.getExceptionCode(),
580 ret.getServiceSpecificError());
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000581
Avichal Rakesh362242f2022-02-08 12:40:53 -0800582 if (reqTemplate == RequestTemplate::ZERO_SHUTTER_LAG ||
583 reqTemplate == RequestTemplate::MANUAL) {
584 // optional templates
585 ASSERT_TRUE(ret.isOk() || static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
586 ret.getServiceSpecificError());
587 } else {
588 ASSERT_TRUE(ret.isOk());
589 }
590
591 if (ret.isOk()) {
Shuzhen Wang7a237d12023-12-20 22:08:30 +0000592 const camera_metadata_t* metadata = (camera_metadata_t*)rawMetadata.metadata.data();
593 size_t expectedSize = rawMetadata.metadata.size();
594 int result = validate_camera_metadata_structure(metadata, &expectedSize);
595 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
596 verifyRequestTemplate(metadata, reqTemplate);
Avichal Rakesh362242f2022-02-08 12:40:53 -0800597 } else {
598 ASSERT_EQ(0u, rawMetadata.metadata.size());
599 }
Shuzhen Wangdf89cb92023-11-09 18:24:42 -0800600
601 if (flags::feature_combination_query()) {
602 if (supportFeatureCombinationQuery) {
603 CameraMetadata rawMetadata2;
604 ndk::ScopedAStatus ret2 =
605 device->constructDefaultRequestSettings(reqTemplate, &rawMetadata2);
606
Shuzhen Wang7a237d12023-12-20 22:08:30 +0000607 // TODO: Do not allow OPERATION_NOT_SUPPORTED once HAL
608 // implementation is in place.
609 if (static_cast<Status>(ret2.getServiceSpecificError()) !=
610 Status::OPERATION_NOT_SUPPORTED) {
611 ASSERT_EQ(ret.isOk(), ret2.isOk());
612 ASSERT_EQ(ret.getStatus(), ret2.getStatus());
Shuzhen Wangdf89cb92023-11-09 18:24:42 -0800613
Shuzhen Wang7a237d12023-12-20 22:08:30 +0000614 ASSERT_EQ(rawMetadata.metadata.size(), rawMetadata2.metadata.size());
615 if (ret2.isOk()) {
616 const camera_metadata_t* metadata =
617 (camera_metadata_t*)rawMetadata2.metadata.data();
618 size_t expectedSize = rawMetadata2.metadata.size();
619 int result =
620 validate_camera_metadata_structure(metadata, &expectedSize);
621 ASSERT_TRUE((result == 0) ||
622 (result == CAMERA_METADATA_VALIDATION_SHIFTED));
623 verifyRequestTemplate(metadata, reqTemplate);
624 }
Shuzhen Wangdf89cb92023-11-09 18:24:42 -0800625 }
626 }
627 }
Avichal Rakesh362242f2022-02-08 12:40:53 -0800628 }
629 ret = mSession->close();
630 mSession = nullptr;
631 ASSERT_TRUE(ret.isOk());
632 }
633}
634
635// Verify that all supported stream formats and sizes can be configured
636// successfully.
637TEST_P(CameraAidlTest, configureStreamsAvailableOutputs) {
638 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
639 std::vector<AvailableStream> outputStreams;
640
641 for (const auto& name : cameraDeviceNames) {
642 CameraMetadata meta;
643 std::shared_ptr<ICameraDevice> device;
644
645 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/, &device /*out*/);
646
647 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
648 outputStreams.clear();
649 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputStreams));
650 ASSERT_NE(0u, outputStreams.size());
651
652 int32_t jpegBufferSize = 0;
653 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
654 ASSERT_NE(0u, jpegBufferSize);
655
656 int32_t streamId = 0;
657 int32_t streamConfigCounter = 0;
658 for (auto& it : outputStreams) {
659 Stream stream;
660 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(it.format));
661 stream.id = streamId;
662 stream.streamType = StreamType::OUTPUT;
663 stream.width = it.width;
664 stream.height = it.height;
665 stream.format = static_cast<PixelFormat>(it.format);
666 stream.dataSpace = dataspace;
667 stream.usage = static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
668 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER);
669 stream.rotation = StreamRotation::ROTATION_0;
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000670 stream.dynamicRangeProfile = RequestAvailableDynamicRangeProfilesMap::
671 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
Austin Borger263e3622023-06-15 11:32:04 -0700672 stream.useCase = ScalerAvailableStreamUseCases::
673 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
674 stream.colorSpace = static_cast<int>(
675 RequestAvailableColorSpaceProfilesMap::
676 ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED);
Avichal Rakesh362242f2022-02-08 12:40:53 -0800677
678 std::vector<Stream> streams = {stream};
679 StreamConfiguration config;
680 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
681 jpegBufferSize);
682
Shuzhen Wangdf89cb92023-11-09 18:24:42 -0800683 verifyStreamCombination(device, config, /*expectedStatus*/ true);
Avichal Rakesh362242f2022-02-08 12:40:53 -0800684
685 config.streamConfigCounter = streamConfigCounter++;
686 std::vector<HalStream> halConfigs;
687 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
688 ASSERT_TRUE(ret.isOk());
689 ASSERT_EQ(halConfigs.size(), 1);
690 ASSERT_EQ(halConfigs[0].id, streamId);
691
692 streamId++;
693 }
694
695 ndk::ScopedAStatus ret = mSession->close();
696 mSession = nullptr;
697 ASSERT_TRUE(ret.isOk());
698 }
699}
700
701// Verify that mandatory concurrent streams and outputs are supported.
702TEST_P(CameraAidlTest, configureConcurrentStreamsAvailableOutputs) {
703 struct CameraTestInfo {
704 CameraMetadata staticMeta;
705 std::shared_ptr<ICameraDeviceSession> session;
706 std::shared_ptr<ICameraDevice> cameraDevice;
707 StreamConfiguration config;
708 };
709
710 std::map<std::string, std::string> idToNameMap = getCameraDeviceIdToNameMap(mProvider);
711 std::vector<ConcurrentCameraIdCombination> concurrentDeviceCombinations =
712 getConcurrentDeviceCombinations(mProvider);
713 std::vector<AvailableStream> outputStreams;
714 for (const auto& cameraDeviceIds : concurrentDeviceCombinations) {
715 std::vector<CameraIdAndStreamCombination> cameraIdsAndStreamCombinations;
716 std::vector<CameraTestInfo> cameraTestInfos;
717 size_t i = 0;
718 for (const auto& id : cameraDeviceIds.combination) {
719 CameraTestInfo cti;
720 auto it = idToNameMap.find(id);
721 ASSERT_TRUE(idToNameMap.end() != it);
722 std::string name = it->second;
723
724 openEmptyDeviceSession(name, mProvider, &cti.session /*out*/, &cti.staticMeta /*out*/,
725 &cti.cameraDevice /*out*/);
726
727 outputStreams.clear();
728 camera_metadata_t* staticMeta =
729 reinterpret_cast<camera_metadata_t*>(cti.staticMeta.metadata.data());
730 ASSERT_EQ(Status::OK, getMandatoryConcurrentStreams(staticMeta, &outputStreams));
731 ASSERT_NE(0u, outputStreams.size());
732
733 int32_t jpegBufferSize = 0;
734 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
735 ASSERT_NE(0u, jpegBufferSize);
736
737 int32_t streamId = 0;
738 std::vector<Stream> streams(outputStreams.size());
739 size_t j = 0;
740 for (const auto& s : outputStreams) {
741 Stream stream;
742 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(s.format));
743 stream.id = streamId++;
744 stream.streamType = StreamType::OUTPUT;
745 stream.width = s.width;
746 stream.height = s.height;
747 stream.format = static_cast<PixelFormat>(s.format);
748 stream.usage = static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
749 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER);
750 stream.dataSpace = dataspace;
751 stream.rotation = StreamRotation::ROTATION_0;
752 stream.sensorPixelModesUsed = {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT};
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000753 stream.dynamicRangeProfile = RequestAvailableDynamicRangeProfilesMap::
754 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
Avichal Rakesh362242f2022-02-08 12:40:53 -0800755 streams[j] = stream;
756 j++;
757 }
758
759 // Add the created stream configs to cameraIdsAndStreamCombinations
760 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &cti.config,
761 jpegBufferSize);
762
763 cti.config.streamConfigCounter = outputStreams.size();
764 CameraIdAndStreamCombination cameraIdAndStreamCombination;
765 cameraIdAndStreamCombination.cameraId = id;
766 cameraIdAndStreamCombination.streamConfiguration = cti.config;
767 cameraIdsAndStreamCombinations.push_back(cameraIdAndStreamCombination);
768 i++;
769 cameraTestInfos.push_back(cti);
770 }
771 // Now verify that concurrent streams are supported
772 bool combinationSupported;
773 ndk::ScopedAStatus ret = mProvider->isConcurrentStreamCombinationSupported(
774 cameraIdsAndStreamCombinations, &combinationSupported);
775 ASSERT_TRUE(ret.isOk());
776 ASSERT_EQ(combinationSupported, true);
777
778 // Test the stream can actually be configured
779 for (auto& cti : cameraTestInfos) {
780 if (cti.session != nullptr) {
Shuzhen Wangdf89cb92023-11-09 18:24:42 -0800781 verifyStreamCombination(cti.cameraDevice, cti.config, /*expectedStatus*/ true);
Avichal Rakesh362242f2022-02-08 12:40:53 -0800782 }
783
784 if (cti.session != nullptr) {
785 std::vector<HalStream> streamConfigs;
786 ret = cti.session->configureStreams(cti.config, &streamConfigs);
787 ASSERT_TRUE(ret.isOk());
788 ASSERT_EQ(cti.config.streams.size(), streamConfigs.size());
789 }
790 }
791
792 for (auto& cti : cameraTestInfos) {
793 ret = cti.session->close();
794 ASSERT_TRUE(ret.isOk());
795 }
796 }
797}
798
799// Check for correct handling of invalid/incorrect configuration parameters.
800TEST_P(CameraAidlTest, configureStreamsInvalidOutputs) {
801 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
802 std::vector<AvailableStream> outputStreams;
803
804 for (const auto& name : cameraDeviceNames) {
805 CameraMetadata meta;
806 std::shared_ptr<ICameraDevice> cameraDevice;
807
808 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
809 &cameraDevice /*out*/);
810 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
811 outputStreams.clear();
812
813 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputStreams));
814 ASSERT_NE(0u, outputStreams.size());
815
816 int32_t jpegBufferSize = 0;
817 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
818 ASSERT_NE(0u, jpegBufferSize);
819
820 int32_t streamId = 0;
821 Stream stream = {streamId++,
822 StreamType::OUTPUT,
823 static_cast<uint32_t>(0),
824 static_cast<uint32_t>(0),
825 static_cast<PixelFormat>(outputStreams[0].format),
826 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
827 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
828 Dataspace::UNKNOWN,
829 StreamRotation::ROTATION_0,
830 std::string(),
831 jpegBufferSize,
832 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000833 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
834 RequestAvailableDynamicRangeProfilesMap::
835 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800836 int32_t streamConfigCounter = 0;
837 std::vector<Stream> streams = {stream};
838 StreamConfiguration config;
839 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
840 jpegBufferSize);
841
Shuzhen Wangdf89cb92023-11-09 18:24:42 -0800842 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ false);
Avichal Rakesh362242f2022-02-08 12:40:53 -0800843
844 config.streamConfigCounter = streamConfigCounter++;
845 std::vector<HalStream> halConfigs;
846 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
847 ASSERT_TRUE(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
848 ret.getServiceSpecificError() ||
849 static_cast<int32_t>(Status::INTERNAL_ERROR) == ret.getServiceSpecificError());
850
851 stream = {streamId++,
852 StreamType::OUTPUT,
853 /*width*/ INT32_MAX,
854 /*height*/ INT32_MAX,
855 static_cast<PixelFormat>(outputStreams[0].format),
856 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
857 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
858 Dataspace::UNKNOWN,
859 StreamRotation::ROTATION_0,
860 std::string(),
861 jpegBufferSize,
862 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000863 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
864 RequestAvailableDynamicRangeProfilesMap::
865 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800866
867 streams[0] = stream;
868 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
869 jpegBufferSize);
870
871 config.streamConfigCounter = streamConfigCounter++;
872 halConfigs.clear();
873 ret = mSession->configureStreams(config, &halConfigs);
874 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
875
876 for (auto& it : outputStreams) {
877 stream = {streamId++,
878 StreamType::OUTPUT,
879 it.width,
880 it.height,
881 static_cast<PixelFormat>(UINT32_MAX),
882 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
883 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
884 Dataspace::UNKNOWN,
885 StreamRotation::ROTATION_0,
886 std::string(),
887 jpegBufferSize,
888 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000889 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
890 RequestAvailableDynamicRangeProfilesMap::
891 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800892
893 streams[0] = stream;
894 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
895 jpegBufferSize);
896 config.streamConfigCounter = streamConfigCounter++;
897 halConfigs.clear();
898 ret = mSession->configureStreams(config, &halConfigs);
899 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
900 ret.getServiceSpecificError());
901
902 stream = {streamId++,
903 StreamType::OUTPUT,
904 it.width,
905 it.height,
906 static_cast<PixelFormat>(it.format),
907 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
908 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
909 Dataspace::UNKNOWN,
910 static_cast<StreamRotation>(UINT32_MAX),
911 std::string(),
912 jpegBufferSize,
913 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000914 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
915 RequestAvailableDynamicRangeProfilesMap::
916 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800917
918 streams[0] = stream;
919 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
920 jpegBufferSize);
921
922 config.streamConfigCounter = streamConfigCounter++;
923 halConfigs.clear();
924 ret = mSession->configureStreams(config, &halConfigs);
925 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
926 ret.getServiceSpecificError());
927 }
928
929 ret = mSession->close();
930 mSession = nullptr;
931 ASSERT_TRUE(ret.isOk());
932 }
933}
934
935// Check whether all supported ZSL output stream combinations can be
936// configured successfully.
937TEST_P(CameraAidlTest, configureStreamsZSLInputOutputs) {
938 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
939 std::vector<AvailableStream> inputStreams;
940 std::vector<AvailableZSLInputOutput> inputOutputMap;
941
942 for (const auto& name : cameraDeviceNames) {
943 CameraMetadata meta;
944 std::shared_ptr<ICameraDevice> cameraDevice;
945
946 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
947 &cameraDevice /*out*/);
948 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
949
950 Status rc = isZSLModeAvailable(staticMeta);
951 if (Status::OPERATION_NOT_SUPPORTED == rc) {
952 ndk::ScopedAStatus ret = mSession->close();
953 mSession = nullptr;
954 ASSERT_TRUE(ret.isOk());
955 continue;
956 }
957 ASSERT_EQ(Status::OK, rc);
958
959 inputStreams.clear();
960 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, inputStreams));
961 ASSERT_NE(0u, inputStreams.size());
962
963 inputOutputMap.clear();
964 ASSERT_EQ(Status::OK, getZSLInputOutputMap(staticMeta, inputOutputMap));
965 ASSERT_NE(0u, inputOutputMap.size());
966
967 bool supportMonoY8 = false;
968 if (Status::OK == isMonochromeCamera(staticMeta)) {
969 for (auto& it : inputStreams) {
970 if (it.format == static_cast<uint32_t>(PixelFormat::Y8)) {
971 supportMonoY8 = true;
972 break;
973 }
974 }
975 }
976
977 int32_t jpegBufferSize = 0;
978 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
979 ASSERT_NE(0u, jpegBufferSize);
980
981 int32_t streamId = 0;
982 bool hasPrivToY8 = false, hasY8ToY8 = false, hasY8ToBlob = false;
983 uint32_t streamConfigCounter = 0;
984 for (auto& inputIter : inputOutputMap) {
985 AvailableStream input;
986 ASSERT_EQ(Status::OK, findLargestSize(inputStreams, inputIter.inputFormat, input));
987 ASSERT_NE(0u, inputStreams.size());
988
989 if (inputIter.inputFormat ==
990 static_cast<uint32_t>(PixelFormat::IMPLEMENTATION_DEFINED) &&
991 inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::Y8)) {
992 hasPrivToY8 = true;
993 } else if (inputIter.inputFormat == static_cast<uint32_t>(PixelFormat::Y8)) {
994 if (inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::BLOB)) {
995 hasY8ToBlob = true;
996 } else if (inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::Y8)) {
997 hasY8ToY8 = true;
998 }
999 }
1000 AvailableStream outputThreshold = {INT32_MAX, INT32_MAX, inputIter.outputFormat};
1001 std::vector<AvailableStream> outputStreams;
1002 ASSERT_EQ(Status::OK,
1003 getAvailableOutputStreams(staticMeta, outputStreams, &outputThreshold));
1004 for (auto& outputIter : outputStreams) {
1005 Dataspace outputDataSpace =
1006 getDataspace(static_cast<PixelFormat>(outputIter.format));
1007 Stream zslStream = {
1008 streamId++,
1009 StreamType::OUTPUT,
1010 input.width,
1011 input.height,
1012 static_cast<PixelFormat>(input.format),
1013 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1014 GRALLOC_USAGE_HW_CAMERA_ZSL),
1015 Dataspace::UNKNOWN,
1016 StreamRotation::ROTATION_0,
1017 std::string(),
1018 jpegBufferSize,
1019 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001020 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1021 RequestAvailableDynamicRangeProfilesMap::
1022 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001023 Stream inputStream = {
1024 streamId++,
1025 StreamType::INPUT,
1026 input.width,
1027 input.height,
1028 static_cast<PixelFormat>(input.format),
1029 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(0),
1030 Dataspace::UNKNOWN,
1031 StreamRotation::ROTATION_0,
1032 std::string(),
1033 jpegBufferSize,
1034 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001035 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1036 RequestAvailableDynamicRangeProfilesMap::
1037 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001038 Stream outputStream = {
1039 streamId++,
1040 StreamType::OUTPUT,
1041 outputIter.width,
1042 outputIter.height,
1043 static_cast<PixelFormat>(outputIter.format),
1044 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1045 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
1046 outputDataSpace,
1047 StreamRotation::ROTATION_0,
1048 std::string(),
1049 jpegBufferSize,
1050 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001051 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1052 RequestAvailableDynamicRangeProfilesMap::
1053 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001054
1055 std::vector<Stream> streams = {inputStream, zslStream, outputStream};
1056
1057 StreamConfiguration config;
1058 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
1059 jpegBufferSize);
1060
Shuzhen Wangdf89cb92023-11-09 18:24:42 -08001061 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001062
1063 config.streamConfigCounter = streamConfigCounter++;
1064 std::vector<HalStream> halConfigs;
1065 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1066 ASSERT_TRUE(ret.isOk());
1067 ASSERT_EQ(3u, halConfigs.size());
1068 }
1069 }
1070
1071 if (supportMonoY8) {
1072 if (Status::OK == isZSLModeAvailable(staticMeta, PRIV_REPROCESS)) {
1073 ASSERT_TRUE(hasPrivToY8);
1074 }
1075 if (Status::OK == isZSLModeAvailable(staticMeta, YUV_REPROCESS)) {
1076 ASSERT_TRUE(hasY8ToY8);
1077 ASSERT_TRUE(hasY8ToBlob);
1078 }
1079 }
1080
1081 ndk::ScopedAStatus ret = mSession->close();
1082 mSession = nullptr;
1083 ASSERT_TRUE(ret.isOk());
1084 }
1085}
1086
1087// Check whether session parameters are supported. If Hal support for them
1088// exist, then try to configure a preview stream using them.
1089TEST_P(CameraAidlTest, configureStreamsWithSessionParameters) {
1090 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1091 std::vector<AvailableStream> outputPreviewStreams;
1092 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
1093 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
1094
1095 for (const auto& name : cameraDeviceNames) {
1096 CameraMetadata meta;
1097
1098 std::shared_ptr<ICameraDevice> unusedCameraDevice;
1099 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1100 &unusedCameraDevice /*out*/);
1101 camera_metadata_t* staticMetaBuffer =
1102 reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1103
1104 std::unordered_set<int32_t> availableSessionKeys;
1105 auto rc = getSupportedKeys(staticMetaBuffer, ANDROID_REQUEST_AVAILABLE_SESSION_KEYS,
1106 &availableSessionKeys);
1107 ASSERT_TRUE(Status::OK == rc);
1108 if (availableSessionKeys.empty()) {
1109 ndk::ScopedAStatus ret = mSession->close();
1110 mSession = nullptr;
1111 ASSERT_TRUE(ret.isOk());
1112 continue;
1113 }
1114
1115 android::hardware::camera::common::V1_0::helper::CameraMetadata previewRequestSettings;
1116 android::hardware::camera::common::V1_0::helper::CameraMetadata sessionParams,
1117 modifiedSessionParams;
1118 constructFilteredSettings(mSession, availableSessionKeys, RequestTemplate::PREVIEW,
1119 &previewRequestSettings, &sessionParams);
1120 if (sessionParams.isEmpty()) {
1121 ndk::ScopedAStatus ret = mSession->close();
1122 mSession = nullptr;
1123 ASSERT_TRUE(ret.isOk());
1124 continue;
1125 }
1126
1127 outputPreviewStreams.clear();
1128
1129 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputPreviewStreams,
1130 &previewThreshold));
1131 ASSERT_NE(0u, outputPreviewStreams.size());
1132
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001133 Stream previewStream = {
1134 0,
1135 StreamType::OUTPUT,
1136 outputPreviewStreams[0].width,
1137 outputPreviewStreams[0].height,
1138 static_cast<PixelFormat>(outputPreviewStreams[0].format),
1139 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1140 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
1141 Dataspace::UNKNOWN,
1142 StreamRotation::ROTATION_0,
1143 std::string(),
1144 /*bufferSize*/ 0,
1145 /*groupId*/ -1,
1146 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1147 RequestAvailableDynamicRangeProfilesMap::
1148 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001149
1150 std::vector<Stream> streams = {previewStream};
1151 StreamConfiguration config;
1152
1153 config.streams = streams;
1154 config.operationMode = StreamConfigurationMode::NORMAL_MODE;
1155 modifiedSessionParams = sessionParams;
1156 auto sessionParamsBuffer = sessionParams.release();
1157 std::vector<uint8_t> rawSessionParam =
1158 std::vector(reinterpret_cast<uint8_t*>(sessionParamsBuffer),
1159 reinterpret_cast<uint8_t*>(sessionParamsBuffer) +
1160 get_camera_metadata_size(sessionParamsBuffer));
1161
1162 config.sessionParams.metadata = rawSessionParam;
1163 config.streamConfigCounter = 0;
1164 config.streams = {previewStream};
1165 config.streamConfigCounter = 0;
1166 config.multiResolutionInputImage = false;
1167
1168 bool newSessionParamsAvailable = false;
1169 for (const auto& it : availableSessionKeys) {
1170 if (modifiedSessionParams.exists(it)) {
1171 modifiedSessionParams.erase(it);
1172 newSessionParamsAvailable = true;
1173 break;
1174 }
1175 }
1176 if (newSessionParamsAvailable) {
1177 auto modifiedSessionParamsBuffer = modifiedSessionParams.release();
1178 verifySessionReconfigurationQuery(mSession, sessionParamsBuffer,
1179 modifiedSessionParamsBuffer);
1180 modifiedSessionParams.acquire(modifiedSessionParamsBuffer);
1181 }
1182
1183 std::vector<HalStream> halConfigs;
1184 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1185 ASSERT_TRUE(ret.isOk());
1186 ASSERT_EQ(1u, halConfigs.size());
1187
1188 sessionParams.acquire(sessionParamsBuffer);
1189 ret = mSession->close();
1190 mSession = nullptr;
1191 ASSERT_TRUE(ret.isOk());
1192 }
1193}
1194
1195// Verify that all supported preview + still capture stream combinations
1196// can be configured successfully.
1197TEST_P(CameraAidlTest, configureStreamsPreviewStillOutputs) {
1198 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1199 std::vector<AvailableStream> outputBlobStreams;
1200 std::vector<AvailableStream> outputPreviewStreams;
1201 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
1202 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
1203 AvailableStream blobThreshold = {INT32_MAX, INT32_MAX, static_cast<int32_t>(PixelFormat::BLOB)};
1204
1205 for (const auto& name : cameraDeviceNames) {
1206 CameraMetadata meta;
1207
1208 std::shared_ptr<ICameraDevice> cameraDevice;
1209 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1210 &cameraDevice /*out*/);
1211
1212 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1213
1214 // Check if camera support depth only
1215 if (isDepthOnly(staticMeta)) {
1216 ndk::ScopedAStatus ret = mSession->close();
1217 mSession = nullptr;
1218 ASSERT_TRUE(ret.isOk());
1219 continue;
1220 }
1221
1222 outputBlobStreams.clear();
1223 ASSERT_EQ(Status::OK,
1224 getAvailableOutputStreams(staticMeta, outputBlobStreams, &blobThreshold));
1225 ASSERT_NE(0u, outputBlobStreams.size());
1226
1227 outputPreviewStreams.clear();
1228 ASSERT_EQ(Status::OK,
1229 getAvailableOutputStreams(staticMeta, outputPreviewStreams, &previewThreshold));
1230 ASSERT_NE(0u, outputPreviewStreams.size());
1231
1232 int32_t jpegBufferSize = 0;
1233 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
1234 ASSERT_NE(0u, jpegBufferSize);
1235
1236 int32_t streamId = 0;
1237 uint32_t streamConfigCounter = 0;
1238
1239 for (auto& blobIter : outputBlobStreams) {
1240 for (auto& previewIter : outputPreviewStreams) {
1241 Stream previewStream = {
1242 streamId++,
1243 StreamType::OUTPUT,
1244 previewIter.width,
1245 previewIter.height,
1246 static_cast<PixelFormat>(previewIter.format),
1247 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1248 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
1249 Dataspace::UNKNOWN,
1250 StreamRotation::ROTATION_0,
1251 std::string(),
1252 /*bufferSize*/ 0,
1253 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001254 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1255 RequestAvailableDynamicRangeProfilesMap::
1256 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001257 Stream blobStream = {
1258 streamId++,
1259 StreamType::OUTPUT,
1260 blobIter.width,
1261 blobIter.height,
1262 static_cast<PixelFormat>(blobIter.format),
1263 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1264 GRALLOC1_CONSUMER_USAGE_CPU_READ),
1265 Dataspace::JFIF,
1266 StreamRotation::ROTATION_0,
1267 std::string(),
1268 /*bufferSize*/ 0,
1269 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001270 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1271 RequestAvailableDynamicRangeProfilesMap::
1272 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001273 std::vector<Stream> streams = {previewStream, blobStream};
1274 StreamConfiguration config;
1275
1276 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
1277 jpegBufferSize);
1278 config.streamConfigCounter = streamConfigCounter++;
Shuzhen Wangdf89cb92023-11-09 18:24:42 -08001279 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001280
1281 std::vector<HalStream> halConfigs;
1282 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1283 ASSERT_TRUE(ret.isOk());
1284 ASSERT_EQ(2u, halConfigs.size());
1285 }
1286 }
1287
1288 ndk::ScopedAStatus ret = mSession->close();
1289 mSession = nullptr;
1290 ASSERT_TRUE(ret.isOk());
1291 }
1292}
1293
1294// In case constrained mode is supported, test whether it can be
1295// configured. Additionally check for common invalid inputs when
1296// using this mode.
1297TEST_P(CameraAidlTest, configureStreamsConstrainedOutputs) {
1298 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1299
1300 for (const auto& name : cameraDeviceNames) {
1301 CameraMetadata meta;
1302 std::shared_ptr<ICameraDevice> cameraDevice;
1303
1304 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1305 &cameraDevice /*out*/);
1306 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1307
1308 Status rc = isConstrainedModeAvailable(staticMeta);
1309 if (Status::OPERATION_NOT_SUPPORTED == rc) {
1310 ndk::ScopedAStatus ret = mSession->close();
1311 mSession = nullptr;
1312 ASSERT_TRUE(ret.isOk());
1313 continue;
1314 }
1315 ASSERT_EQ(Status::OK, rc);
1316
1317 AvailableStream hfrStream;
1318 rc = pickConstrainedModeSize(staticMeta, hfrStream);
1319 ASSERT_EQ(Status::OK, rc);
1320
1321 int32_t streamId = 0;
1322 uint32_t streamConfigCounter = 0;
1323 Stream stream = {streamId,
1324 StreamType::OUTPUT,
1325 hfrStream.width,
1326 hfrStream.height,
1327 static_cast<PixelFormat>(hfrStream.format),
1328 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1329 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1330 Dataspace::UNKNOWN,
1331 StreamRotation::ROTATION_0,
1332 std::string(),
1333 /*bufferSize*/ 0,
1334 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001335 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1336 RequestAvailableDynamicRangeProfilesMap::
1337 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001338 std::vector<Stream> streams = {stream};
1339 StreamConfiguration config;
1340 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1341 &config);
1342
Shuzhen Wangdf89cb92023-11-09 18:24:42 -08001343 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001344
1345 config.streamConfigCounter = streamConfigCounter++;
1346 std::vector<HalStream> halConfigs;
1347 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1348 ASSERT_TRUE(ret.isOk());
1349 ASSERT_EQ(1u, halConfigs.size());
1350 ASSERT_EQ(halConfigs[0].id, streamId);
1351
1352 stream = {streamId++,
1353 StreamType::OUTPUT,
1354 static_cast<uint32_t>(0),
1355 static_cast<uint32_t>(0),
1356 static_cast<PixelFormat>(hfrStream.format),
1357 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1358 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1359 Dataspace::UNKNOWN,
1360 StreamRotation::ROTATION_0,
1361 std::string(),
1362 /*bufferSize*/ 0,
1363 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001364 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1365 RequestAvailableDynamicRangeProfilesMap::
1366 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001367 streams[0] = stream;
1368 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1369 &config);
1370
1371 config.streamConfigCounter = streamConfigCounter++;
1372 std::vector<HalStream> halConfig;
1373 ret = mSession->configureStreams(config, &halConfig);
1374 ASSERT_TRUE(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
1375 ret.getServiceSpecificError() ||
1376 static_cast<int32_t>(Status::INTERNAL_ERROR) == ret.getServiceSpecificError());
1377
1378 stream = {streamId++,
1379 StreamType::OUTPUT,
1380 INT32_MAX,
1381 INT32_MAX,
1382 static_cast<PixelFormat>(hfrStream.format),
1383 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1384 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1385 Dataspace::UNKNOWN,
1386 StreamRotation::ROTATION_0,
1387 std::string(),
1388 /*bufferSize*/ 0,
1389 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001390 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1391 RequestAvailableDynamicRangeProfilesMap::
1392 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001393 streams[0] = stream;
1394 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1395 &config);
1396
1397 config.streamConfigCounter = streamConfigCounter++;
1398 halConfigs.clear();
1399 ret = mSession->configureStreams(config, &halConfigs);
1400 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
1401
1402 stream = {streamId++,
1403 StreamType::OUTPUT,
1404 hfrStream.width,
1405 hfrStream.height,
1406 static_cast<PixelFormat>(UINT32_MAX),
1407 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1408 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1409 Dataspace::UNKNOWN,
1410 StreamRotation::ROTATION_0,
1411 std::string(),
1412 /*bufferSize*/ 0,
1413 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001414 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1415 RequestAvailableDynamicRangeProfilesMap::
1416 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001417 streams[0] = stream;
1418 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1419 &config);
1420
1421 config.streamConfigCounter = streamConfigCounter++;
1422 halConfigs.clear();
1423 ret = mSession->configureStreams(config, &halConfigs);
1424 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
1425
1426 ret = mSession->close();
1427 mSession = nullptr;
1428 ASSERT_TRUE(ret.isOk());
1429 }
1430}
1431
1432// Verify that all supported video + snapshot stream combinations can
1433// be configured successfully.
1434TEST_P(CameraAidlTest, configureStreamsVideoStillOutputs) {
1435 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1436 std::vector<AvailableStream> outputBlobStreams;
1437 std::vector<AvailableStream> outputVideoStreams;
1438 AvailableStream videoThreshold = {kMaxVideoWidth, kMaxVideoHeight,
1439 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
1440 AvailableStream blobThreshold = {kMaxVideoWidth, kMaxVideoHeight,
1441 static_cast<int32_t>(PixelFormat::BLOB)};
1442
1443 for (const auto& name : cameraDeviceNames) {
1444 CameraMetadata meta;
1445 std::shared_ptr<ICameraDevice> cameraDevice;
1446
1447 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1448 &cameraDevice /*out*/);
1449
1450 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1451
1452 // Check if camera support depth only
1453 if (isDepthOnly(staticMeta)) {
1454 ndk::ScopedAStatus ret = mSession->close();
1455 mSession = nullptr;
1456 ASSERT_TRUE(ret.isOk());
1457 continue;
1458 }
1459
1460 outputBlobStreams.clear();
1461 ASSERT_EQ(Status::OK,
1462 getAvailableOutputStreams(staticMeta, outputBlobStreams, &blobThreshold));
1463 ASSERT_NE(0u, outputBlobStreams.size());
1464
1465 outputVideoStreams.clear();
1466 ASSERT_EQ(Status::OK,
1467 getAvailableOutputStreams(staticMeta, outputVideoStreams, &videoThreshold));
1468 ASSERT_NE(0u, outputVideoStreams.size());
1469
1470 int32_t jpegBufferSize = 0;
1471 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
1472 ASSERT_NE(0u, jpegBufferSize);
1473
1474 int32_t streamId = 0;
1475 uint32_t streamConfigCounter = 0;
1476 for (auto& blobIter : outputBlobStreams) {
1477 for (auto& videoIter : outputVideoStreams) {
1478 Stream videoStream = {
1479 streamId++,
1480 StreamType::OUTPUT,
1481 videoIter.width,
1482 videoIter.height,
1483 static_cast<PixelFormat>(videoIter.format),
1484 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1485 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1486 Dataspace::UNKNOWN,
1487 StreamRotation::ROTATION_0,
1488 std::string(),
1489 jpegBufferSize,
1490 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001491 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1492 RequestAvailableDynamicRangeProfilesMap::
1493 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001494 Stream blobStream = {
1495 streamId++,
1496 StreamType::OUTPUT,
1497 blobIter.width,
1498 blobIter.height,
1499 static_cast<PixelFormat>(blobIter.format),
1500 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1501 GRALLOC1_CONSUMER_USAGE_CPU_READ),
1502 Dataspace::JFIF,
1503 StreamRotation::ROTATION_0,
1504 std::string(),
1505 jpegBufferSize,
1506 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001507 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1508 RequestAvailableDynamicRangeProfilesMap::
1509 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001510 std::vector<Stream> streams = {videoStream, blobStream};
1511 StreamConfiguration config;
1512
1513 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
1514 jpegBufferSize);
Shuzhen Wangdf89cb92023-11-09 18:24:42 -08001515 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001516
1517 config.streamConfigCounter = streamConfigCounter++;
1518 std::vector<HalStream> halConfigs;
1519 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1520 ASSERT_TRUE(ret.isOk());
1521 ASSERT_EQ(2u, halConfigs.size());
1522 }
1523 }
1524
1525 ndk::ScopedAStatus ret = mSession->close();
1526 mSession = nullptr;
1527 ASSERT_TRUE(ret.isOk());
1528 }
1529}
1530
1531// Generate and verify a camera capture request
1532TEST_P(CameraAidlTest, processCaptureRequestPreview) {
1533 // TODO(b/220897574): Failing with BUFFER_ERROR
1534 processCaptureRequestInternal(GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, RequestTemplate::PREVIEW,
1535 false /*secureOnlyCameras*/);
1536}
1537
1538// Generate and verify a secure camera capture request
1539TEST_P(CameraAidlTest, processSecureCaptureRequest) {
1540 processCaptureRequestInternal(GRALLOC1_PRODUCER_USAGE_PROTECTED, RequestTemplate::STILL_CAPTURE,
1541 true /*secureOnlyCameras*/);
1542}
1543
1544TEST_P(CameraAidlTest, processCaptureRequestPreviewStabilization) {
1545 std::unordered_map<std::string, nsecs_t> cameraDeviceToTimeLag;
1546 processPreviewStabilizationCaptureRequestInternal(/*previewStabilizationOn*/ false,
1547 cameraDeviceToTimeLag);
1548 processPreviewStabilizationCaptureRequestInternal(/*previewStabilizationOn*/ true,
1549 cameraDeviceToTimeLag);
1550}
1551
1552// Generate and verify a multi-camera capture request
1553TEST_P(CameraAidlTest, processMultiCaptureRequestPreview) {
1554 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1555 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
1556 static_cast<int32_t>(PixelFormat::YCBCR_420_888)};
1557 int64_t bufferId = 1;
1558 uint32_t frameNumber = 1;
1559 std::vector<uint8_t> settings;
1560 std::vector<uint8_t> emptySettings;
1561 std::string invalidPhysicalId = "-1";
1562
1563 for (const auto& name : cameraDeviceNames) {
1564 std::string version, deviceId;
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +00001565 ALOGI("processMultiCaptureRequestPreview: Test device %s", name.c_str());
Avichal Rakesh362242f2022-02-08 12:40:53 -08001566 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
1567 CameraMetadata metadata;
1568
1569 std::shared_ptr<ICameraDevice> unusedDevice;
1570 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &metadata /*out*/,
1571 &unusedDevice /*out*/);
1572
1573 camera_metadata_t* staticMeta =
1574 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
1575 Status rc = isLogicalMultiCamera(staticMeta);
1576 if (Status::OPERATION_NOT_SUPPORTED == rc) {
1577 ndk::ScopedAStatus ret = mSession->close();
1578 mSession = nullptr;
1579 ASSERT_TRUE(ret.isOk());
1580 continue;
1581 }
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +00001582 ASSERT_EQ(Status::OK, rc);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001583
1584 std::unordered_set<std::string> physicalIds;
1585 rc = getPhysicalCameraIds(staticMeta, &physicalIds);
1586 ASSERT_TRUE(Status::OK == rc);
1587 ASSERT_TRUE(physicalIds.size() > 1);
1588
1589 std::unordered_set<int32_t> physicalRequestKeyIDs;
1590 rc = getSupportedKeys(staticMeta, ANDROID_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS,
1591 &physicalRequestKeyIDs);
1592 ASSERT_TRUE(Status::OK == rc);
1593 if (physicalRequestKeyIDs.empty()) {
1594 ndk::ScopedAStatus ret = mSession->close();
1595 mSession = nullptr;
1596 ASSERT_TRUE(ret.isOk());
1597 // The logical camera doesn't support any individual physical requests.
1598 continue;
1599 }
1600
1601 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultPreviewSettings;
1602 android::hardware::camera::common::V1_0::helper::CameraMetadata filteredSettings;
1603 constructFilteredSettings(mSession, physicalRequestKeyIDs, RequestTemplate::PREVIEW,
1604 &defaultPreviewSettings, &filteredSettings);
1605 if (filteredSettings.isEmpty()) {
1606 // No physical device settings in default request.
1607 ndk::ScopedAStatus ret = mSession->close();
1608 mSession = nullptr;
1609 ASSERT_TRUE(ret.isOk());
1610 continue;
1611 }
1612
1613 const camera_metadata_t* settingsBuffer = defaultPreviewSettings.getAndLock();
1614 uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
1615 settings.assign(rawSettingsBuffer,
1616 rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
1617 CameraMetadata settingsMetadata = {settings};
1618 overrideRotateAndCrop(&settingsMetadata);
1619
1620 ndk::ScopedAStatus ret = mSession->close();
1621 mSession = nullptr;
1622 ASSERT_TRUE(ret.isOk());
1623
1624 // Leave only 2 physical devices in the id set.
1625 auto it = physicalIds.begin();
1626 std::string physicalDeviceId = *it;
1627 it++;
1628 physicalIds.erase(++it, physicalIds.end());
1629 ASSERT_EQ(physicalIds.size(), 2u);
1630
1631 std::vector<HalStream> halStreams;
1632 bool supportsPartialResults = false;
1633 bool useHalBufManager = false;
1634 int32_t partialResultCount = 0;
1635 Stream previewStream;
1636 std::shared_ptr<DeviceCb> cb;
1637
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +00001638 configurePreviewStreams(
1639 name, mProvider, &previewThreshold, physicalIds, &mSession, &previewStream,
1640 &halStreams /*out*/, &supportsPartialResults /*out*/, &partialResultCount /*out*/,
1641 &useHalBufManager /*out*/, &cb /*out*/, 0 /*streamConfigCounter*/, true);
1642 if (mSession == nullptr) {
1643 // stream combination not supported by HAL, skip test for device
1644 continue;
1645 }
Avichal Rakesh362242f2022-02-08 12:40:53 -08001646
1647 ::aidl::android::hardware::common::fmq::MQDescriptor<
1648 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
1649 descriptor;
1650 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
1651 ASSERT_TRUE(resultQueueRet.isOk());
1652 std::shared_ptr<ResultMetadataQueue> resultQueue =
1653 std::make_shared<ResultMetadataQueue>(descriptor);
1654 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
1655 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
1656 resultQueue = nullptr;
1657 // Don't use the queue onwards.
1658 }
1659
1660 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
1661 static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
1662 partialResultCount, physicalIds, resultQueue);
1663
1664 std::vector<CaptureRequest> requests(1);
1665 CaptureRequest& request = requests[0];
1666 request.frameNumber = frameNumber;
1667 request.fmqSettingsSize = 0;
Emilian Peev3d919f92022-04-20 13:50:59 -07001668 request.settings = settingsMetadata;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001669
1670 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
1671
1672 std::vector<buffer_handle_t> graphicBuffers;
1673 graphicBuffers.reserve(halStreams.size());
1674 outputBuffers.resize(halStreams.size());
1675 size_t k = 0;
1676 for (const auto& halStream : halStreams) {
1677 buffer_handle_t buffer_handle;
1678 if (useHalBufManager) {
1679 outputBuffers[k] = {halStream.id, /*bufferId*/ 0, NativeHandle(),
1680 BufferStatus::OK, NativeHandle(), NativeHandle()};
1681 } else {
1682 allocateGraphicBuffer(previewStream.width, previewStream.height,
1683 android_convertGralloc1To0Usage(
1684 static_cast<uint64_t>(halStream.producerUsage),
1685 static_cast<uint64_t>(halStream.consumerUsage)),
1686 halStream.overrideFormat, &buffer_handle);
1687 graphicBuffers.push_back(buffer_handle);
1688 outputBuffers[k] = {
1689 halStream.id, bufferId, ::android::makeToAidl(buffer_handle),
1690 BufferStatus::OK, NativeHandle(), NativeHandle()};
1691 bufferId++;
1692 }
1693 k++;
1694 }
1695
1696 std::vector<PhysicalCameraSetting> camSettings(1);
1697 const camera_metadata_t* filteredSettingsBuffer = filteredSettings.getAndLock();
1698 uint8_t* rawFilteredSettingsBuffer = (uint8_t*)filteredSettingsBuffer;
1699 camSettings[0].settings = {std::vector(
1700 rawFilteredSettingsBuffer,
1701 rawFilteredSettingsBuffer + get_camera_metadata_size(filteredSettingsBuffer))};
1702 overrideRotateAndCrop(&camSettings[0].settings);
1703 camSettings[0].fmqSettingsSize = 0;
1704 camSettings[0].physicalCameraId = physicalDeviceId;
1705
1706 request.inputBuffer = {
1707 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
1708 request.physicalCameraSettings = camSettings;
1709
1710 {
1711 std::unique_lock<std::mutex> l(mLock);
1712 mInflightMap.clear();
1713 mInflightMap[frameNumber] = inflightReq;
1714 }
1715
1716 int32_t numRequestProcessed = 0;
1717 std::vector<BufferCache> cachesToRemove;
1718 ndk::ScopedAStatus returnStatus =
1719 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1720 ASSERT_TRUE(returnStatus.isOk());
1721 ASSERT_EQ(numRequestProcessed, 1u);
1722
1723 {
1724 std::unique_lock<std::mutex> l(mLock);
1725 while (!inflightReq->errorCodeValid &&
1726 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1727 auto timeout = std::chrono::system_clock::now() +
1728 std::chrono::seconds(kStreamBufferTimeoutSec);
1729 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1730 }
1731
1732 ASSERT_FALSE(inflightReq->errorCodeValid);
1733 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1734
1735 request.frameNumber++;
1736 // Empty settings should be supported after the first call
1737 // for repeating requests.
1738 request.settings.metadata.clear();
1739 request.physicalCameraSettings[0].settings.metadata.clear();
1740 // The buffer has been registered to HAL by bufferId, so per
1741 // API contract we should send a null handle for this buffer
1742 request.outputBuffers[0].buffer = NativeHandle();
1743 mInflightMap.clear();
1744 inflightReq = std::make_shared<InFlightRequest>(
1745 static_cast<ssize_t>(physicalIds.size()), false, supportsPartialResults,
1746 partialResultCount, physicalIds, resultQueue);
1747 mInflightMap[request.frameNumber] = inflightReq;
1748 }
1749
1750 returnStatus =
1751 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1752 ASSERT_TRUE(returnStatus.isOk());
1753 ASSERT_EQ(numRequestProcessed, 1u);
1754
1755 {
1756 std::unique_lock<std::mutex> l(mLock);
1757 while (!inflightReq->errorCodeValid &&
1758 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1759 auto timeout = std::chrono::system_clock::now() +
1760 std::chrono::seconds(kStreamBufferTimeoutSec);
1761 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1762 }
1763
1764 ASSERT_FALSE(inflightReq->errorCodeValid);
1765 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1766 }
1767
1768 // Invalid physical camera id should fail process requests
1769 frameNumber++;
1770 camSettings[0].physicalCameraId = invalidPhysicalId;
1771 camSettings[0].settings.metadata = settings;
1772
1773 request.physicalCameraSettings = camSettings; // Invalid camera settings
1774 returnStatus =
1775 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1776 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
1777 returnStatus.getServiceSpecificError());
1778
1779 defaultPreviewSettings.unlock(settingsBuffer);
1780 filteredSettings.unlock(filteredSettingsBuffer);
1781
1782 if (useHalBufManager) {
1783 std::vector<int32_t> streamIds(halStreams.size());
1784 for (size_t i = 0; i < streamIds.size(); i++) {
1785 streamIds[i] = halStreams[i].id;
1786 }
1787 verifyBuffersReturned(mSession, streamIds, cb);
1788 }
1789
1790 ret = mSession->close();
1791 mSession = nullptr;
1792 ASSERT_TRUE(ret.isOk());
1793 }
1794}
1795
1796// Generate and verify an ultra high resolution capture request
1797TEST_P(CameraAidlTest, processUltraHighResolutionRequest) {
1798 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1799 int64_t bufferId = 1;
1800 int32_t frameNumber = 1;
1801 CameraMetadata settings;
1802
1803 for (const auto& name : cameraDeviceNames) {
1804 std::string version, deviceId;
1805 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
1806 CameraMetadata meta;
1807
1808 std::shared_ptr<ICameraDevice> unusedDevice;
1809 openEmptyDeviceSession(name, mProvider, &mSession, &meta, &unusedDevice);
1810 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1811 if (!isUltraHighResolution(staticMeta)) {
1812 ndk::ScopedAStatus ret = mSession->close();
1813 mSession = nullptr;
1814 ASSERT_TRUE(ret.isOk());
1815 continue;
1816 }
1817 CameraMetadata req;
1818 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultSettings;
1819 ndk::ScopedAStatus ret =
1820 mSession->constructDefaultRequestSettings(RequestTemplate::STILL_CAPTURE, &req);
1821 ASSERT_TRUE(ret.isOk());
1822
1823 const camera_metadata_t* metadata =
1824 reinterpret_cast<const camera_metadata_t*>(req.metadata.data());
1825 size_t expectedSize = req.metadata.size();
1826 int result = validate_camera_metadata_structure(metadata, &expectedSize);
1827 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
1828
1829 size_t entryCount = get_camera_metadata_entry_count(metadata);
1830 ASSERT_GT(entryCount, 0u);
1831 defaultSettings = metadata;
1832 uint8_t sensorPixelMode =
1833 static_cast<uint8_t>(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
1834 ASSERT_EQ(::android::OK,
1835 defaultSettings.update(ANDROID_SENSOR_PIXEL_MODE, &sensorPixelMode, 1));
1836
1837 const camera_metadata_t* settingsBuffer = defaultSettings.getAndLock();
1838 uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
1839 settings.metadata = std::vector(
1840 rawSettingsBuffer, rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
1841 overrideRotateAndCrop(&settings);
1842
1843 ret = mSession->close();
1844 mSession = nullptr;
1845 ASSERT_TRUE(ret.isOk());
1846
1847 std::vector<HalStream> halStreams;
1848 bool supportsPartialResults = false;
1849 bool useHalBufManager = false;
1850 int32_t partialResultCount = 0;
1851 Stream previewStream;
1852 std::shared_ptr<DeviceCb> cb;
1853
1854 std::list<PixelFormat> pixelFormats = {PixelFormat::YCBCR_420_888, PixelFormat::RAW16};
1855 for (PixelFormat format : pixelFormats) {
Emilian Peevdda1eb72022-07-28 16:37:40 -07001856 previewStream.usage =
1857 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1858 GRALLOC1_CONSUMER_USAGE_CPU_READ);
1859 previewStream.dataSpace = Dataspace::UNKNOWN;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001860 configureStreams(name, mProvider, format, &mSession, &previewStream, &halStreams,
1861 &supportsPartialResults, &partialResultCount, &useHalBufManager, &cb,
1862 0, /*maxResolution*/ true);
1863 ASSERT_NE(mSession, nullptr);
1864
1865 ::aidl::android::hardware::common::fmq::MQDescriptor<
1866 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
1867 descriptor;
1868 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
1869 ASSERT_TRUE(resultQueueRet.isOk());
1870
1871 std::shared_ptr<ResultMetadataQueue> resultQueue =
1872 std::make_shared<ResultMetadataQueue>(descriptor);
1873 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
1874 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
1875 resultQueue = nullptr;
1876 // Don't use the queue onwards.
1877 }
1878
1879 std::vector<buffer_handle_t> graphicBuffers;
1880 graphicBuffers.reserve(halStreams.size());
1881 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
1882 static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
1883 partialResultCount, std::unordered_set<std::string>(), resultQueue);
1884
1885 std::vector<CaptureRequest> requests(1);
1886 CaptureRequest& request = requests[0];
1887 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
1888 outputBuffers.resize(halStreams.size());
1889
1890 size_t k = 0;
1891 for (const auto& halStream : halStreams) {
1892 buffer_handle_t buffer_handle;
1893 if (useHalBufManager) {
1894 outputBuffers[k] = {halStream.id, 0,
1895 NativeHandle(), BufferStatus::OK,
1896 NativeHandle(), NativeHandle()};
1897 } else {
1898 allocateGraphicBuffer(previewStream.width, previewStream.height,
1899 android_convertGralloc1To0Usage(
1900 static_cast<uint64_t>(halStream.producerUsage),
1901 static_cast<uint64_t>(halStream.consumerUsage)),
1902 halStream.overrideFormat, &buffer_handle);
1903 graphicBuffers.push_back(buffer_handle);
1904 outputBuffers[k] = {
1905 halStream.id, bufferId, ::android::makeToAidl(buffer_handle),
1906 BufferStatus::OK, NativeHandle(), NativeHandle()};
1907 bufferId++;
1908 }
1909 k++;
1910 }
1911
1912 request.inputBuffer = {
1913 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
1914 request.frameNumber = frameNumber;
1915 request.fmqSettingsSize = 0;
1916 request.settings = settings;
1917 request.inputWidth = 0;
1918 request.inputHeight = 0;
1919
1920 {
1921 std::unique_lock<std::mutex> l(mLock);
1922 mInflightMap.clear();
1923 mInflightMap[frameNumber] = inflightReq;
1924 }
1925
1926 int32_t numRequestProcessed = 0;
1927 std::vector<BufferCache> cachesToRemove;
1928 ndk::ScopedAStatus returnStatus =
1929 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1930 ASSERT_TRUE(returnStatus.isOk());
1931 ASSERT_EQ(numRequestProcessed, 1u);
1932
1933 {
1934 std::unique_lock<std::mutex> l(mLock);
1935 while (!inflightReq->errorCodeValid &&
1936 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1937 auto timeout = std::chrono::system_clock::now() +
1938 std::chrono::seconds(kStreamBufferTimeoutSec);
1939 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1940 }
1941
1942 ASSERT_FALSE(inflightReq->errorCodeValid);
1943 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1944 }
1945 if (useHalBufManager) {
1946 std::vector<int32_t> streamIds(halStreams.size());
1947 for (size_t i = 0; i < streamIds.size(); i++) {
1948 streamIds[i] = halStreams[i].id;
1949 }
1950 verifyBuffersReturned(mSession, streamIds, cb);
1951 }
1952
1953 ret = mSession->close();
1954 mSession = nullptr;
1955 ASSERT_TRUE(ret.isOk());
1956 }
1957 }
1958}
1959
1960// Generate and verify 10-bit dynamic range request
1961TEST_P(CameraAidlTest, process10BitDynamicRangeRequest) {
1962 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001963 CameraMetadata settings;
1964
1965 for (const auto& name : cameraDeviceNames) {
1966 std::string version, deviceId;
1967 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
1968 CameraMetadata meta;
1969 std::shared_ptr<ICameraDevice> device;
1970 openEmptyDeviceSession(name, mProvider, &mSession, &meta, &device);
1971 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1972 if (!is10BitDynamicRangeCapable(staticMeta)) {
1973 ndk::ScopedAStatus ret = mSession->close();
1974 mSession = nullptr;
1975 ASSERT_TRUE(ret.isOk());
1976 continue;
1977 }
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001978 std::vector<RequestAvailableDynamicRangeProfilesMap> profileList;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001979 get10BitDynamicRangeProfiles(staticMeta, &profileList);
1980 ASSERT_FALSE(profileList.empty());
1981
1982 CameraMetadata req;
1983 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultSettings;
1984 ndk::ScopedAStatus ret =
Emilian Peevdda1eb72022-07-28 16:37:40 -07001985 mSession->constructDefaultRequestSettings(RequestTemplate::PREVIEW, &req);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001986 ASSERT_TRUE(ret.isOk());
1987
1988 const camera_metadata_t* metadata =
1989 reinterpret_cast<const camera_metadata_t*>(req.metadata.data());
1990 size_t expectedSize = req.metadata.size();
1991 int result = validate_camera_metadata_structure(metadata, &expectedSize);
1992 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
1993
1994 size_t entryCount = get_camera_metadata_entry_count(metadata);
1995 ASSERT_GT(entryCount, 0u);
1996 defaultSettings = metadata;
1997
1998 const camera_metadata_t* settingsBuffer = defaultSettings.getAndLock();
1999 uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
2000 settings.metadata = std::vector(
2001 rawSettingsBuffer, rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
2002 overrideRotateAndCrop(&settings);
2003
2004 ret = mSession->close();
2005 mSession = nullptr;
2006 ASSERT_TRUE(ret.isOk());
2007
2008 std::vector<HalStream> halStreams;
2009 bool supportsPartialResults = false;
2010 bool useHalBufManager = false;
2011 int32_t partialResultCount = 0;
2012 Stream previewStream;
2013 std::shared_ptr<DeviceCb> cb;
2014 for (const auto& profile : profileList) {
Emilian Peevdda1eb72022-07-28 16:37:40 -07002015 previewStream.usage =
2016 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2017 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER);
2018 previewStream.dataSpace = getDataspace(PixelFormat::IMPLEMENTATION_DEFINED);
Avichal Rakesh362242f2022-02-08 12:40:53 -08002019 configureStreams(name, mProvider, PixelFormat::IMPLEMENTATION_DEFINED, &mSession,
2020 &previewStream, &halStreams, &supportsPartialResults,
2021 &partialResultCount, &useHalBufManager, &cb, 0,
2022 /*maxResolution*/ false, profile);
2023 ASSERT_NE(mSession, nullptr);
2024
2025 ::aidl::android::hardware::common::fmq::MQDescriptor<
2026 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2027 descriptor;
2028 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2029 ASSERT_TRUE(resultQueueRet.isOk());
2030
2031 std::shared_ptr<ResultMetadataQueue> resultQueue =
2032 std::make_shared<ResultMetadataQueue>(descriptor);
2033 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2034 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2035 resultQueue = nullptr;
2036 // Don't use the queue onwards.
2037 }
2038
Emilian Peevdda1eb72022-07-28 16:37:40 -07002039 mInflightMap.clear();
2040 // Stream as long as needed to fill the Hal inflight queue
2041 std::vector<CaptureRequest> requests(halStreams[0].maxBuffers);
Avichal Rakesh362242f2022-02-08 12:40:53 -08002042
Emilian Peev470d1382023-01-18 11:09:09 -08002043 for (int32_t requestId = 0; requestId < requests.size(); requestId++) {
Emilian Peevdda1eb72022-07-28 16:37:40 -07002044 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
2045 static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
2046 partialResultCount, std::unordered_set<std::string>(), resultQueue);
Avichal Rakesh362242f2022-02-08 12:40:53 -08002047
Emilian Peev470d1382023-01-18 11:09:09 -08002048 CaptureRequest& request = requests[requestId];
Emilian Peevdda1eb72022-07-28 16:37:40 -07002049 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2050 outputBuffers.resize(halStreams.size());
Avichal Rakesh362242f2022-02-08 12:40:53 -08002051
Emilian Peevdda1eb72022-07-28 16:37:40 -07002052 size_t k = 0;
2053 inflightReq->mOutstandingBufferIds.resize(halStreams.size());
2054 std::vector<buffer_handle_t> graphicBuffers;
2055 graphicBuffers.reserve(halStreams.size());
Avichal Rakesh362242f2022-02-08 12:40:53 -08002056
Emilian Peev470d1382023-01-18 11:09:09 -08002057 auto bufferId = requestId + 1; // Buffer id value 0 is not valid
Emilian Peevdda1eb72022-07-28 16:37:40 -07002058 for (const auto& halStream : halStreams) {
2059 buffer_handle_t buffer_handle;
2060 if (useHalBufManager) {
2061 outputBuffers[k] = {halStream.id, 0,
2062 NativeHandle(), BufferStatus::OK,
2063 NativeHandle(), NativeHandle()};
2064 } else {
2065 auto usage = android_convertGralloc1To0Usage(
2066 static_cast<uint64_t>(halStream.producerUsage),
2067 static_cast<uint64_t>(halStream.consumerUsage));
2068 allocateGraphicBuffer(previewStream.width, previewStream.height, usage,
2069 halStream.overrideFormat, &buffer_handle);
2070
2071 inflightReq->mOutstandingBufferIds[halStream.id][bufferId] = buffer_handle;
2072 graphicBuffers.push_back(buffer_handle);
2073 outputBuffers[k] = {halStream.id, bufferId,
2074 android::makeToAidl(buffer_handle), BufferStatus::OK, NativeHandle(),
2075 NativeHandle()};
Emilian Peevdda1eb72022-07-28 16:37:40 -07002076 }
2077 k++;
Avichal Rakesh362242f2022-02-08 12:40:53 -08002078 }
Avichal Rakesh362242f2022-02-08 12:40:53 -08002079
Emilian Peevdda1eb72022-07-28 16:37:40 -07002080 request.inputBuffer = {
2081 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
Emilian Peev470d1382023-01-18 11:09:09 -08002082 request.frameNumber = bufferId;
Emilian Peevdda1eb72022-07-28 16:37:40 -07002083 request.fmqSettingsSize = 0;
2084 request.settings = settings;
2085 request.inputWidth = 0;
2086 request.inputHeight = 0;
Avichal Rakesh362242f2022-02-08 12:40:53 -08002087
Emilian Peevdda1eb72022-07-28 16:37:40 -07002088 {
2089 std::unique_lock<std::mutex> l(mLock);
Emilian Peev470d1382023-01-18 11:09:09 -08002090 mInflightMap[bufferId] = inflightReq;
Emilian Peevdda1eb72022-07-28 16:37:40 -07002091 }
2092
Avichal Rakesh362242f2022-02-08 12:40:53 -08002093 }
2094
2095 int32_t numRequestProcessed = 0;
2096 std::vector<BufferCache> cachesToRemove;
2097 ndk::ScopedAStatus returnStatus =
Emilian Peevdda1eb72022-07-28 16:37:40 -07002098 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
Avichal Rakesh362242f2022-02-08 12:40:53 -08002099 ASSERT_TRUE(returnStatus.isOk());
Emilian Peevdda1eb72022-07-28 16:37:40 -07002100 ASSERT_EQ(numRequestProcessed, requests.size());
Avichal Rakesh362242f2022-02-08 12:40:53 -08002101
Emilian Peevdda1eb72022-07-28 16:37:40 -07002102 returnStatus = mSession->repeatingRequestEnd(requests.size() - 1,
2103 std::vector<int32_t> {halStreams[0].id});
2104 ASSERT_TRUE(returnStatus.isOk());
2105
Emilian Peev470d1382023-01-18 11:09:09 -08002106 // We are keeping frame numbers and buffer ids consistent. Buffer id value of 0
2107 // is used to indicate a buffer that is not present/available so buffer ids as well
2108 // as frame numbers begin with 1.
2109 for (int32_t frameNumber = 1; frameNumber <= requests.size(); frameNumber++) {
Emilian Peevdda1eb72022-07-28 16:37:40 -07002110 const auto& inflightReq = mInflightMap[frameNumber];
Avichal Rakesh362242f2022-02-08 12:40:53 -08002111 std::unique_lock<std::mutex> l(mLock);
2112 while (!inflightReq->errorCodeValid &&
2113 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
2114 auto timeout = std::chrono::system_clock::now() +
2115 std::chrono::seconds(kStreamBufferTimeoutSec);
2116 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2117 }
2118
Shuzhen Wang0f56c562023-04-03 16:58:59 -07002119 waitForReleaseFence(inflightReq->resultOutputBuffers);
2120
Avichal Rakesh362242f2022-02-08 12:40:53 -08002121 ASSERT_FALSE(inflightReq->errorCodeValid);
2122 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
2123 verify10BitMetadata(mHandleImporter, *inflightReq, profile);
2124 }
Emilian Peevdda1eb72022-07-28 16:37:40 -07002125
Avichal Rakesh362242f2022-02-08 12:40:53 -08002126 if (useHalBufManager) {
2127 std::vector<int32_t> streamIds(halStreams.size());
2128 for (size_t i = 0; i < streamIds.size(); i++) {
2129 streamIds[i] = halStreams[i].id;
2130 }
2131 mSession->signalStreamFlush(streamIds, /*streamConfigCounter*/ 0);
2132 cb->waitForBuffersReturned();
2133 }
2134
2135 ret = mSession->close();
2136 mSession = nullptr;
2137 ASSERT_TRUE(ret.isOk());
2138 }
2139 }
2140}
2141
Austin Borger4728fc42022-07-15 11:27:53 -07002142TEST_P(CameraAidlTest, process8BitColorSpaceRequests) {
Austin Borger54b22362023-03-22 11:25:06 -07002143 static int profiles[] = {ColorSpaceNamed::DISPLAY_P3, ColorSpaceNamed::SRGB};
Austin Borger4728fc42022-07-15 11:27:53 -07002144
2145 for (int32_t i = 0; i < sizeof(profiles) / sizeof(profiles[0]); i++) {
2146 processColorSpaceRequest(static_cast<RequestAvailableColorSpaceProfilesMap>(profiles[i]),
2147 static_cast<RequestAvailableDynamicRangeProfilesMap>(
2148 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD));
2149 }
2150}
2151
2152TEST_P(CameraAidlTest, process10BitColorSpaceRequests) {
2153 static const camera_metadata_enum_android_request_available_dynamic_range_profiles_map
2154 dynamicRangeProfiles[] = {
2155 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10,
2156 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10,
2157 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS,
2158 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF,
2159 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF_PO,
2160 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM,
2161 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM_PO,
2162 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF,
2163 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF_PO,
2164 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM,
2165 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM_PO
2166 };
2167
Austin Borger54b22362023-03-22 11:25:06 -07002168 // Process all dynamic range profiles with BT2020_HLG
Austin Borger4728fc42022-07-15 11:27:53 -07002169 for (int32_t i = 0; i < sizeof(dynamicRangeProfiles) / sizeof(dynamicRangeProfiles[0]); i++) {
2170 processColorSpaceRequest(
Austin Borger54b22362023-03-22 11:25:06 -07002171 static_cast<RequestAvailableColorSpaceProfilesMap>(ColorSpaceNamed::BT2020_HLG),
Austin Borger4728fc42022-07-15 11:27:53 -07002172 static_cast<RequestAvailableDynamicRangeProfilesMap>(dynamicRangeProfiles[i]));
2173 }
2174}
2175
Shuzhen Wang4dd6a512022-11-08 20:47:20 +00002176TEST_P(CameraAidlTest, processZoomSettingsOverrideRequests) {
2177 const int32_t kFrameCount = 5;
2178 const int32_t kTestCases = 2;
Shuzhen Wang38ddb272023-05-22 09:40:28 -07002179 const bool kOverrideSequence[kTestCases][kFrameCount] = {// ZOOM, ZOOM, ZOOM, ZOOM, ZOOM;
2180 {true, true, true, true, true},
2181 // OFF, ZOOM, ZOOM, ZOOM, OFF;
2182 {false, true, true, true, false}};
Shuzhen Wang4dd6a512022-11-08 20:47:20 +00002183 const bool kExpectedOverrideResults[kTestCases][kFrameCount] = {
Shuzhen Wang38ddb272023-05-22 09:40:28 -07002184 // All resuls should be overridden except the last one. The last result's
2185 // zoom doesn't have speed-up.
2186 {true, true, true, true, false},
2187 // Because we require at least 1 frame speed-up, request #1, #2 and #3
2188 // will be overridden.
2189 {true, true, true, false, false}};
Shuzhen Wang4dd6a512022-11-08 20:47:20 +00002190
2191 for (int i = 0; i < kTestCases; i++) {
2192 processZoomSettingsOverrideRequests(kFrameCount, kOverrideSequence[i],
2193 kExpectedOverrideResults[i]);
2194 }
2195}
2196
Avichal Rakesh362242f2022-02-08 12:40:53 -08002197// Generate and verify a burst containing alternating sensor sensitivity values
2198TEST_P(CameraAidlTest, processCaptureRequestBurstISO) {
2199 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2200 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2201 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2202 int64_t bufferId = 1;
2203 int32_t frameNumber = 1;
2204 float isoTol = .03f;
2205 CameraMetadata settings;
2206
2207 for (const auto& name : cameraDeviceNames) {
2208 CameraMetadata meta;
2209 settings.metadata.clear();
2210 std::shared_ptr<ICameraDevice> unusedDevice;
2211 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
2212 &unusedDevice /*out*/);
2213 camera_metadata_t* staticMetaBuffer =
2214 clone_camera_metadata(reinterpret_cast<camera_metadata_t*>(meta.metadata.data()));
2215 ::android::hardware::camera::common::V1_0::helper::CameraMetadata staticMeta(
2216 staticMetaBuffer);
2217
2218 camera_metadata_entry_t hwLevel = staticMeta.find(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL);
2219 ASSERT_TRUE(0 < hwLevel.count);
2220 if (ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED == hwLevel.data.u8[0] ||
2221 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL == hwLevel.data.u8[0]) {
2222 // Limited/External devices can skip this test
2223 ndk::ScopedAStatus ret = mSession->close();
2224 mSession = nullptr;
2225 ASSERT_TRUE(ret.isOk());
2226 continue;
2227 }
2228
2229 camera_metadata_entry_t isoRange = staticMeta.find(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE);
2230 ASSERT_EQ(isoRange.count, 2u);
2231
2232 ndk::ScopedAStatus ret = mSession->close();
2233 mSession = nullptr;
2234 ASSERT_TRUE(ret.isOk());
2235
2236 bool supportsPartialResults = false;
2237 bool useHalBufManager = false;
2238 int32_t partialResultCount = 0;
2239 Stream previewStream;
2240 std::vector<HalStream> halStreams;
2241 std::shared_ptr<DeviceCb> cb;
2242 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2243 &previewStream /*out*/, &halStreams /*out*/,
2244 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2245 &useHalBufManager /*out*/, &cb /*out*/);
2246
2247 ::aidl::android::hardware::common::fmq::MQDescriptor<
2248 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2249 descriptor;
2250 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2251 std::shared_ptr<ResultMetadataQueue> resultQueue =
2252 std::make_shared<ResultMetadataQueue>(descriptor);
2253 ASSERT_TRUE(resultQueueRet.isOk());
2254 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2255 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2256 resultQueue = nullptr;
2257 // Don't use the queue onwards.
2258 }
2259
2260 ret = mSession->constructDefaultRequestSettings(RequestTemplate::PREVIEW, &settings);
2261 ASSERT_TRUE(ret.isOk());
2262
2263 ::android::hardware::camera::common::V1_0::helper::CameraMetadata requestMeta;
2264 std::vector<CaptureRequest> requests(kBurstFrameCount);
2265 std::vector<buffer_handle_t> buffers(kBurstFrameCount);
2266 std::vector<std::shared_ptr<InFlightRequest>> inflightReqs(kBurstFrameCount);
2267 std::vector<int32_t> isoValues(kBurstFrameCount);
2268 std::vector<CameraMetadata> requestSettings(kBurstFrameCount);
2269
2270 for (int32_t i = 0; i < kBurstFrameCount; i++) {
2271 std::unique_lock<std::mutex> l(mLock);
2272 CaptureRequest& request = requests[i];
2273 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2274 outputBuffers.resize(1);
2275 StreamBuffer& outputBuffer = outputBuffers[0];
2276
2277 isoValues[i] = ((i % 2) == 0) ? isoRange.data.i32[0] : isoRange.data.i32[1];
2278 if (useHalBufManager) {
2279 outputBuffer = {halStreams[0].id, 0,
2280 NativeHandle(), BufferStatus::OK,
2281 NativeHandle(), NativeHandle()};
2282 } else {
2283 allocateGraphicBuffer(previewStream.width, previewStream.height,
2284 android_convertGralloc1To0Usage(
2285 static_cast<uint64_t>(halStreams[0].producerUsage),
2286 static_cast<uint64_t>(halStreams[0].consumerUsage)),
2287 halStreams[0].overrideFormat, &buffers[i]);
2288 outputBuffer = {halStreams[0].id, bufferId + i, ::android::makeToAidl(buffers[i]),
2289 BufferStatus::OK, NativeHandle(), NativeHandle()};
2290 }
2291
2292 requestMeta.append(reinterpret_cast<camera_metadata_t*>(settings.metadata.data()));
2293
2294 // Disable all 3A routines
2295 uint8_t mode = static_cast<uint8_t>(ANDROID_CONTROL_MODE_OFF);
2296 ASSERT_EQ(::android::OK, requestMeta.update(ANDROID_CONTROL_MODE, &mode, 1));
2297 ASSERT_EQ(::android::OK,
2298 requestMeta.update(ANDROID_SENSOR_SENSITIVITY, &isoValues[i], 1));
2299 camera_metadata_t* metaBuffer = requestMeta.release();
2300 uint8_t* rawMetaBuffer = reinterpret_cast<uint8_t*>(metaBuffer);
2301 requestSettings[i].metadata = std::vector(
2302 rawMetaBuffer, rawMetaBuffer + get_camera_metadata_size(metaBuffer));
2303 overrideRotateAndCrop(&(requestSettings[i]));
2304
2305 request.frameNumber = frameNumber + i;
2306 request.fmqSettingsSize = 0;
2307 request.settings = requestSettings[i];
2308 request.inputBuffer = {
2309 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2310
2311 inflightReqs[i] = std::make_shared<InFlightRequest>(1, false, supportsPartialResults,
2312 partialResultCount, resultQueue);
2313 mInflightMap[frameNumber + i] = inflightReqs[i];
2314 }
2315
2316 int32_t numRequestProcessed = 0;
2317 std::vector<BufferCache> cachesToRemove;
2318
2319 ndk::ScopedAStatus returnStatus =
2320 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2321 ASSERT_TRUE(returnStatus.isOk());
2322 ASSERT_EQ(numRequestProcessed, kBurstFrameCount);
2323
2324 for (size_t i = 0; i < kBurstFrameCount; i++) {
2325 std::unique_lock<std::mutex> l(mLock);
2326 while (!inflightReqs[i]->errorCodeValid && ((0 < inflightReqs[i]->numBuffersLeft) ||
2327 (!inflightReqs[i]->haveResultMetadata))) {
2328 auto timeout = std::chrono::system_clock::now() +
2329 std::chrono::seconds(kStreamBufferTimeoutSec);
2330 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2331 }
2332
2333 ASSERT_FALSE(inflightReqs[i]->errorCodeValid);
2334 ASSERT_NE(inflightReqs[i]->resultOutputBuffers.size(), 0u);
2335 ASSERT_EQ(previewStream.id, inflightReqs[i]->resultOutputBuffers[0].buffer.streamId);
2336 ASSERT_FALSE(inflightReqs[i]->collectedResult.isEmpty());
2337 ASSERT_TRUE(inflightReqs[i]->collectedResult.exists(ANDROID_SENSOR_SENSITIVITY));
2338 camera_metadata_entry_t isoResult =
2339 inflightReqs[i]->collectedResult.find(ANDROID_SENSOR_SENSITIVITY);
2340 ASSERT_TRUE(std::abs(isoResult.data.i32[0] - isoValues[i]) <=
2341 std::round(isoValues[i] * isoTol));
2342 }
2343
2344 if (useHalBufManager) {
2345 verifyBuffersReturned(mSession, previewStream.id, cb);
2346 }
2347 ret = mSession->close();
2348 mSession = nullptr;
2349 ASSERT_TRUE(ret.isOk());
2350 }
2351}
2352
2353// Test whether an incorrect capture request with missing settings will
2354// be reported correctly.
2355TEST_P(CameraAidlTest, processCaptureRequestInvalidSinglePreview) {
2356 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2357 std::vector<AvailableStream> outputPreviewStreams;
2358 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2359 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2360 int64_t bufferId = 1;
2361 int32_t frameNumber = 1;
2362 CameraMetadata settings;
2363
2364 for (const auto& name : cameraDeviceNames) {
2365 Stream previewStream;
2366 std::vector<HalStream> halStreams;
2367 std::shared_ptr<DeviceCb> cb;
2368 bool supportsPartialResults = false;
2369 bool useHalBufManager = false;
2370 int32_t partialResultCount = 0;
2371 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2372 &previewStream /*out*/, &halStreams /*out*/,
2373 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2374 &useHalBufManager /*out*/, &cb /*out*/);
2375 ASSERT_NE(mSession, nullptr);
2376 ASSERT_FALSE(halStreams.empty());
2377
2378 buffer_handle_t buffer_handle = nullptr;
2379
2380 if (useHalBufManager) {
2381 bufferId = 0;
2382 } else {
2383 allocateGraphicBuffer(previewStream.width, previewStream.height,
2384 android_convertGralloc1To0Usage(
2385 static_cast<uint64_t>(halStreams[0].producerUsage),
2386 static_cast<uint64_t>(halStreams[0].consumerUsage)),
2387 halStreams[0].overrideFormat, &buffer_handle);
2388 }
2389
2390 std::vector<CaptureRequest> requests(1);
2391 CaptureRequest& request = requests[0];
2392 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2393 outputBuffers.resize(1);
2394 StreamBuffer& outputBuffer = outputBuffers[0];
2395
2396 outputBuffer = {
2397 halStreams[0].id,
2398 bufferId,
2399 buffer_handle == nullptr ? NativeHandle() : ::android::makeToAidl(buffer_handle),
2400 BufferStatus::OK,
2401 NativeHandle(),
2402 NativeHandle()};
2403
2404 request.inputBuffer = {
2405 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2406 request.frameNumber = frameNumber;
2407 request.fmqSettingsSize = 0;
2408 request.settings = settings;
2409
2410 // Settings were not correctly initialized, we should fail here
2411 int32_t numRequestProcessed = 0;
2412 std::vector<BufferCache> cachesToRemove;
2413 ndk::ScopedAStatus ret =
2414 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2415 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
2416 ASSERT_EQ(numRequestProcessed, 0u);
2417
2418 ret = mSession->close();
2419 mSession = nullptr;
2420 ASSERT_TRUE(ret.isOk());
2421 }
2422}
2423
2424// Verify camera offline session behavior
2425TEST_P(CameraAidlTest, switchToOffline) {
2426 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2427 AvailableStream threshold = {kMaxStillWidth, kMaxStillHeight,
2428 static_cast<int32_t>(PixelFormat::BLOB)};
2429 int64_t bufferId = 1;
2430 int32_t frameNumber = 1;
2431 CameraMetadata settings;
2432
2433 for (const auto& name : cameraDeviceNames) {
2434 CameraMetadata meta;
2435 {
2436 std::shared_ptr<ICameraDevice> unusedDevice;
2437 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
2438 &unusedDevice);
2439 camera_metadata_t* staticMetaBuffer = clone_camera_metadata(
2440 reinterpret_cast<camera_metadata_t*>(meta.metadata.data()));
2441 ::android::hardware::camera::common::V1_0::helper::CameraMetadata staticMeta(
2442 staticMetaBuffer);
2443
2444 if (isOfflineSessionSupported(staticMetaBuffer) != Status::OK) {
2445 ndk::ScopedAStatus ret = mSession->close();
2446 mSession = nullptr;
2447 ASSERT_TRUE(ret.isOk());
2448 continue;
2449 }
2450 ndk::ScopedAStatus ret = mSession->close();
2451 mSession = nullptr;
2452 ASSERT_TRUE(ret.isOk());
2453 }
2454
2455 bool supportsPartialResults = false;
2456 int32_t partialResultCount = 0;
2457 Stream stream;
2458 std::vector<HalStream> halStreams;
2459 std::shared_ptr<DeviceCb> cb;
2460 int32_t jpegBufferSize;
2461 bool useHalBufManager;
2462 configureOfflineStillStream(name, mProvider, &threshold, &mSession /*out*/, &stream /*out*/,
2463 &halStreams /*out*/, &supportsPartialResults /*out*/,
2464 &partialResultCount /*out*/, &cb /*out*/,
2465 &jpegBufferSize /*out*/, &useHalBufManager /*out*/);
2466
2467 auto ret = mSession->constructDefaultRequestSettings(RequestTemplate::STILL_CAPTURE,
2468 &settings);
2469 ASSERT_TRUE(ret.isOk());
2470
2471 ::aidl::android::hardware::common::fmq::MQDescriptor<
2472 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2473 descriptor;
2474
2475 ndk::ScopedAStatus resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2476 ASSERT_TRUE(resultQueueRet.isOk());
2477 std::shared_ptr<ResultMetadataQueue> resultQueue =
2478 std::make_shared<ResultMetadataQueue>(descriptor);
2479 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2480 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2481 resultQueue = nullptr;
2482 // Don't use the queue onwards.
2483 }
2484
2485 ::android::hardware::camera::common::V1_0::helper::CameraMetadata requestMeta;
2486
2487 std::vector<buffer_handle_t> buffers(kBurstFrameCount);
2488 std::vector<std::shared_ptr<InFlightRequest>> inflightReqs(kBurstFrameCount);
2489 std::vector<CameraMetadata> requestSettings(kBurstFrameCount);
2490
2491 std::vector<CaptureRequest> requests(kBurstFrameCount);
2492
2493 HalStream halStream = halStreams[0];
2494 for (uint32_t i = 0; i < kBurstFrameCount; i++) {
2495 CaptureRequest& request = requests[i];
2496 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2497 outputBuffers.resize(1);
2498 StreamBuffer& outputBuffer = outputBuffers[0];
2499
2500 std::unique_lock<std::mutex> l(mLock);
2501 if (useHalBufManager) {
2502 outputBuffer = {halStream.id, 0, NativeHandle(), BufferStatus::OK, NativeHandle(),
2503 NativeHandle()};
2504 } else {
2505 // jpeg buffer (w,h) = (blobLen, 1)
2506 allocateGraphicBuffer(jpegBufferSize, /*height*/ 1,
2507 android_convertGralloc1To0Usage(
2508 static_cast<uint64_t>(halStream.producerUsage),
2509 static_cast<uint64_t>(halStream.consumerUsage)),
2510 halStream.overrideFormat, &buffers[i]);
2511 outputBuffer = {halStream.id, bufferId + i, ::android::makeToAidl(buffers[i]),
2512 BufferStatus::OK, NativeHandle(), NativeHandle()};
2513 }
2514
2515 requestMeta.clear();
2516 requestMeta.append(reinterpret_cast<camera_metadata_t*>(settings.metadata.data()));
2517
2518 camera_metadata_t* metaBuffer = requestMeta.release();
2519 uint8_t* rawMetaBuffer = reinterpret_cast<uint8_t*>(metaBuffer);
2520 requestSettings[i].metadata = std::vector(
2521 rawMetaBuffer, rawMetaBuffer + get_camera_metadata_size(metaBuffer));
2522 overrideRotateAndCrop(&requestSettings[i]);
2523
2524 request.frameNumber = frameNumber + i;
2525 request.fmqSettingsSize = 0;
2526 request.settings = requestSettings[i];
2527 request.inputBuffer = {/*streamId*/ -1,
2528 /*bufferId*/ 0, NativeHandle(),
2529 BufferStatus::ERROR, NativeHandle(),
2530 NativeHandle()};
2531
2532 inflightReqs[i] = std::make_shared<InFlightRequest>(1, false, supportsPartialResults,
2533 partialResultCount, resultQueue);
2534 mInflightMap[frameNumber + i] = inflightReqs[i];
2535 }
2536
2537 int32_t numRequestProcessed = 0;
2538 std::vector<BufferCache> cachesToRemove;
2539
2540 ndk::ScopedAStatus returnStatus =
2541 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2542 ASSERT_TRUE(returnStatus.isOk());
2543 ASSERT_EQ(numRequestProcessed, kBurstFrameCount);
2544
2545 std::vector<int32_t> offlineStreamIds = {halStream.id};
2546 CameraOfflineSessionInfo offlineSessionInfo;
2547 std::shared_ptr<ICameraOfflineSession> offlineSession;
2548 returnStatus =
2549 mSession->switchToOffline(offlineStreamIds, &offlineSessionInfo, &offlineSession);
2550
2551 if (!halStreams[0].supportOffline) {
2552 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
2553 returnStatus.getServiceSpecificError());
2554 ret = mSession->close();
2555 mSession = nullptr;
2556 ASSERT_TRUE(ret.isOk());
2557 continue;
2558 }
2559
2560 ASSERT_TRUE(returnStatus.isOk());
2561 // Hal might be unable to find any requests qualified for offline mode.
2562 if (offlineSession == nullptr) {
2563 ret = mSession->close();
2564 mSession = nullptr;
2565 ASSERT_TRUE(ret.isOk());
2566 continue;
2567 }
2568
2569 ASSERT_EQ(offlineSessionInfo.offlineStreams.size(), 1u);
2570 ASSERT_EQ(offlineSessionInfo.offlineStreams[0].id, halStream.id);
2571 ASSERT_NE(offlineSessionInfo.offlineRequests.size(), 0u);
2572
2573 // close device session to make sure offline session does not rely on it
2574 ret = mSession->close();
2575 mSession = nullptr;
2576 ASSERT_TRUE(ret.isOk());
2577
2578 ::aidl::android::hardware::common::fmq::MQDescriptor<
2579 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2580 offlineResultDescriptor;
2581
2582 auto offlineResultQueueRet =
2583 offlineSession->getCaptureResultMetadataQueue(&offlineResultDescriptor);
2584 std::shared_ptr<ResultMetadataQueue> offlineResultQueue =
2585 std::make_shared<ResultMetadataQueue>(descriptor);
2586 if (!offlineResultQueue->isValid() || offlineResultQueue->availableToWrite() <= 0) {
2587 ALOGE("%s: offline session returns empty result metadata fmq, not use it", __func__);
2588 offlineResultQueue = nullptr;
2589 // Don't use the queue onwards.
2590 }
2591 ASSERT_TRUE(offlineResultQueueRet.isOk());
2592
2593 updateInflightResultQueue(offlineResultQueue);
2594
2595 ret = offlineSession->setCallback(cb);
2596 ASSERT_TRUE(ret.isOk());
2597
2598 for (size_t i = 0; i < kBurstFrameCount; i++) {
2599 std::unique_lock<std::mutex> l(mLock);
2600 while (!inflightReqs[i]->errorCodeValid && ((0 < inflightReqs[i]->numBuffersLeft) ||
2601 (!inflightReqs[i]->haveResultMetadata))) {
2602 auto timeout = std::chrono::system_clock::now() +
2603 std::chrono::seconds(kStreamBufferTimeoutSec);
2604 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2605 }
2606
2607 ASSERT_FALSE(inflightReqs[i]->errorCodeValid);
2608 ASSERT_NE(inflightReqs[i]->resultOutputBuffers.size(), 0u);
2609 ASSERT_EQ(stream.id, inflightReqs[i]->resultOutputBuffers[0].buffer.streamId);
2610 ASSERT_FALSE(inflightReqs[i]->collectedResult.isEmpty());
2611 }
2612
2613 ret = offlineSession->close();
2614 ASSERT_TRUE(ret.isOk());
2615 }
2616}
2617
2618// Check whether an invalid capture request with missing output buffers
2619// will be reported correctly.
2620TEST_P(CameraAidlTest, processCaptureRequestInvalidBuffer) {
2621 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2622 std::vector<AvailableStream> outputBlobStreams;
2623 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2624 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2625 int32_t frameNumber = 1;
2626 CameraMetadata settings;
2627
2628 for (const auto& name : cameraDeviceNames) {
2629 Stream previewStream;
2630 std::vector<HalStream> halStreams;
2631 std::shared_ptr<DeviceCb> cb;
2632 bool supportsPartialResults = false;
2633 bool useHalBufManager = false;
2634 int32_t partialResultCount = 0;
2635 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2636 &previewStream /*out*/, &halStreams /*out*/,
2637 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2638 &useHalBufManager /*out*/, &cb /*out*/);
2639
2640 RequestTemplate reqTemplate = RequestTemplate::PREVIEW;
2641 ndk::ScopedAStatus ret = mSession->constructDefaultRequestSettings(reqTemplate, &settings);
2642 ASSERT_TRUE(ret.isOk());
2643 overrideRotateAndCrop(&settings);
2644
2645 std::vector<CaptureRequest> requests(1);
2646 CaptureRequest& request = requests[0];
2647 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2648 outputBuffers.resize(1);
2649 // Empty output buffer
2650 outputBuffers[0] = {
2651 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2652
2653 request.inputBuffer = {
2654 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2655 request.frameNumber = frameNumber;
2656 request.fmqSettingsSize = 0;
2657 request.settings = settings;
2658
2659 // Output buffers are missing, we should fail here
2660 int32_t numRequestProcessed = 0;
2661 std::vector<BufferCache> cachesToRemove;
2662 ret = mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2663 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
2664 ASSERT_EQ(numRequestProcessed, 0u);
2665
2666 ret = mSession->close();
2667 mSession = nullptr;
2668 ASSERT_TRUE(ret.isOk());
2669 }
2670}
2671
2672// Generate, trigger and flush a preview request
2673TEST_P(CameraAidlTest, flushPreviewRequest) {
2674 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2675 std::vector<AvailableStream> outputPreviewStreams;
2676 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2677 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2678 int64_t bufferId = 1;
2679 int32_t frameNumber = 1;
2680 CameraMetadata settings;
2681
2682 for (const auto& name : cameraDeviceNames) {
2683 Stream previewStream;
2684 std::vector<HalStream> halStreams;
2685 std::shared_ptr<DeviceCb> cb;
2686 bool supportsPartialResults = false;
2687 bool useHalBufManager = false;
2688 int32_t partialResultCount = 0;
2689
2690 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2691 &previewStream /*out*/, &halStreams /*out*/,
2692 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2693 &useHalBufManager /*out*/, &cb /*out*/);
2694
2695 ASSERT_NE(mSession, nullptr);
2696 ASSERT_NE(cb, nullptr);
2697 ASSERT_FALSE(halStreams.empty());
2698
2699 ::aidl::android::hardware::common::fmq::MQDescriptor<
2700 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2701 descriptor;
2702
2703 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2704 std::shared_ptr<ResultMetadataQueue> resultQueue =
2705 std::make_shared<ResultMetadataQueue>(descriptor);
2706 ASSERT_TRUE(resultQueueRet.isOk());
2707 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2708 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2709 resultQueue = nullptr;
2710 // Don't use the queue onwards.
2711 }
2712
2713 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
2714 1, false, supportsPartialResults, partialResultCount, resultQueue);
2715 RequestTemplate reqTemplate = RequestTemplate::PREVIEW;
2716
2717 ndk::ScopedAStatus ret = mSession->constructDefaultRequestSettings(reqTemplate, &settings);
2718 ASSERT_TRUE(ret.isOk());
2719 overrideRotateAndCrop(&settings);
2720
2721 buffer_handle_t buffer_handle;
2722 std::vector<CaptureRequest> requests(1);
2723 CaptureRequest& request = requests[0];
2724 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2725 outputBuffers.resize(1);
2726 StreamBuffer& outputBuffer = outputBuffers[0];
2727 if (useHalBufManager) {
2728 bufferId = 0;
2729 outputBuffer = {halStreams[0].id, bufferId, NativeHandle(),
2730 BufferStatus::OK, NativeHandle(), NativeHandle()};
2731 } else {
2732 allocateGraphicBuffer(previewStream.width, previewStream.height,
2733 android_convertGralloc1To0Usage(
2734 static_cast<uint64_t>(halStreams[0].producerUsage),
2735 static_cast<uint64_t>(halStreams[0].consumerUsage)),
2736 halStreams[0].overrideFormat, &buffer_handle);
2737 outputBuffer = {halStreams[0].id, bufferId, ::android::makeToAidl(buffer_handle),
2738 BufferStatus::OK, NativeHandle(), NativeHandle()};
2739 }
2740
2741 request.frameNumber = frameNumber;
2742 request.fmqSettingsSize = 0;
2743 request.settings = settings;
2744 request.inputBuffer = {
2745 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2746
2747 {
2748 std::unique_lock<std::mutex> l(mLock);
2749 mInflightMap.clear();
2750 mInflightMap[frameNumber] = inflightReq;
2751 }
2752
2753 int32_t numRequestProcessed = 0;
2754 std::vector<BufferCache> cachesToRemove;
2755 ret = mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2756 ASSERT_TRUE(ret.isOk());
2757 ASSERT_EQ(numRequestProcessed, 1u);
2758
2759 // Flush before waiting for request to complete.
2760 ndk::ScopedAStatus returnStatus = mSession->flush();
2761 ASSERT_TRUE(returnStatus.isOk());
2762
2763 {
2764 std::unique_lock<std::mutex> l(mLock);
2765 while (!inflightReq->errorCodeValid &&
2766 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
2767 auto timeout = std::chrono::system_clock::now() +
2768 std::chrono::seconds(kStreamBufferTimeoutSec);
2769 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2770 }
2771
2772 if (!inflightReq->errorCodeValid) {
2773 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
2774 ASSERT_EQ(previewStream.id, inflightReq->resultOutputBuffers[0].buffer.streamId);
2775 } else {
2776 switch (inflightReq->errorCode) {
2777 case ErrorCode::ERROR_REQUEST:
2778 case ErrorCode::ERROR_RESULT:
2779 case ErrorCode::ERROR_BUFFER:
2780 // Expected
2781 break;
2782 case ErrorCode::ERROR_DEVICE:
2783 default:
2784 FAIL() << "Unexpected error:"
2785 << static_cast<uint32_t>(inflightReq->errorCode);
2786 }
2787 }
2788 }
2789
2790 if (useHalBufManager) {
2791 verifyBuffersReturned(mSession, previewStream.id, cb);
2792 }
2793
2794 ret = mSession->close();
2795 mSession = nullptr;
2796 ASSERT_TRUE(ret.isOk());
2797 }
2798}
2799
2800// Verify that camera flushes correctly without any pending requests.
2801TEST_P(CameraAidlTest, flushEmpty) {
2802 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2803 std::vector<AvailableStream> outputPreviewStreams;
2804 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2805 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2806
2807 for (const auto& name : cameraDeviceNames) {
2808 Stream previewStream;
2809 std::vector<HalStream> halStreams;
2810 std::shared_ptr<DeviceCb> cb;
2811 bool supportsPartialResults = false;
2812 bool useHalBufManager = false;
2813
2814 int32_t partialResultCount = 0;
2815 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2816 &previewStream /*out*/, &halStreams /*out*/,
2817 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2818 &useHalBufManager /*out*/, &cb /*out*/);
2819
2820 ndk::ScopedAStatus returnStatus = mSession->flush();
2821 ASSERT_TRUE(returnStatus.isOk());
2822
2823 {
2824 std::unique_lock<std::mutex> l(mLock);
2825 auto timeout = std::chrono::system_clock::now() +
2826 std::chrono::milliseconds(kEmptyFlushTimeoutMSec);
2827 ASSERT_EQ(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2828 }
2829
2830 ndk::ScopedAStatus ret = mSession->close();
2831 mSession = nullptr;
2832 ASSERT_TRUE(ret.isOk());
2833 }
2834}
2835
2836// Test camera provider notify method
2837TEST_P(CameraAidlTest, providerDeviceStateNotification) {
2838 notifyDeviceState(ICameraProvider::DEVICE_STATE_BACK_COVERED);
2839 notifyDeviceState(ICameraProvider::DEVICE_STATE_NORMAL);
2840}
2841
2842// Verify that all supported stream formats and sizes can be configured
2843// successfully for injection camera.
2844TEST_P(CameraAidlTest, configureInjectionStreamsAvailableOutputs) {
2845 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2846 std::vector<AvailableStream> outputStreams;
2847
2848 for (const auto& name : cameraDeviceNames) {
2849 CameraMetadata metadata;
2850
2851 std::shared_ptr<ICameraInjectionSession> injectionSession;
2852 std::shared_ptr<ICameraDevice> unusedDevice;
2853 openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/,
2854 &unusedDevice /*out*/);
2855 if (injectionSession == nullptr) {
2856 continue;
2857 }
2858
2859 camera_metadata_t* staticMetaBuffer =
2860 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
2861 CameraMetadata chars;
2862 chars.metadata = metadata.metadata;
2863
2864 outputStreams.clear();
2865 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputStreams));
2866 ASSERT_NE(0u, outputStreams.size());
2867
2868 int32_t jpegBufferSize = 0;
2869 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMetaBuffer, &jpegBufferSize));
2870 ASSERT_NE(0u, jpegBufferSize);
2871
2872 int32_t streamId = 0;
2873 int32_t streamConfigCounter = 0;
2874 for (auto& it : outputStreams) {
2875 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(it.format));
2876 Stream stream = {streamId,
2877 StreamType::OUTPUT,
2878 it.width,
2879 it.height,
2880 static_cast<PixelFormat>(it.format),
2881 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2882 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2883 dataspace,
2884 StreamRotation::ROTATION_0,
2885 std::string(),
2886 jpegBufferSize,
2887 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002888 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2889 RequestAvailableDynamicRangeProfilesMap::
2890 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002891
2892 std::vector<Stream> streams = {stream};
2893 StreamConfiguration config;
2894 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2895 jpegBufferSize);
2896
2897 config.streamConfigCounter = streamConfigCounter++;
2898 ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars);
2899 ASSERT_TRUE(s.isOk());
2900 streamId++;
2901 }
2902
2903 std::shared_ptr<ICameraDeviceSession> session;
2904 ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session);
2905 ASSERT_TRUE(ret.isOk());
2906 ASSERT_NE(session, nullptr);
2907 ret = session->close();
2908 ASSERT_TRUE(ret.isOk());
2909 }
2910}
2911
2912// Check for correct handling of invalid/incorrect configuration parameters for injection camera.
2913TEST_P(CameraAidlTest, configureInjectionStreamsInvalidOutputs) {
2914 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2915 std::vector<AvailableStream> outputStreams;
2916
2917 for (const auto& name : cameraDeviceNames) {
2918 CameraMetadata metadata;
2919 std::shared_ptr<ICameraInjectionSession> injectionSession;
2920 std::shared_ptr<ICameraDevice> unusedDevice;
2921 openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/,
2922 &unusedDevice);
2923 if (injectionSession == nullptr) {
2924 continue;
2925 }
2926
2927 camera_metadata_t* staticMetaBuffer =
2928 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
2929 std::shared_ptr<ICameraDeviceSession> session;
2930 ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session);
2931 ASSERT_TRUE(ret.isOk());
2932 ASSERT_NE(session, nullptr);
2933
2934 CameraMetadata chars;
2935 chars.metadata = metadata.metadata;
2936
2937 outputStreams.clear();
2938 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputStreams));
2939 ASSERT_NE(0u, outputStreams.size());
2940
2941 int32_t jpegBufferSize = 0;
2942 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMetaBuffer, &jpegBufferSize));
2943 ASSERT_NE(0u, jpegBufferSize);
2944
2945 int32_t streamId = 0;
2946 Stream stream = {streamId++,
2947 StreamType::OUTPUT,
2948 0,
2949 0,
2950 static_cast<PixelFormat>(outputStreams[0].format),
2951 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2952 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2953 Dataspace::UNKNOWN,
2954 StreamRotation::ROTATION_0,
2955 std::string(),
2956 jpegBufferSize,
2957 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002958 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2959 RequestAvailableDynamicRangeProfilesMap::
2960 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002961
2962 int32_t streamConfigCounter = 0;
2963 std::vector<Stream> streams = {stream};
2964 StreamConfiguration config;
2965 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2966 jpegBufferSize);
2967
2968 config.streamConfigCounter = streamConfigCounter++;
2969 ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars);
2970 ASSERT_TRUE(
2971 (static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) == s.getServiceSpecificError()) ||
2972 (static_cast<int32_t>(Status::INTERNAL_ERROR) == s.getServiceSpecificError()));
2973
2974 stream = {streamId++,
2975 StreamType::OUTPUT,
2976 INT32_MAX,
2977 INT32_MAX,
2978 static_cast<PixelFormat>(outputStreams[0].format),
2979 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2980 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2981 Dataspace::UNKNOWN,
2982 StreamRotation::ROTATION_0,
2983 std::string(),
2984 jpegBufferSize,
2985 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002986 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2987 RequestAvailableDynamicRangeProfilesMap::
2988 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
2989
Avichal Rakesh362242f2022-02-08 12:40:53 -08002990 streams[0] = stream;
2991 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2992 jpegBufferSize);
2993 config.streamConfigCounter = streamConfigCounter++;
2994 s = injectionSession->configureInjectionStreams(config, chars);
2995 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError());
2996
2997 for (auto& it : outputStreams) {
2998 stream = {streamId++,
2999 StreamType::OUTPUT,
3000 it.width,
3001 it.height,
3002 static_cast<PixelFormat>(INT32_MAX),
3003 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
3004 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
3005 Dataspace::UNKNOWN,
3006 StreamRotation::ROTATION_0,
3007 std::string(),
3008 jpegBufferSize,
3009 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00003010 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
3011 RequestAvailableDynamicRangeProfilesMap::
3012 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08003013 streams[0] = stream;
3014 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
3015 jpegBufferSize);
3016 config.streamConfigCounter = streamConfigCounter++;
3017 s = injectionSession->configureInjectionStreams(config, chars);
3018 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError());
3019
3020 stream = {streamId++,
3021 StreamType::OUTPUT,
3022 it.width,
3023 it.height,
3024 static_cast<PixelFormat>(it.format),
3025 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
3026 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
3027 Dataspace::UNKNOWN,
3028 static_cast<StreamRotation>(INT32_MAX),
3029 std::string(),
3030 jpegBufferSize,
3031 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00003032 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
3033 RequestAvailableDynamicRangeProfilesMap::
3034 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08003035 streams[0] = stream;
3036 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
3037 jpegBufferSize);
3038 config.streamConfigCounter = streamConfigCounter++;
3039 s = injectionSession->configureInjectionStreams(config, chars);
3040 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError());
3041 }
3042
3043 ret = session->close();
3044 ASSERT_TRUE(ret.isOk());
3045 }
3046}
3047
3048// Check whether session parameters are supported for injection camera. If Hal support for them
3049// exist, then try to configure a preview stream using them.
3050TEST_P(CameraAidlTest, configureInjectionStreamsWithSessionParameters) {
3051 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
3052 std::vector<AvailableStream> outputPreviewStreams;
3053 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
3054 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
3055
3056 for (const auto& name : cameraDeviceNames) {
3057 CameraMetadata metadata;
3058 std::shared_ptr<ICameraInjectionSession> injectionSession;
3059 std::shared_ptr<ICameraDevice> unusedDevice;
3060 openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/,
3061 &unusedDevice /*out*/);
3062 if (injectionSession == nullptr) {
3063 continue;
3064 }
3065
3066 std::shared_ptr<ICameraDeviceSession> session;
3067 ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session);
3068 ASSERT_TRUE(ret.isOk());
3069 ASSERT_NE(session, nullptr);
3070
3071 camera_metadata_t* staticMetaBuffer =
3072 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
3073 CameraMetadata chars;
3074 chars.metadata = metadata.metadata;
3075
3076 std::unordered_set<int32_t> availableSessionKeys;
3077 Status rc = getSupportedKeys(staticMetaBuffer, ANDROID_REQUEST_AVAILABLE_SESSION_KEYS,
3078 &availableSessionKeys);
3079 ASSERT_EQ(Status::OK, rc);
3080 if (availableSessionKeys.empty()) {
3081 ret = session->close();
3082 ASSERT_TRUE(ret.isOk());
3083 continue;
3084 }
3085
3086 android::hardware::camera::common::V1_0::helper::CameraMetadata previewRequestSettings;
3087 android::hardware::camera::common::V1_0::helper::CameraMetadata sessionParams,
3088 modifiedSessionParams;
3089 constructFilteredSettings(session, availableSessionKeys, RequestTemplate::PREVIEW,
3090 &previewRequestSettings, &sessionParams);
3091 if (sessionParams.isEmpty()) {
3092 ret = session->close();
3093 ASSERT_TRUE(ret.isOk());
3094 continue;
3095 }
3096
3097 outputPreviewStreams.clear();
3098
3099 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputPreviewStreams,
3100 &previewThreshold));
3101 ASSERT_NE(0u, outputPreviewStreams.size());
3102
3103 Stream previewStream = {
3104 0,
3105 StreamType::OUTPUT,
3106 outputPreviewStreams[0].width,
3107 outputPreviewStreams[0].height,
3108 static_cast<PixelFormat>(outputPreviewStreams[0].format),
3109 static_cast<::aidl::android::hardware::graphics::common::BufferUsage>(
3110 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
3111 Dataspace::UNKNOWN,
3112 StreamRotation::ROTATION_0,
3113 std::string(),
3114 0,
3115 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00003116 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
3117 RequestAvailableDynamicRangeProfilesMap::
3118 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08003119 std::vector<Stream> streams = {previewStream};
3120 StreamConfiguration config;
3121 config.streams = streams;
3122 config.operationMode = StreamConfigurationMode::NORMAL_MODE;
3123
3124 modifiedSessionParams = sessionParams;
3125 camera_metadata_t* sessionParamsBuffer = sessionParams.release();
3126 uint8_t* rawSessionParamsBuffer = reinterpret_cast<uint8_t*>(sessionParamsBuffer);
3127 config.sessionParams.metadata =
3128 std::vector(rawSessionParamsBuffer,
3129 rawSessionParamsBuffer + get_camera_metadata_size(sessionParamsBuffer));
3130
3131 config.streamConfigCounter = 0;
3132 config.streamConfigCounter = 0;
3133 config.multiResolutionInputImage = false;
3134
3135 ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars);
3136 ASSERT_TRUE(s.isOk());
3137
3138 sessionParams.acquire(sessionParamsBuffer);
3139 free_camera_metadata(staticMetaBuffer);
3140 ret = session->close();
3141 ASSERT_TRUE(ret.isOk());
3142 }
3143}
3144
Jayant Chowdharyde1909e2022-11-23 17:18:38 +00003145TEST_P(CameraAidlTest, configureStreamsUseCasesCroppedRaw) {
3146 AvailableStream rawStreamThreshold =
3147 {INT_MAX, INT_MAX, static_cast<int32_t>(PixelFormat::RAW16)};
3148 configureStreamUseCaseInternal(rawStreamThreshold);
3149}
3150
Avichal Rakesh362242f2022-02-08 12:40:53 -08003151// Verify that valid stream use cases can be configured successfully, and invalid use cases
3152// fail stream configuration.
3153TEST_P(CameraAidlTest, configureStreamsUseCases) {
Jayant Chowdharyde1909e2022-11-23 17:18:38 +00003154 AvailableStream previewStreamThreshold =
3155 {kMaxPreviewWidth, kMaxPreviewHeight, static_cast<int32_t>(PixelFormat::YCBCR_420_888)};
3156 configureStreamUseCaseInternal(previewStreamThreshold);
Avichal Rakesh362242f2022-02-08 12:40:53 -08003157}
3158
Austin Borger0918fc82023-03-21 18:48:18 -07003159// Validate the integrity of stream configuration metadata
3160TEST_P(CameraAidlTest, validateStreamConfigurations) {
3161 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
3162 std::vector<AvailableStream> outputStreams;
3163
3164 const int32_t scalerSizesTag = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS;
3165 const int32_t scalerMinFrameDurationsTag = ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS;
3166 const int32_t scalerStallDurationsTag = ANDROID_SCALER_AVAILABLE_STALL_DURATIONS;
3167
3168 for (const auto& name : cameraDeviceNames) {
3169 CameraMetadata meta;
3170 std::shared_ptr<ICameraDevice> cameraDevice;
3171
3172 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
3173 &cameraDevice /*out*/);
3174 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
3175
3176 if (is10BitDynamicRangeCapable(staticMeta)) {
3177 std::vector<std::tuple<size_t, size_t>> supportedP010Sizes, supportedBlobSizes;
3178
3179 getSupportedSizes(staticMeta, scalerSizesTag, HAL_PIXEL_FORMAT_BLOB,
3180 &supportedBlobSizes);
3181 getSupportedSizes(staticMeta, scalerSizesTag, HAL_PIXEL_FORMAT_YCBCR_P010,
3182 &supportedP010Sizes);
3183 ASSERT_FALSE(supportedP010Sizes.empty());
3184
3185 std::vector<int64_t> blobMinDurations, blobStallDurations;
3186 getSupportedDurations(staticMeta, scalerMinFrameDurationsTag, HAL_PIXEL_FORMAT_BLOB,
3187 supportedP010Sizes, &blobMinDurations);
3188 getSupportedDurations(staticMeta, scalerStallDurationsTag, HAL_PIXEL_FORMAT_BLOB,
3189 supportedP010Sizes, &blobStallDurations);
3190 ASSERT_FALSE(blobStallDurations.empty());
3191 ASSERT_FALSE(blobMinDurations.empty());
3192 ASSERT_EQ(supportedP010Sizes.size(), blobMinDurations.size());
3193 ASSERT_EQ(blobMinDurations.size(), blobStallDurations.size());
3194 }
3195
Austin Borger8e9ac022023-05-04 11:17:26 -07003196 // TODO (b/280887191): Validate other aspects of stream configuration metadata...
3197
3198 ndk::ScopedAStatus ret = mSession->close();
3199 mSession = nullptr;
3200 ASSERT_TRUE(ret.isOk());
Austin Borger0918fc82023-03-21 18:48:18 -07003201 }
3202}
3203
Avichal Rakesh362242f2022-02-08 12:40:53 -08003204GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(CameraAidlTest);
3205INSTANTIATE_TEST_SUITE_P(
3206 PerInstance, CameraAidlTest,
3207 testing::ValuesIn(android::getAidlHalInstanceNames(ICameraProvider::descriptor)),
Jayant Chowdharyde1909e2022-11-23 17:18:38 +00003208 android::hardware::PrintInstanceNameToString);