blob: b6b5206e7f85304a2f80ba7ffb601059937d1047 [file] [log] [blame]
Avichal Rakesh362242f2022-02-08 12:40:53 -08001/*
2 * Copyright (C) 2022 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <aidl/Vintf.h>
18#include <aidl/android/hardware/camera/common/VendorTagSection.h>
19#include <aidl/android/hardware/camera/device/ICameraDevice.h>
20#include <aidlcommonsupport/NativeHandle.h>
21#include <camera_aidl_test.h>
22#include <cutils/properties.h>
23#include <device_cb.h>
24#include <empty_device_cb.h>
25#include <grallocusage/GrallocUsageConversion.h>
26#include <gtest/gtest.h>
27#include <hardware/gralloc.h>
28#include <hardware/gralloc1.h>
29#include <hidl/GtestPrinter.h>
30#include <hidl/HidlSupport.h>
31#include <torch_provider_cb.h>
32#include <list>
33
34using ::aidl::android::hardware::camera::common::CameraDeviceStatus;
35using ::aidl::android::hardware::camera::common::CameraResourceCost;
36using ::aidl::android::hardware::camera::common::TorchModeStatus;
37using ::aidl::android::hardware::camera::common::VendorTagSection;
38using ::aidl::android::hardware::camera::device::ICameraDevice;
Austin Borger4728fc42022-07-15 11:27:53 -070039using ::aidl::android::hardware::camera::metadata::RequestAvailableColorSpaceProfilesMap;
Avichal Rakeshd3503a32022-02-25 06:23:14 +000040using ::aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap;
Avichal Rakesh362242f2022-02-08 12:40:53 -080041using ::aidl::android::hardware::camera::metadata::SensorPixelMode;
42using ::aidl::android::hardware::camera::provider::CameraIdAndStreamCombination;
Avichal Rakesh4bf91c72022-05-23 20:44:02 +000043using ::aidl::android::hardware::camera::provider::BnCameraProviderCallback;
Avichal Rakesh362242f2022-02-08 12:40:53 -080044
45using ::ndk::ScopedAStatus;
46
47namespace {
48const int32_t kBurstFrameCount = 10;
49const uint32_t kMaxStillWidth = 2048;
50const uint32_t kMaxStillHeight = 1536;
51
52const int64_t kEmptyFlushTimeoutMSec = 200;
53
Shuzhen Wang36efa712022-03-08 10:10:44 -080054const static std::vector<int64_t> kMandatoryUseCases = {
Avichal Rakesh362242f2022-02-08 12:40:53 -080055 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
56 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW,
57 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_STILL_CAPTURE,
58 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_RECORD,
59 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW_VIDEO_STILL,
60 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL};
61} // namespace
62
63TEST_P(CameraAidlTest, getCameraIdList) {
64 std::vector<std::string> idList;
65 ScopedAStatus ret = mProvider->getCameraIdList(&idList);
66 ASSERT_TRUE(ret.isOk());
67
68 for (size_t i = 0; i < idList.size(); i++) {
69 ALOGI("Camera Id[%zu] is %s", i, idList[i].c_str());
70 }
71}
72
73// Test if ICameraProvider::getVendorTags returns Status::OK
74TEST_P(CameraAidlTest, getVendorTags) {
75 std::vector<VendorTagSection> vendorTags;
76 ScopedAStatus ret = mProvider->getVendorTags(&vendorTags);
77
78 ASSERT_TRUE(ret.isOk());
79 for (size_t i = 0; i < vendorTags.size(); i++) {
80 ALOGI("Vendor tag section %zu name %s", i, vendorTags[i].sectionName.c_str());
81 for (auto& tag : vendorTags[i].tags) {
82 ALOGI("Vendor tag id %u name %s type %d", tag.tagId, tag.tagName.c_str(),
83 (int)tag.tagType);
84 }
85 }
86}
87
88// Test if ICameraProvider::setCallback returns Status::OK
89TEST_P(CameraAidlTest, setCallback) {
Avichal Rakesh4bf91c72022-05-23 20:44:02 +000090 struct ProviderCb : public BnCameraProviderCallback {
Avichal Rakesh362242f2022-02-08 12:40:53 -080091 ScopedAStatus cameraDeviceStatusChange(const std::string& cameraDeviceName,
92 CameraDeviceStatus newStatus) override {
93 ALOGI("camera device status callback name %s, status %d", cameraDeviceName.c_str(),
94 (int)newStatus);
95 return ScopedAStatus::ok();
96 }
97 ScopedAStatus torchModeStatusChange(const std::string& cameraDeviceName,
98 TorchModeStatus newStatus) override {
99 ALOGI("Torch mode status callback name %s, status %d", cameraDeviceName.c_str(),
100 (int)newStatus);
101 return ScopedAStatus::ok();
102 }
103 ScopedAStatus physicalCameraDeviceStatusChange(const std::string& cameraDeviceName,
104 const std::string& physicalCameraDeviceName,
105 CameraDeviceStatus newStatus) override {
106 ALOGI("physical camera device status callback name %s, physical camera name %s,"
107 " status %d",
108 cameraDeviceName.c_str(), physicalCameraDeviceName.c_str(), (int)newStatus);
109 return ScopedAStatus::ok();
110 }
111 };
112
Avichal Rakesh4bf91c72022-05-23 20:44:02 +0000113 std::shared_ptr<ProviderCb> cb = ndk::SharedRefBase::make<ProviderCb>();
Avichal Rakesh362242f2022-02-08 12:40:53 -0800114 ScopedAStatus ret = mProvider->setCallback(cb);
115 ASSERT_TRUE(ret.isOk());
116 ret = mProvider->setCallback(nullptr);
Avichal Rakesh4bf91c72022-05-23 20:44:02 +0000117 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
Avichal Rakesh362242f2022-02-08 12:40:53 -0800118}
119
120// Test if ICameraProvider::getCameraDeviceInterface returns Status::OK and non-null device
121TEST_P(CameraAidlTest, getCameraDeviceInterface) {
122 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
123
124 for (const auto& name : cameraDeviceNames) {
125 std::shared_ptr<ICameraDevice> cameraDevice;
126 ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &cameraDevice);
127 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
128 ret.getServiceSpecificError());
129 ASSERT_TRUE(ret.isOk());
130 ASSERT_NE(cameraDevice, nullptr);
131 }
132}
133
134// Verify that the device resource cost can be retrieved and the values are
135// correct.
136TEST_P(CameraAidlTest, getResourceCost) {
137 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
138
139 for (const auto& deviceName : cameraDeviceNames) {
140 std::shared_ptr<ICameraDevice> cameraDevice;
141 ScopedAStatus ret = mProvider->getCameraDeviceInterface(deviceName, &cameraDevice);
142 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
143 ret.getServiceSpecificError());
144 ASSERT_TRUE(ret.isOk());
145 ASSERT_NE(cameraDevice, nullptr);
146
147 CameraResourceCost resourceCost;
148 ret = cameraDevice->getResourceCost(&resourceCost);
149 ALOGI("getResourceCost returns: %d:%d", ret.getExceptionCode(),
150 ret.getServiceSpecificError());
151 ASSERT_TRUE(ret.isOk());
152
153 ALOGI(" Resource cost is %d", resourceCost.resourceCost);
154 ASSERT_LE(resourceCost.resourceCost, 100u);
155
156 for (const auto& name : resourceCost.conflictingDevices) {
157 ALOGI(" Conflicting device: %s", name.c_str());
158 }
159 }
160}
161
162TEST_P(CameraAidlTest, systemCameraTest) {
163 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
164 std::map<std::string, std::vector<SystemCameraKind>> hiddenPhysicalIdToLogicalMap;
165 for (const auto& name : cameraDeviceNames) {
166 std::shared_ptr<ICameraDevice> device;
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +0000167 ALOGI("systemCameraTest: Testing camera device %s", name.c_str());
Avichal Rakesh362242f2022-02-08 12:40:53 -0800168 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
169 ASSERT_TRUE(ret.isOk());
170 ASSERT_NE(device, nullptr);
171
172 CameraMetadata cameraCharacteristics;
173 ret = device->getCameraCharacteristics(&cameraCharacteristics);
174 ASSERT_TRUE(ret.isOk());
175
176 const camera_metadata_t* staticMeta =
177 reinterpret_cast<const camera_metadata_t*>(cameraCharacteristics.metadata.data());
178 Status rc = isLogicalMultiCamera(staticMeta);
179 if (rc == Status::OPERATION_NOT_SUPPORTED) {
180 return;
181 }
182
183 ASSERT_EQ(rc, Status::OK);
184 std::unordered_set<std::string> physicalIds;
185 ASSERT_EQ(getPhysicalCameraIds(staticMeta, &physicalIds), Status::OK);
186 SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC;
187 Status retStatus = getSystemCameraKind(staticMeta, &systemCameraKind);
188 ASSERT_EQ(retStatus, Status::OK);
189
190 for (auto physicalId : physicalIds) {
191 bool isPublicId = false;
192 for (auto& deviceName : cameraDeviceNames) {
193 std::string publicVersion, publicId;
194 ASSERT_TRUE(matchDeviceName(deviceName, mProviderType, &publicVersion, &publicId));
195 if (physicalId == publicId) {
196 isPublicId = true;
197 break;
198 }
199 }
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +0000200
Avichal Rakesh362242f2022-02-08 12:40:53 -0800201 // For hidden physical cameras, collect their associated logical cameras
202 // and store the system camera kind.
203 if (!isPublicId) {
204 auto it = hiddenPhysicalIdToLogicalMap.find(physicalId);
205 if (it == hiddenPhysicalIdToLogicalMap.end()) {
206 hiddenPhysicalIdToLogicalMap.insert(std::make_pair(
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +0000207 physicalId, std::vector<SystemCameraKind>({systemCameraKind})));
Avichal Rakesh362242f2022-02-08 12:40:53 -0800208 } else {
209 it->second.push_back(systemCameraKind);
210 }
211 }
212 }
213 }
214
215 // Check that the system camera kind of the logical cameras associated with
216 // each hidden physical camera is the same.
217 for (const auto& it : hiddenPhysicalIdToLogicalMap) {
218 SystemCameraKind neededSystemCameraKind = it.second.front();
219 for (auto foundSystemCamera : it.second) {
220 ASSERT_EQ(neededSystemCameraKind, foundSystemCamera);
221 }
222 }
223}
224
225// Verify that the static camera characteristics can be retrieved
226// successfully.
227TEST_P(CameraAidlTest, getCameraCharacteristics) {
228 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
229
230 for (const auto& name : cameraDeviceNames) {
231 std::shared_ptr<ICameraDevice> device;
232 ALOGI("getCameraCharacteristics: Testing camera device %s", name.c_str());
233 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
234 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
235 ret.getServiceSpecificError());
236 ASSERT_TRUE(ret.isOk());
237 ASSERT_NE(device, nullptr);
238
239 CameraMetadata chars;
240 ret = device->getCameraCharacteristics(&chars);
241 ASSERT_TRUE(ret.isOk());
242 verifyCameraCharacteristics(chars);
243 verifyMonochromeCharacteristics(chars);
244 verifyRecommendedConfigs(chars);
Kwangkyu Park4b7fd452023-05-12 00:22:22 +0900245 verifyHighSpeedRecordingCharacteristics(name, chars);
Avichal Rakesh362242f2022-02-08 12:40:53 -0800246 verifyLogicalOrUltraHighResCameraMetadata(name, device, chars, cameraDeviceNames);
247
248 ASSERT_TRUE(ret.isOk());
249
250 // getPhysicalCameraCharacteristics will fail for publicly
251 // advertised camera IDs.
252 std::string version, cameraId;
253 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &cameraId));
254 CameraMetadata devChars;
255 ret = device->getPhysicalCameraCharacteristics(cameraId, &devChars);
256 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
257 ASSERT_EQ(0, devChars.metadata.size());
258 }
259}
260
261// Verify that the torch strength level can be set and retrieved successfully.
262TEST_P(CameraAidlTest, turnOnTorchWithStrengthLevel) {
263 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
264
265 std::shared_ptr<TorchProviderCb> cb = ndk::SharedRefBase::make<TorchProviderCb>(this);
266 ndk::ScopedAStatus ret = mProvider->setCallback(cb);
267 ASSERT_TRUE(ret.isOk());
268
269 for (const auto& name : cameraDeviceNames) {
270 int32_t defaultLevel;
271 std::shared_ptr<ICameraDevice> device;
272 ALOGI("%s: Testing camera device %s", __FUNCTION__, name.c_str());
273
274 ret = mProvider->getCameraDeviceInterface(name, &device);
275 ASSERT_TRUE(ret.isOk());
276 ASSERT_NE(device, nullptr);
277
278 CameraMetadata chars;
279 ret = device->getCameraCharacteristics(&chars);
280 ASSERT_TRUE(ret.isOk());
281
282 const camera_metadata_t* staticMeta =
283 reinterpret_cast<const camera_metadata_t*>(chars.metadata.data());
284 bool torchStrengthControlSupported = isTorchStrengthControlSupported(staticMeta);
285 camera_metadata_ro_entry entry;
286 int rc = find_camera_metadata_ro_entry(staticMeta,
287 ANDROID_FLASH_INFO_STRENGTH_DEFAULT_LEVEL, &entry);
288 if (torchStrengthControlSupported) {
289 ASSERT_EQ(rc, 0);
290 ASSERT_GT(entry.count, 0);
291 defaultLevel = *entry.data.i32;
292 ALOGI("Default level is:%d", defaultLevel);
293 }
294
295 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
296 ret = device->turnOnTorchWithStrengthLevel(2);
297 ALOGI("turnOnTorchWithStrengthLevel returns status: %d", ret.getServiceSpecificError());
298 // OPERATION_NOT_SUPPORTED check
299 if (!torchStrengthControlSupported) {
300 ALOGI("Torch strength control not supported.");
301 ASSERT_EQ(static_cast<int32_t>(Status::OPERATION_NOT_SUPPORTED),
302 ret.getServiceSpecificError());
303 } else {
304 {
305 ASSERT_TRUE(ret.isOk());
306 std::unique_lock<std::mutex> l(mTorchLock);
307 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
308 auto timeout = std::chrono::system_clock::now() +
309 std::chrono::seconds(kTorchTimeoutSec);
310 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
311 }
312 ASSERT_EQ(TorchModeStatus::AVAILABLE_ON, mTorchStatus);
313 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
314 }
315 ALOGI("getTorchStrengthLevel: Testing");
316 int32_t strengthLevel;
317 ret = device->getTorchStrengthLevel(&strengthLevel);
318 ASSERT_TRUE(ret.isOk());
319 ALOGI("Torch strength level is : %d", strengthLevel);
320 ASSERT_EQ(strengthLevel, 2);
321
322 // Turn OFF the torch and verify torch strength level is reset to default level.
323 ALOGI("Testing torch strength level reset after turning the torch OFF.");
324 ret = device->setTorchMode(false);
325 ASSERT_TRUE(ret.isOk());
326 {
327 std::unique_lock<std::mutex> l(mTorchLock);
328 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
329 auto timeout = std::chrono::system_clock::now() +
330 std::chrono::seconds(kTorchTimeoutSec);
331 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
332 }
333 ASSERT_EQ(TorchModeStatus::AVAILABLE_OFF, mTorchStatus);
334 }
335
336 ret = device->getTorchStrengthLevel(&strengthLevel);
337 ASSERT_TRUE(ret.isOk());
338 ALOGI("Torch strength level after turning OFF torch is : %d", strengthLevel);
339 ASSERT_EQ(strengthLevel, defaultLevel);
340 }
341 }
342}
343
344// In case it is supported verify that torch can be enabled.
345// Check for corresponding torch callbacks as well.
346TEST_P(CameraAidlTest, setTorchMode) {
347 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
348
349 std::shared_ptr<TorchProviderCb> cb = ndk::SharedRefBase::make<TorchProviderCb>(this);
350 ndk::ScopedAStatus ret = mProvider->setCallback(cb);
351 ALOGI("setCallback returns status: %d", ret.getServiceSpecificError());
352 ASSERT_TRUE(ret.isOk());
353 ASSERT_NE(cb, nullptr);
354
355 for (const auto& name : cameraDeviceNames) {
356 std::shared_ptr<ICameraDevice> device;
357 ALOGI("setTorchMode: Testing camera device %s", name.c_str());
358 ret = mProvider->getCameraDeviceInterface(name, &device);
359 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
360 ret.getServiceSpecificError());
361 ASSERT_TRUE(ret.isOk());
362 ASSERT_NE(device, nullptr);
363
364 CameraMetadata metadata;
365 ret = device->getCameraCharacteristics(&metadata);
366 ALOGI("getCameraCharacteristics returns status:%d", ret.getServiceSpecificError());
367 ASSERT_TRUE(ret.isOk());
368 camera_metadata_t* staticMeta =
369 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
370 bool torchSupported = isTorchSupported(staticMeta);
371
372 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
373 ret = device->setTorchMode(true);
374 ALOGI("setTorchMode returns status: %d", ret.getServiceSpecificError());
375 if (!torchSupported) {
376 ASSERT_EQ(static_cast<int32_t>(Status::OPERATION_NOT_SUPPORTED),
377 ret.getServiceSpecificError());
378 } else {
379 ASSERT_TRUE(ret.isOk());
380 {
381 std::unique_lock<std::mutex> l(mTorchLock);
382 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
383 auto timeout = std::chrono::system_clock::now() +
384 std::chrono::seconds(kTorchTimeoutSec);
385 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
386 }
387 ASSERT_EQ(TorchModeStatus::AVAILABLE_ON, mTorchStatus);
388 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
389 }
390
391 ret = device->setTorchMode(false);
392 ASSERT_TRUE(ret.isOk());
393 {
394 std::unique_lock<std::mutex> l(mTorchLock);
395 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
396 auto timeout = std::chrono::system_clock::now() +
397 std::chrono::seconds(kTorchTimeoutSec);
398 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
399 }
400 ASSERT_EQ(TorchModeStatus::AVAILABLE_OFF, mTorchStatus);
401 }
402 }
403 }
Avichal Rakesh362242f2022-02-08 12:40:53 -0800404}
405
406// Check dump functionality.
407TEST_P(CameraAidlTest, dump) {
408 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
409
410 for (const auto& name : cameraDeviceNames) {
411 std::shared_ptr<ICameraDevice> device;
412 ALOGI("dump: Testing camera device %s", name.c_str());
413
414 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
415 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
416 ret.getServiceSpecificError());
417 ASSERT_TRUE(ret.isOk());
418 ASSERT_NE(device, nullptr);
419
420 int raw_handle = open(kDumpOutput, O_RDWR);
421 ASSERT_GE(raw_handle, 0);
422
423 auto retStatus = device->dump(raw_handle, nullptr, 0);
424 ASSERT_EQ(retStatus, ::android::OK);
425 close(raw_handle);
426 }
427}
428
429// Open, dump, then close
430TEST_P(CameraAidlTest, openClose) {
431 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
432
433 for (const auto& name : cameraDeviceNames) {
434 std::shared_ptr<ICameraDevice> device;
435 ALOGI("openClose: Testing camera device %s", name.c_str());
436 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
437 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
438 ret.getServiceSpecificError());
439 ASSERT_TRUE(ret.isOk());
440 ASSERT_NE(device, nullptr);
441
442 std::shared_ptr<EmptyDeviceCb> cb = ndk::SharedRefBase::make<EmptyDeviceCb>();
443
444 ret = device->open(cb, &mSession);
445 ASSERT_TRUE(ret.isOk());
446 ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
447 ret.getServiceSpecificError());
448 ASSERT_NE(mSession, nullptr);
449 int raw_handle = open(kDumpOutput, O_RDWR);
450 ASSERT_GE(raw_handle, 0);
451
452 auto retStatus = device->dump(raw_handle, nullptr, 0);
453 ASSERT_EQ(retStatus, ::android::OK);
454 close(raw_handle);
455
456 ret = mSession->close();
457 mSession = nullptr;
458 ASSERT_TRUE(ret.isOk());
459 // TODO: test all session API calls return INTERNAL_ERROR after close
460 // TODO: keep a wp copy here and verify session cannot be promoted out of this scope
461 }
462}
463
464// Check whether all common default request settings can be successfully
465// constructed.
466TEST_P(CameraAidlTest, constructDefaultRequestSettings) {
467 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
468
469 for (const auto& name : cameraDeviceNames) {
470 std::shared_ptr<ICameraDevice> device;
471 ALOGI("constructDefaultRequestSettings: Testing camera device %s", name.c_str());
472 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
473 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
474 ret.getServiceSpecificError());
475 ASSERT_TRUE(ret.isOk());
476 ASSERT_NE(device, nullptr);
477
478 std::shared_ptr<EmptyDeviceCb> cb = ndk::SharedRefBase::make<EmptyDeviceCb>();
479 ret = device->open(cb, &mSession);
480 ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
481 ret.getServiceSpecificError());
482 ASSERT_TRUE(ret.isOk());
483 ASSERT_NE(mSession, nullptr);
484
485 for (int32_t t = (int32_t)RequestTemplate::PREVIEW; t <= (int32_t)RequestTemplate::MANUAL;
486 t++) {
487 RequestTemplate reqTemplate = (RequestTemplate)t;
488 CameraMetadata rawMetadata;
489 ret = mSession->constructDefaultRequestSettings(reqTemplate, &rawMetadata);
490 ALOGI("constructDefaultRequestSettings returns status:%d:%d", ret.getExceptionCode(),
491 ret.getServiceSpecificError());
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000492
Avichal Rakesh362242f2022-02-08 12:40:53 -0800493 if (reqTemplate == RequestTemplate::ZERO_SHUTTER_LAG ||
494 reqTemplate == RequestTemplate::MANUAL) {
495 // optional templates
496 ASSERT_TRUE(ret.isOk() || static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
497 ret.getServiceSpecificError());
498 } else {
499 ASSERT_TRUE(ret.isOk());
500 }
501
502 if (ret.isOk()) {
503 const camera_metadata_t* metadata = (camera_metadata_t*)rawMetadata.metadata.data();
504 size_t expectedSize = rawMetadata.metadata.size();
505 int result = validate_camera_metadata_structure(metadata, &expectedSize);
506 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
507 verifyRequestTemplate(metadata, reqTemplate);
508 } else {
509 ASSERT_EQ(0u, rawMetadata.metadata.size());
510 }
511 }
512 ret = mSession->close();
513 mSession = nullptr;
514 ASSERT_TRUE(ret.isOk());
515 }
516}
517
518// Verify that all supported stream formats and sizes can be configured
519// successfully.
520TEST_P(CameraAidlTest, configureStreamsAvailableOutputs) {
521 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
522 std::vector<AvailableStream> outputStreams;
523
524 for (const auto& name : cameraDeviceNames) {
525 CameraMetadata meta;
526 std::shared_ptr<ICameraDevice> device;
527
528 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/, &device /*out*/);
529
530 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
531 outputStreams.clear();
532 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputStreams));
533 ASSERT_NE(0u, outputStreams.size());
534
535 int32_t jpegBufferSize = 0;
536 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
537 ASSERT_NE(0u, jpegBufferSize);
538
539 int32_t streamId = 0;
540 int32_t streamConfigCounter = 0;
541 for (auto& it : outputStreams) {
542 Stream stream;
543 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(it.format));
544 stream.id = streamId;
545 stream.streamType = StreamType::OUTPUT;
546 stream.width = it.width;
547 stream.height = it.height;
548 stream.format = static_cast<PixelFormat>(it.format);
549 stream.dataSpace = dataspace;
550 stream.usage = static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
551 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER);
552 stream.rotation = StreamRotation::ROTATION_0;
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000553 stream.dynamicRangeProfile = RequestAvailableDynamicRangeProfilesMap::
554 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
Austin Borger263e3622023-06-15 11:32:04 -0700555 stream.useCase = ScalerAvailableStreamUseCases::
556 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
557 stream.colorSpace = static_cast<int>(
558 RequestAvailableColorSpaceProfilesMap::
559 ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED);
Avichal Rakesh362242f2022-02-08 12:40:53 -0800560
561 std::vector<Stream> streams = {stream};
562 StreamConfiguration config;
563 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
564 jpegBufferSize);
565
566 bool expectStreamCombQuery = (isLogicalMultiCamera(staticMeta) == Status::OK);
567 verifyStreamCombination(device, config, /*expectedStatus*/ true, expectStreamCombQuery);
568
569 config.streamConfigCounter = streamConfigCounter++;
570 std::vector<HalStream> halConfigs;
571 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
572 ASSERT_TRUE(ret.isOk());
573 ASSERT_EQ(halConfigs.size(), 1);
574 ASSERT_EQ(halConfigs[0].id, streamId);
575
576 streamId++;
577 }
578
579 ndk::ScopedAStatus ret = mSession->close();
580 mSession = nullptr;
581 ASSERT_TRUE(ret.isOk());
582 }
583}
584
585// Verify that mandatory concurrent streams and outputs are supported.
586TEST_P(CameraAidlTest, configureConcurrentStreamsAvailableOutputs) {
587 struct CameraTestInfo {
588 CameraMetadata staticMeta;
589 std::shared_ptr<ICameraDeviceSession> session;
590 std::shared_ptr<ICameraDevice> cameraDevice;
591 StreamConfiguration config;
592 };
593
594 std::map<std::string, std::string> idToNameMap = getCameraDeviceIdToNameMap(mProvider);
595 std::vector<ConcurrentCameraIdCombination> concurrentDeviceCombinations =
596 getConcurrentDeviceCombinations(mProvider);
597 std::vector<AvailableStream> outputStreams;
598 for (const auto& cameraDeviceIds : concurrentDeviceCombinations) {
599 std::vector<CameraIdAndStreamCombination> cameraIdsAndStreamCombinations;
600 std::vector<CameraTestInfo> cameraTestInfos;
601 size_t i = 0;
602 for (const auto& id : cameraDeviceIds.combination) {
603 CameraTestInfo cti;
604 auto it = idToNameMap.find(id);
605 ASSERT_TRUE(idToNameMap.end() != it);
606 std::string name = it->second;
607
608 openEmptyDeviceSession(name, mProvider, &cti.session /*out*/, &cti.staticMeta /*out*/,
609 &cti.cameraDevice /*out*/);
610
611 outputStreams.clear();
612 camera_metadata_t* staticMeta =
613 reinterpret_cast<camera_metadata_t*>(cti.staticMeta.metadata.data());
614 ASSERT_EQ(Status::OK, getMandatoryConcurrentStreams(staticMeta, &outputStreams));
615 ASSERT_NE(0u, outputStreams.size());
616
617 int32_t jpegBufferSize = 0;
618 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
619 ASSERT_NE(0u, jpegBufferSize);
620
621 int32_t streamId = 0;
622 std::vector<Stream> streams(outputStreams.size());
623 size_t j = 0;
624 for (const auto& s : outputStreams) {
625 Stream stream;
626 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(s.format));
627 stream.id = streamId++;
628 stream.streamType = StreamType::OUTPUT;
629 stream.width = s.width;
630 stream.height = s.height;
631 stream.format = static_cast<PixelFormat>(s.format);
632 stream.usage = static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
633 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER);
634 stream.dataSpace = dataspace;
635 stream.rotation = StreamRotation::ROTATION_0;
636 stream.sensorPixelModesUsed = {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT};
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000637 stream.dynamicRangeProfile = RequestAvailableDynamicRangeProfilesMap::
638 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
Avichal Rakesh362242f2022-02-08 12:40:53 -0800639 streams[j] = stream;
640 j++;
641 }
642
643 // Add the created stream configs to cameraIdsAndStreamCombinations
644 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &cti.config,
645 jpegBufferSize);
646
647 cti.config.streamConfigCounter = outputStreams.size();
648 CameraIdAndStreamCombination cameraIdAndStreamCombination;
649 cameraIdAndStreamCombination.cameraId = id;
650 cameraIdAndStreamCombination.streamConfiguration = cti.config;
651 cameraIdsAndStreamCombinations.push_back(cameraIdAndStreamCombination);
652 i++;
653 cameraTestInfos.push_back(cti);
654 }
655 // Now verify that concurrent streams are supported
656 bool combinationSupported;
657 ndk::ScopedAStatus ret = mProvider->isConcurrentStreamCombinationSupported(
658 cameraIdsAndStreamCombinations, &combinationSupported);
659 ASSERT_TRUE(ret.isOk());
660 ASSERT_EQ(combinationSupported, true);
661
662 // Test the stream can actually be configured
663 for (auto& cti : cameraTestInfos) {
664 if (cti.session != nullptr) {
665 camera_metadata_t* staticMeta =
666 reinterpret_cast<camera_metadata_t*>(cti.staticMeta.metadata.data());
667 bool expectStreamCombQuery = (isLogicalMultiCamera(staticMeta) == Status::OK);
668 verifyStreamCombination(cti.cameraDevice, cti.config, /*expectedStatus*/ true,
669 expectStreamCombQuery);
670 }
671
672 if (cti.session != nullptr) {
673 std::vector<HalStream> streamConfigs;
674 ret = cti.session->configureStreams(cti.config, &streamConfigs);
675 ASSERT_TRUE(ret.isOk());
676 ASSERT_EQ(cti.config.streams.size(), streamConfigs.size());
677 }
678 }
679
680 for (auto& cti : cameraTestInfos) {
681 ret = cti.session->close();
682 ASSERT_TRUE(ret.isOk());
683 }
684 }
685}
686
687// Check for correct handling of invalid/incorrect configuration parameters.
688TEST_P(CameraAidlTest, configureStreamsInvalidOutputs) {
689 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
690 std::vector<AvailableStream> outputStreams;
691
692 for (const auto& name : cameraDeviceNames) {
693 CameraMetadata meta;
694 std::shared_ptr<ICameraDevice> cameraDevice;
695
696 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
697 &cameraDevice /*out*/);
698 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
699 outputStreams.clear();
700
701 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputStreams));
702 ASSERT_NE(0u, outputStreams.size());
703
704 int32_t jpegBufferSize = 0;
705 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
706 ASSERT_NE(0u, jpegBufferSize);
707
708 int32_t streamId = 0;
709 Stream stream = {streamId++,
710 StreamType::OUTPUT,
711 static_cast<uint32_t>(0),
712 static_cast<uint32_t>(0),
713 static_cast<PixelFormat>(outputStreams[0].format),
714 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
715 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
716 Dataspace::UNKNOWN,
717 StreamRotation::ROTATION_0,
718 std::string(),
719 jpegBufferSize,
720 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000721 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
722 RequestAvailableDynamicRangeProfilesMap::
723 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800724 int32_t streamConfigCounter = 0;
725 std::vector<Stream> streams = {stream};
726 StreamConfiguration config;
727 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
728 jpegBufferSize);
729
730 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ false,
731 /*expectStreamCombQuery*/ false);
732
733 config.streamConfigCounter = streamConfigCounter++;
734 std::vector<HalStream> halConfigs;
735 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
736 ASSERT_TRUE(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
737 ret.getServiceSpecificError() ||
738 static_cast<int32_t>(Status::INTERNAL_ERROR) == ret.getServiceSpecificError());
739
740 stream = {streamId++,
741 StreamType::OUTPUT,
742 /*width*/ INT32_MAX,
743 /*height*/ INT32_MAX,
744 static_cast<PixelFormat>(outputStreams[0].format),
745 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
746 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
747 Dataspace::UNKNOWN,
748 StreamRotation::ROTATION_0,
749 std::string(),
750 jpegBufferSize,
751 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000752 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
753 RequestAvailableDynamicRangeProfilesMap::
754 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800755
756 streams[0] = stream;
757 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
758 jpegBufferSize);
759
760 config.streamConfigCounter = streamConfigCounter++;
761 halConfigs.clear();
762 ret = mSession->configureStreams(config, &halConfigs);
763 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
764
765 for (auto& it : outputStreams) {
766 stream = {streamId++,
767 StreamType::OUTPUT,
768 it.width,
769 it.height,
770 static_cast<PixelFormat>(UINT32_MAX),
771 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
772 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
773 Dataspace::UNKNOWN,
774 StreamRotation::ROTATION_0,
775 std::string(),
776 jpegBufferSize,
777 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000778 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
779 RequestAvailableDynamicRangeProfilesMap::
780 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800781
782 streams[0] = stream;
783 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
784 jpegBufferSize);
785 config.streamConfigCounter = streamConfigCounter++;
786 halConfigs.clear();
787 ret = mSession->configureStreams(config, &halConfigs);
788 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
789 ret.getServiceSpecificError());
790
791 stream = {streamId++,
792 StreamType::OUTPUT,
793 it.width,
794 it.height,
795 static_cast<PixelFormat>(it.format),
796 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
797 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
798 Dataspace::UNKNOWN,
799 static_cast<StreamRotation>(UINT32_MAX),
800 std::string(),
801 jpegBufferSize,
802 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000803 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
804 RequestAvailableDynamicRangeProfilesMap::
805 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800806
807 streams[0] = stream;
808 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
809 jpegBufferSize);
810
811 config.streamConfigCounter = streamConfigCounter++;
812 halConfigs.clear();
813 ret = mSession->configureStreams(config, &halConfigs);
814 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
815 ret.getServiceSpecificError());
816 }
817
818 ret = mSession->close();
819 mSession = nullptr;
820 ASSERT_TRUE(ret.isOk());
821 }
822}
823
824// Check whether all supported ZSL output stream combinations can be
825// configured successfully.
826TEST_P(CameraAidlTest, configureStreamsZSLInputOutputs) {
827 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
828 std::vector<AvailableStream> inputStreams;
829 std::vector<AvailableZSLInputOutput> inputOutputMap;
830
831 for (const auto& name : cameraDeviceNames) {
832 CameraMetadata meta;
833 std::shared_ptr<ICameraDevice> cameraDevice;
834
835 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
836 &cameraDevice /*out*/);
837 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
838
839 Status rc = isZSLModeAvailable(staticMeta);
840 if (Status::OPERATION_NOT_SUPPORTED == rc) {
841 ndk::ScopedAStatus ret = mSession->close();
842 mSession = nullptr;
843 ASSERT_TRUE(ret.isOk());
844 continue;
845 }
846 ASSERT_EQ(Status::OK, rc);
847
848 inputStreams.clear();
849 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, inputStreams));
850 ASSERT_NE(0u, inputStreams.size());
851
852 inputOutputMap.clear();
853 ASSERT_EQ(Status::OK, getZSLInputOutputMap(staticMeta, inputOutputMap));
854 ASSERT_NE(0u, inputOutputMap.size());
855
856 bool supportMonoY8 = false;
857 if (Status::OK == isMonochromeCamera(staticMeta)) {
858 for (auto& it : inputStreams) {
859 if (it.format == static_cast<uint32_t>(PixelFormat::Y8)) {
860 supportMonoY8 = true;
861 break;
862 }
863 }
864 }
865
866 int32_t jpegBufferSize = 0;
867 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
868 ASSERT_NE(0u, jpegBufferSize);
869
870 int32_t streamId = 0;
871 bool hasPrivToY8 = false, hasY8ToY8 = false, hasY8ToBlob = false;
872 uint32_t streamConfigCounter = 0;
873 for (auto& inputIter : inputOutputMap) {
874 AvailableStream input;
875 ASSERT_EQ(Status::OK, findLargestSize(inputStreams, inputIter.inputFormat, input));
876 ASSERT_NE(0u, inputStreams.size());
877
878 if (inputIter.inputFormat ==
879 static_cast<uint32_t>(PixelFormat::IMPLEMENTATION_DEFINED) &&
880 inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::Y8)) {
881 hasPrivToY8 = true;
882 } else if (inputIter.inputFormat == static_cast<uint32_t>(PixelFormat::Y8)) {
883 if (inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::BLOB)) {
884 hasY8ToBlob = true;
885 } else if (inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::Y8)) {
886 hasY8ToY8 = true;
887 }
888 }
889 AvailableStream outputThreshold = {INT32_MAX, INT32_MAX, inputIter.outputFormat};
890 std::vector<AvailableStream> outputStreams;
891 ASSERT_EQ(Status::OK,
892 getAvailableOutputStreams(staticMeta, outputStreams, &outputThreshold));
893 for (auto& outputIter : outputStreams) {
894 Dataspace outputDataSpace =
895 getDataspace(static_cast<PixelFormat>(outputIter.format));
896 Stream zslStream = {
897 streamId++,
898 StreamType::OUTPUT,
899 input.width,
900 input.height,
901 static_cast<PixelFormat>(input.format),
902 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
903 GRALLOC_USAGE_HW_CAMERA_ZSL),
904 Dataspace::UNKNOWN,
905 StreamRotation::ROTATION_0,
906 std::string(),
907 jpegBufferSize,
908 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000909 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
910 RequestAvailableDynamicRangeProfilesMap::
911 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800912 Stream inputStream = {
913 streamId++,
914 StreamType::INPUT,
915 input.width,
916 input.height,
917 static_cast<PixelFormat>(input.format),
918 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(0),
919 Dataspace::UNKNOWN,
920 StreamRotation::ROTATION_0,
921 std::string(),
922 jpegBufferSize,
923 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000924 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
925 RequestAvailableDynamicRangeProfilesMap::
926 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800927 Stream outputStream = {
928 streamId++,
929 StreamType::OUTPUT,
930 outputIter.width,
931 outputIter.height,
932 static_cast<PixelFormat>(outputIter.format),
933 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
934 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
935 outputDataSpace,
936 StreamRotation::ROTATION_0,
937 std::string(),
938 jpegBufferSize,
939 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000940 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
941 RequestAvailableDynamicRangeProfilesMap::
942 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800943
944 std::vector<Stream> streams = {inputStream, zslStream, outputStream};
945
946 StreamConfiguration config;
947 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
948 jpegBufferSize);
949
950 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true,
951 /*expectStreamCombQuery*/ false);
952
953 config.streamConfigCounter = streamConfigCounter++;
954 std::vector<HalStream> halConfigs;
955 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
956 ASSERT_TRUE(ret.isOk());
957 ASSERT_EQ(3u, halConfigs.size());
958 }
959 }
960
961 if (supportMonoY8) {
962 if (Status::OK == isZSLModeAvailable(staticMeta, PRIV_REPROCESS)) {
963 ASSERT_TRUE(hasPrivToY8);
964 }
965 if (Status::OK == isZSLModeAvailable(staticMeta, YUV_REPROCESS)) {
966 ASSERT_TRUE(hasY8ToY8);
967 ASSERT_TRUE(hasY8ToBlob);
968 }
969 }
970
971 ndk::ScopedAStatus ret = mSession->close();
972 mSession = nullptr;
973 ASSERT_TRUE(ret.isOk());
974 }
975}
976
977// Check whether session parameters are supported. If Hal support for them
978// exist, then try to configure a preview stream using them.
979TEST_P(CameraAidlTest, configureStreamsWithSessionParameters) {
980 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
981 std::vector<AvailableStream> outputPreviewStreams;
982 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
983 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
984
985 for (const auto& name : cameraDeviceNames) {
986 CameraMetadata meta;
987
988 std::shared_ptr<ICameraDevice> unusedCameraDevice;
989 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
990 &unusedCameraDevice /*out*/);
991 camera_metadata_t* staticMetaBuffer =
992 reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
993
994 std::unordered_set<int32_t> availableSessionKeys;
995 auto rc = getSupportedKeys(staticMetaBuffer, ANDROID_REQUEST_AVAILABLE_SESSION_KEYS,
996 &availableSessionKeys);
997 ASSERT_TRUE(Status::OK == rc);
998 if (availableSessionKeys.empty()) {
999 ndk::ScopedAStatus ret = mSession->close();
1000 mSession = nullptr;
1001 ASSERT_TRUE(ret.isOk());
1002 continue;
1003 }
1004
1005 android::hardware::camera::common::V1_0::helper::CameraMetadata previewRequestSettings;
1006 android::hardware::camera::common::V1_0::helper::CameraMetadata sessionParams,
1007 modifiedSessionParams;
1008 constructFilteredSettings(mSession, availableSessionKeys, RequestTemplate::PREVIEW,
1009 &previewRequestSettings, &sessionParams);
1010 if (sessionParams.isEmpty()) {
1011 ndk::ScopedAStatus ret = mSession->close();
1012 mSession = nullptr;
1013 ASSERT_TRUE(ret.isOk());
1014 continue;
1015 }
1016
1017 outputPreviewStreams.clear();
1018
1019 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputPreviewStreams,
1020 &previewThreshold));
1021 ASSERT_NE(0u, outputPreviewStreams.size());
1022
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001023 Stream previewStream = {
1024 0,
1025 StreamType::OUTPUT,
1026 outputPreviewStreams[0].width,
1027 outputPreviewStreams[0].height,
1028 static_cast<PixelFormat>(outputPreviewStreams[0].format),
1029 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1030 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
1031 Dataspace::UNKNOWN,
1032 StreamRotation::ROTATION_0,
1033 std::string(),
1034 /*bufferSize*/ 0,
1035 /*groupId*/ -1,
1036 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1037 RequestAvailableDynamicRangeProfilesMap::
1038 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001039
1040 std::vector<Stream> streams = {previewStream};
1041 StreamConfiguration config;
1042
1043 config.streams = streams;
1044 config.operationMode = StreamConfigurationMode::NORMAL_MODE;
1045 modifiedSessionParams = sessionParams;
1046 auto sessionParamsBuffer = sessionParams.release();
1047 std::vector<uint8_t> rawSessionParam =
1048 std::vector(reinterpret_cast<uint8_t*>(sessionParamsBuffer),
1049 reinterpret_cast<uint8_t*>(sessionParamsBuffer) +
1050 get_camera_metadata_size(sessionParamsBuffer));
1051
1052 config.sessionParams.metadata = rawSessionParam;
1053 config.streamConfigCounter = 0;
1054 config.streams = {previewStream};
1055 config.streamConfigCounter = 0;
1056 config.multiResolutionInputImage = false;
1057
1058 bool newSessionParamsAvailable = false;
1059 for (const auto& it : availableSessionKeys) {
1060 if (modifiedSessionParams.exists(it)) {
1061 modifiedSessionParams.erase(it);
1062 newSessionParamsAvailable = true;
1063 break;
1064 }
1065 }
1066 if (newSessionParamsAvailable) {
1067 auto modifiedSessionParamsBuffer = modifiedSessionParams.release();
1068 verifySessionReconfigurationQuery(mSession, sessionParamsBuffer,
1069 modifiedSessionParamsBuffer);
1070 modifiedSessionParams.acquire(modifiedSessionParamsBuffer);
1071 }
1072
1073 std::vector<HalStream> halConfigs;
1074 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1075 ASSERT_TRUE(ret.isOk());
1076 ASSERT_EQ(1u, halConfigs.size());
1077
1078 sessionParams.acquire(sessionParamsBuffer);
1079 ret = mSession->close();
1080 mSession = nullptr;
1081 ASSERT_TRUE(ret.isOk());
1082 }
1083}
1084
1085// Verify that all supported preview + still capture stream combinations
1086// can be configured successfully.
1087TEST_P(CameraAidlTest, configureStreamsPreviewStillOutputs) {
1088 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1089 std::vector<AvailableStream> outputBlobStreams;
1090 std::vector<AvailableStream> outputPreviewStreams;
1091 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
1092 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
1093 AvailableStream blobThreshold = {INT32_MAX, INT32_MAX, static_cast<int32_t>(PixelFormat::BLOB)};
1094
1095 for (const auto& name : cameraDeviceNames) {
1096 CameraMetadata meta;
1097
1098 std::shared_ptr<ICameraDevice> cameraDevice;
1099 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1100 &cameraDevice /*out*/);
1101
1102 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1103
1104 // Check if camera support depth only
1105 if (isDepthOnly(staticMeta)) {
1106 ndk::ScopedAStatus ret = mSession->close();
1107 mSession = nullptr;
1108 ASSERT_TRUE(ret.isOk());
1109 continue;
1110 }
1111
1112 outputBlobStreams.clear();
1113 ASSERT_EQ(Status::OK,
1114 getAvailableOutputStreams(staticMeta, outputBlobStreams, &blobThreshold));
1115 ASSERT_NE(0u, outputBlobStreams.size());
1116
1117 outputPreviewStreams.clear();
1118 ASSERT_EQ(Status::OK,
1119 getAvailableOutputStreams(staticMeta, outputPreviewStreams, &previewThreshold));
1120 ASSERT_NE(0u, outputPreviewStreams.size());
1121
1122 int32_t jpegBufferSize = 0;
1123 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
1124 ASSERT_NE(0u, jpegBufferSize);
1125
1126 int32_t streamId = 0;
1127 uint32_t streamConfigCounter = 0;
1128
1129 for (auto& blobIter : outputBlobStreams) {
1130 for (auto& previewIter : outputPreviewStreams) {
1131 Stream previewStream = {
1132 streamId++,
1133 StreamType::OUTPUT,
1134 previewIter.width,
1135 previewIter.height,
1136 static_cast<PixelFormat>(previewIter.format),
1137 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1138 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
1139 Dataspace::UNKNOWN,
1140 StreamRotation::ROTATION_0,
1141 std::string(),
1142 /*bufferSize*/ 0,
1143 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001144 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1145 RequestAvailableDynamicRangeProfilesMap::
1146 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001147 Stream blobStream = {
1148 streamId++,
1149 StreamType::OUTPUT,
1150 blobIter.width,
1151 blobIter.height,
1152 static_cast<PixelFormat>(blobIter.format),
1153 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1154 GRALLOC1_CONSUMER_USAGE_CPU_READ),
1155 Dataspace::JFIF,
1156 StreamRotation::ROTATION_0,
1157 std::string(),
1158 /*bufferSize*/ 0,
1159 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001160 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1161 RequestAvailableDynamicRangeProfilesMap::
1162 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001163 std::vector<Stream> streams = {previewStream, blobStream};
1164 StreamConfiguration config;
1165
1166 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
1167 jpegBufferSize);
1168 config.streamConfigCounter = streamConfigCounter++;
1169 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true,
1170 /*expectStreamCombQuery*/ false);
1171
1172 std::vector<HalStream> halConfigs;
1173 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1174 ASSERT_TRUE(ret.isOk());
1175 ASSERT_EQ(2u, halConfigs.size());
1176 }
1177 }
1178
1179 ndk::ScopedAStatus ret = mSession->close();
1180 mSession = nullptr;
1181 ASSERT_TRUE(ret.isOk());
1182 }
1183}
1184
1185// In case constrained mode is supported, test whether it can be
1186// configured. Additionally check for common invalid inputs when
1187// using this mode.
1188TEST_P(CameraAidlTest, configureStreamsConstrainedOutputs) {
1189 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1190
1191 for (const auto& name : cameraDeviceNames) {
1192 CameraMetadata meta;
1193 std::shared_ptr<ICameraDevice> cameraDevice;
1194
1195 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1196 &cameraDevice /*out*/);
1197 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1198
1199 Status rc = isConstrainedModeAvailable(staticMeta);
1200 if (Status::OPERATION_NOT_SUPPORTED == rc) {
1201 ndk::ScopedAStatus ret = mSession->close();
1202 mSession = nullptr;
1203 ASSERT_TRUE(ret.isOk());
1204 continue;
1205 }
1206 ASSERT_EQ(Status::OK, rc);
1207
1208 AvailableStream hfrStream;
1209 rc = pickConstrainedModeSize(staticMeta, hfrStream);
1210 ASSERT_EQ(Status::OK, rc);
1211
1212 int32_t streamId = 0;
1213 uint32_t streamConfigCounter = 0;
1214 Stream stream = {streamId,
1215 StreamType::OUTPUT,
1216 hfrStream.width,
1217 hfrStream.height,
1218 static_cast<PixelFormat>(hfrStream.format),
1219 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1220 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1221 Dataspace::UNKNOWN,
1222 StreamRotation::ROTATION_0,
1223 std::string(),
1224 /*bufferSize*/ 0,
1225 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001226 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1227 RequestAvailableDynamicRangeProfilesMap::
1228 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001229 std::vector<Stream> streams = {stream};
1230 StreamConfiguration config;
1231 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1232 &config);
1233
1234 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true,
1235 /*expectStreamCombQuery*/ false);
1236
1237 config.streamConfigCounter = streamConfigCounter++;
1238 std::vector<HalStream> halConfigs;
1239 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1240 ASSERT_TRUE(ret.isOk());
1241 ASSERT_EQ(1u, halConfigs.size());
1242 ASSERT_EQ(halConfigs[0].id, streamId);
1243
1244 stream = {streamId++,
1245 StreamType::OUTPUT,
1246 static_cast<uint32_t>(0),
1247 static_cast<uint32_t>(0),
1248 static_cast<PixelFormat>(hfrStream.format),
1249 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1250 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1251 Dataspace::UNKNOWN,
1252 StreamRotation::ROTATION_0,
1253 std::string(),
1254 /*bufferSize*/ 0,
1255 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001256 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1257 RequestAvailableDynamicRangeProfilesMap::
1258 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001259 streams[0] = stream;
1260 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1261 &config);
1262
1263 config.streamConfigCounter = streamConfigCounter++;
1264 std::vector<HalStream> halConfig;
1265 ret = mSession->configureStreams(config, &halConfig);
1266 ASSERT_TRUE(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
1267 ret.getServiceSpecificError() ||
1268 static_cast<int32_t>(Status::INTERNAL_ERROR) == ret.getServiceSpecificError());
1269
1270 stream = {streamId++,
1271 StreamType::OUTPUT,
1272 INT32_MAX,
1273 INT32_MAX,
1274 static_cast<PixelFormat>(hfrStream.format),
1275 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1276 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1277 Dataspace::UNKNOWN,
1278 StreamRotation::ROTATION_0,
1279 std::string(),
1280 /*bufferSize*/ 0,
1281 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001282 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1283 RequestAvailableDynamicRangeProfilesMap::
1284 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001285 streams[0] = stream;
1286 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1287 &config);
1288
1289 config.streamConfigCounter = streamConfigCounter++;
1290 halConfigs.clear();
1291 ret = mSession->configureStreams(config, &halConfigs);
1292 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
1293
1294 stream = {streamId++,
1295 StreamType::OUTPUT,
1296 hfrStream.width,
1297 hfrStream.height,
1298 static_cast<PixelFormat>(UINT32_MAX),
1299 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1300 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1301 Dataspace::UNKNOWN,
1302 StreamRotation::ROTATION_0,
1303 std::string(),
1304 /*bufferSize*/ 0,
1305 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001306 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1307 RequestAvailableDynamicRangeProfilesMap::
1308 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001309 streams[0] = stream;
1310 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1311 &config);
1312
1313 config.streamConfigCounter = streamConfigCounter++;
1314 halConfigs.clear();
1315 ret = mSession->configureStreams(config, &halConfigs);
1316 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
1317
1318 ret = mSession->close();
1319 mSession = nullptr;
1320 ASSERT_TRUE(ret.isOk());
1321 }
1322}
1323
1324// Verify that all supported video + snapshot stream combinations can
1325// be configured successfully.
1326TEST_P(CameraAidlTest, configureStreamsVideoStillOutputs) {
1327 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1328 std::vector<AvailableStream> outputBlobStreams;
1329 std::vector<AvailableStream> outputVideoStreams;
1330 AvailableStream videoThreshold = {kMaxVideoWidth, kMaxVideoHeight,
1331 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
1332 AvailableStream blobThreshold = {kMaxVideoWidth, kMaxVideoHeight,
1333 static_cast<int32_t>(PixelFormat::BLOB)};
1334
1335 for (const auto& name : cameraDeviceNames) {
1336 CameraMetadata meta;
1337 std::shared_ptr<ICameraDevice> cameraDevice;
1338
1339 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1340 &cameraDevice /*out*/);
1341
1342 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1343
1344 // Check if camera support depth only
1345 if (isDepthOnly(staticMeta)) {
1346 ndk::ScopedAStatus ret = mSession->close();
1347 mSession = nullptr;
1348 ASSERT_TRUE(ret.isOk());
1349 continue;
1350 }
1351
1352 outputBlobStreams.clear();
1353 ASSERT_EQ(Status::OK,
1354 getAvailableOutputStreams(staticMeta, outputBlobStreams, &blobThreshold));
1355 ASSERT_NE(0u, outputBlobStreams.size());
1356
1357 outputVideoStreams.clear();
1358 ASSERT_EQ(Status::OK,
1359 getAvailableOutputStreams(staticMeta, outputVideoStreams, &videoThreshold));
1360 ASSERT_NE(0u, outputVideoStreams.size());
1361
1362 int32_t jpegBufferSize = 0;
1363 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
1364 ASSERT_NE(0u, jpegBufferSize);
1365
1366 int32_t streamId = 0;
1367 uint32_t streamConfigCounter = 0;
1368 for (auto& blobIter : outputBlobStreams) {
1369 for (auto& videoIter : outputVideoStreams) {
1370 Stream videoStream = {
1371 streamId++,
1372 StreamType::OUTPUT,
1373 videoIter.width,
1374 videoIter.height,
1375 static_cast<PixelFormat>(videoIter.format),
1376 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1377 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1378 Dataspace::UNKNOWN,
1379 StreamRotation::ROTATION_0,
1380 std::string(),
1381 jpegBufferSize,
1382 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001383 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1384 RequestAvailableDynamicRangeProfilesMap::
1385 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001386 Stream blobStream = {
1387 streamId++,
1388 StreamType::OUTPUT,
1389 blobIter.width,
1390 blobIter.height,
1391 static_cast<PixelFormat>(blobIter.format),
1392 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1393 GRALLOC1_CONSUMER_USAGE_CPU_READ),
1394 Dataspace::JFIF,
1395 StreamRotation::ROTATION_0,
1396 std::string(),
1397 jpegBufferSize,
1398 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001399 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1400 RequestAvailableDynamicRangeProfilesMap::
1401 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001402 std::vector<Stream> streams = {videoStream, blobStream};
1403 StreamConfiguration config;
1404
1405 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
1406 jpegBufferSize);
1407 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true,
1408 /*expectStreamCombQuery*/ false);
1409
1410 config.streamConfigCounter = streamConfigCounter++;
1411 std::vector<HalStream> halConfigs;
1412 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1413 ASSERT_TRUE(ret.isOk());
1414 ASSERT_EQ(2u, halConfigs.size());
1415 }
1416 }
1417
1418 ndk::ScopedAStatus ret = mSession->close();
1419 mSession = nullptr;
1420 ASSERT_TRUE(ret.isOk());
1421 }
1422}
1423
1424// Generate and verify a camera capture request
1425TEST_P(CameraAidlTest, processCaptureRequestPreview) {
1426 // TODO(b/220897574): Failing with BUFFER_ERROR
1427 processCaptureRequestInternal(GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, RequestTemplate::PREVIEW,
1428 false /*secureOnlyCameras*/);
1429}
1430
1431// Generate and verify a secure camera capture request
1432TEST_P(CameraAidlTest, processSecureCaptureRequest) {
1433 processCaptureRequestInternal(GRALLOC1_PRODUCER_USAGE_PROTECTED, RequestTemplate::STILL_CAPTURE,
1434 true /*secureOnlyCameras*/);
1435}
1436
1437TEST_P(CameraAidlTest, processCaptureRequestPreviewStabilization) {
1438 std::unordered_map<std::string, nsecs_t> cameraDeviceToTimeLag;
1439 processPreviewStabilizationCaptureRequestInternal(/*previewStabilizationOn*/ false,
1440 cameraDeviceToTimeLag);
1441 processPreviewStabilizationCaptureRequestInternal(/*previewStabilizationOn*/ true,
1442 cameraDeviceToTimeLag);
1443}
1444
1445// Generate and verify a multi-camera capture request
1446TEST_P(CameraAidlTest, processMultiCaptureRequestPreview) {
1447 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1448 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
1449 static_cast<int32_t>(PixelFormat::YCBCR_420_888)};
1450 int64_t bufferId = 1;
1451 uint32_t frameNumber = 1;
1452 std::vector<uint8_t> settings;
1453 std::vector<uint8_t> emptySettings;
1454 std::string invalidPhysicalId = "-1";
1455
1456 for (const auto& name : cameraDeviceNames) {
1457 std::string version, deviceId;
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +00001458 ALOGI("processMultiCaptureRequestPreview: Test device %s", name.c_str());
Avichal Rakesh362242f2022-02-08 12:40:53 -08001459 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
1460 CameraMetadata metadata;
1461
1462 std::shared_ptr<ICameraDevice> unusedDevice;
1463 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &metadata /*out*/,
1464 &unusedDevice /*out*/);
1465
1466 camera_metadata_t* staticMeta =
1467 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
1468 Status rc = isLogicalMultiCamera(staticMeta);
1469 if (Status::OPERATION_NOT_SUPPORTED == rc) {
1470 ndk::ScopedAStatus ret = mSession->close();
1471 mSession = nullptr;
1472 ASSERT_TRUE(ret.isOk());
1473 continue;
1474 }
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +00001475 ASSERT_EQ(Status::OK, rc);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001476
1477 std::unordered_set<std::string> physicalIds;
1478 rc = getPhysicalCameraIds(staticMeta, &physicalIds);
1479 ASSERT_TRUE(Status::OK == rc);
1480 ASSERT_TRUE(physicalIds.size() > 1);
1481
1482 std::unordered_set<int32_t> physicalRequestKeyIDs;
1483 rc = getSupportedKeys(staticMeta, ANDROID_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS,
1484 &physicalRequestKeyIDs);
1485 ASSERT_TRUE(Status::OK == rc);
1486 if (physicalRequestKeyIDs.empty()) {
1487 ndk::ScopedAStatus ret = mSession->close();
1488 mSession = nullptr;
1489 ASSERT_TRUE(ret.isOk());
1490 // The logical camera doesn't support any individual physical requests.
1491 continue;
1492 }
1493
1494 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultPreviewSettings;
1495 android::hardware::camera::common::V1_0::helper::CameraMetadata filteredSettings;
1496 constructFilteredSettings(mSession, physicalRequestKeyIDs, RequestTemplate::PREVIEW,
1497 &defaultPreviewSettings, &filteredSettings);
1498 if (filteredSettings.isEmpty()) {
1499 // No physical device settings in default request.
1500 ndk::ScopedAStatus ret = mSession->close();
1501 mSession = nullptr;
1502 ASSERT_TRUE(ret.isOk());
1503 continue;
1504 }
1505
1506 const camera_metadata_t* settingsBuffer = defaultPreviewSettings.getAndLock();
1507 uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
1508 settings.assign(rawSettingsBuffer,
1509 rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
1510 CameraMetadata settingsMetadata = {settings};
1511 overrideRotateAndCrop(&settingsMetadata);
1512
1513 ndk::ScopedAStatus ret = mSession->close();
1514 mSession = nullptr;
1515 ASSERT_TRUE(ret.isOk());
1516
1517 // Leave only 2 physical devices in the id set.
1518 auto it = physicalIds.begin();
1519 std::string physicalDeviceId = *it;
1520 it++;
1521 physicalIds.erase(++it, physicalIds.end());
1522 ASSERT_EQ(physicalIds.size(), 2u);
1523
1524 std::vector<HalStream> halStreams;
1525 bool supportsPartialResults = false;
1526 bool useHalBufManager = false;
1527 int32_t partialResultCount = 0;
1528 Stream previewStream;
1529 std::shared_ptr<DeviceCb> cb;
1530
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +00001531 configurePreviewStreams(
1532 name, mProvider, &previewThreshold, physicalIds, &mSession, &previewStream,
1533 &halStreams /*out*/, &supportsPartialResults /*out*/, &partialResultCount /*out*/,
1534 &useHalBufManager /*out*/, &cb /*out*/, 0 /*streamConfigCounter*/, true);
1535 if (mSession == nullptr) {
1536 // stream combination not supported by HAL, skip test for device
1537 continue;
1538 }
Avichal Rakesh362242f2022-02-08 12:40:53 -08001539
1540 ::aidl::android::hardware::common::fmq::MQDescriptor<
1541 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
1542 descriptor;
1543 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
1544 ASSERT_TRUE(resultQueueRet.isOk());
1545 std::shared_ptr<ResultMetadataQueue> resultQueue =
1546 std::make_shared<ResultMetadataQueue>(descriptor);
1547 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
1548 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
1549 resultQueue = nullptr;
1550 // Don't use the queue onwards.
1551 }
1552
1553 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
1554 static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
1555 partialResultCount, physicalIds, resultQueue);
1556
1557 std::vector<CaptureRequest> requests(1);
1558 CaptureRequest& request = requests[0];
1559 request.frameNumber = frameNumber;
1560 request.fmqSettingsSize = 0;
Emilian Peev3d919f92022-04-20 13:50:59 -07001561 request.settings = settingsMetadata;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001562
1563 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
1564
1565 std::vector<buffer_handle_t> graphicBuffers;
1566 graphicBuffers.reserve(halStreams.size());
1567 outputBuffers.resize(halStreams.size());
1568 size_t k = 0;
1569 for (const auto& halStream : halStreams) {
1570 buffer_handle_t buffer_handle;
1571 if (useHalBufManager) {
1572 outputBuffers[k] = {halStream.id, /*bufferId*/ 0, NativeHandle(),
1573 BufferStatus::OK, NativeHandle(), NativeHandle()};
1574 } else {
1575 allocateGraphicBuffer(previewStream.width, previewStream.height,
1576 android_convertGralloc1To0Usage(
1577 static_cast<uint64_t>(halStream.producerUsage),
1578 static_cast<uint64_t>(halStream.consumerUsage)),
1579 halStream.overrideFormat, &buffer_handle);
1580 graphicBuffers.push_back(buffer_handle);
1581 outputBuffers[k] = {
1582 halStream.id, bufferId, ::android::makeToAidl(buffer_handle),
1583 BufferStatus::OK, NativeHandle(), NativeHandle()};
1584 bufferId++;
1585 }
1586 k++;
1587 }
1588
1589 std::vector<PhysicalCameraSetting> camSettings(1);
1590 const camera_metadata_t* filteredSettingsBuffer = filteredSettings.getAndLock();
1591 uint8_t* rawFilteredSettingsBuffer = (uint8_t*)filteredSettingsBuffer;
1592 camSettings[0].settings = {std::vector(
1593 rawFilteredSettingsBuffer,
1594 rawFilteredSettingsBuffer + get_camera_metadata_size(filteredSettingsBuffer))};
1595 overrideRotateAndCrop(&camSettings[0].settings);
1596 camSettings[0].fmqSettingsSize = 0;
1597 camSettings[0].physicalCameraId = physicalDeviceId;
1598
1599 request.inputBuffer = {
1600 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
1601 request.physicalCameraSettings = camSettings;
1602
1603 {
1604 std::unique_lock<std::mutex> l(mLock);
1605 mInflightMap.clear();
1606 mInflightMap[frameNumber] = inflightReq;
1607 }
1608
1609 int32_t numRequestProcessed = 0;
1610 std::vector<BufferCache> cachesToRemove;
1611 ndk::ScopedAStatus returnStatus =
1612 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1613 ASSERT_TRUE(returnStatus.isOk());
1614 ASSERT_EQ(numRequestProcessed, 1u);
1615
1616 {
1617 std::unique_lock<std::mutex> l(mLock);
1618 while (!inflightReq->errorCodeValid &&
1619 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1620 auto timeout = std::chrono::system_clock::now() +
1621 std::chrono::seconds(kStreamBufferTimeoutSec);
1622 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1623 }
1624
1625 ASSERT_FALSE(inflightReq->errorCodeValid);
1626 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1627
1628 request.frameNumber++;
1629 // Empty settings should be supported after the first call
1630 // for repeating requests.
1631 request.settings.metadata.clear();
1632 request.physicalCameraSettings[0].settings.metadata.clear();
1633 // The buffer has been registered to HAL by bufferId, so per
1634 // API contract we should send a null handle for this buffer
1635 request.outputBuffers[0].buffer = NativeHandle();
1636 mInflightMap.clear();
1637 inflightReq = std::make_shared<InFlightRequest>(
1638 static_cast<ssize_t>(physicalIds.size()), false, supportsPartialResults,
1639 partialResultCount, physicalIds, resultQueue);
1640 mInflightMap[request.frameNumber] = inflightReq;
1641 }
1642
1643 returnStatus =
1644 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1645 ASSERT_TRUE(returnStatus.isOk());
1646 ASSERT_EQ(numRequestProcessed, 1u);
1647
1648 {
1649 std::unique_lock<std::mutex> l(mLock);
1650 while (!inflightReq->errorCodeValid &&
1651 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1652 auto timeout = std::chrono::system_clock::now() +
1653 std::chrono::seconds(kStreamBufferTimeoutSec);
1654 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1655 }
1656
1657 ASSERT_FALSE(inflightReq->errorCodeValid);
1658 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1659 }
1660
1661 // Invalid physical camera id should fail process requests
1662 frameNumber++;
1663 camSettings[0].physicalCameraId = invalidPhysicalId;
1664 camSettings[0].settings.metadata = settings;
1665
1666 request.physicalCameraSettings = camSettings; // Invalid camera settings
1667 returnStatus =
1668 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1669 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
1670 returnStatus.getServiceSpecificError());
1671
1672 defaultPreviewSettings.unlock(settingsBuffer);
1673 filteredSettings.unlock(filteredSettingsBuffer);
1674
1675 if (useHalBufManager) {
1676 std::vector<int32_t> streamIds(halStreams.size());
1677 for (size_t i = 0; i < streamIds.size(); i++) {
1678 streamIds[i] = halStreams[i].id;
1679 }
1680 verifyBuffersReturned(mSession, streamIds, cb);
1681 }
1682
1683 ret = mSession->close();
1684 mSession = nullptr;
1685 ASSERT_TRUE(ret.isOk());
1686 }
1687}
1688
1689// Generate and verify an ultra high resolution capture request
1690TEST_P(CameraAidlTest, processUltraHighResolutionRequest) {
1691 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1692 int64_t bufferId = 1;
1693 int32_t frameNumber = 1;
1694 CameraMetadata settings;
1695
1696 for (const auto& name : cameraDeviceNames) {
1697 std::string version, deviceId;
1698 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
1699 CameraMetadata meta;
1700
1701 std::shared_ptr<ICameraDevice> unusedDevice;
1702 openEmptyDeviceSession(name, mProvider, &mSession, &meta, &unusedDevice);
1703 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1704 if (!isUltraHighResolution(staticMeta)) {
1705 ndk::ScopedAStatus ret = mSession->close();
1706 mSession = nullptr;
1707 ASSERT_TRUE(ret.isOk());
1708 continue;
1709 }
1710 CameraMetadata req;
1711 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultSettings;
1712 ndk::ScopedAStatus ret =
1713 mSession->constructDefaultRequestSettings(RequestTemplate::STILL_CAPTURE, &req);
1714 ASSERT_TRUE(ret.isOk());
1715
1716 const camera_metadata_t* metadata =
1717 reinterpret_cast<const camera_metadata_t*>(req.metadata.data());
1718 size_t expectedSize = req.metadata.size();
1719 int result = validate_camera_metadata_structure(metadata, &expectedSize);
1720 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
1721
1722 size_t entryCount = get_camera_metadata_entry_count(metadata);
1723 ASSERT_GT(entryCount, 0u);
1724 defaultSettings = metadata;
1725 uint8_t sensorPixelMode =
1726 static_cast<uint8_t>(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
1727 ASSERT_EQ(::android::OK,
1728 defaultSettings.update(ANDROID_SENSOR_PIXEL_MODE, &sensorPixelMode, 1));
1729
1730 const camera_metadata_t* settingsBuffer = defaultSettings.getAndLock();
1731 uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
1732 settings.metadata = std::vector(
1733 rawSettingsBuffer, rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
1734 overrideRotateAndCrop(&settings);
1735
1736 ret = mSession->close();
1737 mSession = nullptr;
1738 ASSERT_TRUE(ret.isOk());
1739
1740 std::vector<HalStream> halStreams;
1741 bool supportsPartialResults = false;
1742 bool useHalBufManager = false;
1743 int32_t partialResultCount = 0;
1744 Stream previewStream;
1745 std::shared_ptr<DeviceCb> cb;
1746
1747 std::list<PixelFormat> pixelFormats = {PixelFormat::YCBCR_420_888, PixelFormat::RAW16};
1748 for (PixelFormat format : pixelFormats) {
Emilian Peevdda1eb72022-07-28 16:37:40 -07001749 previewStream.usage =
1750 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1751 GRALLOC1_CONSUMER_USAGE_CPU_READ);
1752 previewStream.dataSpace = Dataspace::UNKNOWN;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001753 configureStreams(name, mProvider, format, &mSession, &previewStream, &halStreams,
1754 &supportsPartialResults, &partialResultCount, &useHalBufManager, &cb,
1755 0, /*maxResolution*/ true);
1756 ASSERT_NE(mSession, nullptr);
1757
1758 ::aidl::android::hardware::common::fmq::MQDescriptor<
1759 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
1760 descriptor;
1761 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
1762 ASSERT_TRUE(resultQueueRet.isOk());
1763
1764 std::shared_ptr<ResultMetadataQueue> resultQueue =
1765 std::make_shared<ResultMetadataQueue>(descriptor);
1766 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
1767 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
1768 resultQueue = nullptr;
1769 // Don't use the queue onwards.
1770 }
1771
1772 std::vector<buffer_handle_t> graphicBuffers;
1773 graphicBuffers.reserve(halStreams.size());
1774 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
1775 static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
1776 partialResultCount, std::unordered_set<std::string>(), resultQueue);
1777
1778 std::vector<CaptureRequest> requests(1);
1779 CaptureRequest& request = requests[0];
1780 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
1781 outputBuffers.resize(halStreams.size());
1782
1783 size_t k = 0;
1784 for (const auto& halStream : halStreams) {
1785 buffer_handle_t buffer_handle;
1786 if (useHalBufManager) {
1787 outputBuffers[k] = {halStream.id, 0,
1788 NativeHandle(), BufferStatus::OK,
1789 NativeHandle(), NativeHandle()};
1790 } else {
1791 allocateGraphicBuffer(previewStream.width, previewStream.height,
1792 android_convertGralloc1To0Usage(
1793 static_cast<uint64_t>(halStream.producerUsage),
1794 static_cast<uint64_t>(halStream.consumerUsage)),
1795 halStream.overrideFormat, &buffer_handle);
1796 graphicBuffers.push_back(buffer_handle);
1797 outputBuffers[k] = {
1798 halStream.id, bufferId, ::android::makeToAidl(buffer_handle),
1799 BufferStatus::OK, NativeHandle(), NativeHandle()};
1800 bufferId++;
1801 }
1802 k++;
1803 }
1804
1805 request.inputBuffer = {
1806 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
1807 request.frameNumber = frameNumber;
1808 request.fmqSettingsSize = 0;
1809 request.settings = settings;
1810 request.inputWidth = 0;
1811 request.inputHeight = 0;
1812
1813 {
1814 std::unique_lock<std::mutex> l(mLock);
1815 mInflightMap.clear();
1816 mInflightMap[frameNumber] = inflightReq;
1817 }
1818
1819 int32_t numRequestProcessed = 0;
1820 std::vector<BufferCache> cachesToRemove;
1821 ndk::ScopedAStatus returnStatus =
1822 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1823 ASSERT_TRUE(returnStatus.isOk());
1824 ASSERT_EQ(numRequestProcessed, 1u);
1825
1826 {
1827 std::unique_lock<std::mutex> l(mLock);
1828 while (!inflightReq->errorCodeValid &&
1829 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1830 auto timeout = std::chrono::system_clock::now() +
1831 std::chrono::seconds(kStreamBufferTimeoutSec);
1832 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1833 }
1834
1835 ASSERT_FALSE(inflightReq->errorCodeValid);
1836 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1837 }
1838 if (useHalBufManager) {
1839 std::vector<int32_t> streamIds(halStreams.size());
1840 for (size_t i = 0; i < streamIds.size(); i++) {
1841 streamIds[i] = halStreams[i].id;
1842 }
1843 verifyBuffersReturned(mSession, streamIds, cb);
1844 }
1845
1846 ret = mSession->close();
1847 mSession = nullptr;
1848 ASSERT_TRUE(ret.isOk());
1849 }
1850 }
1851}
1852
1853// Generate and verify 10-bit dynamic range request
1854TEST_P(CameraAidlTest, process10BitDynamicRangeRequest) {
1855 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001856 CameraMetadata settings;
1857
1858 for (const auto& name : cameraDeviceNames) {
1859 std::string version, deviceId;
1860 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
1861 CameraMetadata meta;
1862 std::shared_ptr<ICameraDevice> device;
1863 openEmptyDeviceSession(name, mProvider, &mSession, &meta, &device);
1864 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1865 if (!is10BitDynamicRangeCapable(staticMeta)) {
1866 ndk::ScopedAStatus ret = mSession->close();
1867 mSession = nullptr;
1868 ASSERT_TRUE(ret.isOk());
1869 continue;
1870 }
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001871 std::vector<RequestAvailableDynamicRangeProfilesMap> profileList;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001872 get10BitDynamicRangeProfiles(staticMeta, &profileList);
1873 ASSERT_FALSE(profileList.empty());
1874
1875 CameraMetadata req;
1876 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultSettings;
1877 ndk::ScopedAStatus ret =
Emilian Peevdda1eb72022-07-28 16:37:40 -07001878 mSession->constructDefaultRequestSettings(RequestTemplate::PREVIEW, &req);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001879 ASSERT_TRUE(ret.isOk());
1880
1881 const camera_metadata_t* metadata =
1882 reinterpret_cast<const camera_metadata_t*>(req.metadata.data());
1883 size_t expectedSize = req.metadata.size();
1884 int result = validate_camera_metadata_structure(metadata, &expectedSize);
1885 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
1886
1887 size_t entryCount = get_camera_metadata_entry_count(metadata);
1888 ASSERT_GT(entryCount, 0u);
1889 defaultSettings = metadata;
1890
1891 const camera_metadata_t* settingsBuffer = defaultSettings.getAndLock();
1892 uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
1893 settings.metadata = std::vector(
1894 rawSettingsBuffer, rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
1895 overrideRotateAndCrop(&settings);
1896
1897 ret = mSession->close();
1898 mSession = nullptr;
1899 ASSERT_TRUE(ret.isOk());
1900
1901 std::vector<HalStream> halStreams;
1902 bool supportsPartialResults = false;
1903 bool useHalBufManager = false;
1904 int32_t partialResultCount = 0;
1905 Stream previewStream;
1906 std::shared_ptr<DeviceCb> cb;
1907 for (const auto& profile : profileList) {
Emilian Peevdda1eb72022-07-28 16:37:40 -07001908 previewStream.usage =
1909 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1910 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER);
1911 previewStream.dataSpace = getDataspace(PixelFormat::IMPLEMENTATION_DEFINED);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001912 configureStreams(name, mProvider, PixelFormat::IMPLEMENTATION_DEFINED, &mSession,
1913 &previewStream, &halStreams, &supportsPartialResults,
1914 &partialResultCount, &useHalBufManager, &cb, 0,
1915 /*maxResolution*/ false, profile);
1916 ASSERT_NE(mSession, nullptr);
1917
1918 ::aidl::android::hardware::common::fmq::MQDescriptor<
1919 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
1920 descriptor;
1921 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
1922 ASSERT_TRUE(resultQueueRet.isOk());
1923
1924 std::shared_ptr<ResultMetadataQueue> resultQueue =
1925 std::make_shared<ResultMetadataQueue>(descriptor);
1926 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
1927 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
1928 resultQueue = nullptr;
1929 // Don't use the queue onwards.
1930 }
1931
Emilian Peevdda1eb72022-07-28 16:37:40 -07001932 mInflightMap.clear();
1933 // Stream as long as needed to fill the Hal inflight queue
1934 std::vector<CaptureRequest> requests(halStreams[0].maxBuffers);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001935
Emilian Peev470d1382023-01-18 11:09:09 -08001936 for (int32_t requestId = 0; requestId < requests.size(); requestId++) {
Emilian Peevdda1eb72022-07-28 16:37:40 -07001937 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
1938 static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
1939 partialResultCount, std::unordered_set<std::string>(), resultQueue);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001940
Emilian Peev470d1382023-01-18 11:09:09 -08001941 CaptureRequest& request = requests[requestId];
Emilian Peevdda1eb72022-07-28 16:37:40 -07001942 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
1943 outputBuffers.resize(halStreams.size());
Avichal Rakesh362242f2022-02-08 12:40:53 -08001944
Emilian Peevdda1eb72022-07-28 16:37:40 -07001945 size_t k = 0;
1946 inflightReq->mOutstandingBufferIds.resize(halStreams.size());
1947 std::vector<buffer_handle_t> graphicBuffers;
1948 graphicBuffers.reserve(halStreams.size());
Avichal Rakesh362242f2022-02-08 12:40:53 -08001949
Emilian Peev470d1382023-01-18 11:09:09 -08001950 auto bufferId = requestId + 1; // Buffer id value 0 is not valid
Emilian Peevdda1eb72022-07-28 16:37:40 -07001951 for (const auto& halStream : halStreams) {
1952 buffer_handle_t buffer_handle;
1953 if (useHalBufManager) {
1954 outputBuffers[k] = {halStream.id, 0,
1955 NativeHandle(), BufferStatus::OK,
1956 NativeHandle(), NativeHandle()};
1957 } else {
1958 auto usage = android_convertGralloc1To0Usage(
1959 static_cast<uint64_t>(halStream.producerUsage),
1960 static_cast<uint64_t>(halStream.consumerUsage));
1961 allocateGraphicBuffer(previewStream.width, previewStream.height, usage,
1962 halStream.overrideFormat, &buffer_handle);
1963
1964 inflightReq->mOutstandingBufferIds[halStream.id][bufferId] = buffer_handle;
1965 graphicBuffers.push_back(buffer_handle);
1966 outputBuffers[k] = {halStream.id, bufferId,
1967 android::makeToAidl(buffer_handle), BufferStatus::OK, NativeHandle(),
1968 NativeHandle()};
Emilian Peevdda1eb72022-07-28 16:37:40 -07001969 }
1970 k++;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001971 }
Avichal Rakesh362242f2022-02-08 12:40:53 -08001972
Emilian Peevdda1eb72022-07-28 16:37:40 -07001973 request.inputBuffer = {
1974 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
Emilian Peev470d1382023-01-18 11:09:09 -08001975 request.frameNumber = bufferId;
Emilian Peevdda1eb72022-07-28 16:37:40 -07001976 request.fmqSettingsSize = 0;
1977 request.settings = settings;
1978 request.inputWidth = 0;
1979 request.inputHeight = 0;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001980
Emilian Peevdda1eb72022-07-28 16:37:40 -07001981 {
1982 std::unique_lock<std::mutex> l(mLock);
Emilian Peev470d1382023-01-18 11:09:09 -08001983 mInflightMap[bufferId] = inflightReq;
Emilian Peevdda1eb72022-07-28 16:37:40 -07001984 }
1985
Avichal Rakesh362242f2022-02-08 12:40:53 -08001986 }
1987
1988 int32_t numRequestProcessed = 0;
1989 std::vector<BufferCache> cachesToRemove;
1990 ndk::ScopedAStatus returnStatus =
Emilian Peevdda1eb72022-07-28 16:37:40 -07001991 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001992 ASSERT_TRUE(returnStatus.isOk());
Emilian Peevdda1eb72022-07-28 16:37:40 -07001993 ASSERT_EQ(numRequestProcessed, requests.size());
Avichal Rakesh362242f2022-02-08 12:40:53 -08001994
Emilian Peevdda1eb72022-07-28 16:37:40 -07001995 returnStatus = mSession->repeatingRequestEnd(requests.size() - 1,
1996 std::vector<int32_t> {halStreams[0].id});
1997 ASSERT_TRUE(returnStatus.isOk());
1998
Emilian Peev470d1382023-01-18 11:09:09 -08001999 // We are keeping frame numbers and buffer ids consistent. Buffer id value of 0
2000 // is used to indicate a buffer that is not present/available so buffer ids as well
2001 // as frame numbers begin with 1.
2002 for (int32_t frameNumber = 1; frameNumber <= requests.size(); frameNumber++) {
Emilian Peevdda1eb72022-07-28 16:37:40 -07002003 const auto& inflightReq = mInflightMap[frameNumber];
Avichal Rakesh362242f2022-02-08 12:40:53 -08002004 std::unique_lock<std::mutex> l(mLock);
2005 while (!inflightReq->errorCodeValid &&
2006 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
2007 auto timeout = std::chrono::system_clock::now() +
2008 std::chrono::seconds(kStreamBufferTimeoutSec);
2009 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2010 }
2011
Shuzhen Wang0f56c562023-04-03 16:58:59 -07002012 waitForReleaseFence(inflightReq->resultOutputBuffers);
2013
Avichal Rakesh362242f2022-02-08 12:40:53 -08002014 ASSERT_FALSE(inflightReq->errorCodeValid);
2015 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
2016 verify10BitMetadata(mHandleImporter, *inflightReq, profile);
2017 }
Emilian Peevdda1eb72022-07-28 16:37:40 -07002018
Avichal Rakesh362242f2022-02-08 12:40:53 -08002019 if (useHalBufManager) {
2020 std::vector<int32_t> streamIds(halStreams.size());
2021 for (size_t i = 0; i < streamIds.size(); i++) {
2022 streamIds[i] = halStreams[i].id;
2023 }
2024 mSession->signalStreamFlush(streamIds, /*streamConfigCounter*/ 0);
2025 cb->waitForBuffersReturned();
2026 }
2027
2028 ret = mSession->close();
2029 mSession = nullptr;
2030 ASSERT_TRUE(ret.isOk());
2031 }
2032 }
2033}
2034
Austin Borger4728fc42022-07-15 11:27:53 -07002035TEST_P(CameraAidlTest, process8BitColorSpaceRequests) {
Austin Borger54b22362023-03-22 11:25:06 -07002036 static int profiles[] = {ColorSpaceNamed::DISPLAY_P3, ColorSpaceNamed::SRGB};
Austin Borger4728fc42022-07-15 11:27:53 -07002037
2038 for (int32_t i = 0; i < sizeof(profiles) / sizeof(profiles[0]); i++) {
2039 processColorSpaceRequest(static_cast<RequestAvailableColorSpaceProfilesMap>(profiles[i]),
2040 static_cast<RequestAvailableDynamicRangeProfilesMap>(
2041 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD));
2042 }
2043}
2044
2045TEST_P(CameraAidlTest, process10BitColorSpaceRequests) {
2046 static const camera_metadata_enum_android_request_available_dynamic_range_profiles_map
2047 dynamicRangeProfiles[] = {
2048 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10,
2049 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10,
2050 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS,
2051 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF,
2052 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF_PO,
2053 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM,
2054 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM_PO,
2055 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF,
2056 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF_PO,
2057 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM,
2058 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM_PO
2059 };
2060
Austin Borger54b22362023-03-22 11:25:06 -07002061 // Process all dynamic range profiles with BT2020_HLG
Austin Borger4728fc42022-07-15 11:27:53 -07002062 for (int32_t i = 0; i < sizeof(dynamicRangeProfiles) / sizeof(dynamicRangeProfiles[0]); i++) {
2063 processColorSpaceRequest(
Austin Borger54b22362023-03-22 11:25:06 -07002064 static_cast<RequestAvailableColorSpaceProfilesMap>(ColorSpaceNamed::BT2020_HLG),
Austin Borger4728fc42022-07-15 11:27:53 -07002065 static_cast<RequestAvailableDynamicRangeProfilesMap>(dynamicRangeProfiles[i]));
2066 }
2067}
2068
Shuzhen Wang4dd6a512022-11-08 20:47:20 +00002069TEST_P(CameraAidlTest, processZoomSettingsOverrideRequests) {
2070 const int32_t kFrameCount = 5;
2071 const int32_t kTestCases = 2;
Shuzhen Wang38ddb272023-05-22 09:40:28 -07002072 const bool kOverrideSequence[kTestCases][kFrameCount] = {// ZOOM, ZOOM, ZOOM, ZOOM, ZOOM;
2073 {true, true, true, true, true},
2074 // OFF, ZOOM, ZOOM, ZOOM, OFF;
2075 {false, true, true, true, false}};
Shuzhen Wang4dd6a512022-11-08 20:47:20 +00002076 const bool kExpectedOverrideResults[kTestCases][kFrameCount] = {
Shuzhen Wang38ddb272023-05-22 09:40:28 -07002077 // All resuls should be overridden except the last one. The last result's
2078 // zoom doesn't have speed-up.
2079 {true, true, true, true, false},
2080 // Because we require at least 1 frame speed-up, request #1, #2 and #3
2081 // will be overridden.
2082 {true, true, true, false, false}};
Shuzhen Wang4dd6a512022-11-08 20:47:20 +00002083
2084 for (int i = 0; i < kTestCases; i++) {
2085 processZoomSettingsOverrideRequests(kFrameCount, kOverrideSequence[i],
2086 kExpectedOverrideResults[i]);
2087 }
2088}
2089
Avichal Rakesh362242f2022-02-08 12:40:53 -08002090// Generate and verify a burst containing alternating sensor sensitivity values
2091TEST_P(CameraAidlTest, processCaptureRequestBurstISO) {
2092 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2093 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2094 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2095 int64_t bufferId = 1;
2096 int32_t frameNumber = 1;
2097 float isoTol = .03f;
2098 CameraMetadata settings;
2099
2100 for (const auto& name : cameraDeviceNames) {
2101 CameraMetadata meta;
2102 settings.metadata.clear();
2103 std::shared_ptr<ICameraDevice> unusedDevice;
2104 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
2105 &unusedDevice /*out*/);
2106 camera_metadata_t* staticMetaBuffer =
2107 clone_camera_metadata(reinterpret_cast<camera_metadata_t*>(meta.metadata.data()));
2108 ::android::hardware::camera::common::V1_0::helper::CameraMetadata staticMeta(
2109 staticMetaBuffer);
2110
2111 camera_metadata_entry_t hwLevel = staticMeta.find(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL);
2112 ASSERT_TRUE(0 < hwLevel.count);
2113 if (ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED == hwLevel.data.u8[0] ||
2114 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL == hwLevel.data.u8[0]) {
2115 // Limited/External devices can skip this test
2116 ndk::ScopedAStatus ret = mSession->close();
2117 mSession = nullptr;
2118 ASSERT_TRUE(ret.isOk());
2119 continue;
2120 }
2121
2122 camera_metadata_entry_t isoRange = staticMeta.find(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE);
2123 ASSERT_EQ(isoRange.count, 2u);
2124
2125 ndk::ScopedAStatus ret = mSession->close();
2126 mSession = nullptr;
2127 ASSERT_TRUE(ret.isOk());
2128
2129 bool supportsPartialResults = false;
2130 bool useHalBufManager = false;
2131 int32_t partialResultCount = 0;
2132 Stream previewStream;
2133 std::vector<HalStream> halStreams;
2134 std::shared_ptr<DeviceCb> cb;
2135 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2136 &previewStream /*out*/, &halStreams /*out*/,
2137 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2138 &useHalBufManager /*out*/, &cb /*out*/);
2139
2140 ::aidl::android::hardware::common::fmq::MQDescriptor<
2141 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2142 descriptor;
2143 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2144 std::shared_ptr<ResultMetadataQueue> resultQueue =
2145 std::make_shared<ResultMetadataQueue>(descriptor);
2146 ASSERT_TRUE(resultQueueRet.isOk());
2147 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2148 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2149 resultQueue = nullptr;
2150 // Don't use the queue onwards.
2151 }
2152
2153 ret = mSession->constructDefaultRequestSettings(RequestTemplate::PREVIEW, &settings);
2154 ASSERT_TRUE(ret.isOk());
2155
2156 ::android::hardware::camera::common::V1_0::helper::CameraMetadata requestMeta;
2157 std::vector<CaptureRequest> requests(kBurstFrameCount);
2158 std::vector<buffer_handle_t> buffers(kBurstFrameCount);
2159 std::vector<std::shared_ptr<InFlightRequest>> inflightReqs(kBurstFrameCount);
2160 std::vector<int32_t> isoValues(kBurstFrameCount);
2161 std::vector<CameraMetadata> requestSettings(kBurstFrameCount);
2162
2163 for (int32_t i = 0; i < kBurstFrameCount; i++) {
2164 std::unique_lock<std::mutex> l(mLock);
2165 CaptureRequest& request = requests[i];
2166 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2167 outputBuffers.resize(1);
2168 StreamBuffer& outputBuffer = outputBuffers[0];
2169
2170 isoValues[i] = ((i % 2) == 0) ? isoRange.data.i32[0] : isoRange.data.i32[1];
2171 if (useHalBufManager) {
2172 outputBuffer = {halStreams[0].id, 0,
2173 NativeHandle(), BufferStatus::OK,
2174 NativeHandle(), NativeHandle()};
2175 } else {
2176 allocateGraphicBuffer(previewStream.width, previewStream.height,
2177 android_convertGralloc1To0Usage(
2178 static_cast<uint64_t>(halStreams[0].producerUsage),
2179 static_cast<uint64_t>(halStreams[0].consumerUsage)),
2180 halStreams[0].overrideFormat, &buffers[i]);
2181 outputBuffer = {halStreams[0].id, bufferId + i, ::android::makeToAidl(buffers[i]),
2182 BufferStatus::OK, NativeHandle(), NativeHandle()};
2183 }
2184
2185 requestMeta.append(reinterpret_cast<camera_metadata_t*>(settings.metadata.data()));
2186
2187 // Disable all 3A routines
2188 uint8_t mode = static_cast<uint8_t>(ANDROID_CONTROL_MODE_OFF);
2189 ASSERT_EQ(::android::OK, requestMeta.update(ANDROID_CONTROL_MODE, &mode, 1));
2190 ASSERT_EQ(::android::OK,
2191 requestMeta.update(ANDROID_SENSOR_SENSITIVITY, &isoValues[i], 1));
2192 camera_metadata_t* metaBuffer = requestMeta.release();
2193 uint8_t* rawMetaBuffer = reinterpret_cast<uint8_t*>(metaBuffer);
2194 requestSettings[i].metadata = std::vector(
2195 rawMetaBuffer, rawMetaBuffer + get_camera_metadata_size(metaBuffer));
2196 overrideRotateAndCrop(&(requestSettings[i]));
2197
2198 request.frameNumber = frameNumber + i;
2199 request.fmqSettingsSize = 0;
2200 request.settings = requestSettings[i];
2201 request.inputBuffer = {
2202 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2203
2204 inflightReqs[i] = std::make_shared<InFlightRequest>(1, false, supportsPartialResults,
2205 partialResultCount, resultQueue);
2206 mInflightMap[frameNumber + i] = inflightReqs[i];
2207 }
2208
2209 int32_t numRequestProcessed = 0;
2210 std::vector<BufferCache> cachesToRemove;
2211
2212 ndk::ScopedAStatus returnStatus =
2213 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2214 ASSERT_TRUE(returnStatus.isOk());
2215 ASSERT_EQ(numRequestProcessed, kBurstFrameCount);
2216
2217 for (size_t i = 0; i < kBurstFrameCount; i++) {
2218 std::unique_lock<std::mutex> l(mLock);
2219 while (!inflightReqs[i]->errorCodeValid && ((0 < inflightReqs[i]->numBuffersLeft) ||
2220 (!inflightReqs[i]->haveResultMetadata))) {
2221 auto timeout = std::chrono::system_clock::now() +
2222 std::chrono::seconds(kStreamBufferTimeoutSec);
2223 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2224 }
2225
2226 ASSERT_FALSE(inflightReqs[i]->errorCodeValid);
2227 ASSERT_NE(inflightReqs[i]->resultOutputBuffers.size(), 0u);
2228 ASSERT_EQ(previewStream.id, inflightReqs[i]->resultOutputBuffers[0].buffer.streamId);
2229 ASSERT_FALSE(inflightReqs[i]->collectedResult.isEmpty());
2230 ASSERT_TRUE(inflightReqs[i]->collectedResult.exists(ANDROID_SENSOR_SENSITIVITY));
2231 camera_metadata_entry_t isoResult =
2232 inflightReqs[i]->collectedResult.find(ANDROID_SENSOR_SENSITIVITY);
2233 ASSERT_TRUE(std::abs(isoResult.data.i32[0] - isoValues[i]) <=
2234 std::round(isoValues[i] * isoTol));
2235 }
2236
2237 if (useHalBufManager) {
2238 verifyBuffersReturned(mSession, previewStream.id, cb);
2239 }
2240 ret = mSession->close();
2241 mSession = nullptr;
2242 ASSERT_TRUE(ret.isOk());
2243 }
2244}
2245
2246// Test whether an incorrect capture request with missing settings will
2247// be reported correctly.
2248TEST_P(CameraAidlTest, processCaptureRequestInvalidSinglePreview) {
2249 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2250 std::vector<AvailableStream> outputPreviewStreams;
2251 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2252 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2253 int64_t bufferId = 1;
2254 int32_t frameNumber = 1;
2255 CameraMetadata settings;
2256
2257 for (const auto& name : cameraDeviceNames) {
2258 Stream previewStream;
2259 std::vector<HalStream> halStreams;
2260 std::shared_ptr<DeviceCb> cb;
2261 bool supportsPartialResults = false;
2262 bool useHalBufManager = false;
2263 int32_t partialResultCount = 0;
2264 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2265 &previewStream /*out*/, &halStreams /*out*/,
2266 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2267 &useHalBufManager /*out*/, &cb /*out*/);
2268 ASSERT_NE(mSession, nullptr);
2269 ASSERT_FALSE(halStreams.empty());
2270
2271 buffer_handle_t buffer_handle = nullptr;
2272
2273 if (useHalBufManager) {
2274 bufferId = 0;
2275 } else {
2276 allocateGraphicBuffer(previewStream.width, previewStream.height,
2277 android_convertGralloc1To0Usage(
2278 static_cast<uint64_t>(halStreams[0].producerUsage),
2279 static_cast<uint64_t>(halStreams[0].consumerUsage)),
2280 halStreams[0].overrideFormat, &buffer_handle);
2281 }
2282
2283 std::vector<CaptureRequest> requests(1);
2284 CaptureRequest& request = requests[0];
2285 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2286 outputBuffers.resize(1);
2287 StreamBuffer& outputBuffer = outputBuffers[0];
2288
2289 outputBuffer = {
2290 halStreams[0].id,
2291 bufferId,
2292 buffer_handle == nullptr ? NativeHandle() : ::android::makeToAidl(buffer_handle),
2293 BufferStatus::OK,
2294 NativeHandle(),
2295 NativeHandle()};
2296
2297 request.inputBuffer = {
2298 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2299 request.frameNumber = frameNumber;
2300 request.fmqSettingsSize = 0;
2301 request.settings = settings;
2302
2303 // Settings were not correctly initialized, we should fail here
2304 int32_t numRequestProcessed = 0;
2305 std::vector<BufferCache> cachesToRemove;
2306 ndk::ScopedAStatus ret =
2307 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2308 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
2309 ASSERT_EQ(numRequestProcessed, 0u);
2310
2311 ret = mSession->close();
2312 mSession = nullptr;
2313 ASSERT_TRUE(ret.isOk());
2314 }
2315}
2316
2317// Verify camera offline session behavior
2318TEST_P(CameraAidlTest, switchToOffline) {
2319 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2320 AvailableStream threshold = {kMaxStillWidth, kMaxStillHeight,
2321 static_cast<int32_t>(PixelFormat::BLOB)};
2322 int64_t bufferId = 1;
2323 int32_t frameNumber = 1;
2324 CameraMetadata settings;
2325
2326 for (const auto& name : cameraDeviceNames) {
2327 CameraMetadata meta;
2328 {
2329 std::shared_ptr<ICameraDevice> unusedDevice;
2330 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
2331 &unusedDevice);
2332 camera_metadata_t* staticMetaBuffer = clone_camera_metadata(
2333 reinterpret_cast<camera_metadata_t*>(meta.metadata.data()));
2334 ::android::hardware::camera::common::V1_0::helper::CameraMetadata staticMeta(
2335 staticMetaBuffer);
2336
2337 if (isOfflineSessionSupported(staticMetaBuffer) != Status::OK) {
2338 ndk::ScopedAStatus ret = mSession->close();
2339 mSession = nullptr;
2340 ASSERT_TRUE(ret.isOk());
2341 continue;
2342 }
2343 ndk::ScopedAStatus ret = mSession->close();
2344 mSession = nullptr;
2345 ASSERT_TRUE(ret.isOk());
2346 }
2347
2348 bool supportsPartialResults = false;
2349 int32_t partialResultCount = 0;
2350 Stream stream;
2351 std::vector<HalStream> halStreams;
2352 std::shared_ptr<DeviceCb> cb;
2353 int32_t jpegBufferSize;
2354 bool useHalBufManager;
2355 configureOfflineStillStream(name, mProvider, &threshold, &mSession /*out*/, &stream /*out*/,
2356 &halStreams /*out*/, &supportsPartialResults /*out*/,
2357 &partialResultCount /*out*/, &cb /*out*/,
2358 &jpegBufferSize /*out*/, &useHalBufManager /*out*/);
2359
2360 auto ret = mSession->constructDefaultRequestSettings(RequestTemplate::STILL_CAPTURE,
2361 &settings);
2362 ASSERT_TRUE(ret.isOk());
2363
2364 ::aidl::android::hardware::common::fmq::MQDescriptor<
2365 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2366 descriptor;
2367
2368 ndk::ScopedAStatus resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2369 ASSERT_TRUE(resultQueueRet.isOk());
2370 std::shared_ptr<ResultMetadataQueue> resultQueue =
2371 std::make_shared<ResultMetadataQueue>(descriptor);
2372 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2373 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2374 resultQueue = nullptr;
2375 // Don't use the queue onwards.
2376 }
2377
2378 ::android::hardware::camera::common::V1_0::helper::CameraMetadata requestMeta;
2379
2380 std::vector<buffer_handle_t> buffers(kBurstFrameCount);
2381 std::vector<std::shared_ptr<InFlightRequest>> inflightReqs(kBurstFrameCount);
2382 std::vector<CameraMetadata> requestSettings(kBurstFrameCount);
2383
2384 std::vector<CaptureRequest> requests(kBurstFrameCount);
2385
2386 HalStream halStream = halStreams[0];
2387 for (uint32_t i = 0; i < kBurstFrameCount; i++) {
2388 CaptureRequest& request = requests[i];
2389 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2390 outputBuffers.resize(1);
2391 StreamBuffer& outputBuffer = outputBuffers[0];
2392
2393 std::unique_lock<std::mutex> l(mLock);
2394 if (useHalBufManager) {
2395 outputBuffer = {halStream.id, 0, NativeHandle(), BufferStatus::OK, NativeHandle(),
2396 NativeHandle()};
2397 } else {
2398 // jpeg buffer (w,h) = (blobLen, 1)
2399 allocateGraphicBuffer(jpegBufferSize, /*height*/ 1,
2400 android_convertGralloc1To0Usage(
2401 static_cast<uint64_t>(halStream.producerUsage),
2402 static_cast<uint64_t>(halStream.consumerUsage)),
2403 halStream.overrideFormat, &buffers[i]);
2404 outputBuffer = {halStream.id, bufferId + i, ::android::makeToAidl(buffers[i]),
2405 BufferStatus::OK, NativeHandle(), NativeHandle()};
2406 }
2407
2408 requestMeta.clear();
2409 requestMeta.append(reinterpret_cast<camera_metadata_t*>(settings.metadata.data()));
2410
2411 camera_metadata_t* metaBuffer = requestMeta.release();
2412 uint8_t* rawMetaBuffer = reinterpret_cast<uint8_t*>(metaBuffer);
2413 requestSettings[i].metadata = std::vector(
2414 rawMetaBuffer, rawMetaBuffer + get_camera_metadata_size(metaBuffer));
2415 overrideRotateAndCrop(&requestSettings[i]);
2416
2417 request.frameNumber = frameNumber + i;
2418 request.fmqSettingsSize = 0;
2419 request.settings = requestSettings[i];
2420 request.inputBuffer = {/*streamId*/ -1,
2421 /*bufferId*/ 0, NativeHandle(),
2422 BufferStatus::ERROR, NativeHandle(),
2423 NativeHandle()};
2424
2425 inflightReqs[i] = std::make_shared<InFlightRequest>(1, false, supportsPartialResults,
2426 partialResultCount, resultQueue);
2427 mInflightMap[frameNumber + i] = inflightReqs[i];
2428 }
2429
2430 int32_t numRequestProcessed = 0;
2431 std::vector<BufferCache> cachesToRemove;
2432
2433 ndk::ScopedAStatus returnStatus =
2434 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2435 ASSERT_TRUE(returnStatus.isOk());
2436 ASSERT_EQ(numRequestProcessed, kBurstFrameCount);
2437
2438 std::vector<int32_t> offlineStreamIds = {halStream.id};
2439 CameraOfflineSessionInfo offlineSessionInfo;
2440 std::shared_ptr<ICameraOfflineSession> offlineSession;
2441 returnStatus =
2442 mSession->switchToOffline(offlineStreamIds, &offlineSessionInfo, &offlineSession);
2443
2444 if (!halStreams[0].supportOffline) {
2445 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
2446 returnStatus.getServiceSpecificError());
2447 ret = mSession->close();
2448 mSession = nullptr;
2449 ASSERT_TRUE(ret.isOk());
2450 continue;
2451 }
2452
2453 ASSERT_TRUE(returnStatus.isOk());
2454 // Hal might be unable to find any requests qualified for offline mode.
2455 if (offlineSession == nullptr) {
2456 ret = mSession->close();
2457 mSession = nullptr;
2458 ASSERT_TRUE(ret.isOk());
2459 continue;
2460 }
2461
2462 ASSERT_EQ(offlineSessionInfo.offlineStreams.size(), 1u);
2463 ASSERT_EQ(offlineSessionInfo.offlineStreams[0].id, halStream.id);
2464 ASSERT_NE(offlineSessionInfo.offlineRequests.size(), 0u);
2465
2466 // close device session to make sure offline session does not rely on it
2467 ret = mSession->close();
2468 mSession = nullptr;
2469 ASSERT_TRUE(ret.isOk());
2470
2471 ::aidl::android::hardware::common::fmq::MQDescriptor<
2472 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2473 offlineResultDescriptor;
2474
2475 auto offlineResultQueueRet =
2476 offlineSession->getCaptureResultMetadataQueue(&offlineResultDescriptor);
2477 std::shared_ptr<ResultMetadataQueue> offlineResultQueue =
2478 std::make_shared<ResultMetadataQueue>(descriptor);
2479 if (!offlineResultQueue->isValid() || offlineResultQueue->availableToWrite() <= 0) {
2480 ALOGE("%s: offline session returns empty result metadata fmq, not use it", __func__);
2481 offlineResultQueue = nullptr;
2482 // Don't use the queue onwards.
2483 }
2484 ASSERT_TRUE(offlineResultQueueRet.isOk());
2485
2486 updateInflightResultQueue(offlineResultQueue);
2487
2488 ret = offlineSession->setCallback(cb);
2489 ASSERT_TRUE(ret.isOk());
2490
2491 for (size_t i = 0; i < kBurstFrameCount; i++) {
2492 std::unique_lock<std::mutex> l(mLock);
2493 while (!inflightReqs[i]->errorCodeValid && ((0 < inflightReqs[i]->numBuffersLeft) ||
2494 (!inflightReqs[i]->haveResultMetadata))) {
2495 auto timeout = std::chrono::system_clock::now() +
2496 std::chrono::seconds(kStreamBufferTimeoutSec);
2497 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2498 }
2499
2500 ASSERT_FALSE(inflightReqs[i]->errorCodeValid);
2501 ASSERT_NE(inflightReqs[i]->resultOutputBuffers.size(), 0u);
2502 ASSERT_EQ(stream.id, inflightReqs[i]->resultOutputBuffers[0].buffer.streamId);
2503 ASSERT_FALSE(inflightReqs[i]->collectedResult.isEmpty());
2504 }
2505
2506 ret = offlineSession->close();
2507 ASSERT_TRUE(ret.isOk());
2508 }
2509}
2510
2511// Check whether an invalid capture request with missing output buffers
2512// will be reported correctly.
2513TEST_P(CameraAidlTest, processCaptureRequestInvalidBuffer) {
2514 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2515 std::vector<AvailableStream> outputBlobStreams;
2516 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2517 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2518 int32_t frameNumber = 1;
2519 CameraMetadata settings;
2520
2521 for (const auto& name : cameraDeviceNames) {
2522 Stream previewStream;
2523 std::vector<HalStream> halStreams;
2524 std::shared_ptr<DeviceCb> cb;
2525 bool supportsPartialResults = false;
2526 bool useHalBufManager = false;
2527 int32_t partialResultCount = 0;
2528 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2529 &previewStream /*out*/, &halStreams /*out*/,
2530 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2531 &useHalBufManager /*out*/, &cb /*out*/);
2532
2533 RequestTemplate reqTemplate = RequestTemplate::PREVIEW;
2534 ndk::ScopedAStatus ret = mSession->constructDefaultRequestSettings(reqTemplate, &settings);
2535 ASSERT_TRUE(ret.isOk());
2536 overrideRotateAndCrop(&settings);
2537
2538 std::vector<CaptureRequest> requests(1);
2539 CaptureRequest& request = requests[0];
2540 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2541 outputBuffers.resize(1);
2542 // Empty output buffer
2543 outputBuffers[0] = {
2544 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2545
2546 request.inputBuffer = {
2547 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2548 request.frameNumber = frameNumber;
2549 request.fmqSettingsSize = 0;
2550 request.settings = settings;
2551
2552 // Output buffers are missing, we should fail here
2553 int32_t numRequestProcessed = 0;
2554 std::vector<BufferCache> cachesToRemove;
2555 ret = mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2556 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
2557 ASSERT_EQ(numRequestProcessed, 0u);
2558
2559 ret = mSession->close();
2560 mSession = nullptr;
2561 ASSERT_TRUE(ret.isOk());
2562 }
2563}
2564
2565// Generate, trigger and flush a preview request
2566TEST_P(CameraAidlTest, flushPreviewRequest) {
2567 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2568 std::vector<AvailableStream> outputPreviewStreams;
2569 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2570 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2571 int64_t bufferId = 1;
2572 int32_t frameNumber = 1;
2573 CameraMetadata settings;
2574
2575 for (const auto& name : cameraDeviceNames) {
2576 Stream previewStream;
2577 std::vector<HalStream> halStreams;
2578 std::shared_ptr<DeviceCb> cb;
2579 bool supportsPartialResults = false;
2580 bool useHalBufManager = false;
2581 int32_t partialResultCount = 0;
2582
2583 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2584 &previewStream /*out*/, &halStreams /*out*/,
2585 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2586 &useHalBufManager /*out*/, &cb /*out*/);
2587
2588 ASSERT_NE(mSession, nullptr);
2589 ASSERT_NE(cb, nullptr);
2590 ASSERT_FALSE(halStreams.empty());
2591
2592 ::aidl::android::hardware::common::fmq::MQDescriptor<
2593 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2594 descriptor;
2595
2596 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2597 std::shared_ptr<ResultMetadataQueue> resultQueue =
2598 std::make_shared<ResultMetadataQueue>(descriptor);
2599 ASSERT_TRUE(resultQueueRet.isOk());
2600 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2601 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2602 resultQueue = nullptr;
2603 // Don't use the queue onwards.
2604 }
2605
2606 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
2607 1, false, supportsPartialResults, partialResultCount, resultQueue);
2608 RequestTemplate reqTemplate = RequestTemplate::PREVIEW;
2609
2610 ndk::ScopedAStatus ret = mSession->constructDefaultRequestSettings(reqTemplate, &settings);
2611 ASSERT_TRUE(ret.isOk());
2612 overrideRotateAndCrop(&settings);
2613
2614 buffer_handle_t buffer_handle;
2615 std::vector<CaptureRequest> requests(1);
2616 CaptureRequest& request = requests[0];
2617 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2618 outputBuffers.resize(1);
2619 StreamBuffer& outputBuffer = outputBuffers[0];
2620 if (useHalBufManager) {
2621 bufferId = 0;
2622 outputBuffer = {halStreams[0].id, bufferId, NativeHandle(),
2623 BufferStatus::OK, NativeHandle(), NativeHandle()};
2624 } else {
2625 allocateGraphicBuffer(previewStream.width, previewStream.height,
2626 android_convertGralloc1To0Usage(
2627 static_cast<uint64_t>(halStreams[0].producerUsage),
2628 static_cast<uint64_t>(halStreams[0].consumerUsage)),
2629 halStreams[0].overrideFormat, &buffer_handle);
2630 outputBuffer = {halStreams[0].id, bufferId, ::android::makeToAidl(buffer_handle),
2631 BufferStatus::OK, NativeHandle(), NativeHandle()};
2632 }
2633
2634 request.frameNumber = frameNumber;
2635 request.fmqSettingsSize = 0;
2636 request.settings = settings;
2637 request.inputBuffer = {
2638 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2639
2640 {
2641 std::unique_lock<std::mutex> l(mLock);
2642 mInflightMap.clear();
2643 mInflightMap[frameNumber] = inflightReq;
2644 }
2645
2646 int32_t numRequestProcessed = 0;
2647 std::vector<BufferCache> cachesToRemove;
2648 ret = mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2649 ASSERT_TRUE(ret.isOk());
2650 ASSERT_EQ(numRequestProcessed, 1u);
2651
2652 // Flush before waiting for request to complete.
2653 ndk::ScopedAStatus returnStatus = mSession->flush();
2654 ASSERT_TRUE(returnStatus.isOk());
2655
2656 {
2657 std::unique_lock<std::mutex> l(mLock);
2658 while (!inflightReq->errorCodeValid &&
2659 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
2660 auto timeout = std::chrono::system_clock::now() +
2661 std::chrono::seconds(kStreamBufferTimeoutSec);
2662 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2663 }
2664
2665 if (!inflightReq->errorCodeValid) {
2666 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
2667 ASSERT_EQ(previewStream.id, inflightReq->resultOutputBuffers[0].buffer.streamId);
2668 } else {
2669 switch (inflightReq->errorCode) {
2670 case ErrorCode::ERROR_REQUEST:
2671 case ErrorCode::ERROR_RESULT:
2672 case ErrorCode::ERROR_BUFFER:
2673 // Expected
2674 break;
2675 case ErrorCode::ERROR_DEVICE:
2676 default:
2677 FAIL() << "Unexpected error:"
2678 << static_cast<uint32_t>(inflightReq->errorCode);
2679 }
2680 }
2681 }
2682
2683 if (useHalBufManager) {
2684 verifyBuffersReturned(mSession, previewStream.id, cb);
2685 }
2686
2687 ret = mSession->close();
2688 mSession = nullptr;
2689 ASSERT_TRUE(ret.isOk());
2690 }
2691}
2692
2693// Verify that camera flushes correctly without any pending requests.
2694TEST_P(CameraAidlTest, flushEmpty) {
2695 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2696 std::vector<AvailableStream> outputPreviewStreams;
2697 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2698 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2699
2700 for (const auto& name : cameraDeviceNames) {
2701 Stream previewStream;
2702 std::vector<HalStream> halStreams;
2703 std::shared_ptr<DeviceCb> cb;
2704 bool supportsPartialResults = false;
2705 bool useHalBufManager = false;
2706
2707 int32_t partialResultCount = 0;
2708 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2709 &previewStream /*out*/, &halStreams /*out*/,
2710 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2711 &useHalBufManager /*out*/, &cb /*out*/);
2712
2713 ndk::ScopedAStatus returnStatus = mSession->flush();
2714 ASSERT_TRUE(returnStatus.isOk());
2715
2716 {
2717 std::unique_lock<std::mutex> l(mLock);
2718 auto timeout = std::chrono::system_clock::now() +
2719 std::chrono::milliseconds(kEmptyFlushTimeoutMSec);
2720 ASSERT_EQ(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2721 }
2722
2723 ndk::ScopedAStatus ret = mSession->close();
2724 mSession = nullptr;
2725 ASSERT_TRUE(ret.isOk());
2726 }
2727}
2728
2729// Test camera provider notify method
2730TEST_P(CameraAidlTest, providerDeviceStateNotification) {
2731 notifyDeviceState(ICameraProvider::DEVICE_STATE_BACK_COVERED);
2732 notifyDeviceState(ICameraProvider::DEVICE_STATE_NORMAL);
2733}
2734
2735// Verify that all supported stream formats and sizes can be configured
2736// successfully for injection camera.
2737TEST_P(CameraAidlTest, configureInjectionStreamsAvailableOutputs) {
2738 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2739 std::vector<AvailableStream> outputStreams;
2740
2741 for (const auto& name : cameraDeviceNames) {
2742 CameraMetadata metadata;
2743
2744 std::shared_ptr<ICameraInjectionSession> injectionSession;
2745 std::shared_ptr<ICameraDevice> unusedDevice;
2746 openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/,
2747 &unusedDevice /*out*/);
2748 if (injectionSession == nullptr) {
2749 continue;
2750 }
2751
2752 camera_metadata_t* staticMetaBuffer =
2753 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
2754 CameraMetadata chars;
2755 chars.metadata = metadata.metadata;
2756
2757 outputStreams.clear();
2758 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputStreams));
2759 ASSERT_NE(0u, outputStreams.size());
2760
2761 int32_t jpegBufferSize = 0;
2762 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMetaBuffer, &jpegBufferSize));
2763 ASSERT_NE(0u, jpegBufferSize);
2764
2765 int32_t streamId = 0;
2766 int32_t streamConfigCounter = 0;
2767 for (auto& it : outputStreams) {
2768 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(it.format));
2769 Stream stream = {streamId,
2770 StreamType::OUTPUT,
2771 it.width,
2772 it.height,
2773 static_cast<PixelFormat>(it.format),
2774 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2775 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2776 dataspace,
2777 StreamRotation::ROTATION_0,
2778 std::string(),
2779 jpegBufferSize,
2780 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002781 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2782 RequestAvailableDynamicRangeProfilesMap::
2783 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002784
2785 std::vector<Stream> streams = {stream};
2786 StreamConfiguration config;
2787 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2788 jpegBufferSize);
2789
2790 config.streamConfigCounter = streamConfigCounter++;
2791 ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars);
2792 ASSERT_TRUE(s.isOk());
2793 streamId++;
2794 }
2795
2796 std::shared_ptr<ICameraDeviceSession> session;
2797 ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session);
2798 ASSERT_TRUE(ret.isOk());
2799 ASSERT_NE(session, nullptr);
2800 ret = session->close();
2801 ASSERT_TRUE(ret.isOk());
2802 }
2803}
2804
2805// Check for correct handling of invalid/incorrect configuration parameters for injection camera.
2806TEST_P(CameraAidlTest, configureInjectionStreamsInvalidOutputs) {
2807 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2808 std::vector<AvailableStream> outputStreams;
2809
2810 for (const auto& name : cameraDeviceNames) {
2811 CameraMetadata metadata;
2812 std::shared_ptr<ICameraInjectionSession> injectionSession;
2813 std::shared_ptr<ICameraDevice> unusedDevice;
2814 openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/,
2815 &unusedDevice);
2816 if (injectionSession == nullptr) {
2817 continue;
2818 }
2819
2820 camera_metadata_t* staticMetaBuffer =
2821 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
2822 std::shared_ptr<ICameraDeviceSession> session;
2823 ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session);
2824 ASSERT_TRUE(ret.isOk());
2825 ASSERT_NE(session, nullptr);
2826
2827 CameraMetadata chars;
2828 chars.metadata = metadata.metadata;
2829
2830 outputStreams.clear();
2831 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputStreams));
2832 ASSERT_NE(0u, outputStreams.size());
2833
2834 int32_t jpegBufferSize = 0;
2835 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMetaBuffer, &jpegBufferSize));
2836 ASSERT_NE(0u, jpegBufferSize);
2837
2838 int32_t streamId = 0;
2839 Stream stream = {streamId++,
2840 StreamType::OUTPUT,
2841 0,
2842 0,
2843 static_cast<PixelFormat>(outputStreams[0].format),
2844 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2845 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2846 Dataspace::UNKNOWN,
2847 StreamRotation::ROTATION_0,
2848 std::string(),
2849 jpegBufferSize,
2850 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002851 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2852 RequestAvailableDynamicRangeProfilesMap::
2853 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002854
2855 int32_t streamConfigCounter = 0;
2856 std::vector<Stream> streams = {stream};
2857 StreamConfiguration config;
2858 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2859 jpegBufferSize);
2860
2861 config.streamConfigCounter = streamConfigCounter++;
2862 ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars);
2863 ASSERT_TRUE(
2864 (static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) == s.getServiceSpecificError()) ||
2865 (static_cast<int32_t>(Status::INTERNAL_ERROR) == s.getServiceSpecificError()));
2866
2867 stream = {streamId++,
2868 StreamType::OUTPUT,
2869 INT32_MAX,
2870 INT32_MAX,
2871 static_cast<PixelFormat>(outputStreams[0].format),
2872 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2873 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2874 Dataspace::UNKNOWN,
2875 StreamRotation::ROTATION_0,
2876 std::string(),
2877 jpegBufferSize,
2878 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002879 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2880 RequestAvailableDynamicRangeProfilesMap::
2881 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
2882
Avichal Rakesh362242f2022-02-08 12:40:53 -08002883 streams[0] = stream;
2884 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2885 jpegBufferSize);
2886 config.streamConfigCounter = streamConfigCounter++;
2887 s = injectionSession->configureInjectionStreams(config, chars);
2888 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError());
2889
2890 for (auto& it : outputStreams) {
2891 stream = {streamId++,
2892 StreamType::OUTPUT,
2893 it.width,
2894 it.height,
2895 static_cast<PixelFormat>(INT32_MAX),
2896 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2897 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2898 Dataspace::UNKNOWN,
2899 StreamRotation::ROTATION_0,
2900 std::string(),
2901 jpegBufferSize,
2902 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002903 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2904 RequestAvailableDynamicRangeProfilesMap::
2905 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002906 streams[0] = stream;
2907 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2908 jpegBufferSize);
2909 config.streamConfigCounter = streamConfigCounter++;
2910 s = injectionSession->configureInjectionStreams(config, chars);
2911 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError());
2912
2913 stream = {streamId++,
2914 StreamType::OUTPUT,
2915 it.width,
2916 it.height,
2917 static_cast<PixelFormat>(it.format),
2918 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2919 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2920 Dataspace::UNKNOWN,
2921 static_cast<StreamRotation>(INT32_MAX),
2922 std::string(),
2923 jpegBufferSize,
2924 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002925 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2926 RequestAvailableDynamicRangeProfilesMap::
2927 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002928 streams[0] = stream;
2929 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2930 jpegBufferSize);
2931 config.streamConfigCounter = streamConfigCounter++;
2932 s = injectionSession->configureInjectionStreams(config, chars);
2933 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError());
2934 }
2935
2936 ret = session->close();
2937 ASSERT_TRUE(ret.isOk());
2938 }
2939}
2940
2941// Check whether session parameters are supported for injection camera. If Hal support for them
2942// exist, then try to configure a preview stream using them.
2943TEST_P(CameraAidlTest, configureInjectionStreamsWithSessionParameters) {
2944 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2945 std::vector<AvailableStream> outputPreviewStreams;
2946 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2947 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2948
2949 for (const auto& name : cameraDeviceNames) {
2950 CameraMetadata metadata;
2951 std::shared_ptr<ICameraInjectionSession> injectionSession;
2952 std::shared_ptr<ICameraDevice> unusedDevice;
2953 openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/,
2954 &unusedDevice /*out*/);
2955 if (injectionSession == nullptr) {
2956 continue;
2957 }
2958
2959 std::shared_ptr<ICameraDeviceSession> session;
2960 ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session);
2961 ASSERT_TRUE(ret.isOk());
2962 ASSERT_NE(session, nullptr);
2963
2964 camera_metadata_t* staticMetaBuffer =
2965 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
2966 CameraMetadata chars;
2967 chars.metadata = metadata.metadata;
2968
2969 std::unordered_set<int32_t> availableSessionKeys;
2970 Status rc = getSupportedKeys(staticMetaBuffer, ANDROID_REQUEST_AVAILABLE_SESSION_KEYS,
2971 &availableSessionKeys);
2972 ASSERT_EQ(Status::OK, rc);
2973 if (availableSessionKeys.empty()) {
2974 ret = session->close();
2975 ASSERT_TRUE(ret.isOk());
2976 continue;
2977 }
2978
2979 android::hardware::camera::common::V1_0::helper::CameraMetadata previewRequestSettings;
2980 android::hardware::camera::common::V1_0::helper::CameraMetadata sessionParams,
2981 modifiedSessionParams;
2982 constructFilteredSettings(session, availableSessionKeys, RequestTemplate::PREVIEW,
2983 &previewRequestSettings, &sessionParams);
2984 if (sessionParams.isEmpty()) {
2985 ret = session->close();
2986 ASSERT_TRUE(ret.isOk());
2987 continue;
2988 }
2989
2990 outputPreviewStreams.clear();
2991
2992 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputPreviewStreams,
2993 &previewThreshold));
2994 ASSERT_NE(0u, outputPreviewStreams.size());
2995
2996 Stream previewStream = {
2997 0,
2998 StreamType::OUTPUT,
2999 outputPreviewStreams[0].width,
3000 outputPreviewStreams[0].height,
3001 static_cast<PixelFormat>(outputPreviewStreams[0].format),
3002 static_cast<::aidl::android::hardware::graphics::common::BufferUsage>(
3003 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
3004 Dataspace::UNKNOWN,
3005 StreamRotation::ROTATION_0,
3006 std::string(),
3007 0,
3008 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00003009 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
3010 RequestAvailableDynamicRangeProfilesMap::
3011 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08003012 std::vector<Stream> streams = {previewStream};
3013 StreamConfiguration config;
3014 config.streams = streams;
3015 config.operationMode = StreamConfigurationMode::NORMAL_MODE;
3016
3017 modifiedSessionParams = sessionParams;
3018 camera_metadata_t* sessionParamsBuffer = sessionParams.release();
3019 uint8_t* rawSessionParamsBuffer = reinterpret_cast<uint8_t*>(sessionParamsBuffer);
3020 config.sessionParams.metadata =
3021 std::vector(rawSessionParamsBuffer,
3022 rawSessionParamsBuffer + get_camera_metadata_size(sessionParamsBuffer));
3023
3024 config.streamConfigCounter = 0;
3025 config.streamConfigCounter = 0;
3026 config.multiResolutionInputImage = false;
3027
3028 ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars);
3029 ASSERT_TRUE(s.isOk());
3030
3031 sessionParams.acquire(sessionParamsBuffer);
3032 free_camera_metadata(staticMetaBuffer);
3033 ret = session->close();
3034 ASSERT_TRUE(ret.isOk());
3035 }
3036}
3037
Jayant Chowdharyde1909e2022-11-23 17:18:38 +00003038TEST_P(CameraAidlTest, configureStreamsUseCasesCroppedRaw) {
3039 AvailableStream rawStreamThreshold =
3040 {INT_MAX, INT_MAX, static_cast<int32_t>(PixelFormat::RAW16)};
3041 configureStreamUseCaseInternal(rawStreamThreshold);
3042}
3043
Avichal Rakesh362242f2022-02-08 12:40:53 -08003044// Verify that valid stream use cases can be configured successfully, and invalid use cases
3045// fail stream configuration.
3046TEST_P(CameraAidlTest, configureStreamsUseCases) {
Jayant Chowdharyde1909e2022-11-23 17:18:38 +00003047 AvailableStream previewStreamThreshold =
3048 {kMaxPreviewWidth, kMaxPreviewHeight, static_cast<int32_t>(PixelFormat::YCBCR_420_888)};
3049 configureStreamUseCaseInternal(previewStreamThreshold);
Avichal Rakesh362242f2022-02-08 12:40:53 -08003050}
3051
Austin Borger0918fc82023-03-21 18:48:18 -07003052// Validate the integrity of stream configuration metadata
3053TEST_P(CameraAidlTest, validateStreamConfigurations) {
3054 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
3055 std::vector<AvailableStream> outputStreams;
3056
3057 const int32_t scalerSizesTag = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS;
3058 const int32_t scalerMinFrameDurationsTag = ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS;
3059 const int32_t scalerStallDurationsTag = ANDROID_SCALER_AVAILABLE_STALL_DURATIONS;
3060
3061 for (const auto& name : cameraDeviceNames) {
3062 CameraMetadata meta;
3063 std::shared_ptr<ICameraDevice> cameraDevice;
3064
3065 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
3066 &cameraDevice /*out*/);
3067 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
3068
3069 if (is10BitDynamicRangeCapable(staticMeta)) {
3070 std::vector<std::tuple<size_t, size_t>> supportedP010Sizes, supportedBlobSizes;
3071
3072 getSupportedSizes(staticMeta, scalerSizesTag, HAL_PIXEL_FORMAT_BLOB,
3073 &supportedBlobSizes);
3074 getSupportedSizes(staticMeta, scalerSizesTag, HAL_PIXEL_FORMAT_YCBCR_P010,
3075 &supportedP010Sizes);
3076 ASSERT_FALSE(supportedP010Sizes.empty());
3077
3078 std::vector<int64_t> blobMinDurations, blobStallDurations;
3079 getSupportedDurations(staticMeta, scalerMinFrameDurationsTag, HAL_PIXEL_FORMAT_BLOB,
3080 supportedP010Sizes, &blobMinDurations);
3081 getSupportedDurations(staticMeta, scalerStallDurationsTag, HAL_PIXEL_FORMAT_BLOB,
3082 supportedP010Sizes, &blobStallDurations);
3083 ASSERT_FALSE(blobStallDurations.empty());
3084 ASSERT_FALSE(blobMinDurations.empty());
3085 ASSERT_EQ(supportedP010Sizes.size(), blobMinDurations.size());
3086 ASSERT_EQ(blobMinDurations.size(), blobStallDurations.size());
3087 }
3088
Austin Borger8e9ac022023-05-04 11:17:26 -07003089 // TODO (b/280887191): Validate other aspects of stream configuration metadata...
3090
3091 ndk::ScopedAStatus ret = mSession->close();
3092 mSession = nullptr;
3093 ASSERT_TRUE(ret.isOk());
Austin Borger0918fc82023-03-21 18:48:18 -07003094 }
3095}
3096
Avichal Rakesh362242f2022-02-08 12:40:53 -08003097GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(CameraAidlTest);
3098INSTANTIATE_TEST_SUITE_P(
3099 PerInstance, CameraAidlTest,
3100 testing::ValuesIn(android::getAidlHalInstanceNames(ICameraProvider::descriptor)),
Jayant Chowdharyde1909e2022-11-23 17:18:38 +00003101 android::hardware::PrintInstanceNameToString);