blob: aee53664b08022a2c1dbf15ea4d0e53710e30a4e [file] [log] [blame]
Avichal Rakesh362242f2022-02-08 12:40:53 -08001/*
2 * Copyright (C) 2022 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <aidl/Vintf.h>
18#include <aidl/android/hardware/camera/common/VendorTagSection.h>
19#include <aidl/android/hardware/camera/device/ICameraDevice.h>
20#include <aidlcommonsupport/NativeHandle.h>
21#include <camera_aidl_test.h>
22#include <cutils/properties.h>
23#include <device_cb.h>
24#include <empty_device_cb.h>
25#include <grallocusage/GrallocUsageConversion.h>
26#include <gtest/gtest.h>
27#include <hardware/gralloc.h>
28#include <hardware/gralloc1.h>
29#include <hidl/GtestPrinter.h>
30#include <hidl/HidlSupport.h>
31#include <torch_provider_cb.h>
32#include <list>
33
34using ::aidl::android::hardware::camera::common::CameraDeviceStatus;
35using ::aidl::android::hardware::camera::common::CameraResourceCost;
36using ::aidl::android::hardware::camera::common::TorchModeStatus;
37using ::aidl::android::hardware::camera::common::VendorTagSection;
38using ::aidl::android::hardware::camera::device::ICameraDevice;
Austin Borger4728fc42022-07-15 11:27:53 -070039using ::aidl::android::hardware::camera::metadata::RequestAvailableColorSpaceProfilesMap;
Avichal Rakeshd3503a32022-02-25 06:23:14 +000040using ::aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap;
Avichal Rakesh362242f2022-02-08 12:40:53 -080041using ::aidl::android::hardware::camera::metadata::SensorPixelMode;
42using ::aidl::android::hardware::camera::provider::CameraIdAndStreamCombination;
Avichal Rakesh4bf91c72022-05-23 20:44:02 +000043using ::aidl::android::hardware::camera::provider::BnCameraProviderCallback;
Avichal Rakesh362242f2022-02-08 12:40:53 -080044
45using ::ndk::ScopedAStatus;
46
47namespace {
48const int32_t kBurstFrameCount = 10;
49const uint32_t kMaxStillWidth = 2048;
50const uint32_t kMaxStillHeight = 1536;
51
52const int64_t kEmptyFlushTimeoutMSec = 200;
53
Shuzhen Wang36efa712022-03-08 10:10:44 -080054const static std::vector<int64_t> kMandatoryUseCases = {
Avichal Rakesh362242f2022-02-08 12:40:53 -080055 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
56 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW,
57 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_STILL_CAPTURE,
58 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_RECORD,
59 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW_VIDEO_STILL,
60 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL};
61} // namespace
62
63TEST_P(CameraAidlTest, getCameraIdList) {
64 std::vector<std::string> idList;
65 ScopedAStatus ret = mProvider->getCameraIdList(&idList);
66 ASSERT_TRUE(ret.isOk());
67
68 for (size_t i = 0; i < idList.size(); i++) {
69 ALOGI("Camera Id[%zu] is %s", i, idList[i].c_str());
70 }
71}
72
73// Test if ICameraProvider::getVendorTags returns Status::OK
74TEST_P(CameraAidlTest, getVendorTags) {
75 std::vector<VendorTagSection> vendorTags;
76 ScopedAStatus ret = mProvider->getVendorTags(&vendorTags);
77
78 ASSERT_TRUE(ret.isOk());
79 for (size_t i = 0; i < vendorTags.size(); i++) {
80 ALOGI("Vendor tag section %zu name %s", i, vendorTags[i].sectionName.c_str());
81 for (auto& tag : vendorTags[i].tags) {
82 ALOGI("Vendor tag id %u name %s type %d", tag.tagId, tag.tagName.c_str(),
83 (int)tag.tagType);
84 }
85 }
86}
87
88// Test if ICameraProvider::setCallback returns Status::OK
89TEST_P(CameraAidlTest, setCallback) {
Avichal Rakesh4bf91c72022-05-23 20:44:02 +000090 struct ProviderCb : public BnCameraProviderCallback {
Avichal Rakesh362242f2022-02-08 12:40:53 -080091 ScopedAStatus cameraDeviceStatusChange(const std::string& cameraDeviceName,
92 CameraDeviceStatus newStatus) override {
93 ALOGI("camera device status callback name %s, status %d", cameraDeviceName.c_str(),
94 (int)newStatus);
95 return ScopedAStatus::ok();
96 }
97 ScopedAStatus torchModeStatusChange(const std::string& cameraDeviceName,
98 TorchModeStatus newStatus) override {
99 ALOGI("Torch mode status callback name %s, status %d", cameraDeviceName.c_str(),
100 (int)newStatus);
101 return ScopedAStatus::ok();
102 }
103 ScopedAStatus physicalCameraDeviceStatusChange(const std::string& cameraDeviceName,
104 const std::string& physicalCameraDeviceName,
105 CameraDeviceStatus newStatus) override {
106 ALOGI("physical camera device status callback name %s, physical camera name %s,"
107 " status %d",
108 cameraDeviceName.c_str(), physicalCameraDeviceName.c_str(), (int)newStatus);
109 return ScopedAStatus::ok();
110 }
111 };
112
Avichal Rakesh4bf91c72022-05-23 20:44:02 +0000113 std::shared_ptr<ProviderCb> cb = ndk::SharedRefBase::make<ProviderCb>();
Avichal Rakesh362242f2022-02-08 12:40:53 -0800114 ScopedAStatus ret = mProvider->setCallback(cb);
115 ASSERT_TRUE(ret.isOk());
116 ret = mProvider->setCallback(nullptr);
Avichal Rakesh4bf91c72022-05-23 20:44:02 +0000117 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
Avichal Rakesh362242f2022-02-08 12:40:53 -0800118}
119
120// Test if ICameraProvider::getCameraDeviceInterface returns Status::OK and non-null device
121TEST_P(CameraAidlTest, getCameraDeviceInterface) {
122 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
123
124 for (const auto& name : cameraDeviceNames) {
125 std::shared_ptr<ICameraDevice> cameraDevice;
126 ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &cameraDevice);
127 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
128 ret.getServiceSpecificError());
129 ASSERT_TRUE(ret.isOk());
130 ASSERT_NE(cameraDevice, nullptr);
131 }
132}
133
134// Verify that the device resource cost can be retrieved and the values are
135// correct.
136TEST_P(CameraAidlTest, getResourceCost) {
137 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
138
139 for (const auto& deviceName : cameraDeviceNames) {
140 std::shared_ptr<ICameraDevice> cameraDevice;
141 ScopedAStatus ret = mProvider->getCameraDeviceInterface(deviceName, &cameraDevice);
142 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
143 ret.getServiceSpecificError());
144 ASSERT_TRUE(ret.isOk());
145 ASSERT_NE(cameraDevice, nullptr);
146
147 CameraResourceCost resourceCost;
148 ret = cameraDevice->getResourceCost(&resourceCost);
149 ALOGI("getResourceCost returns: %d:%d", ret.getExceptionCode(),
150 ret.getServiceSpecificError());
151 ASSERT_TRUE(ret.isOk());
152
153 ALOGI(" Resource cost is %d", resourceCost.resourceCost);
154 ASSERT_LE(resourceCost.resourceCost, 100u);
155
156 for (const auto& name : resourceCost.conflictingDevices) {
157 ALOGI(" Conflicting device: %s", name.c_str());
158 }
159 }
160}
161
162TEST_P(CameraAidlTest, systemCameraTest) {
163 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
164 std::map<std::string, std::vector<SystemCameraKind>> hiddenPhysicalIdToLogicalMap;
165 for (const auto& name : cameraDeviceNames) {
166 std::shared_ptr<ICameraDevice> device;
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +0000167 ALOGI("systemCameraTest: Testing camera device %s", name.c_str());
Avichal Rakesh362242f2022-02-08 12:40:53 -0800168 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
169 ASSERT_TRUE(ret.isOk());
170 ASSERT_NE(device, nullptr);
171
172 CameraMetadata cameraCharacteristics;
173 ret = device->getCameraCharacteristics(&cameraCharacteristics);
174 ASSERT_TRUE(ret.isOk());
175
176 const camera_metadata_t* staticMeta =
177 reinterpret_cast<const camera_metadata_t*>(cameraCharacteristics.metadata.data());
178 Status rc = isLogicalMultiCamera(staticMeta);
179 if (rc == Status::OPERATION_NOT_SUPPORTED) {
180 return;
181 }
182
183 ASSERT_EQ(rc, Status::OK);
184 std::unordered_set<std::string> physicalIds;
185 ASSERT_EQ(getPhysicalCameraIds(staticMeta, &physicalIds), Status::OK);
186 SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC;
187 Status retStatus = getSystemCameraKind(staticMeta, &systemCameraKind);
188 ASSERT_EQ(retStatus, Status::OK);
189
190 for (auto physicalId : physicalIds) {
191 bool isPublicId = false;
192 for (auto& deviceName : cameraDeviceNames) {
193 std::string publicVersion, publicId;
194 ASSERT_TRUE(matchDeviceName(deviceName, mProviderType, &publicVersion, &publicId));
195 if (physicalId == publicId) {
196 isPublicId = true;
197 break;
198 }
199 }
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +0000200
Avichal Rakesh362242f2022-02-08 12:40:53 -0800201 // For hidden physical cameras, collect their associated logical cameras
202 // and store the system camera kind.
203 if (!isPublicId) {
204 auto it = hiddenPhysicalIdToLogicalMap.find(physicalId);
205 if (it == hiddenPhysicalIdToLogicalMap.end()) {
206 hiddenPhysicalIdToLogicalMap.insert(std::make_pair(
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +0000207 physicalId, std::vector<SystemCameraKind>({systemCameraKind})));
Avichal Rakesh362242f2022-02-08 12:40:53 -0800208 } else {
209 it->second.push_back(systemCameraKind);
210 }
211 }
212 }
213 }
214
215 // Check that the system camera kind of the logical cameras associated with
216 // each hidden physical camera is the same.
217 for (const auto& it : hiddenPhysicalIdToLogicalMap) {
218 SystemCameraKind neededSystemCameraKind = it.second.front();
219 for (auto foundSystemCamera : it.second) {
220 ASSERT_EQ(neededSystemCameraKind, foundSystemCamera);
221 }
222 }
223}
224
225// Verify that the static camera characteristics can be retrieved
226// successfully.
227TEST_P(CameraAidlTest, getCameraCharacteristics) {
228 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
229
230 for (const auto& name : cameraDeviceNames) {
231 std::shared_ptr<ICameraDevice> device;
232 ALOGI("getCameraCharacteristics: Testing camera device %s", name.c_str());
233 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
234 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
235 ret.getServiceSpecificError());
236 ASSERT_TRUE(ret.isOk());
237 ASSERT_NE(device, nullptr);
238
239 CameraMetadata chars;
240 ret = device->getCameraCharacteristics(&chars);
241 ASSERT_TRUE(ret.isOk());
242 verifyCameraCharacteristics(chars);
243 verifyMonochromeCharacteristics(chars);
244 verifyRecommendedConfigs(chars);
245 verifyLogicalOrUltraHighResCameraMetadata(name, device, chars, cameraDeviceNames);
246
247 ASSERT_TRUE(ret.isOk());
248
249 // getPhysicalCameraCharacteristics will fail for publicly
250 // advertised camera IDs.
251 std::string version, cameraId;
252 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &cameraId));
253 CameraMetadata devChars;
254 ret = device->getPhysicalCameraCharacteristics(cameraId, &devChars);
255 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
256 ASSERT_EQ(0, devChars.metadata.size());
257 }
258}
259
260// Verify that the torch strength level can be set and retrieved successfully.
261TEST_P(CameraAidlTest, turnOnTorchWithStrengthLevel) {
262 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
263
264 std::shared_ptr<TorchProviderCb> cb = ndk::SharedRefBase::make<TorchProviderCb>(this);
265 ndk::ScopedAStatus ret = mProvider->setCallback(cb);
266 ASSERT_TRUE(ret.isOk());
267
268 for (const auto& name : cameraDeviceNames) {
269 int32_t defaultLevel;
270 std::shared_ptr<ICameraDevice> device;
271 ALOGI("%s: Testing camera device %s", __FUNCTION__, name.c_str());
272
273 ret = mProvider->getCameraDeviceInterface(name, &device);
274 ASSERT_TRUE(ret.isOk());
275 ASSERT_NE(device, nullptr);
276
277 CameraMetadata chars;
278 ret = device->getCameraCharacteristics(&chars);
279 ASSERT_TRUE(ret.isOk());
280
281 const camera_metadata_t* staticMeta =
282 reinterpret_cast<const camera_metadata_t*>(chars.metadata.data());
283 bool torchStrengthControlSupported = isTorchStrengthControlSupported(staticMeta);
284 camera_metadata_ro_entry entry;
285 int rc = find_camera_metadata_ro_entry(staticMeta,
286 ANDROID_FLASH_INFO_STRENGTH_DEFAULT_LEVEL, &entry);
287 if (torchStrengthControlSupported) {
288 ASSERT_EQ(rc, 0);
289 ASSERT_GT(entry.count, 0);
290 defaultLevel = *entry.data.i32;
291 ALOGI("Default level is:%d", defaultLevel);
292 }
293
294 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
295 ret = device->turnOnTorchWithStrengthLevel(2);
296 ALOGI("turnOnTorchWithStrengthLevel returns status: %d", ret.getServiceSpecificError());
297 // OPERATION_NOT_SUPPORTED check
298 if (!torchStrengthControlSupported) {
299 ALOGI("Torch strength control not supported.");
300 ASSERT_EQ(static_cast<int32_t>(Status::OPERATION_NOT_SUPPORTED),
301 ret.getServiceSpecificError());
302 } else {
303 {
304 ASSERT_TRUE(ret.isOk());
305 std::unique_lock<std::mutex> l(mTorchLock);
306 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
307 auto timeout = std::chrono::system_clock::now() +
308 std::chrono::seconds(kTorchTimeoutSec);
309 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
310 }
311 ASSERT_EQ(TorchModeStatus::AVAILABLE_ON, mTorchStatus);
312 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
313 }
314 ALOGI("getTorchStrengthLevel: Testing");
315 int32_t strengthLevel;
316 ret = device->getTorchStrengthLevel(&strengthLevel);
317 ASSERT_TRUE(ret.isOk());
318 ALOGI("Torch strength level is : %d", strengthLevel);
319 ASSERT_EQ(strengthLevel, 2);
320
321 // Turn OFF the torch and verify torch strength level is reset to default level.
322 ALOGI("Testing torch strength level reset after turning the torch OFF.");
323 ret = device->setTorchMode(false);
324 ASSERT_TRUE(ret.isOk());
325 {
326 std::unique_lock<std::mutex> l(mTorchLock);
327 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
328 auto timeout = std::chrono::system_clock::now() +
329 std::chrono::seconds(kTorchTimeoutSec);
330 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
331 }
332 ASSERT_EQ(TorchModeStatus::AVAILABLE_OFF, mTorchStatus);
333 }
334
335 ret = device->getTorchStrengthLevel(&strengthLevel);
336 ASSERT_TRUE(ret.isOk());
337 ALOGI("Torch strength level after turning OFF torch is : %d", strengthLevel);
338 ASSERT_EQ(strengthLevel, defaultLevel);
339 }
340 }
341}
342
343// In case it is supported verify that torch can be enabled.
344// Check for corresponding torch callbacks as well.
345TEST_P(CameraAidlTest, setTorchMode) {
346 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
347
348 std::shared_ptr<TorchProviderCb> cb = ndk::SharedRefBase::make<TorchProviderCb>(this);
349 ndk::ScopedAStatus ret = mProvider->setCallback(cb);
350 ALOGI("setCallback returns status: %d", ret.getServiceSpecificError());
351 ASSERT_TRUE(ret.isOk());
352 ASSERT_NE(cb, nullptr);
353
354 for (const auto& name : cameraDeviceNames) {
355 std::shared_ptr<ICameraDevice> device;
356 ALOGI("setTorchMode: Testing camera device %s", name.c_str());
357 ret = mProvider->getCameraDeviceInterface(name, &device);
358 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
359 ret.getServiceSpecificError());
360 ASSERT_TRUE(ret.isOk());
361 ASSERT_NE(device, nullptr);
362
363 CameraMetadata metadata;
364 ret = device->getCameraCharacteristics(&metadata);
365 ALOGI("getCameraCharacteristics returns status:%d", ret.getServiceSpecificError());
366 ASSERT_TRUE(ret.isOk());
367 camera_metadata_t* staticMeta =
368 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
369 bool torchSupported = isTorchSupported(staticMeta);
370
371 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
372 ret = device->setTorchMode(true);
373 ALOGI("setTorchMode returns status: %d", ret.getServiceSpecificError());
374 if (!torchSupported) {
375 ASSERT_EQ(static_cast<int32_t>(Status::OPERATION_NOT_SUPPORTED),
376 ret.getServiceSpecificError());
377 } else {
378 ASSERT_TRUE(ret.isOk());
379 {
380 std::unique_lock<std::mutex> l(mTorchLock);
381 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
382 auto timeout = std::chrono::system_clock::now() +
383 std::chrono::seconds(kTorchTimeoutSec);
384 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
385 }
386 ASSERT_EQ(TorchModeStatus::AVAILABLE_ON, mTorchStatus);
387 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
388 }
389
390 ret = device->setTorchMode(false);
391 ASSERT_TRUE(ret.isOk());
392 {
393 std::unique_lock<std::mutex> l(mTorchLock);
394 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
395 auto timeout = std::chrono::system_clock::now() +
396 std::chrono::seconds(kTorchTimeoutSec);
397 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
398 }
399 ASSERT_EQ(TorchModeStatus::AVAILABLE_OFF, mTorchStatus);
400 }
401 }
402 }
Avichal Rakesh362242f2022-02-08 12:40:53 -0800403}
404
405// Check dump functionality.
406TEST_P(CameraAidlTest, dump) {
407 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
408
409 for (const auto& name : cameraDeviceNames) {
410 std::shared_ptr<ICameraDevice> device;
411 ALOGI("dump: Testing camera device %s", name.c_str());
412
413 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
414 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
415 ret.getServiceSpecificError());
416 ASSERT_TRUE(ret.isOk());
417 ASSERT_NE(device, nullptr);
418
419 int raw_handle = open(kDumpOutput, O_RDWR);
420 ASSERT_GE(raw_handle, 0);
421
422 auto retStatus = device->dump(raw_handle, nullptr, 0);
423 ASSERT_EQ(retStatus, ::android::OK);
424 close(raw_handle);
425 }
426}
427
428// Open, dump, then close
429TEST_P(CameraAidlTest, openClose) {
430 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
431
432 for (const auto& name : cameraDeviceNames) {
433 std::shared_ptr<ICameraDevice> device;
434 ALOGI("openClose: Testing camera device %s", name.c_str());
435 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
436 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
437 ret.getServiceSpecificError());
438 ASSERT_TRUE(ret.isOk());
439 ASSERT_NE(device, nullptr);
440
441 std::shared_ptr<EmptyDeviceCb> cb = ndk::SharedRefBase::make<EmptyDeviceCb>();
442
443 ret = device->open(cb, &mSession);
444 ASSERT_TRUE(ret.isOk());
445 ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
446 ret.getServiceSpecificError());
447 ASSERT_NE(mSession, nullptr);
448 int raw_handle = open(kDumpOutput, O_RDWR);
449 ASSERT_GE(raw_handle, 0);
450
451 auto retStatus = device->dump(raw_handle, nullptr, 0);
452 ASSERT_EQ(retStatus, ::android::OK);
453 close(raw_handle);
454
455 ret = mSession->close();
456 mSession = nullptr;
457 ASSERT_TRUE(ret.isOk());
458 // TODO: test all session API calls return INTERNAL_ERROR after close
459 // TODO: keep a wp copy here and verify session cannot be promoted out of this scope
460 }
461}
462
463// Check whether all common default request settings can be successfully
464// constructed.
465TEST_P(CameraAidlTest, constructDefaultRequestSettings) {
466 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
467
468 for (const auto& name : cameraDeviceNames) {
469 std::shared_ptr<ICameraDevice> device;
470 ALOGI("constructDefaultRequestSettings: Testing camera device %s", name.c_str());
471 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
472 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
473 ret.getServiceSpecificError());
474 ASSERT_TRUE(ret.isOk());
475 ASSERT_NE(device, nullptr);
476
477 std::shared_ptr<EmptyDeviceCb> cb = ndk::SharedRefBase::make<EmptyDeviceCb>();
478 ret = device->open(cb, &mSession);
479 ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
480 ret.getServiceSpecificError());
481 ASSERT_TRUE(ret.isOk());
482 ASSERT_NE(mSession, nullptr);
483
484 for (int32_t t = (int32_t)RequestTemplate::PREVIEW; t <= (int32_t)RequestTemplate::MANUAL;
485 t++) {
486 RequestTemplate reqTemplate = (RequestTemplate)t;
487 CameraMetadata rawMetadata;
488 ret = mSession->constructDefaultRequestSettings(reqTemplate, &rawMetadata);
489 ALOGI("constructDefaultRequestSettings returns status:%d:%d", ret.getExceptionCode(),
490 ret.getServiceSpecificError());
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000491
Avichal Rakesh362242f2022-02-08 12:40:53 -0800492 if (reqTemplate == RequestTemplate::ZERO_SHUTTER_LAG ||
493 reqTemplate == RequestTemplate::MANUAL) {
494 // optional templates
495 ASSERT_TRUE(ret.isOk() || static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
496 ret.getServiceSpecificError());
497 } else {
498 ASSERT_TRUE(ret.isOk());
499 }
500
501 if (ret.isOk()) {
502 const camera_metadata_t* metadata = (camera_metadata_t*)rawMetadata.metadata.data();
503 size_t expectedSize = rawMetadata.metadata.size();
504 int result = validate_camera_metadata_structure(metadata, &expectedSize);
505 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
506 verifyRequestTemplate(metadata, reqTemplate);
507 } else {
508 ASSERT_EQ(0u, rawMetadata.metadata.size());
509 }
510 }
511 ret = mSession->close();
512 mSession = nullptr;
513 ASSERT_TRUE(ret.isOk());
514 }
515}
516
517// Verify that all supported stream formats and sizes can be configured
518// successfully.
519TEST_P(CameraAidlTest, configureStreamsAvailableOutputs) {
520 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
521 std::vector<AvailableStream> outputStreams;
522
523 for (const auto& name : cameraDeviceNames) {
524 CameraMetadata meta;
525 std::shared_ptr<ICameraDevice> device;
526
527 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/, &device /*out*/);
528
529 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
530 outputStreams.clear();
531 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputStreams));
532 ASSERT_NE(0u, outputStreams.size());
533
534 int32_t jpegBufferSize = 0;
535 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
536 ASSERT_NE(0u, jpegBufferSize);
537
538 int32_t streamId = 0;
539 int32_t streamConfigCounter = 0;
540 for (auto& it : outputStreams) {
541 Stream stream;
542 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(it.format));
543 stream.id = streamId;
544 stream.streamType = StreamType::OUTPUT;
545 stream.width = it.width;
546 stream.height = it.height;
547 stream.format = static_cast<PixelFormat>(it.format);
548 stream.dataSpace = dataspace;
549 stream.usage = static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
550 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER);
551 stream.rotation = StreamRotation::ROTATION_0;
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000552 stream.dynamicRangeProfile = RequestAvailableDynamicRangeProfilesMap::
553 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
Avichal Rakesh362242f2022-02-08 12:40:53 -0800554
555 std::vector<Stream> streams = {stream};
556 StreamConfiguration config;
557 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
558 jpegBufferSize);
559
560 bool expectStreamCombQuery = (isLogicalMultiCamera(staticMeta) == Status::OK);
561 verifyStreamCombination(device, config, /*expectedStatus*/ true, expectStreamCombQuery);
562
563 config.streamConfigCounter = streamConfigCounter++;
564 std::vector<HalStream> halConfigs;
565 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
566 ASSERT_TRUE(ret.isOk());
567 ASSERT_EQ(halConfigs.size(), 1);
568 ASSERT_EQ(halConfigs[0].id, streamId);
569
570 streamId++;
571 }
572
573 ndk::ScopedAStatus ret = mSession->close();
574 mSession = nullptr;
575 ASSERT_TRUE(ret.isOk());
576 }
577}
578
579// Verify that mandatory concurrent streams and outputs are supported.
580TEST_P(CameraAidlTest, configureConcurrentStreamsAvailableOutputs) {
581 struct CameraTestInfo {
582 CameraMetadata staticMeta;
583 std::shared_ptr<ICameraDeviceSession> session;
584 std::shared_ptr<ICameraDevice> cameraDevice;
585 StreamConfiguration config;
586 };
587
588 std::map<std::string, std::string> idToNameMap = getCameraDeviceIdToNameMap(mProvider);
589 std::vector<ConcurrentCameraIdCombination> concurrentDeviceCombinations =
590 getConcurrentDeviceCombinations(mProvider);
591 std::vector<AvailableStream> outputStreams;
592 for (const auto& cameraDeviceIds : concurrentDeviceCombinations) {
593 std::vector<CameraIdAndStreamCombination> cameraIdsAndStreamCombinations;
594 std::vector<CameraTestInfo> cameraTestInfos;
595 size_t i = 0;
596 for (const auto& id : cameraDeviceIds.combination) {
597 CameraTestInfo cti;
598 auto it = idToNameMap.find(id);
599 ASSERT_TRUE(idToNameMap.end() != it);
600 std::string name = it->second;
601
602 openEmptyDeviceSession(name, mProvider, &cti.session /*out*/, &cti.staticMeta /*out*/,
603 &cti.cameraDevice /*out*/);
604
605 outputStreams.clear();
606 camera_metadata_t* staticMeta =
607 reinterpret_cast<camera_metadata_t*>(cti.staticMeta.metadata.data());
608 ASSERT_EQ(Status::OK, getMandatoryConcurrentStreams(staticMeta, &outputStreams));
609 ASSERT_NE(0u, outputStreams.size());
610
611 int32_t jpegBufferSize = 0;
612 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
613 ASSERT_NE(0u, jpegBufferSize);
614
615 int32_t streamId = 0;
616 std::vector<Stream> streams(outputStreams.size());
617 size_t j = 0;
618 for (const auto& s : outputStreams) {
619 Stream stream;
620 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(s.format));
621 stream.id = streamId++;
622 stream.streamType = StreamType::OUTPUT;
623 stream.width = s.width;
624 stream.height = s.height;
625 stream.format = static_cast<PixelFormat>(s.format);
626 stream.usage = static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
627 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER);
628 stream.dataSpace = dataspace;
629 stream.rotation = StreamRotation::ROTATION_0;
630 stream.sensorPixelModesUsed = {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT};
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000631 stream.dynamicRangeProfile = RequestAvailableDynamicRangeProfilesMap::
632 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
Avichal Rakesh362242f2022-02-08 12:40:53 -0800633 streams[j] = stream;
634 j++;
635 }
636
637 // Add the created stream configs to cameraIdsAndStreamCombinations
638 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &cti.config,
639 jpegBufferSize);
640
641 cti.config.streamConfigCounter = outputStreams.size();
642 CameraIdAndStreamCombination cameraIdAndStreamCombination;
643 cameraIdAndStreamCombination.cameraId = id;
644 cameraIdAndStreamCombination.streamConfiguration = cti.config;
645 cameraIdsAndStreamCombinations.push_back(cameraIdAndStreamCombination);
646 i++;
647 cameraTestInfos.push_back(cti);
648 }
649 // Now verify that concurrent streams are supported
650 bool combinationSupported;
651 ndk::ScopedAStatus ret = mProvider->isConcurrentStreamCombinationSupported(
652 cameraIdsAndStreamCombinations, &combinationSupported);
653 ASSERT_TRUE(ret.isOk());
654 ASSERT_EQ(combinationSupported, true);
655
656 // Test the stream can actually be configured
657 for (auto& cti : cameraTestInfos) {
658 if (cti.session != nullptr) {
659 camera_metadata_t* staticMeta =
660 reinterpret_cast<camera_metadata_t*>(cti.staticMeta.metadata.data());
661 bool expectStreamCombQuery = (isLogicalMultiCamera(staticMeta) == Status::OK);
662 verifyStreamCombination(cti.cameraDevice, cti.config, /*expectedStatus*/ true,
663 expectStreamCombQuery);
664 }
665
666 if (cti.session != nullptr) {
667 std::vector<HalStream> streamConfigs;
668 ret = cti.session->configureStreams(cti.config, &streamConfigs);
669 ASSERT_TRUE(ret.isOk());
670 ASSERT_EQ(cti.config.streams.size(), streamConfigs.size());
671 }
672 }
673
674 for (auto& cti : cameraTestInfos) {
675 ret = cti.session->close();
676 ASSERT_TRUE(ret.isOk());
677 }
678 }
679}
680
681// Check for correct handling of invalid/incorrect configuration parameters.
682TEST_P(CameraAidlTest, configureStreamsInvalidOutputs) {
683 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
684 std::vector<AvailableStream> outputStreams;
685
686 for (const auto& name : cameraDeviceNames) {
687 CameraMetadata meta;
688 std::shared_ptr<ICameraDevice> cameraDevice;
689
690 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
691 &cameraDevice /*out*/);
692 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
693 outputStreams.clear();
694
695 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputStreams));
696 ASSERT_NE(0u, outputStreams.size());
697
698 int32_t jpegBufferSize = 0;
699 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
700 ASSERT_NE(0u, jpegBufferSize);
701
702 int32_t streamId = 0;
703 Stream stream = {streamId++,
704 StreamType::OUTPUT,
705 static_cast<uint32_t>(0),
706 static_cast<uint32_t>(0),
707 static_cast<PixelFormat>(outputStreams[0].format),
708 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
709 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
710 Dataspace::UNKNOWN,
711 StreamRotation::ROTATION_0,
712 std::string(),
713 jpegBufferSize,
714 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000715 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
716 RequestAvailableDynamicRangeProfilesMap::
717 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800718 int32_t streamConfigCounter = 0;
719 std::vector<Stream> streams = {stream};
720 StreamConfiguration config;
721 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
722 jpegBufferSize);
723
724 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ false,
725 /*expectStreamCombQuery*/ false);
726
727 config.streamConfigCounter = streamConfigCounter++;
728 std::vector<HalStream> halConfigs;
729 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
730 ASSERT_TRUE(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
731 ret.getServiceSpecificError() ||
732 static_cast<int32_t>(Status::INTERNAL_ERROR) == ret.getServiceSpecificError());
733
734 stream = {streamId++,
735 StreamType::OUTPUT,
736 /*width*/ INT32_MAX,
737 /*height*/ INT32_MAX,
738 static_cast<PixelFormat>(outputStreams[0].format),
739 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
740 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
741 Dataspace::UNKNOWN,
742 StreamRotation::ROTATION_0,
743 std::string(),
744 jpegBufferSize,
745 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000746 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
747 RequestAvailableDynamicRangeProfilesMap::
748 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800749
750 streams[0] = stream;
751 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
752 jpegBufferSize);
753
754 config.streamConfigCounter = streamConfigCounter++;
755 halConfigs.clear();
756 ret = mSession->configureStreams(config, &halConfigs);
757 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
758
759 for (auto& it : outputStreams) {
760 stream = {streamId++,
761 StreamType::OUTPUT,
762 it.width,
763 it.height,
764 static_cast<PixelFormat>(UINT32_MAX),
765 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
766 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
767 Dataspace::UNKNOWN,
768 StreamRotation::ROTATION_0,
769 std::string(),
770 jpegBufferSize,
771 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000772 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
773 RequestAvailableDynamicRangeProfilesMap::
774 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800775
776 streams[0] = stream;
777 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
778 jpegBufferSize);
779 config.streamConfigCounter = streamConfigCounter++;
780 halConfigs.clear();
781 ret = mSession->configureStreams(config, &halConfigs);
782 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
783 ret.getServiceSpecificError());
784
785 stream = {streamId++,
786 StreamType::OUTPUT,
787 it.width,
788 it.height,
789 static_cast<PixelFormat>(it.format),
790 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
791 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
792 Dataspace::UNKNOWN,
793 static_cast<StreamRotation>(UINT32_MAX),
794 std::string(),
795 jpegBufferSize,
796 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000797 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
798 RequestAvailableDynamicRangeProfilesMap::
799 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800800
801 streams[0] = stream;
802 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
803 jpegBufferSize);
804
805 config.streamConfigCounter = streamConfigCounter++;
806 halConfigs.clear();
807 ret = mSession->configureStreams(config, &halConfigs);
808 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
809 ret.getServiceSpecificError());
810 }
811
812 ret = mSession->close();
813 mSession = nullptr;
814 ASSERT_TRUE(ret.isOk());
815 }
816}
817
818// Check whether all supported ZSL output stream combinations can be
819// configured successfully.
820TEST_P(CameraAidlTest, configureStreamsZSLInputOutputs) {
821 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
822 std::vector<AvailableStream> inputStreams;
823 std::vector<AvailableZSLInputOutput> inputOutputMap;
824
825 for (const auto& name : cameraDeviceNames) {
826 CameraMetadata meta;
827 std::shared_ptr<ICameraDevice> cameraDevice;
828
829 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
830 &cameraDevice /*out*/);
831 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
832
833 Status rc = isZSLModeAvailable(staticMeta);
834 if (Status::OPERATION_NOT_SUPPORTED == rc) {
835 ndk::ScopedAStatus ret = mSession->close();
836 mSession = nullptr;
837 ASSERT_TRUE(ret.isOk());
838 continue;
839 }
840 ASSERT_EQ(Status::OK, rc);
841
842 inputStreams.clear();
843 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, inputStreams));
844 ASSERT_NE(0u, inputStreams.size());
845
846 inputOutputMap.clear();
847 ASSERT_EQ(Status::OK, getZSLInputOutputMap(staticMeta, inputOutputMap));
848 ASSERT_NE(0u, inputOutputMap.size());
849
850 bool supportMonoY8 = false;
851 if (Status::OK == isMonochromeCamera(staticMeta)) {
852 for (auto& it : inputStreams) {
853 if (it.format == static_cast<uint32_t>(PixelFormat::Y8)) {
854 supportMonoY8 = true;
855 break;
856 }
857 }
858 }
859
860 int32_t jpegBufferSize = 0;
861 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
862 ASSERT_NE(0u, jpegBufferSize);
863
864 int32_t streamId = 0;
865 bool hasPrivToY8 = false, hasY8ToY8 = false, hasY8ToBlob = false;
866 uint32_t streamConfigCounter = 0;
867 for (auto& inputIter : inputOutputMap) {
868 AvailableStream input;
869 ASSERT_EQ(Status::OK, findLargestSize(inputStreams, inputIter.inputFormat, input));
870 ASSERT_NE(0u, inputStreams.size());
871
872 if (inputIter.inputFormat ==
873 static_cast<uint32_t>(PixelFormat::IMPLEMENTATION_DEFINED) &&
874 inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::Y8)) {
875 hasPrivToY8 = true;
876 } else if (inputIter.inputFormat == static_cast<uint32_t>(PixelFormat::Y8)) {
877 if (inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::BLOB)) {
878 hasY8ToBlob = true;
879 } else if (inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::Y8)) {
880 hasY8ToY8 = true;
881 }
882 }
883 AvailableStream outputThreshold = {INT32_MAX, INT32_MAX, inputIter.outputFormat};
884 std::vector<AvailableStream> outputStreams;
885 ASSERT_EQ(Status::OK,
886 getAvailableOutputStreams(staticMeta, outputStreams, &outputThreshold));
887 for (auto& outputIter : outputStreams) {
888 Dataspace outputDataSpace =
889 getDataspace(static_cast<PixelFormat>(outputIter.format));
890 Stream zslStream = {
891 streamId++,
892 StreamType::OUTPUT,
893 input.width,
894 input.height,
895 static_cast<PixelFormat>(input.format),
896 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
897 GRALLOC_USAGE_HW_CAMERA_ZSL),
898 Dataspace::UNKNOWN,
899 StreamRotation::ROTATION_0,
900 std::string(),
901 jpegBufferSize,
902 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000903 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
904 RequestAvailableDynamicRangeProfilesMap::
905 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800906 Stream inputStream = {
907 streamId++,
908 StreamType::INPUT,
909 input.width,
910 input.height,
911 static_cast<PixelFormat>(input.format),
912 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(0),
913 Dataspace::UNKNOWN,
914 StreamRotation::ROTATION_0,
915 std::string(),
916 jpegBufferSize,
917 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000918 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
919 RequestAvailableDynamicRangeProfilesMap::
920 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800921 Stream outputStream = {
922 streamId++,
923 StreamType::OUTPUT,
924 outputIter.width,
925 outputIter.height,
926 static_cast<PixelFormat>(outputIter.format),
927 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
928 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
929 outputDataSpace,
930 StreamRotation::ROTATION_0,
931 std::string(),
932 jpegBufferSize,
933 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000934 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
935 RequestAvailableDynamicRangeProfilesMap::
936 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800937
938 std::vector<Stream> streams = {inputStream, zslStream, outputStream};
939
940 StreamConfiguration config;
941 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
942 jpegBufferSize);
943
944 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true,
945 /*expectStreamCombQuery*/ false);
946
947 config.streamConfigCounter = streamConfigCounter++;
948 std::vector<HalStream> halConfigs;
949 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
950 ASSERT_TRUE(ret.isOk());
951 ASSERT_EQ(3u, halConfigs.size());
952 }
953 }
954
955 if (supportMonoY8) {
956 if (Status::OK == isZSLModeAvailable(staticMeta, PRIV_REPROCESS)) {
957 ASSERT_TRUE(hasPrivToY8);
958 }
959 if (Status::OK == isZSLModeAvailable(staticMeta, YUV_REPROCESS)) {
960 ASSERT_TRUE(hasY8ToY8);
961 ASSERT_TRUE(hasY8ToBlob);
962 }
963 }
964
965 ndk::ScopedAStatus ret = mSession->close();
966 mSession = nullptr;
967 ASSERT_TRUE(ret.isOk());
968 }
969}
970
971// Check whether session parameters are supported. If Hal support for them
972// exist, then try to configure a preview stream using them.
973TEST_P(CameraAidlTest, configureStreamsWithSessionParameters) {
974 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
975 std::vector<AvailableStream> outputPreviewStreams;
976 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
977 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
978
979 for (const auto& name : cameraDeviceNames) {
980 CameraMetadata meta;
981
982 std::shared_ptr<ICameraDevice> unusedCameraDevice;
983 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
984 &unusedCameraDevice /*out*/);
985 camera_metadata_t* staticMetaBuffer =
986 reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
987
988 std::unordered_set<int32_t> availableSessionKeys;
989 auto rc = getSupportedKeys(staticMetaBuffer, ANDROID_REQUEST_AVAILABLE_SESSION_KEYS,
990 &availableSessionKeys);
991 ASSERT_TRUE(Status::OK == rc);
992 if (availableSessionKeys.empty()) {
993 ndk::ScopedAStatus ret = mSession->close();
994 mSession = nullptr;
995 ASSERT_TRUE(ret.isOk());
996 continue;
997 }
998
999 android::hardware::camera::common::V1_0::helper::CameraMetadata previewRequestSettings;
1000 android::hardware::camera::common::V1_0::helper::CameraMetadata sessionParams,
1001 modifiedSessionParams;
1002 constructFilteredSettings(mSession, availableSessionKeys, RequestTemplate::PREVIEW,
1003 &previewRequestSettings, &sessionParams);
1004 if (sessionParams.isEmpty()) {
1005 ndk::ScopedAStatus ret = mSession->close();
1006 mSession = nullptr;
1007 ASSERT_TRUE(ret.isOk());
1008 continue;
1009 }
1010
1011 outputPreviewStreams.clear();
1012
1013 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputPreviewStreams,
1014 &previewThreshold));
1015 ASSERT_NE(0u, outputPreviewStreams.size());
1016
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001017 Stream previewStream = {
1018 0,
1019 StreamType::OUTPUT,
1020 outputPreviewStreams[0].width,
1021 outputPreviewStreams[0].height,
1022 static_cast<PixelFormat>(outputPreviewStreams[0].format),
1023 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1024 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
1025 Dataspace::UNKNOWN,
1026 StreamRotation::ROTATION_0,
1027 std::string(),
1028 /*bufferSize*/ 0,
1029 /*groupId*/ -1,
1030 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1031 RequestAvailableDynamicRangeProfilesMap::
1032 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001033
1034 std::vector<Stream> streams = {previewStream};
1035 StreamConfiguration config;
1036
1037 config.streams = streams;
1038 config.operationMode = StreamConfigurationMode::NORMAL_MODE;
1039 modifiedSessionParams = sessionParams;
1040 auto sessionParamsBuffer = sessionParams.release();
1041 std::vector<uint8_t> rawSessionParam =
1042 std::vector(reinterpret_cast<uint8_t*>(sessionParamsBuffer),
1043 reinterpret_cast<uint8_t*>(sessionParamsBuffer) +
1044 get_camera_metadata_size(sessionParamsBuffer));
1045
1046 config.sessionParams.metadata = rawSessionParam;
1047 config.streamConfigCounter = 0;
1048 config.streams = {previewStream};
1049 config.streamConfigCounter = 0;
1050 config.multiResolutionInputImage = false;
1051
1052 bool newSessionParamsAvailable = false;
1053 for (const auto& it : availableSessionKeys) {
1054 if (modifiedSessionParams.exists(it)) {
1055 modifiedSessionParams.erase(it);
1056 newSessionParamsAvailable = true;
1057 break;
1058 }
1059 }
1060 if (newSessionParamsAvailable) {
1061 auto modifiedSessionParamsBuffer = modifiedSessionParams.release();
1062 verifySessionReconfigurationQuery(mSession, sessionParamsBuffer,
1063 modifiedSessionParamsBuffer);
1064 modifiedSessionParams.acquire(modifiedSessionParamsBuffer);
1065 }
1066
1067 std::vector<HalStream> halConfigs;
1068 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1069 ASSERT_TRUE(ret.isOk());
1070 ASSERT_EQ(1u, halConfigs.size());
1071
1072 sessionParams.acquire(sessionParamsBuffer);
1073 ret = mSession->close();
1074 mSession = nullptr;
1075 ASSERT_TRUE(ret.isOk());
1076 }
1077}
1078
1079// Verify that all supported preview + still capture stream combinations
1080// can be configured successfully.
1081TEST_P(CameraAidlTest, configureStreamsPreviewStillOutputs) {
1082 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1083 std::vector<AvailableStream> outputBlobStreams;
1084 std::vector<AvailableStream> outputPreviewStreams;
1085 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
1086 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
1087 AvailableStream blobThreshold = {INT32_MAX, INT32_MAX, static_cast<int32_t>(PixelFormat::BLOB)};
1088
1089 for (const auto& name : cameraDeviceNames) {
1090 CameraMetadata meta;
1091
1092 std::shared_ptr<ICameraDevice> cameraDevice;
1093 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1094 &cameraDevice /*out*/);
1095
1096 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1097
1098 // Check if camera support depth only
1099 if (isDepthOnly(staticMeta)) {
1100 ndk::ScopedAStatus ret = mSession->close();
1101 mSession = nullptr;
1102 ASSERT_TRUE(ret.isOk());
1103 continue;
1104 }
1105
1106 outputBlobStreams.clear();
1107 ASSERT_EQ(Status::OK,
1108 getAvailableOutputStreams(staticMeta, outputBlobStreams, &blobThreshold));
1109 ASSERT_NE(0u, outputBlobStreams.size());
1110
1111 outputPreviewStreams.clear();
1112 ASSERT_EQ(Status::OK,
1113 getAvailableOutputStreams(staticMeta, outputPreviewStreams, &previewThreshold));
1114 ASSERT_NE(0u, outputPreviewStreams.size());
1115
1116 int32_t jpegBufferSize = 0;
1117 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
1118 ASSERT_NE(0u, jpegBufferSize);
1119
1120 int32_t streamId = 0;
1121 uint32_t streamConfigCounter = 0;
1122
1123 for (auto& blobIter : outputBlobStreams) {
1124 for (auto& previewIter : outputPreviewStreams) {
1125 Stream previewStream = {
1126 streamId++,
1127 StreamType::OUTPUT,
1128 previewIter.width,
1129 previewIter.height,
1130 static_cast<PixelFormat>(previewIter.format),
1131 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1132 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
1133 Dataspace::UNKNOWN,
1134 StreamRotation::ROTATION_0,
1135 std::string(),
1136 /*bufferSize*/ 0,
1137 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001138 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1139 RequestAvailableDynamicRangeProfilesMap::
1140 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001141 Stream blobStream = {
1142 streamId++,
1143 StreamType::OUTPUT,
1144 blobIter.width,
1145 blobIter.height,
1146 static_cast<PixelFormat>(blobIter.format),
1147 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1148 GRALLOC1_CONSUMER_USAGE_CPU_READ),
1149 Dataspace::JFIF,
1150 StreamRotation::ROTATION_0,
1151 std::string(),
1152 /*bufferSize*/ 0,
1153 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001154 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1155 RequestAvailableDynamicRangeProfilesMap::
1156 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001157 std::vector<Stream> streams = {previewStream, blobStream};
1158 StreamConfiguration config;
1159
1160 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
1161 jpegBufferSize);
1162 config.streamConfigCounter = streamConfigCounter++;
1163 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true,
1164 /*expectStreamCombQuery*/ false);
1165
1166 std::vector<HalStream> halConfigs;
1167 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1168 ASSERT_TRUE(ret.isOk());
1169 ASSERT_EQ(2u, halConfigs.size());
1170 }
1171 }
1172
1173 ndk::ScopedAStatus ret = mSession->close();
1174 mSession = nullptr;
1175 ASSERT_TRUE(ret.isOk());
1176 }
1177}
1178
1179// In case constrained mode is supported, test whether it can be
1180// configured. Additionally check for common invalid inputs when
1181// using this mode.
1182TEST_P(CameraAidlTest, configureStreamsConstrainedOutputs) {
1183 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1184
1185 for (const auto& name : cameraDeviceNames) {
1186 CameraMetadata meta;
1187 std::shared_ptr<ICameraDevice> cameraDevice;
1188
1189 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1190 &cameraDevice /*out*/);
1191 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1192
1193 Status rc = isConstrainedModeAvailable(staticMeta);
1194 if (Status::OPERATION_NOT_SUPPORTED == rc) {
1195 ndk::ScopedAStatus ret = mSession->close();
1196 mSession = nullptr;
1197 ASSERT_TRUE(ret.isOk());
1198 continue;
1199 }
1200 ASSERT_EQ(Status::OK, rc);
1201
1202 AvailableStream hfrStream;
1203 rc = pickConstrainedModeSize(staticMeta, hfrStream);
1204 ASSERT_EQ(Status::OK, rc);
1205
1206 int32_t streamId = 0;
1207 uint32_t streamConfigCounter = 0;
1208 Stream stream = {streamId,
1209 StreamType::OUTPUT,
1210 hfrStream.width,
1211 hfrStream.height,
1212 static_cast<PixelFormat>(hfrStream.format),
1213 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1214 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1215 Dataspace::UNKNOWN,
1216 StreamRotation::ROTATION_0,
1217 std::string(),
1218 /*bufferSize*/ 0,
1219 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001220 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1221 RequestAvailableDynamicRangeProfilesMap::
1222 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001223 std::vector<Stream> streams = {stream};
1224 StreamConfiguration config;
1225 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1226 &config);
1227
1228 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true,
1229 /*expectStreamCombQuery*/ false);
1230
1231 config.streamConfigCounter = streamConfigCounter++;
1232 std::vector<HalStream> halConfigs;
1233 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1234 ASSERT_TRUE(ret.isOk());
1235 ASSERT_EQ(1u, halConfigs.size());
1236 ASSERT_EQ(halConfigs[0].id, streamId);
1237
1238 stream = {streamId++,
1239 StreamType::OUTPUT,
1240 static_cast<uint32_t>(0),
1241 static_cast<uint32_t>(0),
1242 static_cast<PixelFormat>(hfrStream.format),
1243 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1244 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1245 Dataspace::UNKNOWN,
1246 StreamRotation::ROTATION_0,
1247 std::string(),
1248 /*bufferSize*/ 0,
1249 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001250 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1251 RequestAvailableDynamicRangeProfilesMap::
1252 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001253 streams[0] = stream;
1254 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1255 &config);
1256
1257 config.streamConfigCounter = streamConfigCounter++;
1258 std::vector<HalStream> halConfig;
1259 ret = mSession->configureStreams(config, &halConfig);
1260 ASSERT_TRUE(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
1261 ret.getServiceSpecificError() ||
1262 static_cast<int32_t>(Status::INTERNAL_ERROR) == ret.getServiceSpecificError());
1263
1264 stream = {streamId++,
1265 StreamType::OUTPUT,
1266 INT32_MAX,
1267 INT32_MAX,
1268 static_cast<PixelFormat>(hfrStream.format),
1269 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1270 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1271 Dataspace::UNKNOWN,
1272 StreamRotation::ROTATION_0,
1273 std::string(),
1274 /*bufferSize*/ 0,
1275 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001276 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1277 RequestAvailableDynamicRangeProfilesMap::
1278 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001279 streams[0] = stream;
1280 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1281 &config);
1282
1283 config.streamConfigCounter = streamConfigCounter++;
1284 halConfigs.clear();
1285 ret = mSession->configureStreams(config, &halConfigs);
1286 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
1287
1288 stream = {streamId++,
1289 StreamType::OUTPUT,
1290 hfrStream.width,
1291 hfrStream.height,
1292 static_cast<PixelFormat>(UINT32_MAX),
1293 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1294 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1295 Dataspace::UNKNOWN,
1296 StreamRotation::ROTATION_0,
1297 std::string(),
1298 /*bufferSize*/ 0,
1299 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001300 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1301 RequestAvailableDynamicRangeProfilesMap::
1302 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001303 streams[0] = stream;
1304 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1305 &config);
1306
1307 config.streamConfigCounter = streamConfigCounter++;
1308 halConfigs.clear();
1309 ret = mSession->configureStreams(config, &halConfigs);
1310 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
1311
1312 ret = mSession->close();
1313 mSession = nullptr;
1314 ASSERT_TRUE(ret.isOk());
1315 }
1316}
1317
1318// Verify that all supported video + snapshot stream combinations can
1319// be configured successfully.
1320TEST_P(CameraAidlTest, configureStreamsVideoStillOutputs) {
1321 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1322 std::vector<AvailableStream> outputBlobStreams;
1323 std::vector<AvailableStream> outputVideoStreams;
1324 AvailableStream videoThreshold = {kMaxVideoWidth, kMaxVideoHeight,
1325 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
1326 AvailableStream blobThreshold = {kMaxVideoWidth, kMaxVideoHeight,
1327 static_cast<int32_t>(PixelFormat::BLOB)};
1328
1329 for (const auto& name : cameraDeviceNames) {
1330 CameraMetadata meta;
1331 std::shared_ptr<ICameraDevice> cameraDevice;
1332
1333 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1334 &cameraDevice /*out*/);
1335
1336 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1337
1338 // Check if camera support depth only
1339 if (isDepthOnly(staticMeta)) {
1340 ndk::ScopedAStatus ret = mSession->close();
1341 mSession = nullptr;
1342 ASSERT_TRUE(ret.isOk());
1343 continue;
1344 }
1345
1346 outputBlobStreams.clear();
1347 ASSERT_EQ(Status::OK,
1348 getAvailableOutputStreams(staticMeta, outputBlobStreams, &blobThreshold));
1349 ASSERT_NE(0u, outputBlobStreams.size());
1350
1351 outputVideoStreams.clear();
1352 ASSERT_EQ(Status::OK,
1353 getAvailableOutputStreams(staticMeta, outputVideoStreams, &videoThreshold));
1354 ASSERT_NE(0u, outputVideoStreams.size());
1355
1356 int32_t jpegBufferSize = 0;
1357 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
1358 ASSERT_NE(0u, jpegBufferSize);
1359
1360 int32_t streamId = 0;
1361 uint32_t streamConfigCounter = 0;
1362 for (auto& blobIter : outputBlobStreams) {
1363 for (auto& videoIter : outputVideoStreams) {
1364 Stream videoStream = {
1365 streamId++,
1366 StreamType::OUTPUT,
1367 videoIter.width,
1368 videoIter.height,
1369 static_cast<PixelFormat>(videoIter.format),
1370 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1371 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1372 Dataspace::UNKNOWN,
1373 StreamRotation::ROTATION_0,
1374 std::string(),
1375 jpegBufferSize,
1376 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001377 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1378 RequestAvailableDynamicRangeProfilesMap::
1379 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001380 Stream blobStream = {
1381 streamId++,
1382 StreamType::OUTPUT,
1383 blobIter.width,
1384 blobIter.height,
1385 static_cast<PixelFormat>(blobIter.format),
1386 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1387 GRALLOC1_CONSUMER_USAGE_CPU_READ),
1388 Dataspace::JFIF,
1389 StreamRotation::ROTATION_0,
1390 std::string(),
1391 jpegBufferSize,
1392 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001393 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1394 RequestAvailableDynamicRangeProfilesMap::
1395 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001396 std::vector<Stream> streams = {videoStream, blobStream};
1397 StreamConfiguration config;
1398
1399 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
1400 jpegBufferSize);
1401 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true,
1402 /*expectStreamCombQuery*/ false);
1403
1404 config.streamConfigCounter = streamConfigCounter++;
1405 std::vector<HalStream> halConfigs;
1406 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1407 ASSERT_TRUE(ret.isOk());
1408 ASSERT_EQ(2u, halConfigs.size());
1409 }
1410 }
1411
1412 ndk::ScopedAStatus ret = mSession->close();
1413 mSession = nullptr;
1414 ASSERT_TRUE(ret.isOk());
1415 }
1416}
1417
1418// Generate and verify a camera capture request
1419TEST_P(CameraAidlTest, processCaptureRequestPreview) {
1420 // TODO(b/220897574): Failing with BUFFER_ERROR
1421 processCaptureRequestInternal(GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, RequestTemplate::PREVIEW,
1422 false /*secureOnlyCameras*/);
1423}
1424
1425// Generate and verify a secure camera capture request
1426TEST_P(CameraAidlTest, processSecureCaptureRequest) {
1427 processCaptureRequestInternal(GRALLOC1_PRODUCER_USAGE_PROTECTED, RequestTemplate::STILL_CAPTURE,
1428 true /*secureOnlyCameras*/);
1429}
1430
1431TEST_P(CameraAidlTest, processCaptureRequestPreviewStabilization) {
1432 std::unordered_map<std::string, nsecs_t> cameraDeviceToTimeLag;
1433 processPreviewStabilizationCaptureRequestInternal(/*previewStabilizationOn*/ false,
1434 cameraDeviceToTimeLag);
1435 processPreviewStabilizationCaptureRequestInternal(/*previewStabilizationOn*/ true,
1436 cameraDeviceToTimeLag);
1437}
1438
1439// Generate and verify a multi-camera capture request
1440TEST_P(CameraAidlTest, processMultiCaptureRequestPreview) {
1441 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1442 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
1443 static_cast<int32_t>(PixelFormat::YCBCR_420_888)};
1444 int64_t bufferId = 1;
1445 uint32_t frameNumber = 1;
1446 std::vector<uint8_t> settings;
1447 std::vector<uint8_t> emptySettings;
1448 std::string invalidPhysicalId = "-1";
1449
1450 for (const auto& name : cameraDeviceNames) {
1451 std::string version, deviceId;
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +00001452 ALOGI("processMultiCaptureRequestPreview: Test device %s", name.c_str());
Avichal Rakesh362242f2022-02-08 12:40:53 -08001453 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
1454 CameraMetadata metadata;
1455
1456 std::shared_ptr<ICameraDevice> unusedDevice;
1457 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &metadata /*out*/,
1458 &unusedDevice /*out*/);
1459
1460 camera_metadata_t* staticMeta =
1461 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
1462 Status rc = isLogicalMultiCamera(staticMeta);
1463 if (Status::OPERATION_NOT_SUPPORTED == rc) {
1464 ndk::ScopedAStatus ret = mSession->close();
1465 mSession = nullptr;
1466 ASSERT_TRUE(ret.isOk());
1467 continue;
1468 }
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +00001469 ASSERT_EQ(Status::OK, rc);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001470
1471 std::unordered_set<std::string> physicalIds;
1472 rc = getPhysicalCameraIds(staticMeta, &physicalIds);
1473 ASSERT_TRUE(Status::OK == rc);
1474 ASSERT_TRUE(physicalIds.size() > 1);
1475
1476 std::unordered_set<int32_t> physicalRequestKeyIDs;
1477 rc = getSupportedKeys(staticMeta, ANDROID_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS,
1478 &physicalRequestKeyIDs);
1479 ASSERT_TRUE(Status::OK == rc);
1480 if (physicalRequestKeyIDs.empty()) {
1481 ndk::ScopedAStatus ret = mSession->close();
1482 mSession = nullptr;
1483 ASSERT_TRUE(ret.isOk());
1484 // The logical camera doesn't support any individual physical requests.
1485 continue;
1486 }
1487
1488 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultPreviewSettings;
1489 android::hardware::camera::common::V1_0::helper::CameraMetadata filteredSettings;
1490 constructFilteredSettings(mSession, physicalRequestKeyIDs, RequestTemplate::PREVIEW,
1491 &defaultPreviewSettings, &filteredSettings);
1492 if (filteredSettings.isEmpty()) {
1493 // No physical device settings in default request.
1494 ndk::ScopedAStatus ret = mSession->close();
1495 mSession = nullptr;
1496 ASSERT_TRUE(ret.isOk());
1497 continue;
1498 }
1499
1500 const camera_metadata_t* settingsBuffer = defaultPreviewSettings.getAndLock();
1501 uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
1502 settings.assign(rawSettingsBuffer,
1503 rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
1504 CameraMetadata settingsMetadata = {settings};
1505 overrideRotateAndCrop(&settingsMetadata);
1506
1507 ndk::ScopedAStatus ret = mSession->close();
1508 mSession = nullptr;
1509 ASSERT_TRUE(ret.isOk());
1510
1511 // Leave only 2 physical devices in the id set.
1512 auto it = physicalIds.begin();
1513 std::string physicalDeviceId = *it;
1514 it++;
1515 physicalIds.erase(++it, physicalIds.end());
1516 ASSERT_EQ(physicalIds.size(), 2u);
1517
1518 std::vector<HalStream> halStreams;
1519 bool supportsPartialResults = false;
1520 bool useHalBufManager = false;
1521 int32_t partialResultCount = 0;
1522 Stream previewStream;
1523 std::shared_ptr<DeviceCb> cb;
1524
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +00001525 configurePreviewStreams(
1526 name, mProvider, &previewThreshold, physicalIds, &mSession, &previewStream,
1527 &halStreams /*out*/, &supportsPartialResults /*out*/, &partialResultCount /*out*/,
1528 &useHalBufManager /*out*/, &cb /*out*/, 0 /*streamConfigCounter*/, true);
1529 if (mSession == nullptr) {
1530 // stream combination not supported by HAL, skip test for device
1531 continue;
1532 }
Avichal Rakesh362242f2022-02-08 12:40:53 -08001533
1534 ::aidl::android::hardware::common::fmq::MQDescriptor<
1535 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
1536 descriptor;
1537 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
1538 ASSERT_TRUE(resultQueueRet.isOk());
1539 std::shared_ptr<ResultMetadataQueue> resultQueue =
1540 std::make_shared<ResultMetadataQueue>(descriptor);
1541 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
1542 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
1543 resultQueue = nullptr;
1544 // Don't use the queue onwards.
1545 }
1546
1547 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
1548 static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
1549 partialResultCount, physicalIds, resultQueue);
1550
1551 std::vector<CaptureRequest> requests(1);
1552 CaptureRequest& request = requests[0];
1553 request.frameNumber = frameNumber;
1554 request.fmqSettingsSize = 0;
Emilian Peev3d919f92022-04-20 13:50:59 -07001555 request.settings = settingsMetadata;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001556
1557 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
1558
1559 std::vector<buffer_handle_t> graphicBuffers;
1560 graphicBuffers.reserve(halStreams.size());
1561 outputBuffers.resize(halStreams.size());
1562 size_t k = 0;
1563 for (const auto& halStream : halStreams) {
1564 buffer_handle_t buffer_handle;
1565 if (useHalBufManager) {
1566 outputBuffers[k] = {halStream.id, /*bufferId*/ 0, NativeHandle(),
1567 BufferStatus::OK, NativeHandle(), NativeHandle()};
1568 } else {
1569 allocateGraphicBuffer(previewStream.width, previewStream.height,
1570 android_convertGralloc1To0Usage(
1571 static_cast<uint64_t>(halStream.producerUsage),
1572 static_cast<uint64_t>(halStream.consumerUsage)),
1573 halStream.overrideFormat, &buffer_handle);
1574 graphicBuffers.push_back(buffer_handle);
1575 outputBuffers[k] = {
1576 halStream.id, bufferId, ::android::makeToAidl(buffer_handle),
1577 BufferStatus::OK, NativeHandle(), NativeHandle()};
1578 bufferId++;
1579 }
1580 k++;
1581 }
1582
1583 std::vector<PhysicalCameraSetting> camSettings(1);
1584 const camera_metadata_t* filteredSettingsBuffer = filteredSettings.getAndLock();
1585 uint8_t* rawFilteredSettingsBuffer = (uint8_t*)filteredSettingsBuffer;
1586 camSettings[0].settings = {std::vector(
1587 rawFilteredSettingsBuffer,
1588 rawFilteredSettingsBuffer + get_camera_metadata_size(filteredSettingsBuffer))};
1589 overrideRotateAndCrop(&camSettings[0].settings);
1590 camSettings[0].fmqSettingsSize = 0;
1591 camSettings[0].physicalCameraId = physicalDeviceId;
1592
1593 request.inputBuffer = {
1594 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
1595 request.physicalCameraSettings = camSettings;
1596
1597 {
1598 std::unique_lock<std::mutex> l(mLock);
1599 mInflightMap.clear();
1600 mInflightMap[frameNumber] = inflightReq;
1601 }
1602
1603 int32_t numRequestProcessed = 0;
1604 std::vector<BufferCache> cachesToRemove;
1605 ndk::ScopedAStatus returnStatus =
1606 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1607 ASSERT_TRUE(returnStatus.isOk());
1608 ASSERT_EQ(numRequestProcessed, 1u);
1609
1610 {
1611 std::unique_lock<std::mutex> l(mLock);
1612 while (!inflightReq->errorCodeValid &&
1613 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1614 auto timeout = std::chrono::system_clock::now() +
1615 std::chrono::seconds(kStreamBufferTimeoutSec);
1616 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1617 }
1618
1619 ASSERT_FALSE(inflightReq->errorCodeValid);
1620 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1621
1622 request.frameNumber++;
1623 // Empty settings should be supported after the first call
1624 // for repeating requests.
1625 request.settings.metadata.clear();
1626 request.physicalCameraSettings[0].settings.metadata.clear();
1627 // The buffer has been registered to HAL by bufferId, so per
1628 // API contract we should send a null handle for this buffer
1629 request.outputBuffers[0].buffer = NativeHandle();
1630 mInflightMap.clear();
1631 inflightReq = std::make_shared<InFlightRequest>(
1632 static_cast<ssize_t>(physicalIds.size()), false, supportsPartialResults,
1633 partialResultCount, physicalIds, resultQueue);
1634 mInflightMap[request.frameNumber] = inflightReq;
1635 }
1636
1637 returnStatus =
1638 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1639 ASSERT_TRUE(returnStatus.isOk());
1640 ASSERT_EQ(numRequestProcessed, 1u);
1641
1642 {
1643 std::unique_lock<std::mutex> l(mLock);
1644 while (!inflightReq->errorCodeValid &&
1645 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1646 auto timeout = std::chrono::system_clock::now() +
1647 std::chrono::seconds(kStreamBufferTimeoutSec);
1648 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1649 }
1650
1651 ASSERT_FALSE(inflightReq->errorCodeValid);
1652 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1653 }
1654
1655 // Invalid physical camera id should fail process requests
1656 frameNumber++;
1657 camSettings[0].physicalCameraId = invalidPhysicalId;
1658 camSettings[0].settings.metadata = settings;
1659
1660 request.physicalCameraSettings = camSettings; // Invalid camera settings
1661 returnStatus =
1662 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1663 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
1664 returnStatus.getServiceSpecificError());
1665
1666 defaultPreviewSettings.unlock(settingsBuffer);
1667 filteredSettings.unlock(filteredSettingsBuffer);
1668
1669 if (useHalBufManager) {
1670 std::vector<int32_t> streamIds(halStreams.size());
1671 for (size_t i = 0; i < streamIds.size(); i++) {
1672 streamIds[i] = halStreams[i].id;
1673 }
1674 verifyBuffersReturned(mSession, streamIds, cb);
1675 }
1676
1677 ret = mSession->close();
1678 mSession = nullptr;
1679 ASSERT_TRUE(ret.isOk());
1680 }
1681}
1682
1683// Generate and verify an ultra high resolution capture request
1684TEST_P(CameraAidlTest, processUltraHighResolutionRequest) {
1685 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1686 int64_t bufferId = 1;
1687 int32_t frameNumber = 1;
1688 CameraMetadata settings;
1689
1690 for (const auto& name : cameraDeviceNames) {
1691 std::string version, deviceId;
1692 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
1693 CameraMetadata meta;
1694
1695 std::shared_ptr<ICameraDevice> unusedDevice;
1696 openEmptyDeviceSession(name, mProvider, &mSession, &meta, &unusedDevice);
1697 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1698 if (!isUltraHighResolution(staticMeta)) {
1699 ndk::ScopedAStatus ret = mSession->close();
1700 mSession = nullptr;
1701 ASSERT_TRUE(ret.isOk());
1702 continue;
1703 }
1704 CameraMetadata req;
1705 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultSettings;
1706 ndk::ScopedAStatus ret =
1707 mSession->constructDefaultRequestSettings(RequestTemplate::STILL_CAPTURE, &req);
1708 ASSERT_TRUE(ret.isOk());
1709
1710 const camera_metadata_t* metadata =
1711 reinterpret_cast<const camera_metadata_t*>(req.metadata.data());
1712 size_t expectedSize = req.metadata.size();
1713 int result = validate_camera_metadata_structure(metadata, &expectedSize);
1714 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
1715
1716 size_t entryCount = get_camera_metadata_entry_count(metadata);
1717 ASSERT_GT(entryCount, 0u);
1718 defaultSettings = metadata;
1719 uint8_t sensorPixelMode =
1720 static_cast<uint8_t>(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
1721 ASSERT_EQ(::android::OK,
1722 defaultSettings.update(ANDROID_SENSOR_PIXEL_MODE, &sensorPixelMode, 1));
1723
1724 const camera_metadata_t* settingsBuffer = defaultSettings.getAndLock();
1725 uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
1726 settings.metadata = std::vector(
1727 rawSettingsBuffer, rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
1728 overrideRotateAndCrop(&settings);
1729
1730 ret = mSession->close();
1731 mSession = nullptr;
1732 ASSERT_TRUE(ret.isOk());
1733
1734 std::vector<HalStream> halStreams;
1735 bool supportsPartialResults = false;
1736 bool useHalBufManager = false;
1737 int32_t partialResultCount = 0;
1738 Stream previewStream;
1739 std::shared_ptr<DeviceCb> cb;
1740
1741 std::list<PixelFormat> pixelFormats = {PixelFormat::YCBCR_420_888, PixelFormat::RAW16};
1742 for (PixelFormat format : pixelFormats) {
Emilian Peevdda1eb72022-07-28 16:37:40 -07001743 previewStream.usage =
1744 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1745 GRALLOC1_CONSUMER_USAGE_CPU_READ);
1746 previewStream.dataSpace = Dataspace::UNKNOWN;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001747 configureStreams(name, mProvider, format, &mSession, &previewStream, &halStreams,
1748 &supportsPartialResults, &partialResultCount, &useHalBufManager, &cb,
1749 0, /*maxResolution*/ true);
1750 ASSERT_NE(mSession, nullptr);
1751
1752 ::aidl::android::hardware::common::fmq::MQDescriptor<
1753 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
1754 descriptor;
1755 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
1756 ASSERT_TRUE(resultQueueRet.isOk());
1757
1758 std::shared_ptr<ResultMetadataQueue> resultQueue =
1759 std::make_shared<ResultMetadataQueue>(descriptor);
1760 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
1761 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
1762 resultQueue = nullptr;
1763 // Don't use the queue onwards.
1764 }
1765
1766 std::vector<buffer_handle_t> graphicBuffers;
1767 graphicBuffers.reserve(halStreams.size());
1768 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
1769 static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
1770 partialResultCount, std::unordered_set<std::string>(), resultQueue);
1771
1772 std::vector<CaptureRequest> requests(1);
1773 CaptureRequest& request = requests[0];
1774 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
1775 outputBuffers.resize(halStreams.size());
1776
1777 size_t k = 0;
1778 for (const auto& halStream : halStreams) {
1779 buffer_handle_t buffer_handle;
1780 if (useHalBufManager) {
1781 outputBuffers[k] = {halStream.id, 0,
1782 NativeHandle(), BufferStatus::OK,
1783 NativeHandle(), NativeHandle()};
1784 } else {
1785 allocateGraphicBuffer(previewStream.width, previewStream.height,
1786 android_convertGralloc1To0Usage(
1787 static_cast<uint64_t>(halStream.producerUsage),
1788 static_cast<uint64_t>(halStream.consumerUsage)),
1789 halStream.overrideFormat, &buffer_handle);
1790 graphicBuffers.push_back(buffer_handle);
1791 outputBuffers[k] = {
1792 halStream.id, bufferId, ::android::makeToAidl(buffer_handle),
1793 BufferStatus::OK, NativeHandle(), NativeHandle()};
1794 bufferId++;
1795 }
1796 k++;
1797 }
1798
1799 request.inputBuffer = {
1800 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
1801 request.frameNumber = frameNumber;
1802 request.fmqSettingsSize = 0;
1803 request.settings = settings;
1804 request.inputWidth = 0;
1805 request.inputHeight = 0;
1806
1807 {
1808 std::unique_lock<std::mutex> l(mLock);
1809 mInflightMap.clear();
1810 mInflightMap[frameNumber] = inflightReq;
1811 }
1812
1813 int32_t numRequestProcessed = 0;
1814 std::vector<BufferCache> cachesToRemove;
1815 ndk::ScopedAStatus returnStatus =
1816 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1817 ASSERT_TRUE(returnStatus.isOk());
1818 ASSERT_EQ(numRequestProcessed, 1u);
1819
1820 {
1821 std::unique_lock<std::mutex> l(mLock);
1822 while (!inflightReq->errorCodeValid &&
1823 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1824 auto timeout = std::chrono::system_clock::now() +
1825 std::chrono::seconds(kStreamBufferTimeoutSec);
1826 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1827 }
1828
1829 ASSERT_FALSE(inflightReq->errorCodeValid);
1830 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1831 }
1832 if (useHalBufManager) {
1833 std::vector<int32_t> streamIds(halStreams.size());
1834 for (size_t i = 0; i < streamIds.size(); i++) {
1835 streamIds[i] = halStreams[i].id;
1836 }
1837 verifyBuffersReturned(mSession, streamIds, cb);
1838 }
1839
1840 ret = mSession->close();
1841 mSession = nullptr;
1842 ASSERT_TRUE(ret.isOk());
1843 }
1844 }
1845}
1846
1847// Generate and verify 10-bit dynamic range request
1848TEST_P(CameraAidlTest, process10BitDynamicRangeRequest) {
1849 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001850 CameraMetadata settings;
1851
1852 for (const auto& name : cameraDeviceNames) {
1853 std::string version, deviceId;
1854 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
1855 CameraMetadata meta;
1856 std::shared_ptr<ICameraDevice> device;
1857 openEmptyDeviceSession(name, mProvider, &mSession, &meta, &device);
1858 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1859 if (!is10BitDynamicRangeCapable(staticMeta)) {
1860 ndk::ScopedAStatus ret = mSession->close();
1861 mSession = nullptr;
1862 ASSERT_TRUE(ret.isOk());
1863 continue;
1864 }
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001865 std::vector<RequestAvailableDynamicRangeProfilesMap> profileList;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001866 get10BitDynamicRangeProfiles(staticMeta, &profileList);
1867 ASSERT_FALSE(profileList.empty());
1868
1869 CameraMetadata req;
1870 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultSettings;
1871 ndk::ScopedAStatus ret =
Emilian Peevdda1eb72022-07-28 16:37:40 -07001872 mSession->constructDefaultRequestSettings(RequestTemplate::PREVIEW, &req);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001873 ASSERT_TRUE(ret.isOk());
1874
1875 const camera_metadata_t* metadata =
1876 reinterpret_cast<const camera_metadata_t*>(req.metadata.data());
1877 size_t expectedSize = req.metadata.size();
1878 int result = validate_camera_metadata_structure(metadata, &expectedSize);
1879 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
1880
1881 size_t entryCount = get_camera_metadata_entry_count(metadata);
1882 ASSERT_GT(entryCount, 0u);
1883 defaultSettings = metadata;
1884
1885 const camera_metadata_t* settingsBuffer = defaultSettings.getAndLock();
1886 uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
1887 settings.metadata = std::vector(
1888 rawSettingsBuffer, rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
1889 overrideRotateAndCrop(&settings);
1890
1891 ret = mSession->close();
1892 mSession = nullptr;
1893 ASSERT_TRUE(ret.isOk());
1894
1895 std::vector<HalStream> halStreams;
1896 bool supportsPartialResults = false;
1897 bool useHalBufManager = false;
1898 int32_t partialResultCount = 0;
1899 Stream previewStream;
1900 std::shared_ptr<DeviceCb> cb;
1901 for (const auto& profile : profileList) {
Emilian Peevdda1eb72022-07-28 16:37:40 -07001902 previewStream.usage =
1903 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1904 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER);
1905 previewStream.dataSpace = getDataspace(PixelFormat::IMPLEMENTATION_DEFINED);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001906 configureStreams(name, mProvider, PixelFormat::IMPLEMENTATION_DEFINED, &mSession,
1907 &previewStream, &halStreams, &supportsPartialResults,
1908 &partialResultCount, &useHalBufManager, &cb, 0,
1909 /*maxResolution*/ false, profile);
1910 ASSERT_NE(mSession, nullptr);
1911
1912 ::aidl::android::hardware::common::fmq::MQDescriptor<
1913 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
1914 descriptor;
1915 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
1916 ASSERT_TRUE(resultQueueRet.isOk());
1917
1918 std::shared_ptr<ResultMetadataQueue> resultQueue =
1919 std::make_shared<ResultMetadataQueue>(descriptor);
1920 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
1921 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
1922 resultQueue = nullptr;
1923 // Don't use the queue onwards.
1924 }
1925
Emilian Peevdda1eb72022-07-28 16:37:40 -07001926 mInflightMap.clear();
1927 // Stream as long as needed to fill the Hal inflight queue
1928 std::vector<CaptureRequest> requests(halStreams[0].maxBuffers);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001929
Emilian Peev470d1382023-01-18 11:09:09 -08001930 for (int32_t requestId = 0; requestId < requests.size(); requestId++) {
Emilian Peevdda1eb72022-07-28 16:37:40 -07001931 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
1932 static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
1933 partialResultCount, std::unordered_set<std::string>(), resultQueue);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001934
Emilian Peev470d1382023-01-18 11:09:09 -08001935 CaptureRequest& request = requests[requestId];
Emilian Peevdda1eb72022-07-28 16:37:40 -07001936 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
1937 outputBuffers.resize(halStreams.size());
Avichal Rakesh362242f2022-02-08 12:40:53 -08001938
Emilian Peevdda1eb72022-07-28 16:37:40 -07001939 size_t k = 0;
1940 inflightReq->mOutstandingBufferIds.resize(halStreams.size());
1941 std::vector<buffer_handle_t> graphicBuffers;
1942 graphicBuffers.reserve(halStreams.size());
Avichal Rakesh362242f2022-02-08 12:40:53 -08001943
Emilian Peev470d1382023-01-18 11:09:09 -08001944 auto bufferId = requestId + 1; // Buffer id value 0 is not valid
Emilian Peevdda1eb72022-07-28 16:37:40 -07001945 for (const auto& halStream : halStreams) {
1946 buffer_handle_t buffer_handle;
1947 if (useHalBufManager) {
1948 outputBuffers[k] = {halStream.id, 0,
1949 NativeHandle(), BufferStatus::OK,
1950 NativeHandle(), NativeHandle()};
1951 } else {
1952 auto usage = android_convertGralloc1To0Usage(
1953 static_cast<uint64_t>(halStream.producerUsage),
1954 static_cast<uint64_t>(halStream.consumerUsage));
1955 allocateGraphicBuffer(previewStream.width, previewStream.height, usage,
1956 halStream.overrideFormat, &buffer_handle);
1957
1958 inflightReq->mOutstandingBufferIds[halStream.id][bufferId] = buffer_handle;
1959 graphicBuffers.push_back(buffer_handle);
1960 outputBuffers[k] = {halStream.id, bufferId,
1961 android::makeToAidl(buffer_handle), BufferStatus::OK, NativeHandle(),
1962 NativeHandle()};
Emilian Peevdda1eb72022-07-28 16:37:40 -07001963 }
1964 k++;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001965 }
Avichal Rakesh362242f2022-02-08 12:40:53 -08001966
Emilian Peevdda1eb72022-07-28 16:37:40 -07001967 request.inputBuffer = {
1968 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
Emilian Peev470d1382023-01-18 11:09:09 -08001969 request.frameNumber = bufferId;
Emilian Peevdda1eb72022-07-28 16:37:40 -07001970 request.fmqSettingsSize = 0;
1971 request.settings = settings;
1972 request.inputWidth = 0;
1973 request.inputHeight = 0;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001974
Emilian Peevdda1eb72022-07-28 16:37:40 -07001975 {
1976 std::unique_lock<std::mutex> l(mLock);
Emilian Peev470d1382023-01-18 11:09:09 -08001977 mInflightMap[bufferId] = inflightReq;
Emilian Peevdda1eb72022-07-28 16:37:40 -07001978 }
1979
Avichal Rakesh362242f2022-02-08 12:40:53 -08001980 }
1981
1982 int32_t numRequestProcessed = 0;
1983 std::vector<BufferCache> cachesToRemove;
1984 ndk::ScopedAStatus returnStatus =
Emilian Peevdda1eb72022-07-28 16:37:40 -07001985 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001986 ASSERT_TRUE(returnStatus.isOk());
Emilian Peevdda1eb72022-07-28 16:37:40 -07001987 ASSERT_EQ(numRequestProcessed, requests.size());
Avichal Rakesh362242f2022-02-08 12:40:53 -08001988
Emilian Peevdda1eb72022-07-28 16:37:40 -07001989 returnStatus = mSession->repeatingRequestEnd(requests.size() - 1,
1990 std::vector<int32_t> {halStreams[0].id});
1991 ASSERT_TRUE(returnStatus.isOk());
1992
Emilian Peev470d1382023-01-18 11:09:09 -08001993 // We are keeping frame numbers and buffer ids consistent. Buffer id value of 0
1994 // is used to indicate a buffer that is not present/available so buffer ids as well
1995 // as frame numbers begin with 1.
1996 for (int32_t frameNumber = 1; frameNumber <= requests.size(); frameNumber++) {
Emilian Peevdda1eb72022-07-28 16:37:40 -07001997 const auto& inflightReq = mInflightMap[frameNumber];
Avichal Rakesh362242f2022-02-08 12:40:53 -08001998 std::unique_lock<std::mutex> l(mLock);
1999 while (!inflightReq->errorCodeValid &&
2000 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
2001 auto timeout = std::chrono::system_clock::now() +
2002 std::chrono::seconds(kStreamBufferTimeoutSec);
2003 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2004 }
2005
Shuzhen Wang0f56c562023-04-03 16:58:59 -07002006 waitForReleaseFence(inflightReq->resultOutputBuffers);
2007
Avichal Rakesh362242f2022-02-08 12:40:53 -08002008 ASSERT_FALSE(inflightReq->errorCodeValid);
2009 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
2010 verify10BitMetadata(mHandleImporter, *inflightReq, profile);
2011 }
Emilian Peevdda1eb72022-07-28 16:37:40 -07002012
Avichal Rakesh362242f2022-02-08 12:40:53 -08002013 if (useHalBufManager) {
2014 std::vector<int32_t> streamIds(halStreams.size());
2015 for (size_t i = 0; i < streamIds.size(); i++) {
2016 streamIds[i] = halStreams[i].id;
2017 }
2018 mSession->signalStreamFlush(streamIds, /*streamConfigCounter*/ 0);
2019 cb->waitForBuffersReturned();
2020 }
2021
2022 ret = mSession->close();
2023 mSession = nullptr;
2024 ASSERT_TRUE(ret.isOk());
2025 }
2026 }
2027}
2028
Austin Borger4728fc42022-07-15 11:27:53 -07002029TEST_P(CameraAidlTest, process8BitColorSpaceRequests) {
Austin Borger54b22362023-03-22 11:25:06 -07002030 static int profiles[] = {ColorSpaceNamed::DISPLAY_P3, ColorSpaceNamed::SRGB};
Austin Borger4728fc42022-07-15 11:27:53 -07002031
2032 for (int32_t i = 0; i < sizeof(profiles) / sizeof(profiles[0]); i++) {
2033 processColorSpaceRequest(static_cast<RequestAvailableColorSpaceProfilesMap>(profiles[i]),
2034 static_cast<RequestAvailableDynamicRangeProfilesMap>(
2035 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD));
2036 }
2037}
2038
2039TEST_P(CameraAidlTest, process10BitColorSpaceRequests) {
2040 static const camera_metadata_enum_android_request_available_dynamic_range_profiles_map
2041 dynamicRangeProfiles[] = {
2042 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10,
2043 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10,
2044 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS,
2045 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF,
2046 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF_PO,
2047 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM,
2048 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM_PO,
2049 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF,
2050 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF_PO,
2051 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM,
2052 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM_PO
2053 };
2054
Austin Borger54b22362023-03-22 11:25:06 -07002055 // Process all dynamic range profiles with BT2020_HLG
Austin Borger4728fc42022-07-15 11:27:53 -07002056 for (int32_t i = 0; i < sizeof(dynamicRangeProfiles) / sizeof(dynamicRangeProfiles[0]); i++) {
2057 processColorSpaceRequest(
Austin Borger54b22362023-03-22 11:25:06 -07002058 static_cast<RequestAvailableColorSpaceProfilesMap>(ColorSpaceNamed::BT2020_HLG),
Austin Borger4728fc42022-07-15 11:27:53 -07002059 static_cast<RequestAvailableDynamicRangeProfilesMap>(dynamicRangeProfiles[i]));
2060 }
2061}
2062
Shuzhen Wang4dd6a512022-11-08 20:47:20 +00002063TEST_P(CameraAidlTest, processZoomSettingsOverrideRequests) {
2064 const int32_t kFrameCount = 5;
2065 const int32_t kTestCases = 2;
Shuzhen Wang38ddb272023-05-22 09:40:28 -07002066 const bool kOverrideSequence[kTestCases][kFrameCount] = {// ZOOM, ZOOM, ZOOM, ZOOM, ZOOM;
2067 {true, true, true, true, true},
2068 // OFF, ZOOM, ZOOM, ZOOM, OFF;
2069 {false, true, true, true, false}};
Shuzhen Wang4dd6a512022-11-08 20:47:20 +00002070 const bool kExpectedOverrideResults[kTestCases][kFrameCount] = {
Shuzhen Wang38ddb272023-05-22 09:40:28 -07002071 // All resuls should be overridden except the last one. The last result's
2072 // zoom doesn't have speed-up.
2073 {true, true, true, true, false},
2074 // Because we require at least 1 frame speed-up, request #1, #2 and #3
2075 // will be overridden.
2076 {true, true, true, false, false}};
Shuzhen Wang4dd6a512022-11-08 20:47:20 +00002077
2078 for (int i = 0; i < kTestCases; i++) {
2079 processZoomSettingsOverrideRequests(kFrameCount, kOverrideSequence[i],
2080 kExpectedOverrideResults[i]);
2081 }
2082}
2083
Avichal Rakesh362242f2022-02-08 12:40:53 -08002084// Generate and verify a burst containing alternating sensor sensitivity values
2085TEST_P(CameraAidlTest, processCaptureRequestBurstISO) {
2086 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2087 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2088 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2089 int64_t bufferId = 1;
2090 int32_t frameNumber = 1;
2091 float isoTol = .03f;
2092 CameraMetadata settings;
2093
2094 for (const auto& name : cameraDeviceNames) {
2095 CameraMetadata meta;
2096 settings.metadata.clear();
2097 std::shared_ptr<ICameraDevice> unusedDevice;
2098 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
2099 &unusedDevice /*out*/);
2100 camera_metadata_t* staticMetaBuffer =
2101 clone_camera_metadata(reinterpret_cast<camera_metadata_t*>(meta.metadata.data()));
2102 ::android::hardware::camera::common::V1_0::helper::CameraMetadata staticMeta(
2103 staticMetaBuffer);
2104
2105 camera_metadata_entry_t hwLevel = staticMeta.find(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL);
2106 ASSERT_TRUE(0 < hwLevel.count);
2107 if (ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED == hwLevel.data.u8[0] ||
2108 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL == hwLevel.data.u8[0]) {
2109 // Limited/External devices can skip this test
2110 ndk::ScopedAStatus ret = mSession->close();
2111 mSession = nullptr;
2112 ASSERT_TRUE(ret.isOk());
2113 continue;
2114 }
2115
2116 camera_metadata_entry_t isoRange = staticMeta.find(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE);
2117 ASSERT_EQ(isoRange.count, 2u);
2118
2119 ndk::ScopedAStatus ret = mSession->close();
2120 mSession = nullptr;
2121 ASSERT_TRUE(ret.isOk());
2122
2123 bool supportsPartialResults = false;
2124 bool useHalBufManager = false;
2125 int32_t partialResultCount = 0;
2126 Stream previewStream;
2127 std::vector<HalStream> halStreams;
2128 std::shared_ptr<DeviceCb> cb;
2129 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2130 &previewStream /*out*/, &halStreams /*out*/,
2131 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2132 &useHalBufManager /*out*/, &cb /*out*/);
2133
2134 ::aidl::android::hardware::common::fmq::MQDescriptor<
2135 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2136 descriptor;
2137 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2138 std::shared_ptr<ResultMetadataQueue> resultQueue =
2139 std::make_shared<ResultMetadataQueue>(descriptor);
2140 ASSERT_TRUE(resultQueueRet.isOk());
2141 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2142 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2143 resultQueue = nullptr;
2144 // Don't use the queue onwards.
2145 }
2146
2147 ret = mSession->constructDefaultRequestSettings(RequestTemplate::PREVIEW, &settings);
2148 ASSERT_TRUE(ret.isOk());
2149
2150 ::android::hardware::camera::common::V1_0::helper::CameraMetadata requestMeta;
2151 std::vector<CaptureRequest> requests(kBurstFrameCount);
2152 std::vector<buffer_handle_t> buffers(kBurstFrameCount);
2153 std::vector<std::shared_ptr<InFlightRequest>> inflightReqs(kBurstFrameCount);
2154 std::vector<int32_t> isoValues(kBurstFrameCount);
2155 std::vector<CameraMetadata> requestSettings(kBurstFrameCount);
2156
2157 for (int32_t i = 0; i < kBurstFrameCount; i++) {
2158 std::unique_lock<std::mutex> l(mLock);
2159 CaptureRequest& request = requests[i];
2160 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2161 outputBuffers.resize(1);
2162 StreamBuffer& outputBuffer = outputBuffers[0];
2163
2164 isoValues[i] = ((i % 2) == 0) ? isoRange.data.i32[0] : isoRange.data.i32[1];
2165 if (useHalBufManager) {
2166 outputBuffer = {halStreams[0].id, 0,
2167 NativeHandle(), BufferStatus::OK,
2168 NativeHandle(), NativeHandle()};
2169 } else {
2170 allocateGraphicBuffer(previewStream.width, previewStream.height,
2171 android_convertGralloc1To0Usage(
2172 static_cast<uint64_t>(halStreams[0].producerUsage),
2173 static_cast<uint64_t>(halStreams[0].consumerUsage)),
2174 halStreams[0].overrideFormat, &buffers[i]);
2175 outputBuffer = {halStreams[0].id, bufferId + i, ::android::makeToAidl(buffers[i]),
2176 BufferStatus::OK, NativeHandle(), NativeHandle()};
2177 }
2178
2179 requestMeta.append(reinterpret_cast<camera_metadata_t*>(settings.metadata.data()));
2180
2181 // Disable all 3A routines
2182 uint8_t mode = static_cast<uint8_t>(ANDROID_CONTROL_MODE_OFF);
2183 ASSERT_EQ(::android::OK, requestMeta.update(ANDROID_CONTROL_MODE, &mode, 1));
2184 ASSERT_EQ(::android::OK,
2185 requestMeta.update(ANDROID_SENSOR_SENSITIVITY, &isoValues[i], 1));
2186 camera_metadata_t* metaBuffer = requestMeta.release();
2187 uint8_t* rawMetaBuffer = reinterpret_cast<uint8_t*>(metaBuffer);
2188 requestSettings[i].metadata = std::vector(
2189 rawMetaBuffer, rawMetaBuffer + get_camera_metadata_size(metaBuffer));
2190 overrideRotateAndCrop(&(requestSettings[i]));
2191
2192 request.frameNumber = frameNumber + i;
2193 request.fmqSettingsSize = 0;
2194 request.settings = requestSettings[i];
2195 request.inputBuffer = {
2196 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2197
2198 inflightReqs[i] = std::make_shared<InFlightRequest>(1, false, supportsPartialResults,
2199 partialResultCount, resultQueue);
2200 mInflightMap[frameNumber + i] = inflightReqs[i];
2201 }
2202
2203 int32_t numRequestProcessed = 0;
2204 std::vector<BufferCache> cachesToRemove;
2205
2206 ndk::ScopedAStatus returnStatus =
2207 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2208 ASSERT_TRUE(returnStatus.isOk());
2209 ASSERT_EQ(numRequestProcessed, kBurstFrameCount);
2210
2211 for (size_t i = 0; i < kBurstFrameCount; i++) {
2212 std::unique_lock<std::mutex> l(mLock);
2213 while (!inflightReqs[i]->errorCodeValid && ((0 < inflightReqs[i]->numBuffersLeft) ||
2214 (!inflightReqs[i]->haveResultMetadata))) {
2215 auto timeout = std::chrono::system_clock::now() +
2216 std::chrono::seconds(kStreamBufferTimeoutSec);
2217 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2218 }
2219
2220 ASSERT_FALSE(inflightReqs[i]->errorCodeValid);
2221 ASSERT_NE(inflightReqs[i]->resultOutputBuffers.size(), 0u);
2222 ASSERT_EQ(previewStream.id, inflightReqs[i]->resultOutputBuffers[0].buffer.streamId);
2223 ASSERT_FALSE(inflightReqs[i]->collectedResult.isEmpty());
2224 ASSERT_TRUE(inflightReqs[i]->collectedResult.exists(ANDROID_SENSOR_SENSITIVITY));
2225 camera_metadata_entry_t isoResult =
2226 inflightReqs[i]->collectedResult.find(ANDROID_SENSOR_SENSITIVITY);
2227 ASSERT_TRUE(std::abs(isoResult.data.i32[0] - isoValues[i]) <=
2228 std::round(isoValues[i] * isoTol));
2229 }
2230
2231 if (useHalBufManager) {
2232 verifyBuffersReturned(mSession, previewStream.id, cb);
2233 }
2234 ret = mSession->close();
2235 mSession = nullptr;
2236 ASSERT_TRUE(ret.isOk());
2237 }
2238}
2239
2240// Test whether an incorrect capture request with missing settings will
2241// be reported correctly.
2242TEST_P(CameraAidlTest, processCaptureRequestInvalidSinglePreview) {
2243 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2244 std::vector<AvailableStream> outputPreviewStreams;
2245 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2246 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2247 int64_t bufferId = 1;
2248 int32_t frameNumber = 1;
2249 CameraMetadata settings;
2250
2251 for (const auto& name : cameraDeviceNames) {
2252 Stream previewStream;
2253 std::vector<HalStream> halStreams;
2254 std::shared_ptr<DeviceCb> cb;
2255 bool supportsPartialResults = false;
2256 bool useHalBufManager = false;
2257 int32_t partialResultCount = 0;
2258 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2259 &previewStream /*out*/, &halStreams /*out*/,
2260 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2261 &useHalBufManager /*out*/, &cb /*out*/);
2262 ASSERT_NE(mSession, nullptr);
2263 ASSERT_FALSE(halStreams.empty());
2264
2265 buffer_handle_t buffer_handle = nullptr;
2266
2267 if (useHalBufManager) {
2268 bufferId = 0;
2269 } else {
2270 allocateGraphicBuffer(previewStream.width, previewStream.height,
2271 android_convertGralloc1To0Usage(
2272 static_cast<uint64_t>(halStreams[0].producerUsage),
2273 static_cast<uint64_t>(halStreams[0].consumerUsage)),
2274 halStreams[0].overrideFormat, &buffer_handle);
2275 }
2276
2277 std::vector<CaptureRequest> requests(1);
2278 CaptureRequest& request = requests[0];
2279 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2280 outputBuffers.resize(1);
2281 StreamBuffer& outputBuffer = outputBuffers[0];
2282
2283 outputBuffer = {
2284 halStreams[0].id,
2285 bufferId,
2286 buffer_handle == nullptr ? NativeHandle() : ::android::makeToAidl(buffer_handle),
2287 BufferStatus::OK,
2288 NativeHandle(),
2289 NativeHandle()};
2290
2291 request.inputBuffer = {
2292 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2293 request.frameNumber = frameNumber;
2294 request.fmqSettingsSize = 0;
2295 request.settings = settings;
2296
2297 // Settings were not correctly initialized, we should fail here
2298 int32_t numRequestProcessed = 0;
2299 std::vector<BufferCache> cachesToRemove;
2300 ndk::ScopedAStatus ret =
2301 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2302 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
2303 ASSERT_EQ(numRequestProcessed, 0u);
2304
2305 ret = mSession->close();
2306 mSession = nullptr;
2307 ASSERT_TRUE(ret.isOk());
2308 }
2309}
2310
2311// Verify camera offline session behavior
2312TEST_P(CameraAidlTest, switchToOffline) {
2313 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2314 AvailableStream threshold = {kMaxStillWidth, kMaxStillHeight,
2315 static_cast<int32_t>(PixelFormat::BLOB)};
2316 int64_t bufferId = 1;
2317 int32_t frameNumber = 1;
2318 CameraMetadata settings;
2319
2320 for (const auto& name : cameraDeviceNames) {
2321 CameraMetadata meta;
2322 {
2323 std::shared_ptr<ICameraDevice> unusedDevice;
2324 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
2325 &unusedDevice);
2326 camera_metadata_t* staticMetaBuffer = clone_camera_metadata(
2327 reinterpret_cast<camera_metadata_t*>(meta.metadata.data()));
2328 ::android::hardware::camera::common::V1_0::helper::CameraMetadata staticMeta(
2329 staticMetaBuffer);
2330
2331 if (isOfflineSessionSupported(staticMetaBuffer) != Status::OK) {
2332 ndk::ScopedAStatus ret = mSession->close();
2333 mSession = nullptr;
2334 ASSERT_TRUE(ret.isOk());
2335 continue;
2336 }
2337 ndk::ScopedAStatus ret = mSession->close();
2338 mSession = nullptr;
2339 ASSERT_TRUE(ret.isOk());
2340 }
2341
2342 bool supportsPartialResults = false;
2343 int32_t partialResultCount = 0;
2344 Stream stream;
2345 std::vector<HalStream> halStreams;
2346 std::shared_ptr<DeviceCb> cb;
2347 int32_t jpegBufferSize;
2348 bool useHalBufManager;
2349 configureOfflineStillStream(name, mProvider, &threshold, &mSession /*out*/, &stream /*out*/,
2350 &halStreams /*out*/, &supportsPartialResults /*out*/,
2351 &partialResultCount /*out*/, &cb /*out*/,
2352 &jpegBufferSize /*out*/, &useHalBufManager /*out*/);
2353
2354 auto ret = mSession->constructDefaultRequestSettings(RequestTemplate::STILL_CAPTURE,
2355 &settings);
2356 ASSERT_TRUE(ret.isOk());
2357
2358 ::aidl::android::hardware::common::fmq::MQDescriptor<
2359 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2360 descriptor;
2361
2362 ndk::ScopedAStatus resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2363 ASSERT_TRUE(resultQueueRet.isOk());
2364 std::shared_ptr<ResultMetadataQueue> resultQueue =
2365 std::make_shared<ResultMetadataQueue>(descriptor);
2366 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2367 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2368 resultQueue = nullptr;
2369 // Don't use the queue onwards.
2370 }
2371
2372 ::android::hardware::camera::common::V1_0::helper::CameraMetadata requestMeta;
2373
2374 std::vector<buffer_handle_t> buffers(kBurstFrameCount);
2375 std::vector<std::shared_ptr<InFlightRequest>> inflightReqs(kBurstFrameCount);
2376 std::vector<CameraMetadata> requestSettings(kBurstFrameCount);
2377
2378 std::vector<CaptureRequest> requests(kBurstFrameCount);
2379
2380 HalStream halStream = halStreams[0];
2381 for (uint32_t i = 0; i < kBurstFrameCount; i++) {
2382 CaptureRequest& request = requests[i];
2383 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2384 outputBuffers.resize(1);
2385 StreamBuffer& outputBuffer = outputBuffers[0];
2386
2387 std::unique_lock<std::mutex> l(mLock);
2388 if (useHalBufManager) {
2389 outputBuffer = {halStream.id, 0, NativeHandle(), BufferStatus::OK, NativeHandle(),
2390 NativeHandle()};
2391 } else {
2392 // jpeg buffer (w,h) = (blobLen, 1)
2393 allocateGraphicBuffer(jpegBufferSize, /*height*/ 1,
2394 android_convertGralloc1To0Usage(
2395 static_cast<uint64_t>(halStream.producerUsage),
2396 static_cast<uint64_t>(halStream.consumerUsage)),
2397 halStream.overrideFormat, &buffers[i]);
2398 outputBuffer = {halStream.id, bufferId + i, ::android::makeToAidl(buffers[i]),
2399 BufferStatus::OK, NativeHandle(), NativeHandle()};
2400 }
2401
2402 requestMeta.clear();
2403 requestMeta.append(reinterpret_cast<camera_metadata_t*>(settings.metadata.data()));
2404
2405 camera_metadata_t* metaBuffer = requestMeta.release();
2406 uint8_t* rawMetaBuffer = reinterpret_cast<uint8_t*>(metaBuffer);
2407 requestSettings[i].metadata = std::vector(
2408 rawMetaBuffer, rawMetaBuffer + get_camera_metadata_size(metaBuffer));
2409 overrideRotateAndCrop(&requestSettings[i]);
2410
2411 request.frameNumber = frameNumber + i;
2412 request.fmqSettingsSize = 0;
2413 request.settings = requestSettings[i];
2414 request.inputBuffer = {/*streamId*/ -1,
2415 /*bufferId*/ 0, NativeHandle(),
2416 BufferStatus::ERROR, NativeHandle(),
2417 NativeHandle()};
2418
2419 inflightReqs[i] = std::make_shared<InFlightRequest>(1, false, supportsPartialResults,
2420 partialResultCount, resultQueue);
2421 mInflightMap[frameNumber + i] = inflightReqs[i];
2422 }
2423
2424 int32_t numRequestProcessed = 0;
2425 std::vector<BufferCache> cachesToRemove;
2426
2427 ndk::ScopedAStatus returnStatus =
2428 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2429 ASSERT_TRUE(returnStatus.isOk());
2430 ASSERT_EQ(numRequestProcessed, kBurstFrameCount);
2431
2432 std::vector<int32_t> offlineStreamIds = {halStream.id};
2433 CameraOfflineSessionInfo offlineSessionInfo;
2434 std::shared_ptr<ICameraOfflineSession> offlineSession;
2435 returnStatus =
2436 mSession->switchToOffline(offlineStreamIds, &offlineSessionInfo, &offlineSession);
2437
2438 if (!halStreams[0].supportOffline) {
2439 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
2440 returnStatus.getServiceSpecificError());
2441 ret = mSession->close();
2442 mSession = nullptr;
2443 ASSERT_TRUE(ret.isOk());
2444 continue;
2445 }
2446
2447 ASSERT_TRUE(returnStatus.isOk());
2448 // Hal might be unable to find any requests qualified for offline mode.
2449 if (offlineSession == nullptr) {
2450 ret = mSession->close();
2451 mSession = nullptr;
2452 ASSERT_TRUE(ret.isOk());
2453 continue;
2454 }
2455
2456 ASSERT_EQ(offlineSessionInfo.offlineStreams.size(), 1u);
2457 ASSERT_EQ(offlineSessionInfo.offlineStreams[0].id, halStream.id);
2458 ASSERT_NE(offlineSessionInfo.offlineRequests.size(), 0u);
2459
2460 // close device session to make sure offline session does not rely on it
2461 ret = mSession->close();
2462 mSession = nullptr;
2463 ASSERT_TRUE(ret.isOk());
2464
2465 ::aidl::android::hardware::common::fmq::MQDescriptor<
2466 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2467 offlineResultDescriptor;
2468
2469 auto offlineResultQueueRet =
2470 offlineSession->getCaptureResultMetadataQueue(&offlineResultDescriptor);
2471 std::shared_ptr<ResultMetadataQueue> offlineResultQueue =
2472 std::make_shared<ResultMetadataQueue>(descriptor);
2473 if (!offlineResultQueue->isValid() || offlineResultQueue->availableToWrite() <= 0) {
2474 ALOGE("%s: offline session returns empty result metadata fmq, not use it", __func__);
2475 offlineResultQueue = nullptr;
2476 // Don't use the queue onwards.
2477 }
2478 ASSERT_TRUE(offlineResultQueueRet.isOk());
2479
2480 updateInflightResultQueue(offlineResultQueue);
2481
2482 ret = offlineSession->setCallback(cb);
2483 ASSERT_TRUE(ret.isOk());
2484
2485 for (size_t i = 0; i < kBurstFrameCount; i++) {
2486 std::unique_lock<std::mutex> l(mLock);
2487 while (!inflightReqs[i]->errorCodeValid && ((0 < inflightReqs[i]->numBuffersLeft) ||
2488 (!inflightReqs[i]->haveResultMetadata))) {
2489 auto timeout = std::chrono::system_clock::now() +
2490 std::chrono::seconds(kStreamBufferTimeoutSec);
2491 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2492 }
2493
2494 ASSERT_FALSE(inflightReqs[i]->errorCodeValid);
2495 ASSERT_NE(inflightReqs[i]->resultOutputBuffers.size(), 0u);
2496 ASSERT_EQ(stream.id, inflightReqs[i]->resultOutputBuffers[0].buffer.streamId);
2497 ASSERT_FALSE(inflightReqs[i]->collectedResult.isEmpty());
2498 }
2499
2500 ret = offlineSession->close();
2501 ASSERT_TRUE(ret.isOk());
2502 }
2503}
2504
2505// Check whether an invalid capture request with missing output buffers
2506// will be reported correctly.
2507TEST_P(CameraAidlTest, processCaptureRequestInvalidBuffer) {
2508 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2509 std::vector<AvailableStream> outputBlobStreams;
2510 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2511 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2512 int32_t frameNumber = 1;
2513 CameraMetadata settings;
2514
2515 for (const auto& name : cameraDeviceNames) {
2516 Stream previewStream;
2517 std::vector<HalStream> halStreams;
2518 std::shared_ptr<DeviceCb> cb;
2519 bool supportsPartialResults = false;
2520 bool useHalBufManager = false;
2521 int32_t partialResultCount = 0;
2522 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2523 &previewStream /*out*/, &halStreams /*out*/,
2524 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2525 &useHalBufManager /*out*/, &cb /*out*/);
2526
2527 RequestTemplate reqTemplate = RequestTemplate::PREVIEW;
2528 ndk::ScopedAStatus ret = mSession->constructDefaultRequestSettings(reqTemplate, &settings);
2529 ASSERT_TRUE(ret.isOk());
2530 overrideRotateAndCrop(&settings);
2531
2532 std::vector<CaptureRequest> requests(1);
2533 CaptureRequest& request = requests[0];
2534 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2535 outputBuffers.resize(1);
2536 // Empty output buffer
2537 outputBuffers[0] = {
2538 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2539
2540 request.inputBuffer = {
2541 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2542 request.frameNumber = frameNumber;
2543 request.fmqSettingsSize = 0;
2544 request.settings = settings;
2545
2546 // Output buffers are missing, we should fail here
2547 int32_t numRequestProcessed = 0;
2548 std::vector<BufferCache> cachesToRemove;
2549 ret = mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2550 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
2551 ASSERT_EQ(numRequestProcessed, 0u);
2552
2553 ret = mSession->close();
2554 mSession = nullptr;
2555 ASSERT_TRUE(ret.isOk());
2556 }
2557}
2558
2559// Generate, trigger and flush a preview request
2560TEST_P(CameraAidlTest, flushPreviewRequest) {
2561 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2562 std::vector<AvailableStream> outputPreviewStreams;
2563 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2564 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2565 int64_t bufferId = 1;
2566 int32_t frameNumber = 1;
2567 CameraMetadata settings;
2568
2569 for (const auto& name : cameraDeviceNames) {
2570 Stream previewStream;
2571 std::vector<HalStream> halStreams;
2572 std::shared_ptr<DeviceCb> cb;
2573 bool supportsPartialResults = false;
2574 bool useHalBufManager = false;
2575 int32_t partialResultCount = 0;
2576
2577 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2578 &previewStream /*out*/, &halStreams /*out*/,
2579 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2580 &useHalBufManager /*out*/, &cb /*out*/);
2581
2582 ASSERT_NE(mSession, nullptr);
2583 ASSERT_NE(cb, nullptr);
2584 ASSERT_FALSE(halStreams.empty());
2585
2586 ::aidl::android::hardware::common::fmq::MQDescriptor<
2587 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2588 descriptor;
2589
2590 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2591 std::shared_ptr<ResultMetadataQueue> resultQueue =
2592 std::make_shared<ResultMetadataQueue>(descriptor);
2593 ASSERT_TRUE(resultQueueRet.isOk());
2594 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2595 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2596 resultQueue = nullptr;
2597 // Don't use the queue onwards.
2598 }
2599
2600 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
2601 1, false, supportsPartialResults, partialResultCount, resultQueue);
2602 RequestTemplate reqTemplate = RequestTemplate::PREVIEW;
2603
2604 ndk::ScopedAStatus ret = mSession->constructDefaultRequestSettings(reqTemplate, &settings);
2605 ASSERT_TRUE(ret.isOk());
2606 overrideRotateAndCrop(&settings);
2607
2608 buffer_handle_t buffer_handle;
2609 std::vector<CaptureRequest> requests(1);
2610 CaptureRequest& request = requests[0];
2611 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2612 outputBuffers.resize(1);
2613 StreamBuffer& outputBuffer = outputBuffers[0];
2614 if (useHalBufManager) {
2615 bufferId = 0;
2616 outputBuffer = {halStreams[0].id, bufferId, NativeHandle(),
2617 BufferStatus::OK, NativeHandle(), NativeHandle()};
2618 } else {
2619 allocateGraphicBuffer(previewStream.width, previewStream.height,
2620 android_convertGralloc1To0Usage(
2621 static_cast<uint64_t>(halStreams[0].producerUsage),
2622 static_cast<uint64_t>(halStreams[0].consumerUsage)),
2623 halStreams[0].overrideFormat, &buffer_handle);
2624 outputBuffer = {halStreams[0].id, bufferId, ::android::makeToAidl(buffer_handle),
2625 BufferStatus::OK, NativeHandle(), NativeHandle()};
2626 }
2627
2628 request.frameNumber = frameNumber;
2629 request.fmqSettingsSize = 0;
2630 request.settings = settings;
2631 request.inputBuffer = {
2632 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2633
2634 {
2635 std::unique_lock<std::mutex> l(mLock);
2636 mInflightMap.clear();
2637 mInflightMap[frameNumber] = inflightReq;
2638 }
2639
2640 int32_t numRequestProcessed = 0;
2641 std::vector<BufferCache> cachesToRemove;
2642 ret = mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2643 ASSERT_TRUE(ret.isOk());
2644 ASSERT_EQ(numRequestProcessed, 1u);
2645
2646 // Flush before waiting for request to complete.
2647 ndk::ScopedAStatus returnStatus = mSession->flush();
2648 ASSERT_TRUE(returnStatus.isOk());
2649
2650 {
2651 std::unique_lock<std::mutex> l(mLock);
2652 while (!inflightReq->errorCodeValid &&
2653 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
2654 auto timeout = std::chrono::system_clock::now() +
2655 std::chrono::seconds(kStreamBufferTimeoutSec);
2656 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2657 }
2658
2659 if (!inflightReq->errorCodeValid) {
2660 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
2661 ASSERT_EQ(previewStream.id, inflightReq->resultOutputBuffers[0].buffer.streamId);
2662 } else {
2663 switch (inflightReq->errorCode) {
2664 case ErrorCode::ERROR_REQUEST:
2665 case ErrorCode::ERROR_RESULT:
2666 case ErrorCode::ERROR_BUFFER:
2667 // Expected
2668 break;
2669 case ErrorCode::ERROR_DEVICE:
2670 default:
2671 FAIL() << "Unexpected error:"
2672 << static_cast<uint32_t>(inflightReq->errorCode);
2673 }
2674 }
2675 }
2676
2677 if (useHalBufManager) {
2678 verifyBuffersReturned(mSession, previewStream.id, cb);
2679 }
2680
2681 ret = mSession->close();
2682 mSession = nullptr;
2683 ASSERT_TRUE(ret.isOk());
2684 }
2685}
2686
2687// Verify that camera flushes correctly without any pending requests.
2688TEST_P(CameraAidlTest, flushEmpty) {
2689 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2690 std::vector<AvailableStream> outputPreviewStreams;
2691 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2692 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2693
2694 for (const auto& name : cameraDeviceNames) {
2695 Stream previewStream;
2696 std::vector<HalStream> halStreams;
2697 std::shared_ptr<DeviceCb> cb;
2698 bool supportsPartialResults = false;
2699 bool useHalBufManager = false;
2700
2701 int32_t partialResultCount = 0;
2702 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2703 &previewStream /*out*/, &halStreams /*out*/,
2704 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2705 &useHalBufManager /*out*/, &cb /*out*/);
2706
2707 ndk::ScopedAStatus returnStatus = mSession->flush();
2708 ASSERT_TRUE(returnStatus.isOk());
2709
2710 {
2711 std::unique_lock<std::mutex> l(mLock);
2712 auto timeout = std::chrono::system_clock::now() +
2713 std::chrono::milliseconds(kEmptyFlushTimeoutMSec);
2714 ASSERT_EQ(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2715 }
2716
2717 ndk::ScopedAStatus ret = mSession->close();
2718 mSession = nullptr;
2719 ASSERT_TRUE(ret.isOk());
2720 }
2721}
2722
2723// Test camera provider notify method
2724TEST_P(CameraAidlTest, providerDeviceStateNotification) {
2725 notifyDeviceState(ICameraProvider::DEVICE_STATE_BACK_COVERED);
2726 notifyDeviceState(ICameraProvider::DEVICE_STATE_NORMAL);
2727}
2728
2729// Verify that all supported stream formats and sizes can be configured
2730// successfully for injection camera.
2731TEST_P(CameraAidlTest, configureInjectionStreamsAvailableOutputs) {
2732 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2733 std::vector<AvailableStream> outputStreams;
2734
2735 for (const auto& name : cameraDeviceNames) {
2736 CameraMetadata metadata;
2737
2738 std::shared_ptr<ICameraInjectionSession> injectionSession;
2739 std::shared_ptr<ICameraDevice> unusedDevice;
2740 openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/,
2741 &unusedDevice /*out*/);
2742 if (injectionSession == nullptr) {
2743 continue;
2744 }
2745
2746 camera_metadata_t* staticMetaBuffer =
2747 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
2748 CameraMetadata chars;
2749 chars.metadata = metadata.metadata;
2750
2751 outputStreams.clear();
2752 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputStreams));
2753 ASSERT_NE(0u, outputStreams.size());
2754
2755 int32_t jpegBufferSize = 0;
2756 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMetaBuffer, &jpegBufferSize));
2757 ASSERT_NE(0u, jpegBufferSize);
2758
2759 int32_t streamId = 0;
2760 int32_t streamConfigCounter = 0;
2761 for (auto& it : outputStreams) {
2762 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(it.format));
2763 Stream stream = {streamId,
2764 StreamType::OUTPUT,
2765 it.width,
2766 it.height,
2767 static_cast<PixelFormat>(it.format),
2768 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2769 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2770 dataspace,
2771 StreamRotation::ROTATION_0,
2772 std::string(),
2773 jpegBufferSize,
2774 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002775 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2776 RequestAvailableDynamicRangeProfilesMap::
2777 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002778
2779 std::vector<Stream> streams = {stream};
2780 StreamConfiguration config;
2781 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2782 jpegBufferSize);
2783
2784 config.streamConfigCounter = streamConfigCounter++;
2785 ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars);
2786 ASSERT_TRUE(s.isOk());
2787 streamId++;
2788 }
2789
2790 std::shared_ptr<ICameraDeviceSession> session;
2791 ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session);
2792 ASSERT_TRUE(ret.isOk());
2793 ASSERT_NE(session, nullptr);
2794 ret = session->close();
2795 ASSERT_TRUE(ret.isOk());
2796 }
2797}
2798
2799// Check for correct handling of invalid/incorrect configuration parameters for injection camera.
2800TEST_P(CameraAidlTest, configureInjectionStreamsInvalidOutputs) {
2801 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2802 std::vector<AvailableStream> outputStreams;
2803
2804 for (const auto& name : cameraDeviceNames) {
2805 CameraMetadata metadata;
2806 std::shared_ptr<ICameraInjectionSession> injectionSession;
2807 std::shared_ptr<ICameraDevice> unusedDevice;
2808 openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/,
2809 &unusedDevice);
2810 if (injectionSession == nullptr) {
2811 continue;
2812 }
2813
2814 camera_metadata_t* staticMetaBuffer =
2815 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
2816 std::shared_ptr<ICameraDeviceSession> session;
2817 ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session);
2818 ASSERT_TRUE(ret.isOk());
2819 ASSERT_NE(session, nullptr);
2820
2821 CameraMetadata chars;
2822 chars.metadata = metadata.metadata;
2823
2824 outputStreams.clear();
2825 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputStreams));
2826 ASSERT_NE(0u, outputStreams.size());
2827
2828 int32_t jpegBufferSize = 0;
2829 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMetaBuffer, &jpegBufferSize));
2830 ASSERT_NE(0u, jpegBufferSize);
2831
2832 int32_t streamId = 0;
2833 Stream stream = {streamId++,
2834 StreamType::OUTPUT,
2835 0,
2836 0,
2837 static_cast<PixelFormat>(outputStreams[0].format),
2838 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2839 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2840 Dataspace::UNKNOWN,
2841 StreamRotation::ROTATION_0,
2842 std::string(),
2843 jpegBufferSize,
2844 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002845 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2846 RequestAvailableDynamicRangeProfilesMap::
2847 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002848
2849 int32_t streamConfigCounter = 0;
2850 std::vector<Stream> streams = {stream};
2851 StreamConfiguration config;
2852 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2853 jpegBufferSize);
2854
2855 config.streamConfigCounter = streamConfigCounter++;
2856 ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars);
2857 ASSERT_TRUE(
2858 (static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) == s.getServiceSpecificError()) ||
2859 (static_cast<int32_t>(Status::INTERNAL_ERROR) == s.getServiceSpecificError()));
2860
2861 stream = {streamId++,
2862 StreamType::OUTPUT,
2863 INT32_MAX,
2864 INT32_MAX,
2865 static_cast<PixelFormat>(outputStreams[0].format),
2866 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2867 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2868 Dataspace::UNKNOWN,
2869 StreamRotation::ROTATION_0,
2870 std::string(),
2871 jpegBufferSize,
2872 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002873 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2874 RequestAvailableDynamicRangeProfilesMap::
2875 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
2876
Avichal Rakesh362242f2022-02-08 12:40:53 -08002877 streams[0] = stream;
2878 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2879 jpegBufferSize);
2880 config.streamConfigCounter = streamConfigCounter++;
2881 s = injectionSession->configureInjectionStreams(config, chars);
2882 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError());
2883
2884 for (auto& it : outputStreams) {
2885 stream = {streamId++,
2886 StreamType::OUTPUT,
2887 it.width,
2888 it.height,
2889 static_cast<PixelFormat>(INT32_MAX),
2890 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2891 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2892 Dataspace::UNKNOWN,
2893 StreamRotation::ROTATION_0,
2894 std::string(),
2895 jpegBufferSize,
2896 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002897 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2898 RequestAvailableDynamicRangeProfilesMap::
2899 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002900 streams[0] = stream;
2901 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2902 jpegBufferSize);
2903 config.streamConfigCounter = streamConfigCounter++;
2904 s = injectionSession->configureInjectionStreams(config, chars);
2905 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError());
2906
2907 stream = {streamId++,
2908 StreamType::OUTPUT,
2909 it.width,
2910 it.height,
2911 static_cast<PixelFormat>(it.format),
2912 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2913 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2914 Dataspace::UNKNOWN,
2915 static_cast<StreamRotation>(INT32_MAX),
2916 std::string(),
2917 jpegBufferSize,
2918 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002919 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2920 RequestAvailableDynamicRangeProfilesMap::
2921 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002922 streams[0] = stream;
2923 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2924 jpegBufferSize);
2925 config.streamConfigCounter = streamConfigCounter++;
2926 s = injectionSession->configureInjectionStreams(config, chars);
2927 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError());
2928 }
2929
2930 ret = session->close();
2931 ASSERT_TRUE(ret.isOk());
2932 }
2933}
2934
2935// Check whether session parameters are supported for injection camera. If Hal support for them
2936// exist, then try to configure a preview stream using them.
2937TEST_P(CameraAidlTest, configureInjectionStreamsWithSessionParameters) {
2938 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2939 std::vector<AvailableStream> outputPreviewStreams;
2940 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2941 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2942
2943 for (const auto& name : cameraDeviceNames) {
2944 CameraMetadata metadata;
2945 std::shared_ptr<ICameraInjectionSession> injectionSession;
2946 std::shared_ptr<ICameraDevice> unusedDevice;
2947 openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/,
2948 &unusedDevice /*out*/);
2949 if (injectionSession == nullptr) {
2950 continue;
2951 }
2952
2953 std::shared_ptr<ICameraDeviceSession> session;
2954 ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session);
2955 ASSERT_TRUE(ret.isOk());
2956 ASSERT_NE(session, nullptr);
2957
2958 camera_metadata_t* staticMetaBuffer =
2959 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
2960 CameraMetadata chars;
2961 chars.metadata = metadata.metadata;
2962
2963 std::unordered_set<int32_t> availableSessionKeys;
2964 Status rc = getSupportedKeys(staticMetaBuffer, ANDROID_REQUEST_AVAILABLE_SESSION_KEYS,
2965 &availableSessionKeys);
2966 ASSERT_EQ(Status::OK, rc);
2967 if (availableSessionKeys.empty()) {
2968 ret = session->close();
2969 ASSERT_TRUE(ret.isOk());
2970 continue;
2971 }
2972
2973 android::hardware::camera::common::V1_0::helper::CameraMetadata previewRequestSettings;
2974 android::hardware::camera::common::V1_0::helper::CameraMetadata sessionParams,
2975 modifiedSessionParams;
2976 constructFilteredSettings(session, availableSessionKeys, RequestTemplate::PREVIEW,
2977 &previewRequestSettings, &sessionParams);
2978 if (sessionParams.isEmpty()) {
2979 ret = session->close();
2980 ASSERT_TRUE(ret.isOk());
2981 continue;
2982 }
2983
2984 outputPreviewStreams.clear();
2985
2986 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputPreviewStreams,
2987 &previewThreshold));
2988 ASSERT_NE(0u, outputPreviewStreams.size());
2989
2990 Stream previewStream = {
2991 0,
2992 StreamType::OUTPUT,
2993 outputPreviewStreams[0].width,
2994 outputPreviewStreams[0].height,
2995 static_cast<PixelFormat>(outputPreviewStreams[0].format),
2996 static_cast<::aidl::android::hardware::graphics::common::BufferUsage>(
2997 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2998 Dataspace::UNKNOWN,
2999 StreamRotation::ROTATION_0,
3000 std::string(),
3001 0,
3002 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00003003 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
3004 RequestAvailableDynamicRangeProfilesMap::
3005 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08003006 std::vector<Stream> streams = {previewStream};
3007 StreamConfiguration config;
3008 config.streams = streams;
3009 config.operationMode = StreamConfigurationMode::NORMAL_MODE;
3010
3011 modifiedSessionParams = sessionParams;
3012 camera_metadata_t* sessionParamsBuffer = sessionParams.release();
3013 uint8_t* rawSessionParamsBuffer = reinterpret_cast<uint8_t*>(sessionParamsBuffer);
3014 config.sessionParams.metadata =
3015 std::vector(rawSessionParamsBuffer,
3016 rawSessionParamsBuffer + get_camera_metadata_size(sessionParamsBuffer));
3017
3018 config.streamConfigCounter = 0;
3019 config.streamConfigCounter = 0;
3020 config.multiResolutionInputImage = false;
3021
3022 ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars);
3023 ASSERT_TRUE(s.isOk());
3024
3025 sessionParams.acquire(sessionParamsBuffer);
3026 free_camera_metadata(staticMetaBuffer);
3027 ret = session->close();
3028 ASSERT_TRUE(ret.isOk());
3029 }
3030}
3031
Jayant Chowdharyde1909e2022-11-23 17:18:38 +00003032TEST_P(CameraAidlTest, configureStreamsUseCasesCroppedRaw) {
3033 AvailableStream rawStreamThreshold =
3034 {INT_MAX, INT_MAX, static_cast<int32_t>(PixelFormat::RAW16)};
3035 configureStreamUseCaseInternal(rawStreamThreshold);
3036}
3037
Avichal Rakesh362242f2022-02-08 12:40:53 -08003038// Verify that valid stream use cases can be configured successfully, and invalid use cases
3039// fail stream configuration.
3040TEST_P(CameraAidlTest, configureStreamsUseCases) {
Jayant Chowdharyde1909e2022-11-23 17:18:38 +00003041 AvailableStream previewStreamThreshold =
3042 {kMaxPreviewWidth, kMaxPreviewHeight, static_cast<int32_t>(PixelFormat::YCBCR_420_888)};
3043 configureStreamUseCaseInternal(previewStreamThreshold);
Avichal Rakesh362242f2022-02-08 12:40:53 -08003044}
3045
Austin Borger0918fc82023-03-21 18:48:18 -07003046// Validate the integrity of stream configuration metadata
3047TEST_P(CameraAidlTest, validateStreamConfigurations) {
3048 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
3049 std::vector<AvailableStream> outputStreams;
3050
3051 const int32_t scalerSizesTag = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS;
3052 const int32_t scalerMinFrameDurationsTag = ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS;
3053 const int32_t scalerStallDurationsTag = ANDROID_SCALER_AVAILABLE_STALL_DURATIONS;
3054
3055 for (const auto& name : cameraDeviceNames) {
3056 CameraMetadata meta;
3057 std::shared_ptr<ICameraDevice> cameraDevice;
3058
3059 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
3060 &cameraDevice /*out*/);
3061 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
3062
3063 if (is10BitDynamicRangeCapable(staticMeta)) {
3064 std::vector<std::tuple<size_t, size_t>> supportedP010Sizes, supportedBlobSizes;
3065
3066 getSupportedSizes(staticMeta, scalerSizesTag, HAL_PIXEL_FORMAT_BLOB,
3067 &supportedBlobSizes);
3068 getSupportedSizes(staticMeta, scalerSizesTag, HAL_PIXEL_FORMAT_YCBCR_P010,
3069 &supportedP010Sizes);
3070 ASSERT_FALSE(supportedP010Sizes.empty());
3071
3072 std::vector<int64_t> blobMinDurations, blobStallDurations;
3073 getSupportedDurations(staticMeta, scalerMinFrameDurationsTag, HAL_PIXEL_FORMAT_BLOB,
3074 supportedP010Sizes, &blobMinDurations);
3075 getSupportedDurations(staticMeta, scalerStallDurationsTag, HAL_PIXEL_FORMAT_BLOB,
3076 supportedP010Sizes, &blobStallDurations);
3077 ASSERT_FALSE(blobStallDurations.empty());
3078 ASSERT_FALSE(blobMinDurations.empty());
3079 ASSERT_EQ(supportedP010Sizes.size(), blobMinDurations.size());
3080 ASSERT_EQ(blobMinDurations.size(), blobStallDurations.size());
3081 }
3082
Austin Borger8e9ac022023-05-04 11:17:26 -07003083 // TODO (b/280887191): Validate other aspects of stream configuration metadata...
3084
3085 ndk::ScopedAStatus ret = mSession->close();
3086 mSession = nullptr;
3087 ASSERT_TRUE(ret.isOk());
Austin Borger0918fc82023-03-21 18:48:18 -07003088 }
3089}
3090
Avichal Rakesh362242f2022-02-08 12:40:53 -08003091GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(CameraAidlTest);
3092INSTANTIATE_TEST_SUITE_P(
3093 PerInstance, CameraAidlTest,
3094 testing::ValuesIn(android::getAidlHalInstanceNames(ICameraProvider::descriptor)),
Jayant Chowdharyde1909e2022-11-23 17:18:38 +00003095 android::hardware::PrintInstanceNameToString);