blob: 284518072791b54a01bd6a5fb632df221bcc88a8 [file] [log] [blame]
Avichal Rakesh362242f2022-02-08 12:40:53 -08001/*
2 * Copyright (C) 2022 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <aidl/Vintf.h>
18#include <aidl/android/hardware/camera/common/VendorTagSection.h>
19#include <aidl/android/hardware/camera/device/ICameraDevice.h>
20#include <aidlcommonsupport/NativeHandle.h>
21#include <camera_aidl_test.h>
22#include <cutils/properties.h>
23#include <device_cb.h>
24#include <empty_device_cb.h>
25#include <grallocusage/GrallocUsageConversion.h>
26#include <gtest/gtest.h>
27#include <hardware/gralloc.h>
28#include <hardware/gralloc1.h>
29#include <hidl/GtestPrinter.h>
30#include <hidl/HidlSupport.h>
31#include <torch_provider_cb.h>
32#include <list>
33
34using ::aidl::android::hardware::camera::common::CameraDeviceStatus;
35using ::aidl::android::hardware::camera::common::CameraResourceCost;
36using ::aidl::android::hardware::camera::common::TorchModeStatus;
37using ::aidl::android::hardware::camera::common::VendorTagSection;
38using ::aidl::android::hardware::camera::device::ICameraDevice;
Austin Borger4728fc42022-07-15 11:27:53 -070039using ::aidl::android::hardware::camera::metadata::RequestAvailableColorSpaceProfilesMap;
Avichal Rakeshd3503a32022-02-25 06:23:14 +000040using ::aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap;
Avichal Rakesh362242f2022-02-08 12:40:53 -080041using ::aidl::android::hardware::camera::metadata::SensorPixelMode;
42using ::aidl::android::hardware::camera::provider::CameraIdAndStreamCombination;
Avichal Rakesh4bf91c72022-05-23 20:44:02 +000043using ::aidl::android::hardware::camera::provider::BnCameraProviderCallback;
Avichal Rakesh362242f2022-02-08 12:40:53 -080044
45using ::ndk::ScopedAStatus;
46
47namespace {
48const int32_t kBurstFrameCount = 10;
49const uint32_t kMaxStillWidth = 2048;
50const uint32_t kMaxStillHeight = 1536;
51
52const int64_t kEmptyFlushTimeoutMSec = 200;
53
Shuzhen Wang36efa712022-03-08 10:10:44 -080054const static std::vector<int64_t> kMandatoryUseCases = {
Avichal Rakesh362242f2022-02-08 12:40:53 -080055 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
56 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW,
57 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_STILL_CAPTURE,
58 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_RECORD,
59 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW_VIDEO_STILL,
60 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL};
61} // namespace
62
63TEST_P(CameraAidlTest, getCameraIdList) {
64 std::vector<std::string> idList;
65 ScopedAStatus ret = mProvider->getCameraIdList(&idList);
66 ASSERT_TRUE(ret.isOk());
67
68 for (size_t i = 0; i < idList.size(); i++) {
69 ALOGI("Camera Id[%zu] is %s", i, idList[i].c_str());
70 }
71}
72
73// Test if ICameraProvider::getVendorTags returns Status::OK
74TEST_P(CameraAidlTest, getVendorTags) {
75 std::vector<VendorTagSection> vendorTags;
76 ScopedAStatus ret = mProvider->getVendorTags(&vendorTags);
77
78 ASSERT_TRUE(ret.isOk());
79 for (size_t i = 0; i < vendorTags.size(); i++) {
80 ALOGI("Vendor tag section %zu name %s", i, vendorTags[i].sectionName.c_str());
81 for (auto& tag : vendorTags[i].tags) {
82 ALOGI("Vendor tag id %u name %s type %d", tag.tagId, tag.tagName.c_str(),
83 (int)tag.tagType);
84 }
85 }
86}
87
88// Test if ICameraProvider::setCallback returns Status::OK
89TEST_P(CameraAidlTest, setCallback) {
Avichal Rakesh4bf91c72022-05-23 20:44:02 +000090 struct ProviderCb : public BnCameraProviderCallback {
Avichal Rakesh362242f2022-02-08 12:40:53 -080091 ScopedAStatus cameraDeviceStatusChange(const std::string& cameraDeviceName,
92 CameraDeviceStatus newStatus) override {
93 ALOGI("camera device status callback name %s, status %d", cameraDeviceName.c_str(),
94 (int)newStatus);
95 return ScopedAStatus::ok();
96 }
97 ScopedAStatus torchModeStatusChange(const std::string& cameraDeviceName,
98 TorchModeStatus newStatus) override {
99 ALOGI("Torch mode status callback name %s, status %d", cameraDeviceName.c_str(),
100 (int)newStatus);
101 return ScopedAStatus::ok();
102 }
103 ScopedAStatus physicalCameraDeviceStatusChange(const std::string& cameraDeviceName,
104 const std::string& physicalCameraDeviceName,
105 CameraDeviceStatus newStatus) override {
106 ALOGI("physical camera device status callback name %s, physical camera name %s,"
107 " status %d",
108 cameraDeviceName.c_str(), physicalCameraDeviceName.c_str(), (int)newStatus);
109 return ScopedAStatus::ok();
110 }
111 };
112
Avichal Rakesh4bf91c72022-05-23 20:44:02 +0000113 std::shared_ptr<ProviderCb> cb = ndk::SharedRefBase::make<ProviderCb>();
Avichal Rakesh362242f2022-02-08 12:40:53 -0800114 ScopedAStatus ret = mProvider->setCallback(cb);
115 ASSERT_TRUE(ret.isOk());
116 ret = mProvider->setCallback(nullptr);
Avichal Rakesh4bf91c72022-05-23 20:44:02 +0000117 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
Avichal Rakesh362242f2022-02-08 12:40:53 -0800118}
119
120// Test if ICameraProvider::getCameraDeviceInterface returns Status::OK and non-null device
121TEST_P(CameraAidlTest, getCameraDeviceInterface) {
122 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
123
124 for (const auto& name : cameraDeviceNames) {
125 std::shared_ptr<ICameraDevice> cameraDevice;
126 ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &cameraDevice);
127 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
128 ret.getServiceSpecificError());
129 ASSERT_TRUE(ret.isOk());
130 ASSERT_NE(cameraDevice, nullptr);
131 }
132}
133
134// Verify that the device resource cost can be retrieved and the values are
135// correct.
136TEST_P(CameraAidlTest, getResourceCost) {
137 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
138
139 for (const auto& deviceName : cameraDeviceNames) {
140 std::shared_ptr<ICameraDevice> cameraDevice;
141 ScopedAStatus ret = mProvider->getCameraDeviceInterface(deviceName, &cameraDevice);
142 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
143 ret.getServiceSpecificError());
144 ASSERT_TRUE(ret.isOk());
145 ASSERT_NE(cameraDevice, nullptr);
146
147 CameraResourceCost resourceCost;
148 ret = cameraDevice->getResourceCost(&resourceCost);
149 ALOGI("getResourceCost returns: %d:%d", ret.getExceptionCode(),
150 ret.getServiceSpecificError());
151 ASSERT_TRUE(ret.isOk());
152
153 ALOGI(" Resource cost is %d", resourceCost.resourceCost);
154 ASSERT_LE(resourceCost.resourceCost, 100u);
155
156 for (const auto& name : resourceCost.conflictingDevices) {
157 ALOGI(" Conflicting device: %s", name.c_str());
158 }
159 }
160}
161
162TEST_P(CameraAidlTest, systemCameraTest) {
163 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
164 std::map<std::string, std::vector<SystemCameraKind>> hiddenPhysicalIdToLogicalMap;
165 for (const auto& name : cameraDeviceNames) {
166 std::shared_ptr<ICameraDevice> device;
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +0000167 ALOGI("systemCameraTest: Testing camera device %s", name.c_str());
Avichal Rakesh362242f2022-02-08 12:40:53 -0800168 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
169 ASSERT_TRUE(ret.isOk());
170 ASSERT_NE(device, nullptr);
171
172 CameraMetadata cameraCharacteristics;
173 ret = device->getCameraCharacteristics(&cameraCharacteristics);
174 ASSERT_TRUE(ret.isOk());
175
176 const camera_metadata_t* staticMeta =
177 reinterpret_cast<const camera_metadata_t*>(cameraCharacteristics.metadata.data());
178 Status rc = isLogicalMultiCamera(staticMeta);
179 if (rc == Status::OPERATION_NOT_SUPPORTED) {
180 return;
181 }
182
183 ASSERT_EQ(rc, Status::OK);
184 std::unordered_set<std::string> physicalIds;
185 ASSERT_EQ(getPhysicalCameraIds(staticMeta, &physicalIds), Status::OK);
186 SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC;
187 Status retStatus = getSystemCameraKind(staticMeta, &systemCameraKind);
188 ASSERT_EQ(retStatus, Status::OK);
189
190 for (auto physicalId : physicalIds) {
191 bool isPublicId = false;
192 for (auto& deviceName : cameraDeviceNames) {
193 std::string publicVersion, publicId;
194 ASSERT_TRUE(matchDeviceName(deviceName, mProviderType, &publicVersion, &publicId));
195 if (physicalId == publicId) {
196 isPublicId = true;
197 break;
198 }
199 }
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +0000200
Avichal Rakesh362242f2022-02-08 12:40:53 -0800201 // For hidden physical cameras, collect their associated logical cameras
202 // and store the system camera kind.
203 if (!isPublicId) {
204 auto it = hiddenPhysicalIdToLogicalMap.find(physicalId);
205 if (it == hiddenPhysicalIdToLogicalMap.end()) {
206 hiddenPhysicalIdToLogicalMap.insert(std::make_pair(
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +0000207 physicalId, std::vector<SystemCameraKind>({systemCameraKind})));
Avichal Rakesh362242f2022-02-08 12:40:53 -0800208 } else {
209 it->second.push_back(systemCameraKind);
210 }
211 }
212 }
213 }
214
215 // Check that the system camera kind of the logical cameras associated with
216 // each hidden physical camera is the same.
217 for (const auto& it : hiddenPhysicalIdToLogicalMap) {
218 SystemCameraKind neededSystemCameraKind = it.second.front();
219 for (auto foundSystemCamera : it.second) {
220 ASSERT_EQ(neededSystemCameraKind, foundSystemCamera);
221 }
222 }
223}
224
225// Verify that the static camera characteristics can be retrieved
226// successfully.
227TEST_P(CameraAidlTest, getCameraCharacteristics) {
228 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
229
230 for (const auto& name : cameraDeviceNames) {
231 std::shared_ptr<ICameraDevice> device;
232 ALOGI("getCameraCharacteristics: Testing camera device %s", name.c_str());
233 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
234 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
235 ret.getServiceSpecificError());
236 ASSERT_TRUE(ret.isOk());
237 ASSERT_NE(device, nullptr);
238
239 CameraMetadata chars;
240 ret = device->getCameraCharacteristics(&chars);
241 ASSERT_TRUE(ret.isOk());
242 verifyCameraCharacteristics(chars);
243 verifyMonochromeCharacteristics(chars);
244 verifyRecommendedConfigs(chars);
245 verifyLogicalOrUltraHighResCameraMetadata(name, device, chars, cameraDeviceNames);
246
247 ASSERT_TRUE(ret.isOk());
248
249 // getPhysicalCameraCharacteristics will fail for publicly
250 // advertised camera IDs.
251 std::string version, cameraId;
252 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &cameraId));
253 CameraMetadata devChars;
254 ret = device->getPhysicalCameraCharacteristics(cameraId, &devChars);
255 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
256 ASSERT_EQ(0, devChars.metadata.size());
257 }
258}
259
260// Verify that the torch strength level can be set and retrieved successfully.
261TEST_P(CameraAidlTest, turnOnTorchWithStrengthLevel) {
262 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
263
264 std::shared_ptr<TorchProviderCb> cb = ndk::SharedRefBase::make<TorchProviderCb>(this);
265 ndk::ScopedAStatus ret = mProvider->setCallback(cb);
266 ASSERT_TRUE(ret.isOk());
267
268 for (const auto& name : cameraDeviceNames) {
269 int32_t defaultLevel;
270 std::shared_ptr<ICameraDevice> device;
271 ALOGI("%s: Testing camera device %s", __FUNCTION__, name.c_str());
272
273 ret = mProvider->getCameraDeviceInterface(name, &device);
274 ASSERT_TRUE(ret.isOk());
275 ASSERT_NE(device, nullptr);
276
277 CameraMetadata chars;
278 ret = device->getCameraCharacteristics(&chars);
279 ASSERT_TRUE(ret.isOk());
280
281 const camera_metadata_t* staticMeta =
282 reinterpret_cast<const camera_metadata_t*>(chars.metadata.data());
283 bool torchStrengthControlSupported = isTorchStrengthControlSupported(staticMeta);
284 camera_metadata_ro_entry entry;
285 int rc = find_camera_metadata_ro_entry(staticMeta,
286 ANDROID_FLASH_INFO_STRENGTH_DEFAULT_LEVEL, &entry);
287 if (torchStrengthControlSupported) {
288 ASSERT_EQ(rc, 0);
289 ASSERT_GT(entry.count, 0);
290 defaultLevel = *entry.data.i32;
291 ALOGI("Default level is:%d", defaultLevel);
292 }
293
294 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
295 ret = device->turnOnTorchWithStrengthLevel(2);
296 ALOGI("turnOnTorchWithStrengthLevel returns status: %d", ret.getServiceSpecificError());
297 // OPERATION_NOT_SUPPORTED check
298 if (!torchStrengthControlSupported) {
299 ALOGI("Torch strength control not supported.");
300 ASSERT_EQ(static_cast<int32_t>(Status::OPERATION_NOT_SUPPORTED),
301 ret.getServiceSpecificError());
302 } else {
303 {
304 ASSERT_TRUE(ret.isOk());
305 std::unique_lock<std::mutex> l(mTorchLock);
306 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
307 auto timeout = std::chrono::system_clock::now() +
308 std::chrono::seconds(kTorchTimeoutSec);
309 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
310 }
311 ASSERT_EQ(TorchModeStatus::AVAILABLE_ON, mTorchStatus);
312 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
313 }
314 ALOGI("getTorchStrengthLevel: Testing");
315 int32_t strengthLevel;
316 ret = device->getTorchStrengthLevel(&strengthLevel);
317 ASSERT_TRUE(ret.isOk());
318 ALOGI("Torch strength level is : %d", strengthLevel);
319 ASSERT_EQ(strengthLevel, 2);
320
321 // Turn OFF the torch and verify torch strength level is reset to default level.
322 ALOGI("Testing torch strength level reset after turning the torch OFF.");
323 ret = device->setTorchMode(false);
324 ASSERT_TRUE(ret.isOk());
325 {
326 std::unique_lock<std::mutex> l(mTorchLock);
327 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
328 auto timeout = std::chrono::system_clock::now() +
329 std::chrono::seconds(kTorchTimeoutSec);
330 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
331 }
332 ASSERT_EQ(TorchModeStatus::AVAILABLE_OFF, mTorchStatus);
333 }
334
335 ret = device->getTorchStrengthLevel(&strengthLevel);
336 ASSERT_TRUE(ret.isOk());
337 ALOGI("Torch strength level after turning OFF torch is : %d", strengthLevel);
338 ASSERT_EQ(strengthLevel, defaultLevel);
339 }
340 }
341}
342
343// In case it is supported verify that torch can be enabled.
344// Check for corresponding torch callbacks as well.
345TEST_P(CameraAidlTest, setTorchMode) {
346 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
347
348 std::shared_ptr<TorchProviderCb> cb = ndk::SharedRefBase::make<TorchProviderCb>(this);
349 ndk::ScopedAStatus ret = mProvider->setCallback(cb);
350 ALOGI("setCallback returns status: %d", ret.getServiceSpecificError());
351 ASSERT_TRUE(ret.isOk());
352 ASSERT_NE(cb, nullptr);
353
354 for (const auto& name : cameraDeviceNames) {
355 std::shared_ptr<ICameraDevice> device;
356 ALOGI("setTorchMode: Testing camera device %s", name.c_str());
357 ret = mProvider->getCameraDeviceInterface(name, &device);
358 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
359 ret.getServiceSpecificError());
360 ASSERT_TRUE(ret.isOk());
361 ASSERT_NE(device, nullptr);
362
363 CameraMetadata metadata;
364 ret = device->getCameraCharacteristics(&metadata);
365 ALOGI("getCameraCharacteristics returns status:%d", ret.getServiceSpecificError());
366 ASSERT_TRUE(ret.isOk());
367 camera_metadata_t* staticMeta =
368 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
369 bool torchSupported = isTorchSupported(staticMeta);
370
371 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
372 ret = device->setTorchMode(true);
373 ALOGI("setTorchMode returns status: %d", ret.getServiceSpecificError());
374 if (!torchSupported) {
375 ASSERT_EQ(static_cast<int32_t>(Status::OPERATION_NOT_SUPPORTED),
376 ret.getServiceSpecificError());
377 } else {
378 ASSERT_TRUE(ret.isOk());
379 {
380 std::unique_lock<std::mutex> l(mTorchLock);
381 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
382 auto timeout = std::chrono::system_clock::now() +
383 std::chrono::seconds(kTorchTimeoutSec);
384 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
385 }
386 ASSERT_EQ(TorchModeStatus::AVAILABLE_ON, mTorchStatus);
387 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
388 }
389
390 ret = device->setTorchMode(false);
391 ASSERT_TRUE(ret.isOk());
392 {
393 std::unique_lock<std::mutex> l(mTorchLock);
394 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
395 auto timeout = std::chrono::system_clock::now() +
396 std::chrono::seconds(kTorchTimeoutSec);
397 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
398 }
399 ASSERT_EQ(TorchModeStatus::AVAILABLE_OFF, mTorchStatus);
400 }
401 }
402 }
Avichal Rakesh362242f2022-02-08 12:40:53 -0800403}
404
405// Check dump functionality.
406TEST_P(CameraAidlTest, dump) {
407 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
408
409 for (const auto& name : cameraDeviceNames) {
410 std::shared_ptr<ICameraDevice> device;
411 ALOGI("dump: Testing camera device %s", name.c_str());
412
413 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
414 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
415 ret.getServiceSpecificError());
416 ASSERT_TRUE(ret.isOk());
417 ASSERT_NE(device, nullptr);
418
419 int raw_handle = open(kDumpOutput, O_RDWR);
420 ASSERT_GE(raw_handle, 0);
421
422 auto retStatus = device->dump(raw_handle, nullptr, 0);
423 ASSERT_EQ(retStatus, ::android::OK);
424 close(raw_handle);
425 }
426}
427
428// Open, dump, then close
429TEST_P(CameraAidlTest, openClose) {
430 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
431
432 for (const auto& name : cameraDeviceNames) {
433 std::shared_ptr<ICameraDevice> device;
434 ALOGI("openClose: Testing camera device %s", name.c_str());
435 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
436 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
437 ret.getServiceSpecificError());
438 ASSERT_TRUE(ret.isOk());
439 ASSERT_NE(device, nullptr);
440
441 std::shared_ptr<EmptyDeviceCb> cb = ndk::SharedRefBase::make<EmptyDeviceCb>();
442
443 ret = device->open(cb, &mSession);
444 ASSERT_TRUE(ret.isOk());
445 ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
446 ret.getServiceSpecificError());
447 ASSERT_NE(mSession, nullptr);
448 int raw_handle = open(kDumpOutput, O_RDWR);
449 ASSERT_GE(raw_handle, 0);
450
451 auto retStatus = device->dump(raw_handle, nullptr, 0);
452 ASSERT_EQ(retStatus, ::android::OK);
453 close(raw_handle);
454
455 ret = mSession->close();
456 mSession = nullptr;
457 ASSERT_TRUE(ret.isOk());
458 // TODO: test all session API calls return INTERNAL_ERROR after close
459 // TODO: keep a wp copy here and verify session cannot be promoted out of this scope
460 }
461}
462
463// Check whether all common default request settings can be successfully
464// constructed.
465TEST_P(CameraAidlTest, constructDefaultRequestSettings) {
466 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
467
468 for (const auto& name : cameraDeviceNames) {
469 std::shared_ptr<ICameraDevice> device;
470 ALOGI("constructDefaultRequestSettings: Testing camera device %s", name.c_str());
471 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
472 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
473 ret.getServiceSpecificError());
474 ASSERT_TRUE(ret.isOk());
475 ASSERT_NE(device, nullptr);
476
477 std::shared_ptr<EmptyDeviceCb> cb = ndk::SharedRefBase::make<EmptyDeviceCb>();
478 ret = device->open(cb, &mSession);
479 ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
480 ret.getServiceSpecificError());
481 ASSERT_TRUE(ret.isOk());
482 ASSERT_NE(mSession, nullptr);
483
484 for (int32_t t = (int32_t)RequestTemplate::PREVIEW; t <= (int32_t)RequestTemplate::MANUAL;
485 t++) {
486 RequestTemplate reqTemplate = (RequestTemplate)t;
487 CameraMetadata rawMetadata;
488 ret = mSession->constructDefaultRequestSettings(reqTemplate, &rawMetadata);
489 ALOGI("constructDefaultRequestSettings returns status:%d:%d", ret.getExceptionCode(),
490 ret.getServiceSpecificError());
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000491
Avichal Rakesh362242f2022-02-08 12:40:53 -0800492 if (reqTemplate == RequestTemplate::ZERO_SHUTTER_LAG ||
493 reqTemplate == RequestTemplate::MANUAL) {
494 // optional templates
495 ASSERT_TRUE(ret.isOk() || static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
496 ret.getServiceSpecificError());
497 } else {
498 ASSERT_TRUE(ret.isOk());
499 }
500
501 if (ret.isOk()) {
502 const camera_metadata_t* metadata = (camera_metadata_t*)rawMetadata.metadata.data();
503 size_t expectedSize = rawMetadata.metadata.size();
504 int result = validate_camera_metadata_structure(metadata, &expectedSize);
505 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
506 verifyRequestTemplate(metadata, reqTemplate);
507 } else {
508 ASSERT_EQ(0u, rawMetadata.metadata.size());
509 }
510 }
511 ret = mSession->close();
512 mSession = nullptr;
513 ASSERT_TRUE(ret.isOk());
514 }
515}
516
517// Verify that all supported stream formats and sizes can be configured
518// successfully.
519TEST_P(CameraAidlTest, configureStreamsAvailableOutputs) {
520 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
521 std::vector<AvailableStream> outputStreams;
522
523 for (const auto& name : cameraDeviceNames) {
524 CameraMetadata meta;
525 std::shared_ptr<ICameraDevice> device;
526
527 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/, &device /*out*/);
528
529 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
530 outputStreams.clear();
531 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputStreams));
532 ASSERT_NE(0u, outputStreams.size());
533
534 int32_t jpegBufferSize = 0;
535 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
536 ASSERT_NE(0u, jpegBufferSize);
537
538 int32_t streamId = 0;
539 int32_t streamConfigCounter = 0;
540 for (auto& it : outputStreams) {
541 Stream stream;
542 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(it.format));
543 stream.id = streamId;
544 stream.streamType = StreamType::OUTPUT;
545 stream.width = it.width;
546 stream.height = it.height;
547 stream.format = static_cast<PixelFormat>(it.format);
548 stream.dataSpace = dataspace;
549 stream.usage = static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
550 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER);
551 stream.rotation = StreamRotation::ROTATION_0;
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000552 stream.dynamicRangeProfile = RequestAvailableDynamicRangeProfilesMap::
553 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
Austin Borger263e3622023-06-15 11:32:04 -0700554 stream.useCase = ScalerAvailableStreamUseCases::
555 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
556 stream.colorSpace = static_cast<int>(
557 RequestAvailableColorSpaceProfilesMap::
558 ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED);
Avichal Rakesh362242f2022-02-08 12:40:53 -0800559
560 std::vector<Stream> streams = {stream};
561 StreamConfiguration config;
562 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
563 jpegBufferSize);
564
565 bool expectStreamCombQuery = (isLogicalMultiCamera(staticMeta) == Status::OK);
566 verifyStreamCombination(device, config, /*expectedStatus*/ true, expectStreamCombQuery);
567
568 config.streamConfigCounter = streamConfigCounter++;
569 std::vector<HalStream> halConfigs;
570 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
571 ASSERT_TRUE(ret.isOk());
572 ASSERT_EQ(halConfigs.size(), 1);
573 ASSERT_EQ(halConfigs[0].id, streamId);
574
575 streamId++;
576 }
577
578 ndk::ScopedAStatus ret = mSession->close();
579 mSession = nullptr;
580 ASSERT_TRUE(ret.isOk());
581 }
582}
583
584// Verify that mandatory concurrent streams and outputs are supported.
585TEST_P(CameraAidlTest, configureConcurrentStreamsAvailableOutputs) {
586 struct CameraTestInfo {
587 CameraMetadata staticMeta;
588 std::shared_ptr<ICameraDeviceSession> session;
589 std::shared_ptr<ICameraDevice> cameraDevice;
590 StreamConfiguration config;
591 };
592
593 std::map<std::string, std::string> idToNameMap = getCameraDeviceIdToNameMap(mProvider);
594 std::vector<ConcurrentCameraIdCombination> concurrentDeviceCombinations =
595 getConcurrentDeviceCombinations(mProvider);
596 std::vector<AvailableStream> outputStreams;
597 for (const auto& cameraDeviceIds : concurrentDeviceCombinations) {
598 std::vector<CameraIdAndStreamCombination> cameraIdsAndStreamCombinations;
599 std::vector<CameraTestInfo> cameraTestInfos;
600 size_t i = 0;
601 for (const auto& id : cameraDeviceIds.combination) {
602 CameraTestInfo cti;
603 auto it = idToNameMap.find(id);
604 ASSERT_TRUE(idToNameMap.end() != it);
605 std::string name = it->second;
606
607 openEmptyDeviceSession(name, mProvider, &cti.session /*out*/, &cti.staticMeta /*out*/,
608 &cti.cameraDevice /*out*/);
609
610 outputStreams.clear();
611 camera_metadata_t* staticMeta =
612 reinterpret_cast<camera_metadata_t*>(cti.staticMeta.metadata.data());
613 ASSERT_EQ(Status::OK, getMandatoryConcurrentStreams(staticMeta, &outputStreams));
614 ASSERT_NE(0u, outputStreams.size());
615
616 int32_t jpegBufferSize = 0;
617 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
618 ASSERT_NE(0u, jpegBufferSize);
619
620 int32_t streamId = 0;
621 std::vector<Stream> streams(outputStreams.size());
622 size_t j = 0;
623 for (const auto& s : outputStreams) {
624 Stream stream;
625 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(s.format));
626 stream.id = streamId++;
627 stream.streamType = StreamType::OUTPUT;
628 stream.width = s.width;
629 stream.height = s.height;
630 stream.format = static_cast<PixelFormat>(s.format);
631 stream.usage = static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
632 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER);
633 stream.dataSpace = dataspace;
634 stream.rotation = StreamRotation::ROTATION_0;
635 stream.sensorPixelModesUsed = {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT};
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000636 stream.dynamicRangeProfile = RequestAvailableDynamicRangeProfilesMap::
637 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
Avichal Rakesh362242f2022-02-08 12:40:53 -0800638 streams[j] = stream;
639 j++;
640 }
641
642 // Add the created stream configs to cameraIdsAndStreamCombinations
643 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &cti.config,
644 jpegBufferSize);
645
646 cti.config.streamConfigCounter = outputStreams.size();
647 CameraIdAndStreamCombination cameraIdAndStreamCombination;
648 cameraIdAndStreamCombination.cameraId = id;
649 cameraIdAndStreamCombination.streamConfiguration = cti.config;
650 cameraIdsAndStreamCombinations.push_back(cameraIdAndStreamCombination);
651 i++;
652 cameraTestInfos.push_back(cti);
653 }
654 // Now verify that concurrent streams are supported
655 bool combinationSupported;
656 ndk::ScopedAStatus ret = mProvider->isConcurrentStreamCombinationSupported(
657 cameraIdsAndStreamCombinations, &combinationSupported);
658 ASSERT_TRUE(ret.isOk());
659 ASSERT_EQ(combinationSupported, true);
660
661 // Test the stream can actually be configured
662 for (auto& cti : cameraTestInfos) {
663 if (cti.session != nullptr) {
664 camera_metadata_t* staticMeta =
665 reinterpret_cast<camera_metadata_t*>(cti.staticMeta.metadata.data());
666 bool expectStreamCombQuery = (isLogicalMultiCamera(staticMeta) == Status::OK);
667 verifyStreamCombination(cti.cameraDevice, cti.config, /*expectedStatus*/ true,
668 expectStreamCombQuery);
669 }
670
671 if (cti.session != nullptr) {
672 std::vector<HalStream> streamConfigs;
673 ret = cti.session->configureStreams(cti.config, &streamConfigs);
674 ASSERT_TRUE(ret.isOk());
675 ASSERT_EQ(cti.config.streams.size(), streamConfigs.size());
676 }
677 }
678
679 for (auto& cti : cameraTestInfos) {
680 ret = cti.session->close();
681 ASSERT_TRUE(ret.isOk());
682 }
683 }
684}
685
686// Check for correct handling of invalid/incorrect configuration parameters.
687TEST_P(CameraAidlTest, configureStreamsInvalidOutputs) {
688 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
689 std::vector<AvailableStream> outputStreams;
690
691 for (const auto& name : cameraDeviceNames) {
692 CameraMetadata meta;
693 std::shared_ptr<ICameraDevice> cameraDevice;
694
695 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
696 &cameraDevice /*out*/);
697 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
698 outputStreams.clear();
699
700 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputStreams));
701 ASSERT_NE(0u, outputStreams.size());
702
703 int32_t jpegBufferSize = 0;
704 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
705 ASSERT_NE(0u, jpegBufferSize);
706
707 int32_t streamId = 0;
708 Stream stream = {streamId++,
709 StreamType::OUTPUT,
710 static_cast<uint32_t>(0),
711 static_cast<uint32_t>(0),
712 static_cast<PixelFormat>(outputStreams[0].format),
713 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
714 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
715 Dataspace::UNKNOWN,
716 StreamRotation::ROTATION_0,
717 std::string(),
718 jpegBufferSize,
719 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000720 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
721 RequestAvailableDynamicRangeProfilesMap::
722 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800723 int32_t streamConfigCounter = 0;
724 std::vector<Stream> streams = {stream};
725 StreamConfiguration config;
726 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
727 jpegBufferSize);
728
729 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ false,
730 /*expectStreamCombQuery*/ false);
731
732 config.streamConfigCounter = streamConfigCounter++;
733 std::vector<HalStream> halConfigs;
734 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
735 ASSERT_TRUE(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
736 ret.getServiceSpecificError() ||
737 static_cast<int32_t>(Status::INTERNAL_ERROR) == ret.getServiceSpecificError());
738
739 stream = {streamId++,
740 StreamType::OUTPUT,
741 /*width*/ INT32_MAX,
742 /*height*/ INT32_MAX,
743 static_cast<PixelFormat>(outputStreams[0].format),
744 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
745 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
746 Dataspace::UNKNOWN,
747 StreamRotation::ROTATION_0,
748 std::string(),
749 jpegBufferSize,
750 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000751 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
752 RequestAvailableDynamicRangeProfilesMap::
753 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800754
755 streams[0] = stream;
756 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
757 jpegBufferSize);
758
759 config.streamConfigCounter = streamConfigCounter++;
760 halConfigs.clear();
761 ret = mSession->configureStreams(config, &halConfigs);
762 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
763
764 for (auto& it : outputStreams) {
765 stream = {streamId++,
766 StreamType::OUTPUT,
767 it.width,
768 it.height,
769 static_cast<PixelFormat>(UINT32_MAX),
770 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
771 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
772 Dataspace::UNKNOWN,
773 StreamRotation::ROTATION_0,
774 std::string(),
775 jpegBufferSize,
776 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000777 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
778 RequestAvailableDynamicRangeProfilesMap::
779 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800780
781 streams[0] = stream;
782 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
783 jpegBufferSize);
784 config.streamConfigCounter = streamConfigCounter++;
785 halConfigs.clear();
786 ret = mSession->configureStreams(config, &halConfigs);
787 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
788 ret.getServiceSpecificError());
789
790 stream = {streamId++,
791 StreamType::OUTPUT,
792 it.width,
793 it.height,
794 static_cast<PixelFormat>(it.format),
795 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
796 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
797 Dataspace::UNKNOWN,
798 static_cast<StreamRotation>(UINT32_MAX),
799 std::string(),
800 jpegBufferSize,
801 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000802 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
803 RequestAvailableDynamicRangeProfilesMap::
804 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800805
806 streams[0] = stream;
807 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
808 jpegBufferSize);
809
810 config.streamConfigCounter = streamConfigCounter++;
811 halConfigs.clear();
812 ret = mSession->configureStreams(config, &halConfigs);
813 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
814 ret.getServiceSpecificError());
815 }
816
817 ret = mSession->close();
818 mSession = nullptr;
819 ASSERT_TRUE(ret.isOk());
820 }
821}
822
823// Check whether all supported ZSL output stream combinations can be
824// configured successfully.
825TEST_P(CameraAidlTest, configureStreamsZSLInputOutputs) {
826 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
827 std::vector<AvailableStream> inputStreams;
828 std::vector<AvailableZSLInputOutput> inputOutputMap;
829
830 for (const auto& name : cameraDeviceNames) {
831 CameraMetadata meta;
832 std::shared_ptr<ICameraDevice> cameraDevice;
833
834 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
835 &cameraDevice /*out*/);
836 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
837
838 Status rc = isZSLModeAvailable(staticMeta);
839 if (Status::OPERATION_NOT_SUPPORTED == rc) {
840 ndk::ScopedAStatus ret = mSession->close();
841 mSession = nullptr;
842 ASSERT_TRUE(ret.isOk());
843 continue;
844 }
845 ASSERT_EQ(Status::OK, rc);
846
847 inputStreams.clear();
848 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, inputStreams));
849 ASSERT_NE(0u, inputStreams.size());
850
851 inputOutputMap.clear();
852 ASSERT_EQ(Status::OK, getZSLInputOutputMap(staticMeta, inputOutputMap));
853 ASSERT_NE(0u, inputOutputMap.size());
854
855 bool supportMonoY8 = false;
856 if (Status::OK == isMonochromeCamera(staticMeta)) {
857 for (auto& it : inputStreams) {
858 if (it.format == static_cast<uint32_t>(PixelFormat::Y8)) {
859 supportMonoY8 = true;
860 break;
861 }
862 }
863 }
864
865 int32_t jpegBufferSize = 0;
866 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
867 ASSERT_NE(0u, jpegBufferSize);
868
869 int32_t streamId = 0;
870 bool hasPrivToY8 = false, hasY8ToY8 = false, hasY8ToBlob = false;
871 uint32_t streamConfigCounter = 0;
872 for (auto& inputIter : inputOutputMap) {
873 AvailableStream input;
874 ASSERT_EQ(Status::OK, findLargestSize(inputStreams, inputIter.inputFormat, input));
875 ASSERT_NE(0u, inputStreams.size());
876
877 if (inputIter.inputFormat ==
878 static_cast<uint32_t>(PixelFormat::IMPLEMENTATION_DEFINED) &&
879 inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::Y8)) {
880 hasPrivToY8 = true;
881 } else if (inputIter.inputFormat == static_cast<uint32_t>(PixelFormat::Y8)) {
882 if (inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::BLOB)) {
883 hasY8ToBlob = true;
884 } else if (inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::Y8)) {
885 hasY8ToY8 = true;
886 }
887 }
888 AvailableStream outputThreshold = {INT32_MAX, INT32_MAX, inputIter.outputFormat};
889 std::vector<AvailableStream> outputStreams;
890 ASSERT_EQ(Status::OK,
891 getAvailableOutputStreams(staticMeta, outputStreams, &outputThreshold));
892 for (auto& outputIter : outputStreams) {
893 Dataspace outputDataSpace =
894 getDataspace(static_cast<PixelFormat>(outputIter.format));
895 Stream zslStream = {
896 streamId++,
897 StreamType::OUTPUT,
898 input.width,
899 input.height,
900 static_cast<PixelFormat>(input.format),
901 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
902 GRALLOC_USAGE_HW_CAMERA_ZSL),
903 Dataspace::UNKNOWN,
904 StreamRotation::ROTATION_0,
905 std::string(),
906 jpegBufferSize,
907 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000908 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
909 RequestAvailableDynamicRangeProfilesMap::
910 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800911 Stream inputStream = {
912 streamId++,
913 StreamType::INPUT,
914 input.width,
915 input.height,
916 static_cast<PixelFormat>(input.format),
917 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(0),
918 Dataspace::UNKNOWN,
919 StreamRotation::ROTATION_0,
920 std::string(),
921 jpegBufferSize,
922 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000923 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
924 RequestAvailableDynamicRangeProfilesMap::
925 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800926 Stream outputStream = {
927 streamId++,
928 StreamType::OUTPUT,
929 outputIter.width,
930 outputIter.height,
931 static_cast<PixelFormat>(outputIter.format),
932 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
933 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
934 outputDataSpace,
935 StreamRotation::ROTATION_0,
936 std::string(),
937 jpegBufferSize,
938 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000939 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
940 RequestAvailableDynamicRangeProfilesMap::
941 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800942
943 std::vector<Stream> streams = {inputStream, zslStream, outputStream};
944
945 StreamConfiguration config;
946 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
947 jpegBufferSize);
948
949 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true,
950 /*expectStreamCombQuery*/ false);
951
952 config.streamConfigCounter = streamConfigCounter++;
953 std::vector<HalStream> halConfigs;
954 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
955 ASSERT_TRUE(ret.isOk());
956 ASSERT_EQ(3u, halConfigs.size());
957 }
958 }
959
960 if (supportMonoY8) {
961 if (Status::OK == isZSLModeAvailable(staticMeta, PRIV_REPROCESS)) {
962 ASSERT_TRUE(hasPrivToY8);
963 }
964 if (Status::OK == isZSLModeAvailable(staticMeta, YUV_REPROCESS)) {
965 ASSERT_TRUE(hasY8ToY8);
966 ASSERT_TRUE(hasY8ToBlob);
967 }
968 }
969
970 ndk::ScopedAStatus ret = mSession->close();
971 mSession = nullptr;
972 ASSERT_TRUE(ret.isOk());
973 }
974}
975
976// Check whether session parameters are supported. If Hal support for them
977// exist, then try to configure a preview stream using them.
978TEST_P(CameraAidlTest, configureStreamsWithSessionParameters) {
979 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
980 std::vector<AvailableStream> outputPreviewStreams;
981 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
982 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
983
984 for (const auto& name : cameraDeviceNames) {
985 CameraMetadata meta;
986
987 std::shared_ptr<ICameraDevice> unusedCameraDevice;
988 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
989 &unusedCameraDevice /*out*/);
990 camera_metadata_t* staticMetaBuffer =
991 reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
992
993 std::unordered_set<int32_t> availableSessionKeys;
994 auto rc = getSupportedKeys(staticMetaBuffer, ANDROID_REQUEST_AVAILABLE_SESSION_KEYS,
995 &availableSessionKeys);
996 ASSERT_TRUE(Status::OK == rc);
997 if (availableSessionKeys.empty()) {
998 ndk::ScopedAStatus ret = mSession->close();
999 mSession = nullptr;
1000 ASSERT_TRUE(ret.isOk());
1001 continue;
1002 }
1003
1004 android::hardware::camera::common::V1_0::helper::CameraMetadata previewRequestSettings;
1005 android::hardware::camera::common::V1_0::helper::CameraMetadata sessionParams,
1006 modifiedSessionParams;
1007 constructFilteredSettings(mSession, availableSessionKeys, RequestTemplate::PREVIEW,
1008 &previewRequestSettings, &sessionParams);
1009 if (sessionParams.isEmpty()) {
1010 ndk::ScopedAStatus ret = mSession->close();
1011 mSession = nullptr;
1012 ASSERT_TRUE(ret.isOk());
1013 continue;
1014 }
1015
1016 outputPreviewStreams.clear();
1017
1018 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputPreviewStreams,
1019 &previewThreshold));
1020 ASSERT_NE(0u, outputPreviewStreams.size());
1021
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001022 Stream previewStream = {
1023 0,
1024 StreamType::OUTPUT,
1025 outputPreviewStreams[0].width,
1026 outputPreviewStreams[0].height,
1027 static_cast<PixelFormat>(outputPreviewStreams[0].format),
1028 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1029 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
1030 Dataspace::UNKNOWN,
1031 StreamRotation::ROTATION_0,
1032 std::string(),
1033 /*bufferSize*/ 0,
1034 /*groupId*/ -1,
1035 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1036 RequestAvailableDynamicRangeProfilesMap::
1037 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001038
1039 std::vector<Stream> streams = {previewStream};
1040 StreamConfiguration config;
1041
1042 config.streams = streams;
1043 config.operationMode = StreamConfigurationMode::NORMAL_MODE;
1044 modifiedSessionParams = sessionParams;
1045 auto sessionParamsBuffer = sessionParams.release();
1046 std::vector<uint8_t> rawSessionParam =
1047 std::vector(reinterpret_cast<uint8_t*>(sessionParamsBuffer),
1048 reinterpret_cast<uint8_t*>(sessionParamsBuffer) +
1049 get_camera_metadata_size(sessionParamsBuffer));
1050
1051 config.sessionParams.metadata = rawSessionParam;
1052 config.streamConfigCounter = 0;
1053 config.streams = {previewStream};
1054 config.streamConfigCounter = 0;
1055 config.multiResolutionInputImage = false;
1056
1057 bool newSessionParamsAvailable = false;
1058 for (const auto& it : availableSessionKeys) {
1059 if (modifiedSessionParams.exists(it)) {
1060 modifiedSessionParams.erase(it);
1061 newSessionParamsAvailable = true;
1062 break;
1063 }
1064 }
1065 if (newSessionParamsAvailable) {
1066 auto modifiedSessionParamsBuffer = modifiedSessionParams.release();
1067 verifySessionReconfigurationQuery(mSession, sessionParamsBuffer,
1068 modifiedSessionParamsBuffer);
1069 modifiedSessionParams.acquire(modifiedSessionParamsBuffer);
1070 }
1071
1072 std::vector<HalStream> halConfigs;
1073 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1074 ASSERT_TRUE(ret.isOk());
1075 ASSERT_EQ(1u, halConfigs.size());
1076
1077 sessionParams.acquire(sessionParamsBuffer);
1078 ret = mSession->close();
1079 mSession = nullptr;
1080 ASSERT_TRUE(ret.isOk());
1081 }
1082}
1083
1084// Verify that all supported preview + still capture stream combinations
1085// can be configured successfully.
1086TEST_P(CameraAidlTest, configureStreamsPreviewStillOutputs) {
1087 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1088 std::vector<AvailableStream> outputBlobStreams;
1089 std::vector<AvailableStream> outputPreviewStreams;
1090 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
1091 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
1092 AvailableStream blobThreshold = {INT32_MAX, INT32_MAX, static_cast<int32_t>(PixelFormat::BLOB)};
1093
1094 for (const auto& name : cameraDeviceNames) {
1095 CameraMetadata meta;
1096
1097 std::shared_ptr<ICameraDevice> cameraDevice;
1098 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1099 &cameraDevice /*out*/);
1100
1101 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1102
1103 // Check if camera support depth only
1104 if (isDepthOnly(staticMeta)) {
1105 ndk::ScopedAStatus ret = mSession->close();
1106 mSession = nullptr;
1107 ASSERT_TRUE(ret.isOk());
1108 continue;
1109 }
1110
1111 outputBlobStreams.clear();
1112 ASSERT_EQ(Status::OK,
1113 getAvailableOutputStreams(staticMeta, outputBlobStreams, &blobThreshold));
1114 ASSERT_NE(0u, outputBlobStreams.size());
1115
1116 outputPreviewStreams.clear();
1117 ASSERT_EQ(Status::OK,
1118 getAvailableOutputStreams(staticMeta, outputPreviewStreams, &previewThreshold));
1119 ASSERT_NE(0u, outputPreviewStreams.size());
1120
1121 int32_t jpegBufferSize = 0;
1122 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
1123 ASSERT_NE(0u, jpegBufferSize);
1124
1125 int32_t streamId = 0;
1126 uint32_t streamConfigCounter = 0;
1127
1128 for (auto& blobIter : outputBlobStreams) {
1129 for (auto& previewIter : outputPreviewStreams) {
1130 Stream previewStream = {
1131 streamId++,
1132 StreamType::OUTPUT,
1133 previewIter.width,
1134 previewIter.height,
1135 static_cast<PixelFormat>(previewIter.format),
1136 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1137 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
1138 Dataspace::UNKNOWN,
1139 StreamRotation::ROTATION_0,
1140 std::string(),
1141 /*bufferSize*/ 0,
1142 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001143 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1144 RequestAvailableDynamicRangeProfilesMap::
1145 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001146 Stream blobStream = {
1147 streamId++,
1148 StreamType::OUTPUT,
1149 blobIter.width,
1150 blobIter.height,
1151 static_cast<PixelFormat>(blobIter.format),
1152 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1153 GRALLOC1_CONSUMER_USAGE_CPU_READ),
1154 Dataspace::JFIF,
1155 StreamRotation::ROTATION_0,
1156 std::string(),
1157 /*bufferSize*/ 0,
1158 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001159 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1160 RequestAvailableDynamicRangeProfilesMap::
1161 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001162 std::vector<Stream> streams = {previewStream, blobStream};
1163 StreamConfiguration config;
1164
1165 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
1166 jpegBufferSize);
1167 config.streamConfigCounter = streamConfigCounter++;
1168 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true,
1169 /*expectStreamCombQuery*/ false);
1170
1171 std::vector<HalStream> halConfigs;
1172 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1173 ASSERT_TRUE(ret.isOk());
1174 ASSERT_EQ(2u, halConfigs.size());
1175 }
1176 }
1177
1178 ndk::ScopedAStatus ret = mSession->close();
1179 mSession = nullptr;
1180 ASSERT_TRUE(ret.isOk());
1181 }
1182}
1183
1184// In case constrained mode is supported, test whether it can be
1185// configured. Additionally check for common invalid inputs when
1186// using this mode.
1187TEST_P(CameraAidlTest, configureStreamsConstrainedOutputs) {
1188 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1189
1190 for (const auto& name : cameraDeviceNames) {
1191 CameraMetadata meta;
1192 std::shared_ptr<ICameraDevice> cameraDevice;
1193
1194 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1195 &cameraDevice /*out*/);
1196 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1197
1198 Status rc = isConstrainedModeAvailable(staticMeta);
1199 if (Status::OPERATION_NOT_SUPPORTED == rc) {
1200 ndk::ScopedAStatus ret = mSession->close();
1201 mSession = nullptr;
1202 ASSERT_TRUE(ret.isOk());
1203 continue;
1204 }
1205 ASSERT_EQ(Status::OK, rc);
1206
1207 AvailableStream hfrStream;
1208 rc = pickConstrainedModeSize(staticMeta, hfrStream);
1209 ASSERT_EQ(Status::OK, rc);
1210
1211 int32_t streamId = 0;
1212 uint32_t streamConfigCounter = 0;
1213 Stream stream = {streamId,
1214 StreamType::OUTPUT,
1215 hfrStream.width,
1216 hfrStream.height,
1217 static_cast<PixelFormat>(hfrStream.format),
1218 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1219 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1220 Dataspace::UNKNOWN,
1221 StreamRotation::ROTATION_0,
1222 std::string(),
1223 /*bufferSize*/ 0,
1224 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001225 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1226 RequestAvailableDynamicRangeProfilesMap::
1227 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001228 std::vector<Stream> streams = {stream};
1229 StreamConfiguration config;
1230 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1231 &config);
1232
1233 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true,
1234 /*expectStreamCombQuery*/ false);
1235
1236 config.streamConfigCounter = streamConfigCounter++;
1237 std::vector<HalStream> halConfigs;
1238 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1239 ASSERT_TRUE(ret.isOk());
1240 ASSERT_EQ(1u, halConfigs.size());
1241 ASSERT_EQ(halConfigs[0].id, streamId);
1242
1243 stream = {streamId++,
1244 StreamType::OUTPUT,
1245 static_cast<uint32_t>(0),
1246 static_cast<uint32_t>(0),
1247 static_cast<PixelFormat>(hfrStream.format),
1248 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1249 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1250 Dataspace::UNKNOWN,
1251 StreamRotation::ROTATION_0,
1252 std::string(),
1253 /*bufferSize*/ 0,
1254 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001255 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1256 RequestAvailableDynamicRangeProfilesMap::
1257 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001258 streams[0] = stream;
1259 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1260 &config);
1261
1262 config.streamConfigCounter = streamConfigCounter++;
1263 std::vector<HalStream> halConfig;
1264 ret = mSession->configureStreams(config, &halConfig);
1265 ASSERT_TRUE(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
1266 ret.getServiceSpecificError() ||
1267 static_cast<int32_t>(Status::INTERNAL_ERROR) == ret.getServiceSpecificError());
1268
1269 stream = {streamId++,
1270 StreamType::OUTPUT,
1271 INT32_MAX,
1272 INT32_MAX,
1273 static_cast<PixelFormat>(hfrStream.format),
1274 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1275 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1276 Dataspace::UNKNOWN,
1277 StreamRotation::ROTATION_0,
1278 std::string(),
1279 /*bufferSize*/ 0,
1280 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001281 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1282 RequestAvailableDynamicRangeProfilesMap::
1283 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001284 streams[0] = stream;
1285 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1286 &config);
1287
1288 config.streamConfigCounter = streamConfigCounter++;
1289 halConfigs.clear();
1290 ret = mSession->configureStreams(config, &halConfigs);
1291 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
1292
1293 stream = {streamId++,
1294 StreamType::OUTPUT,
1295 hfrStream.width,
1296 hfrStream.height,
1297 static_cast<PixelFormat>(UINT32_MAX),
1298 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1299 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1300 Dataspace::UNKNOWN,
1301 StreamRotation::ROTATION_0,
1302 std::string(),
1303 /*bufferSize*/ 0,
1304 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001305 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1306 RequestAvailableDynamicRangeProfilesMap::
1307 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001308 streams[0] = stream;
1309 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1310 &config);
1311
1312 config.streamConfigCounter = streamConfigCounter++;
1313 halConfigs.clear();
1314 ret = mSession->configureStreams(config, &halConfigs);
1315 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
1316
1317 ret = mSession->close();
1318 mSession = nullptr;
1319 ASSERT_TRUE(ret.isOk());
1320 }
1321}
1322
1323// Verify that all supported video + snapshot stream combinations can
1324// be configured successfully.
1325TEST_P(CameraAidlTest, configureStreamsVideoStillOutputs) {
1326 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1327 std::vector<AvailableStream> outputBlobStreams;
1328 std::vector<AvailableStream> outputVideoStreams;
1329 AvailableStream videoThreshold = {kMaxVideoWidth, kMaxVideoHeight,
1330 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
1331 AvailableStream blobThreshold = {kMaxVideoWidth, kMaxVideoHeight,
1332 static_cast<int32_t>(PixelFormat::BLOB)};
1333
1334 for (const auto& name : cameraDeviceNames) {
1335 CameraMetadata meta;
1336 std::shared_ptr<ICameraDevice> cameraDevice;
1337
1338 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1339 &cameraDevice /*out*/);
1340
1341 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1342
1343 // Check if camera support depth only
1344 if (isDepthOnly(staticMeta)) {
1345 ndk::ScopedAStatus ret = mSession->close();
1346 mSession = nullptr;
1347 ASSERT_TRUE(ret.isOk());
1348 continue;
1349 }
1350
1351 outputBlobStreams.clear();
1352 ASSERT_EQ(Status::OK,
1353 getAvailableOutputStreams(staticMeta, outputBlobStreams, &blobThreshold));
1354 ASSERT_NE(0u, outputBlobStreams.size());
1355
1356 outputVideoStreams.clear();
1357 ASSERT_EQ(Status::OK,
1358 getAvailableOutputStreams(staticMeta, outputVideoStreams, &videoThreshold));
1359 ASSERT_NE(0u, outputVideoStreams.size());
1360
1361 int32_t jpegBufferSize = 0;
1362 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
1363 ASSERT_NE(0u, jpegBufferSize);
1364
1365 int32_t streamId = 0;
1366 uint32_t streamConfigCounter = 0;
1367 for (auto& blobIter : outputBlobStreams) {
1368 for (auto& videoIter : outputVideoStreams) {
1369 Stream videoStream = {
1370 streamId++,
1371 StreamType::OUTPUT,
1372 videoIter.width,
1373 videoIter.height,
1374 static_cast<PixelFormat>(videoIter.format),
1375 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1376 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1377 Dataspace::UNKNOWN,
1378 StreamRotation::ROTATION_0,
1379 std::string(),
1380 jpegBufferSize,
1381 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001382 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1383 RequestAvailableDynamicRangeProfilesMap::
1384 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001385 Stream blobStream = {
1386 streamId++,
1387 StreamType::OUTPUT,
1388 blobIter.width,
1389 blobIter.height,
1390 static_cast<PixelFormat>(blobIter.format),
1391 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1392 GRALLOC1_CONSUMER_USAGE_CPU_READ),
1393 Dataspace::JFIF,
1394 StreamRotation::ROTATION_0,
1395 std::string(),
1396 jpegBufferSize,
1397 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001398 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1399 RequestAvailableDynamicRangeProfilesMap::
1400 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001401 std::vector<Stream> streams = {videoStream, blobStream};
1402 StreamConfiguration config;
1403
1404 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
1405 jpegBufferSize);
1406 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true,
1407 /*expectStreamCombQuery*/ false);
1408
1409 config.streamConfigCounter = streamConfigCounter++;
1410 std::vector<HalStream> halConfigs;
1411 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1412 ASSERT_TRUE(ret.isOk());
1413 ASSERT_EQ(2u, halConfigs.size());
1414 }
1415 }
1416
1417 ndk::ScopedAStatus ret = mSession->close();
1418 mSession = nullptr;
1419 ASSERT_TRUE(ret.isOk());
1420 }
1421}
1422
1423// Generate and verify a camera capture request
1424TEST_P(CameraAidlTest, processCaptureRequestPreview) {
1425 // TODO(b/220897574): Failing with BUFFER_ERROR
1426 processCaptureRequestInternal(GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, RequestTemplate::PREVIEW,
1427 false /*secureOnlyCameras*/);
1428}
1429
1430// Generate and verify a secure camera capture request
1431TEST_P(CameraAidlTest, processSecureCaptureRequest) {
1432 processCaptureRequestInternal(GRALLOC1_PRODUCER_USAGE_PROTECTED, RequestTemplate::STILL_CAPTURE,
1433 true /*secureOnlyCameras*/);
1434}
1435
1436TEST_P(CameraAidlTest, processCaptureRequestPreviewStabilization) {
1437 std::unordered_map<std::string, nsecs_t> cameraDeviceToTimeLag;
1438 processPreviewStabilizationCaptureRequestInternal(/*previewStabilizationOn*/ false,
1439 cameraDeviceToTimeLag);
1440 processPreviewStabilizationCaptureRequestInternal(/*previewStabilizationOn*/ true,
1441 cameraDeviceToTimeLag);
1442}
1443
1444// Generate and verify a multi-camera capture request
1445TEST_P(CameraAidlTest, processMultiCaptureRequestPreview) {
1446 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1447 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
1448 static_cast<int32_t>(PixelFormat::YCBCR_420_888)};
1449 int64_t bufferId = 1;
1450 uint32_t frameNumber = 1;
1451 std::vector<uint8_t> settings;
1452 std::vector<uint8_t> emptySettings;
1453 std::string invalidPhysicalId = "-1";
1454
1455 for (const auto& name : cameraDeviceNames) {
1456 std::string version, deviceId;
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +00001457 ALOGI("processMultiCaptureRequestPreview: Test device %s", name.c_str());
Avichal Rakesh362242f2022-02-08 12:40:53 -08001458 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
1459 CameraMetadata metadata;
1460
1461 std::shared_ptr<ICameraDevice> unusedDevice;
1462 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &metadata /*out*/,
1463 &unusedDevice /*out*/);
1464
1465 camera_metadata_t* staticMeta =
1466 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
1467 Status rc = isLogicalMultiCamera(staticMeta);
1468 if (Status::OPERATION_NOT_SUPPORTED == rc) {
1469 ndk::ScopedAStatus ret = mSession->close();
1470 mSession = nullptr;
1471 ASSERT_TRUE(ret.isOk());
1472 continue;
1473 }
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +00001474 ASSERT_EQ(Status::OK, rc);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001475
1476 std::unordered_set<std::string> physicalIds;
1477 rc = getPhysicalCameraIds(staticMeta, &physicalIds);
1478 ASSERT_TRUE(Status::OK == rc);
1479 ASSERT_TRUE(physicalIds.size() > 1);
1480
1481 std::unordered_set<int32_t> physicalRequestKeyIDs;
1482 rc = getSupportedKeys(staticMeta, ANDROID_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS,
1483 &physicalRequestKeyIDs);
1484 ASSERT_TRUE(Status::OK == rc);
1485 if (physicalRequestKeyIDs.empty()) {
1486 ndk::ScopedAStatus ret = mSession->close();
1487 mSession = nullptr;
1488 ASSERT_TRUE(ret.isOk());
1489 // The logical camera doesn't support any individual physical requests.
1490 continue;
1491 }
1492
1493 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultPreviewSettings;
1494 android::hardware::camera::common::V1_0::helper::CameraMetadata filteredSettings;
1495 constructFilteredSettings(mSession, physicalRequestKeyIDs, RequestTemplate::PREVIEW,
1496 &defaultPreviewSettings, &filteredSettings);
1497 if (filteredSettings.isEmpty()) {
1498 // No physical device settings in default request.
1499 ndk::ScopedAStatus ret = mSession->close();
1500 mSession = nullptr;
1501 ASSERT_TRUE(ret.isOk());
1502 continue;
1503 }
1504
1505 const camera_metadata_t* settingsBuffer = defaultPreviewSettings.getAndLock();
1506 uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
1507 settings.assign(rawSettingsBuffer,
1508 rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
1509 CameraMetadata settingsMetadata = {settings};
1510 overrideRotateAndCrop(&settingsMetadata);
1511
1512 ndk::ScopedAStatus ret = mSession->close();
1513 mSession = nullptr;
1514 ASSERT_TRUE(ret.isOk());
1515
1516 // Leave only 2 physical devices in the id set.
1517 auto it = physicalIds.begin();
1518 std::string physicalDeviceId = *it;
1519 it++;
1520 physicalIds.erase(++it, physicalIds.end());
1521 ASSERT_EQ(physicalIds.size(), 2u);
1522
1523 std::vector<HalStream> halStreams;
1524 bool supportsPartialResults = false;
1525 bool useHalBufManager = false;
1526 int32_t partialResultCount = 0;
1527 Stream previewStream;
1528 std::shared_ptr<DeviceCb> cb;
1529
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +00001530 configurePreviewStreams(
1531 name, mProvider, &previewThreshold, physicalIds, &mSession, &previewStream,
1532 &halStreams /*out*/, &supportsPartialResults /*out*/, &partialResultCount /*out*/,
1533 &useHalBufManager /*out*/, &cb /*out*/, 0 /*streamConfigCounter*/, true);
1534 if (mSession == nullptr) {
1535 // stream combination not supported by HAL, skip test for device
1536 continue;
1537 }
Avichal Rakesh362242f2022-02-08 12:40:53 -08001538
1539 ::aidl::android::hardware::common::fmq::MQDescriptor<
1540 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
1541 descriptor;
1542 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
1543 ASSERT_TRUE(resultQueueRet.isOk());
1544 std::shared_ptr<ResultMetadataQueue> resultQueue =
1545 std::make_shared<ResultMetadataQueue>(descriptor);
1546 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
1547 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
1548 resultQueue = nullptr;
1549 // Don't use the queue onwards.
1550 }
1551
1552 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
1553 static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
1554 partialResultCount, physicalIds, resultQueue);
1555
1556 std::vector<CaptureRequest> requests(1);
1557 CaptureRequest& request = requests[0];
1558 request.frameNumber = frameNumber;
1559 request.fmqSettingsSize = 0;
Emilian Peev3d919f92022-04-20 13:50:59 -07001560 request.settings = settingsMetadata;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001561
1562 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
1563
1564 std::vector<buffer_handle_t> graphicBuffers;
1565 graphicBuffers.reserve(halStreams.size());
1566 outputBuffers.resize(halStreams.size());
1567 size_t k = 0;
1568 for (const auto& halStream : halStreams) {
1569 buffer_handle_t buffer_handle;
1570 if (useHalBufManager) {
1571 outputBuffers[k] = {halStream.id, /*bufferId*/ 0, NativeHandle(),
1572 BufferStatus::OK, NativeHandle(), NativeHandle()};
1573 } else {
1574 allocateGraphicBuffer(previewStream.width, previewStream.height,
1575 android_convertGralloc1To0Usage(
1576 static_cast<uint64_t>(halStream.producerUsage),
1577 static_cast<uint64_t>(halStream.consumerUsage)),
1578 halStream.overrideFormat, &buffer_handle);
1579 graphicBuffers.push_back(buffer_handle);
1580 outputBuffers[k] = {
1581 halStream.id, bufferId, ::android::makeToAidl(buffer_handle),
1582 BufferStatus::OK, NativeHandle(), NativeHandle()};
1583 bufferId++;
1584 }
1585 k++;
1586 }
1587
1588 std::vector<PhysicalCameraSetting> camSettings(1);
1589 const camera_metadata_t* filteredSettingsBuffer = filteredSettings.getAndLock();
1590 uint8_t* rawFilteredSettingsBuffer = (uint8_t*)filteredSettingsBuffer;
1591 camSettings[0].settings = {std::vector(
1592 rawFilteredSettingsBuffer,
1593 rawFilteredSettingsBuffer + get_camera_metadata_size(filteredSettingsBuffer))};
1594 overrideRotateAndCrop(&camSettings[0].settings);
1595 camSettings[0].fmqSettingsSize = 0;
1596 camSettings[0].physicalCameraId = physicalDeviceId;
1597
1598 request.inputBuffer = {
1599 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
1600 request.physicalCameraSettings = camSettings;
1601
1602 {
1603 std::unique_lock<std::mutex> l(mLock);
1604 mInflightMap.clear();
1605 mInflightMap[frameNumber] = inflightReq;
1606 }
1607
1608 int32_t numRequestProcessed = 0;
1609 std::vector<BufferCache> cachesToRemove;
1610 ndk::ScopedAStatus returnStatus =
1611 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1612 ASSERT_TRUE(returnStatus.isOk());
1613 ASSERT_EQ(numRequestProcessed, 1u);
1614
1615 {
1616 std::unique_lock<std::mutex> l(mLock);
1617 while (!inflightReq->errorCodeValid &&
1618 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1619 auto timeout = std::chrono::system_clock::now() +
1620 std::chrono::seconds(kStreamBufferTimeoutSec);
1621 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1622 }
1623
1624 ASSERT_FALSE(inflightReq->errorCodeValid);
1625 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1626
1627 request.frameNumber++;
1628 // Empty settings should be supported after the first call
1629 // for repeating requests.
1630 request.settings.metadata.clear();
1631 request.physicalCameraSettings[0].settings.metadata.clear();
1632 // The buffer has been registered to HAL by bufferId, so per
1633 // API contract we should send a null handle for this buffer
1634 request.outputBuffers[0].buffer = NativeHandle();
1635 mInflightMap.clear();
1636 inflightReq = std::make_shared<InFlightRequest>(
1637 static_cast<ssize_t>(physicalIds.size()), false, supportsPartialResults,
1638 partialResultCount, physicalIds, resultQueue);
1639 mInflightMap[request.frameNumber] = inflightReq;
1640 }
1641
1642 returnStatus =
1643 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1644 ASSERT_TRUE(returnStatus.isOk());
1645 ASSERT_EQ(numRequestProcessed, 1u);
1646
1647 {
1648 std::unique_lock<std::mutex> l(mLock);
1649 while (!inflightReq->errorCodeValid &&
1650 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1651 auto timeout = std::chrono::system_clock::now() +
1652 std::chrono::seconds(kStreamBufferTimeoutSec);
1653 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1654 }
1655
1656 ASSERT_FALSE(inflightReq->errorCodeValid);
1657 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1658 }
1659
1660 // Invalid physical camera id should fail process requests
1661 frameNumber++;
1662 camSettings[0].physicalCameraId = invalidPhysicalId;
1663 camSettings[0].settings.metadata = settings;
1664
1665 request.physicalCameraSettings = camSettings; // Invalid camera settings
1666 returnStatus =
1667 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1668 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
1669 returnStatus.getServiceSpecificError());
1670
1671 defaultPreviewSettings.unlock(settingsBuffer);
1672 filteredSettings.unlock(filteredSettingsBuffer);
1673
1674 if (useHalBufManager) {
1675 std::vector<int32_t> streamIds(halStreams.size());
1676 for (size_t i = 0; i < streamIds.size(); i++) {
1677 streamIds[i] = halStreams[i].id;
1678 }
1679 verifyBuffersReturned(mSession, streamIds, cb);
1680 }
1681
1682 ret = mSession->close();
1683 mSession = nullptr;
1684 ASSERT_TRUE(ret.isOk());
1685 }
1686}
1687
1688// Generate and verify an ultra high resolution capture request
1689TEST_P(CameraAidlTest, processUltraHighResolutionRequest) {
1690 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1691 int64_t bufferId = 1;
1692 int32_t frameNumber = 1;
1693 CameraMetadata settings;
1694
1695 for (const auto& name : cameraDeviceNames) {
1696 std::string version, deviceId;
1697 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
1698 CameraMetadata meta;
1699
1700 std::shared_ptr<ICameraDevice> unusedDevice;
1701 openEmptyDeviceSession(name, mProvider, &mSession, &meta, &unusedDevice);
1702 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1703 if (!isUltraHighResolution(staticMeta)) {
1704 ndk::ScopedAStatus ret = mSession->close();
1705 mSession = nullptr;
1706 ASSERT_TRUE(ret.isOk());
1707 continue;
1708 }
1709 CameraMetadata req;
1710 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultSettings;
1711 ndk::ScopedAStatus ret =
1712 mSession->constructDefaultRequestSettings(RequestTemplate::STILL_CAPTURE, &req);
1713 ASSERT_TRUE(ret.isOk());
1714
1715 const camera_metadata_t* metadata =
1716 reinterpret_cast<const camera_metadata_t*>(req.metadata.data());
1717 size_t expectedSize = req.metadata.size();
1718 int result = validate_camera_metadata_structure(metadata, &expectedSize);
1719 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
1720
1721 size_t entryCount = get_camera_metadata_entry_count(metadata);
1722 ASSERT_GT(entryCount, 0u);
1723 defaultSettings = metadata;
1724 uint8_t sensorPixelMode =
1725 static_cast<uint8_t>(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
1726 ASSERT_EQ(::android::OK,
1727 defaultSettings.update(ANDROID_SENSOR_PIXEL_MODE, &sensorPixelMode, 1));
1728
1729 const camera_metadata_t* settingsBuffer = defaultSettings.getAndLock();
1730 uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
1731 settings.metadata = std::vector(
1732 rawSettingsBuffer, rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
1733 overrideRotateAndCrop(&settings);
1734
1735 ret = mSession->close();
1736 mSession = nullptr;
1737 ASSERT_TRUE(ret.isOk());
1738
1739 std::vector<HalStream> halStreams;
1740 bool supportsPartialResults = false;
1741 bool useHalBufManager = false;
1742 int32_t partialResultCount = 0;
1743 Stream previewStream;
1744 std::shared_ptr<DeviceCb> cb;
1745
1746 std::list<PixelFormat> pixelFormats = {PixelFormat::YCBCR_420_888, PixelFormat::RAW16};
1747 for (PixelFormat format : pixelFormats) {
Emilian Peevdda1eb72022-07-28 16:37:40 -07001748 previewStream.usage =
1749 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1750 GRALLOC1_CONSUMER_USAGE_CPU_READ);
1751 previewStream.dataSpace = Dataspace::UNKNOWN;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001752 configureStreams(name, mProvider, format, &mSession, &previewStream, &halStreams,
1753 &supportsPartialResults, &partialResultCount, &useHalBufManager, &cb,
1754 0, /*maxResolution*/ true);
1755 ASSERT_NE(mSession, nullptr);
1756
1757 ::aidl::android::hardware::common::fmq::MQDescriptor<
1758 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
1759 descriptor;
1760 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
1761 ASSERT_TRUE(resultQueueRet.isOk());
1762
1763 std::shared_ptr<ResultMetadataQueue> resultQueue =
1764 std::make_shared<ResultMetadataQueue>(descriptor);
1765 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
1766 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
1767 resultQueue = nullptr;
1768 // Don't use the queue onwards.
1769 }
1770
1771 std::vector<buffer_handle_t> graphicBuffers;
1772 graphicBuffers.reserve(halStreams.size());
1773 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
1774 static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
1775 partialResultCount, std::unordered_set<std::string>(), resultQueue);
1776
1777 std::vector<CaptureRequest> requests(1);
1778 CaptureRequest& request = requests[0];
1779 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
1780 outputBuffers.resize(halStreams.size());
1781
1782 size_t k = 0;
1783 for (const auto& halStream : halStreams) {
1784 buffer_handle_t buffer_handle;
1785 if (useHalBufManager) {
1786 outputBuffers[k] = {halStream.id, 0,
1787 NativeHandle(), BufferStatus::OK,
1788 NativeHandle(), NativeHandle()};
1789 } else {
1790 allocateGraphicBuffer(previewStream.width, previewStream.height,
1791 android_convertGralloc1To0Usage(
1792 static_cast<uint64_t>(halStream.producerUsage),
1793 static_cast<uint64_t>(halStream.consumerUsage)),
1794 halStream.overrideFormat, &buffer_handle);
1795 graphicBuffers.push_back(buffer_handle);
1796 outputBuffers[k] = {
1797 halStream.id, bufferId, ::android::makeToAidl(buffer_handle),
1798 BufferStatus::OK, NativeHandle(), NativeHandle()};
1799 bufferId++;
1800 }
1801 k++;
1802 }
1803
1804 request.inputBuffer = {
1805 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
1806 request.frameNumber = frameNumber;
1807 request.fmqSettingsSize = 0;
1808 request.settings = settings;
1809 request.inputWidth = 0;
1810 request.inputHeight = 0;
1811
1812 {
1813 std::unique_lock<std::mutex> l(mLock);
1814 mInflightMap.clear();
1815 mInflightMap[frameNumber] = inflightReq;
1816 }
1817
1818 int32_t numRequestProcessed = 0;
1819 std::vector<BufferCache> cachesToRemove;
1820 ndk::ScopedAStatus returnStatus =
1821 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1822 ASSERT_TRUE(returnStatus.isOk());
1823 ASSERT_EQ(numRequestProcessed, 1u);
1824
1825 {
1826 std::unique_lock<std::mutex> l(mLock);
1827 while (!inflightReq->errorCodeValid &&
1828 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1829 auto timeout = std::chrono::system_clock::now() +
1830 std::chrono::seconds(kStreamBufferTimeoutSec);
1831 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1832 }
1833
1834 ASSERT_FALSE(inflightReq->errorCodeValid);
1835 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1836 }
1837 if (useHalBufManager) {
1838 std::vector<int32_t> streamIds(halStreams.size());
1839 for (size_t i = 0; i < streamIds.size(); i++) {
1840 streamIds[i] = halStreams[i].id;
1841 }
1842 verifyBuffersReturned(mSession, streamIds, cb);
1843 }
1844
1845 ret = mSession->close();
1846 mSession = nullptr;
1847 ASSERT_TRUE(ret.isOk());
1848 }
1849 }
1850}
1851
1852// Generate and verify 10-bit dynamic range request
1853TEST_P(CameraAidlTest, process10BitDynamicRangeRequest) {
1854 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001855 CameraMetadata settings;
1856
1857 for (const auto& name : cameraDeviceNames) {
1858 std::string version, deviceId;
1859 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
1860 CameraMetadata meta;
1861 std::shared_ptr<ICameraDevice> device;
1862 openEmptyDeviceSession(name, mProvider, &mSession, &meta, &device);
1863 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1864 if (!is10BitDynamicRangeCapable(staticMeta)) {
1865 ndk::ScopedAStatus ret = mSession->close();
1866 mSession = nullptr;
1867 ASSERT_TRUE(ret.isOk());
1868 continue;
1869 }
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001870 std::vector<RequestAvailableDynamicRangeProfilesMap> profileList;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001871 get10BitDynamicRangeProfiles(staticMeta, &profileList);
1872 ASSERT_FALSE(profileList.empty());
1873
1874 CameraMetadata req;
1875 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultSettings;
1876 ndk::ScopedAStatus ret =
Emilian Peevdda1eb72022-07-28 16:37:40 -07001877 mSession->constructDefaultRequestSettings(RequestTemplate::PREVIEW, &req);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001878 ASSERT_TRUE(ret.isOk());
1879
1880 const camera_metadata_t* metadata =
1881 reinterpret_cast<const camera_metadata_t*>(req.metadata.data());
1882 size_t expectedSize = req.metadata.size();
1883 int result = validate_camera_metadata_structure(metadata, &expectedSize);
1884 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
1885
1886 size_t entryCount = get_camera_metadata_entry_count(metadata);
1887 ASSERT_GT(entryCount, 0u);
1888 defaultSettings = metadata;
1889
1890 const camera_metadata_t* settingsBuffer = defaultSettings.getAndLock();
1891 uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
1892 settings.metadata = std::vector(
1893 rawSettingsBuffer, rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
1894 overrideRotateAndCrop(&settings);
1895
1896 ret = mSession->close();
1897 mSession = nullptr;
1898 ASSERT_TRUE(ret.isOk());
1899
1900 std::vector<HalStream> halStreams;
1901 bool supportsPartialResults = false;
1902 bool useHalBufManager = false;
1903 int32_t partialResultCount = 0;
1904 Stream previewStream;
1905 std::shared_ptr<DeviceCb> cb;
1906 for (const auto& profile : profileList) {
Emilian Peevdda1eb72022-07-28 16:37:40 -07001907 previewStream.usage =
1908 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1909 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER);
1910 previewStream.dataSpace = getDataspace(PixelFormat::IMPLEMENTATION_DEFINED);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001911 configureStreams(name, mProvider, PixelFormat::IMPLEMENTATION_DEFINED, &mSession,
1912 &previewStream, &halStreams, &supportsPartialResults,
1913 &partialResultCount, &useHalBufManager, &cb, 0,
1914 /*maxResolution*/ false, profile);
1915 ASSERT_NE(mSession, nullptr);
1916
1917 ::aidl::android::hardware::common::fmq::MQDescriptor<
1918 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
1919 descriptor;
1920 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
1921 ASSERT_TRUE(resultQueueRet.isOk());
1922
1923 std::shared_ptr<ResultMetadataQueue> resultQueue =
1924 std::make_shared<ResultMetadataQueue>(descriptor);
1925 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
1926 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
1927 resultQueue = nullptr;
1928 // Don't use the queue onwards.
1929 }
1930
Emilian Peevdda1eb72022-07-28 16:37:40 -07001931 mInflightMap.clear();
1932 // Stream as long as needed to fill the Hal inflight queue
1933 std::vector<CaptureRequest> requests(halStreams[0].maxBuffers);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001934
Emilian Peev470d1382023-01-18 11:09:09 -08001935 for (int32_t requestId = 0; requestId < requests.size(); requestId++) {
Emilian Peevdda1eb72022-07-28 16:37:40 -07001936 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
1937 static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
1938 partialResultCount, std::unordered_set<std::string>(), resultQueue);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001939
Emilian Peev470d1382023-01-18 11:09:09 -08001940 CaptureRequest& request = requests[requestId];
Emilian Peevdda1eb72022-07-28 16:37:40 -07001941 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
1942 outputBuffers.resize(halStreams.size());
Avichal Rakesh362242f2022-02-08 12:40:53 -08001943
Emilian Peevdda1eb72022-07-28 16:37:40 -07001944 size_t k = 0;
1945 inflightReq->mOutstandingBufferIds.resize(halStreams.size());
1946 std::vector<buffer_handle_t> graphicBuffers;
1947 graphicBuffers.reserve(halStreams.size());
Avichal Rakesh362242f2022-02-08 12:40:53 -08001948
Emilian Peev470d1382023-01-18 11:09:09 -08001949 auto bufferId = requestId + 1; // Buffer id value 0 is not valid
Emilian Peevdda1eb72022-07-28 16:37:40 -07001950 for (const auto& halStream : halStreams) {
1951 buffer_handle_t buffer_handle;
1952 if (useHalBufManager) {
1953 outputBuffers[k] = {halStream.id, 0,
1954 NativeHandle(), BufferStatus::OK,
1955 NativeHandle(), NativeHandle()};
1956 } else {
1957 auto usage = android_convertGralloc1To0Usage(
1958 static_cast<uint64_t>(halStream.producerUsage),
1959 static_cast<uint64_t>(halStream.consumerUsage));
1960 allocateGraphicBuffer(previewStream.width, previewStream.height, usage,
1961 halStream.overrideFormat, &buffer_handle);
1962
1963 inflightReq->mOutstandingBufferIds[halStream.id][bufferId] = buffer_handle;
1964 graphicBuffers.push_back(buffer_handle);
1965 outputBuffers[k] = {halStream.id, bufferId,
1966 android::makeToAidl(buffer_handle), BufferStatus::OK, NativeHandle(),
1967 NativeHandle()};
Emilian Peevdda1eb72022-07-28 16:37:40 -07001968 }
1969 k++;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001970 }
Avichal Rakesh362242f2022-02-08 12:40:53 -08001971
Emilian Peevdda1eb72022-07-28 16:37:40 -07001972 request.inputBuffer = {
1973 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
Emilian Peev470d1382023-01-18 11:09:09 -08001974 request.frameNumber = bufferId;
Emilian Peevdda1eb72022-07-28 16:37:40 -07001975 request.fmqSettingsSize = 0;
1976 request.settings = settings;
1977 request.inputWidth = 0;
1978 request.inputHeight = 0;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001979
Emilian Peevdda1eb72022-07-28 16:37:40 -07001980 {
1981 std::unique_lock<std::mutex> l(mLock);
Emilian Peev470d1382023-01-18 11:09:09 -08001982 mInflightMap[bufferId] = inflightReq;
Emilian Peevdda1eb72022-07-28 16:37:40 -07001983 }
1984
Avichal Rakesh362242f2022-02-08 12:40:53 -08001985 }
1986
1987 int32_t numRequestProcessed = 0;
1988 std::vector<BufferCache> cachesToRemove;
1989 ndk::ScopedAStatus returnStatus =
Emilian Peevdda1eb72022-07-28 16:37:40 -07001990 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001991 ASSERT_TRUE(returnStatus.isOk());
Emilian Peevdda1eb72022-07-28 16:37:40 -07001992 ASSERT_EQ(numRequestProcessed, requests.size());
Avichal Rakesh362242f2022-02-08 12:40:53 -08001993
Emilian Peevdda1eb72022-07-28 16:37:40 -07001994 returnStatus = mSession->repeatingRequestEnd(requests.size() - 1,
1995 std::vector<int32_t> {halStreams[0].id});
1996 ASSERT_TRUE(returnStatus.isOk());
1997
Emilian Peev470d1382023-01-18 11:09:09 -08001998 // We are keeping frame numbers and buffer ids consistent. Buffer id value of 0
1999 // is used to indicate a buffer that is not present/available so buffer ids as well
2000 // as frame numbers begin with 1.
2001 for (int32_t frameNumber = 1; frameNumber <= requests.size(); frameNumber++) {
Emilian Peevdda1eb72022-07-28 16:37:40 -07002002 const auto& inflightReq = mInflightMap[frameNumber];
Avichal Rakesh362242f2022-02-08 12:40:53 -08002003 std::unique_lock<std::mutex> l(mLock);
2004 while (!inflightReq->errorCodeValid &&
2005 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
2006 auto timeout = std::chrono::system_clock::now() +
2007 std::chrono::seconds(kStreamBufferTimeoutSec);
2008 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2009 }
2010
Shuzhen Wang0f56c562023-04-03 16:58:59 -07002011 waitForReleaseFence(inflightReq->resultOutputBuffers);
2012
Avichal Rakesh362242f2022-02-08 12:40:53 -08002013 ASSERT_FALSE(inflightReq->errorCodeValid);
2014 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
2015 verify10BitMetadata(mHandleImporter, *inflightReq, profile);
2016 }
Emilian Peevdda1eb72022-07-28 16:37:40 -07002017
Avichal Rakesh362242f2022-02-08 12:40:53 -08002018 if (useHalBufManager) {
2019 std::vector<int32_t> streamIds(halStreams.size());
2020 for (size_t i = 0; i < streamIds.size(); i++) {
2021 streamIds[i] = halStreams[i].id;
2022 }
2023 mSession->signalStreamFlush(streamIds, /*streamConfigCounter*/ 0);
2024 cb->waitForBuffersReturned();
2025 }
2026
2027 ret = mSession->close();
2028 mSession = nullptr;
2029 ASSERT_TRUE(ret.isOk());
2030 }
2031 }
2032}
2033
Austin Borger4728fc42022-07-15 11:27:53 -07002034TEST_P(CameraAidlTest, process8BitColorSpaceRequests) {
Austin Borger54b22362023-03-22 11:25:06 -07002035 static int profiles[] = {ColorSpaceNamed::DISPLAY_P3, ColorSpaceNamed::SRGB};
Austin Borger4728fc42022-07-15 11:27:53 -07002036
2037 for (int32_t i = 0; i < sizeof(profiles) / sizeof(profiles[0]); i++) {
2038 processColorSpaceRequest(static_cast<RequestAvailableColorSpaceProfilesMap>(profiles[i]),
2039 static_cast<RequestAvailableDynamicRangeProfilesMap>(
2040 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD));
2041 }
2042}
2043
2044TEST_P(CameraAidlTest, process10BitColorSpaceRequests) {
2045 static const camera_metadata_enum_android_request_available_dynamic_range_profiles_map
2046 dynamicRangeProfiles[] = {
2047 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10,
2048 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10,
2049 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS,
2050 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF,
2051 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF_PO,
2052 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM,
2053 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM_PO,
2054 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF,
2055 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF_PO,
2056 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM,
2057 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM_PO
2058 };
2059
Austin Borger54b22362023-03-22 11:25:06 -07002060 // Process all dynamic range profiles with BT2020_HLG
Austin Borger4728fc42022-07-15 11:27:53 -07002061 for (int32_t i = 0; i < sizeof(dynamicRangeProfiles) / sizeof(dynamicRangeProfiles[0]); i++) {
2062 processColorSpaceRequest(
Austin Borger54b22362023-03-22 11:25:06 -07002063 static_cast<RequestAvailableColorSpaceProfilesMap>(ColorSpaceNamed::BT2020_HLG),
Austin Borger4728fc42022-07-15 11:27:53 -07002064 static_cast<RequestAvailableDynamicRangeProfilesMap>(dynamicRangeProfiles[i]));
2065 }
2066}
2067
Shuzhen Wang4dd6a512022-11-08 20:47:20 +00002068TEST_P(CameraAidlTest, processZoomSettingsOverrideRequests) {
2069 const int32_t kFrameCount = 5;
2070 const int32_t kTestCases = 2;
Shuzhen Wang38ddb272023-05-22 09:40:28 -07002071 const bool kOverrideSequence[kTestCases][kFrameCount] = {// ZOOM, ZOOM, ZOOM, ZOOM, ZOOM;
2072 {true, true, true, true, true},
2073 // OFF, ZOOM, ZOOM, ZOOM, OFF;
2074 {false, true, true, true, false}};
Shuzhen Wang4dd6a512022-11-08 20:47:20 +00002075 const bool kExpectedOverrideResults[kTestCases][kFrameCount] = {
Shuzhen Wang38ddb272023-05-22 09:40:28 -07002076 // All resuls should be overridden except the last one. The last result's
2077 // zoom doesn't have speed-up.
2078 {true, true, true, true, false},
2079 // Because we require at least 1 frame speed-up, request #1, #2 and #3
2080 // will be overridden.
2081 {true, true, true, false, false}};
Shuzhen Wang4dd6a512022-11-08 20:47:20 +00002082
2083 for (int i = 0; i < kTestCases; i++) {
2084 processZoomSettingsOverrideRequests(kFrameCount, kOverrideSequence[i],
2085 kExpectedOverrideResults[i]);
2086 }
2087}
2088
Avichal Rakesh362242f2022-02-08 12:40:53 -08002089// Generate and verify a burst containing alternating sensor sensitivity values
2090TEST_P(CameraAidlTest, processCaptureRequestBurstISO) {
2091 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2092 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2093 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2094 int64_t bufferId = 1;
2095 int32_t frameNumber = 1;
2096 float isoTol = .03f;
2097 CameraMetadata settings;
2098
2099 for (const auto& name : cameraDeviceNames) {
2100 CameraMetadata meta;
2101 settings.metadata.clear();
2102 std::shared_ptr<ICameraDevice> unusedDevice;
2103 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
2104 &unusedDevice /*out*/);
2105 camera_metadata_t* staticMetaBuffer =
2106 clone_camera_metadata(reinterpret_cast<camera_metadata_t*>(meta.metadata.data()));
2107 ::android::hardware::camera::common::V1_0::helper::CameraMetadata staticMeta(
2108 staticMetaBuffer);
2109
2110 camera_metadata_entry_t hwLevel = staticMeta.find(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL);
2111 ASSERT_TRUE(0 < hwLevel.count);
2112 if (ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED == hwLevel.data.u8[0] ||
2113 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL == hwLevel.data.u8[0]) {
2114 // Limited/External devices can skip this test
2115 ndk::ScopedAStatus ret = mSession->close();
2116 mSession = nullptr;
2117 ASSERT_TRUE(ret.isOk());
2118 continue;
2119 }
2120
2121 camera_metadata_entry_t isoRange = staticMeta.find(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE);
2122 ASSERT_EQ(isoRange.count, 2u);
2123
2124 ndk::ScopedAStatus ret = mSession->close();
2125 mSession = nullptr;
2126 ASSERT_TRUE(ret.isOk());
2127
2128 bool supportsPartialResults = false;
2129 bool useHalBufManager = false;
2130 int32_t partialResultCount = 0;
2131 Stream previewStream;
2132 std::vector<HalStream> halStreams;
2133 std::shared_ptr<DeviceCb> cb;
2134 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2135 &previewStream /*out*/, &halStreams /*out*/,
2136 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2137 &useHalBufManager /*out*/, &cb /*out*/);
2138
2139 ::aidl::android::hardware::common::fmq::MQDescriptor<
2140 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2141 descriptor;
2142 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2143 std::shared_ptr<ResultMetadataQueue> resultQueue =
2144 std::make_shared<ResultMetadataQueue>(descriptor);
2145 ASSERT_TRUE(resultQueueRet.isOk());
2146 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2147 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2148 resultQueue = nullptr;
2149 // Don't use the queue onwards.
2150 }
2151
2152 ret = mSession->constructDefaultRequestSettings(RequestTemplate::PREVIEW, &settings);
2153 ASSERT_TRUE(ret.isOk());
2154
2155 ::android::hardware::camera::common::V1_0::helper::CameraMetadata requestMeta;
2156 std::vector<CaptureRequest> requests(kBurstFrameCount);
2157 std::vector<buffer_handle_t> buffers(kBurstFrameCount);
2158 std::vector<std::shared_ptr<InFlightRequest>> inflightReqs(kBurstFrameCount);
2159 std::vector<int32_t> isoValues(kBurstFrameCount);
2160 std::vector<CameraMetadata> requestSettings(kBurstFrameCount);
2161
2162 for (int32_t i = 0; i < kBurstFrameCount; i++) {
2163 std::unique_lock<std::mutex> l(mLock);
2164 CaptureRequest& request = requests[i];
2165 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2166 outputBuffers.resize(1);
2167 StreamBuffer& outputBuffer = outputBuffers[0];
2168
2169 isoValues[i] = ((i % 2) == 0) ? isoRange.data.i32[0] : isoRange.data.i32[1];
2170 if (useHalBufManager) {
2171 outputBuffer = {halStreams[0].id, 0,
2172 NativeHandle(), BufferStatus::OK,
2173 NativeHandle(), NativeHandle()};
2174 } else {
2175 allocateGraphicBuffer(previewStream.width, previewStream.height,
2176 android_convertGralloc1To0Usage(
2177 static_cast<uint64_t>(halStreams[0].producerUsage),
2178 static_cast<uint64_t>(halStreams[0].consumerUsage)),
2179 halStreams[0].overrideFormat, &buffers[i]);
2180 outputBuffer = {halStreams[0].id, bufferId + i, ::android::makeToAidl(buffers[i]),
2181 BufferStatus::OK, NativeHandle(), NativeHandle()};
2182 }
2183
2184 requestMeta.append(reinterpret_cast<camera_metadata_t*>(settings.metadata.data()));
2185
2186 // Disable all 3A routines
2187 uint8_t mode = static_cast<uint8_t>(ANDROID_CONTROL_MODE_OFF);
2188 ASSERT_EQ(::android::OK, requestMeta.update(ANDROID_CONTROL_MODE, &mode, 1));
2189 ASSERT_EQ(::android::OK,
2190 requestMeta.update(ANDROID_SENSOR_SENSITIVITY, &isoValues[i], 1));
2191 camera_metadata_t* metaBuffer = requestMeta.release();
2192 uint8_t* rawMetaBuffer = reinterpret_cast<uint8_t*>(metaBuffer);
2193 requestSettings[i].metadata = std::vector(
2194 rawMetaBuffer, rawMetaBuffer + get_camera_metadata_size(metaBuffer));
2195 overrideRotateAndCrop(&(requestSettings[i]));
2196
2197 request.frameNumber = frameNumber + i;
2198 request.fmqSettingsSize = 0;
2199 request.settings = requestSettings[i];
2200 request.inputBuffer = {
2201 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2202
2203 inflightReqs[i] = std::make_shared<InFlightRequest>(1, false, supportsPartialResults,
2204 partialResultCount, resultQueue);
2205 mInflightMap[frameNumber + i] = inflightReqs[i];
2206 }
2207
2208 int32_t numRequestProcessed = 0;
2209 std::vector<BufferCache> cachesToRemove;
2210
2211 ndk::ScopedAStatus returnStatus =
2212 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2213 ASSERT_TRUE(returnStatus.isOk());
2214 ASSERT_EQ(numRequestProcessed, kBurstFrameCount);
2215
2216 for (size_t i = 0; i < kBurstFrameCount; i++) {
2217 std::unique_lock<std::mutex> l(mLock);
2218 while (!inflightReqs[i]->errorCodeValid && ((0 < inflightReqs[i]->numBuffersLeft) ||
2219 (!inflightReqs[i]->haveResultMetadata))) {
2220 auto timeout = std::chrono::system_clock::now() +
2221 std::chrono::seconds(kStreamBufferTimeoutSec);
2222 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2223 }
2224
2225 ASSERT_FALSE(inflightReqs[i]->errorCodeValid);
2226 ASSERT_NE(inflightReqs[i]->resultOutputBuffers.size(), 0u);
2227 ASSERT_EQ(previewStream.id, inflightReqs[i]->resultOutputBuffers[0].buffer.streamId);
2228 ASSERT_FALSE(inflightReqs[i]->collectedResult.isEmpty());
2229 ASSERT_TRUE(inflightReqs[i]->collectedResult.exists(ANDROID_SENSOR_SENSITIVITY));
2230 camera_metadata_entry_t isoResult =
2231 inflightReqs[i]->collectedResult.find(ANDROID_SENSOR_SENSITIVITY);
2232 ASSERT_TRUE(std::abs(isoResult.data.i32[0] - isoValues[i]) <=
2233 std::round(isoValues[i] * isoTol));
2234 }
2235
2236 if (useHalBufManager) {
2237 verifyBuffersReturned(mSession, previewStream.id, cb);
2238 }
2239 ret = mSession->close();
2240 mSession = nullptr;
2241 ASSERT_TRUE(ret.isOk());
2242 }
2243}
2244
2245// Test whether an incorrect capture request with missing settings will
2246// be reported correctly.
2247TEST_P(CameraAidlTest, processCaptureRequestInvalidSinglePreview) {
2248 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2249 std::vector<AvailableStream> outputPreviewStreams;
2250 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2251 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2252 int64_t bufferId = 1;
2253 int32_t frameNumber = 1;
2254 CameraMetadata settings;
2255
2256 for (const auto& name : cameraDeviceNames) {
2257 Stream previewStream;
2258 std::vector<HalStream> halStreams;
2259 std::shared_ptr<DeviceCb> cb;
2260 bool supportsPartialResults = false;
2261 bool useHalBufManager = false;
2262 int32_t partialResultCount = 0;
2263 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2264 &previewStream /*out*/, &halStreams /*out*/,
2265 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2266 &useHalBufManager /*out*/, &cb /*out*/);
2267 ASSERT_NE(mSession, nullptr);
2268 ASSERT_FALSE(halStreams.empty());
2269
2270 buffer_handle_t buffer_handle = nullptr;
2271
2272 if (useHalBufManager) {
2273 bufferId = 0;
2274 } else {
2275 allocateGraphicBuffer(previewStream.width, previewStream.height,
2276 android_convertGralloc1To0Usage(
2277 static_cast<uint64_t>(halStreams[0].producerUsage),
2278 static_cast<uint64_t>(halStreams[0].consumerUsage)),
2279 halStreams[0].overrideFormat, &buffer_handle);
2280 }
2281
2282 std::vector<CaptureRequest> requests(1);
2283 CaptureRequest& request = requests[0];
2284 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2285 outputBuffers.resize(1);
2286 StreamBuffer& outputBuffer = outputBuffers[0];
2287
2288 outputBuffer = {
2289 halStreams[0].id,
2290 bufferId,
2291 buffer_handle == nullptr ? NativeHandle() : ::android::makeToAidl(buffer_handle),
2292 BufferStatus::OK,
2293 NativeHandle(),
2294 NativeHandle()};
2295
2296 request.inputBuffer = {
2297 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2298 request.frameNumber = frameNumber;
2299 request.fmqSettingsSize = 0;
2300 request.settings = settings;
2301
2302 // Settings were not correctly initialized, we should fail here
2303 int32_t numRequestProcessed = 0;
2304 std::vector<BufferCache> cachesToRemove;
2305 ndk::ScopedAStatus ret =
2306 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2307 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
2308 ASSERT_EQ(numRequestProcessed, 0u);
2309
2310 ret = mSession->close();
2311 mSession = nullptr;
2312 ASSERT_TRUE(ret.isOk());
2313 }
2314}
2315
2316// Verify camera offline session behavior
2317TEST_P(CameraAidlTest, switchToOffline) {
2318 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2319 AvailableStream threshold = {kMaxStillWidth, kMaxStillHeight,
2320 static_cast<int32_t>(PixelFormat::BLOB)};
2321 int64_t bufferId = 1;
2322 int32_t frameNumber = 1;
2323 CameraMetadata settings;
2324
2325 for (const auto& name : cameraDeviceNames) {
2326 CameraMetadata meta;
2327 {
2328 std::shared_ptr<ICameraDevice> unusedDevice;
2329 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
2330 &unusedDevice);
2331 camera_metadata_t* staticMetaBuffer = clone_camera_metadata(
2332 reinterpret_cast<camera_metadata_t*>(meta.metadata.data()));
2333 ::android::hardware::camera::common::V1_0::helper::CameraMetadata staticMeta(
2334 staticMetaBuffer);
2335
2336 if (isOfflineSessionSupported(staticMetaBuffer) != Status::OK) {
2337 ndk::ScopedAStatus ret = mSession->close();
2338 mSession = nullptr;
2339 ASSERT_TRUE(ret.isOk());
2340 continue;
2341 }
2342 ndk::ScopedAStatus ret = mSession->close();
2343 mSession = nullptr;
2344 ASSERT_TRUE(ret.isOk());
2345 }
2346
2347 bool supportsPartialResults = false;
2348 int32_t partialResultCount = 0;
2349 Stream stream;
2350 std::vector<HalStream> halStreams;
2351 std::shared_ptr<DeviceCb> cb;
2352 int32_t jpegBufferSize;
2353 bool useHalBufManager;
2354 configureOfflineStillStream(name, mProvider, &threshold, &mSession /*out*/, &stream /*out*/,
2355 &halStreams /*out*/, &supportsPartialResults /*out*/,
2356 &partialResultCount /*out*/, &cb /*out*/,
2357 &jpegBufferSize /*out*/, &useHalBufManager /*out*/);
2358
2359 auto ret = mSession->constructDefaultRequestSettings(RequestTemplate::STILL_CAPTURE,
2360 &settings);
2361 ASSERT_TRUE(ret.isOk());
2362
2363 ::aidl::android::hardware::common::fmq::MQDescriptor<
2364 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2365 descriptor;
2366
2367 ndk::ScopedAStatus resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2368 ASSERT_TRUE(resultQueueRet.isOk());
2369 std::shared_ptr<ResultMetadataQueue> resultQueue =
2370 std::make_shared<ResultMetadataQueue>(descriptor);
2371 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2372 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2373 resultQueue = nullptr;
2374 // Don't use the queue onwards.
2375 }
2376
2377 ::android::hardware::camera::common::V1_0::helper::CameraMetadata requestMeta;
2378
2379 std::vector<buffer_handle_t> buffers(kBurstFrameCount);
2380 std::vector<std::shared_ptr<InFlightRequest>> inflightReqs(kBurstFrameCount);
2381 std::vector<CameraMetadata> requestSettings(kBurstFrameCount);
2382
2383 std::vector<CaptureRequest> requests(kBurstFrameCount);
2384
2385 HalStream halStream = halStreams[0];
2386 for (uint32_t i = 0; i < kBurstFrameCount; i++) {
2387 CaptureRequest& request = requests[i];
2388 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2389 outputBuffers.resize(1);
2390 StreamBuffer& outputBuffer = outputBuffers[0];
2391
2392 std::unique_lock<std::mutex> l(mLock);
2393 if (useHalBufManager) {
2394 outputBuffer = {halStream.id, 0, NativeHandle(), BufferStatus::OK, NativeHandle(),
2395 NativeHandle()};
2396 } else {
2397 // jpeg buffer (w,h) = (blobLen, 1)
2398 allocateGraphicBuffer(jpegBufferSize, /*height*/ 1,
2399 android_convertGralloc1To0Usage(
2400 static_cast<uint64_t>(halStream.producerUsage),
2401 static_cast<uint64_t>(halStream.consumerUsage)),
2402 halStream.overrideFormat, &buffers[i]);
2403 outputBuffer = {halStream.id, bufferId + i, ::android::makeToAidl(buffers[i]),
2404 BufferStatus::OK, NativeHandle(), NativeHandle()};
2405 }
2406
2407 requestMeta.clear();
2408 requestMeta.append(reinterpret_cast<camera_metadata_t*>(settings.metadata.data()));
2409
2410 camera_metadata_t* metaBuffer = requestMeta.release();
2411 uint8_t* rawMetaBuffer = reinterpret_cast<uint8_t*>(metaBuffer);
2412 requestSettings[i].metadata = std::vector(
2413 rawMetaBuffer, rawMetaBuffer + get_camera_metadata_size(metaBuffer));
2414 overrideRotateAndCrop(&requestSettings[i]);
2415
2416 request.frameNumber = frameNumber + i;
2417 request.fmqSettingsSize = 0;
2418 request.settings = requestSettings[i];
2419 request.inputBuffer = {/*streamId*/ -1,
2420 /*bufferId*/ 0, NativeHandle(),
2421 BufferStatus::ERROR, NativeHandle(),
2422 NativeHandle()};
2423
2424 inflightReqs[i] = std::make_shared<InFlightRequest>(1, false, supportsPartialResults,
2425 partialResultCount, resultQueue);
2426 mInflightMap[frameNumber + i] = inflightReqs[i];
2427 }
2428
2429 int32_t numRequestProcessed = 0;
2430 std::vector<BufferCache> cachesToRemove;
2431
2432 ndk::ScopedAStatus returnStatus =
2433 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2434 ASSERT_TRUE(returnStatus.isOk());
2435 ASSERT_EQ(numRequestProcessed, kBurstFrameCount);
2436
2437 std::vector<int32_t> offlineStreamIds = {halStream.id};
2438 CameraOfflineSessionInfo offlineSessionInfo;
2439 std::shared_ptr<ICameraOfflineSession> offlineSession;
2440 returnStatus =
2441 mSession->switchToOffline(offlineStreamIds, &offlineSessionInfo, &offlineSession);
2442
2443 if (!halStreams[0].supportOffline) {
2444 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
2445 returnStatus.getServiceSpecificError());
2446 ret = mSession->close();
2447 mSession = nullptr;
2448 ASSERT_TRUE(ret.isOk());
2449 continue;
2450 }
2451
2452 ASSERT_TRUE(returnStatus.isOk());
2453 // Hal might be unable to find any requests qualified for offline mode.
2454 if (offlineSession == nullptr) {
2455 ret = mSession->close();
2456 mSession = nullptr;
2457 ASSERT_TRUE(ret.isOk());
2458 continue;
2459 }
2460
2461 ASSERT_EQ(offlineSessionInfo.offlineStreams.size(), 1u);
2462 ASSERT_EQ(offlineSessionInfo.offlineStreams[0].id, halStream.id);
2463 ASSERT_NE(offlineSessionInfo.offlineRequests.size(), 0u);
2464
2465 // close device session to make sure offline session does not rely on it
2466 ret = mSession->close();
2467 mSession = nullptr;
2468 ASSERT_TRUE(ret.isOk());
2469
2470 ::aidl::android::hardware::common::fmq::MQDescriptor<
2471 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2472 offlineResultDescriptor;
2473
2474 auto offlineResultQueueRet =
2475 offlineSession->getCaptureResultMetadataQueue(&offlineResultDescriptor);
2476 std::shared_ptr<ResultMetadataQueue> offlineResultQueue =
2477 std::make_shared<ResultMetadataQueue>(descriptor);
2478 if (!offlineResultQueue->isValid() || offlineResultQueue->availableToWrite() <= 0) {
2479 ALOGE("%s: offline session returns empty result metadata fmq, not use it", __func__);
2480 offlineResultQueue = nullptr;
2481 // Don't use the queue onwards.
2482 }
2483 ASSERT_TRUE(offlineResultQueueRet.isOk());
2484
2485 updateInflightResultQueue(offlineResultQueue);
2486
2487 ret = offlineSession->setCallback(cb);
2488 ASSERT_TRUE(ret.isOk());
2489
2490 for (size_t i = 0; i < kBurstFrameCount; i++) {
2491 std::unique_lock<std::mutex> l(mLock);
2492 while (!inflightReqs[i]->errorCodeValid && ((0 < inflightReqs[i]->numBuffersLeft) ||
2493 (!inflightReqs[i]->haveResultMetadata))) {
2494 auto timeout = std::chrono::system_clock::now() +
2495 std::chrono::seconds(kStreamBufferTimeoutSec);
2496 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2497 }
2498
2499 ASSERT_FALSE(inflightReqs[i]->errorCodeValid);
2500 ASSERT_NE(inflightReqs[i]->resultOutputBuffers.size(), 0u);
2501 ASSERT_EQ(stream.id, inflightReqs[i]->resultOutputBuffers[0].buffer.streamId);
2502 ASSERT_FALSE(inflightReqs[i]->collectedResult.isEmpty());
2503 }
2504
2505 ret = offlineSession->close();
2506 ASSERT_TRUE(ret.isOk());
2507 }
2508}
2509
2510// Check whether an invalid capture request with missing output buffers
2511// will be reported correctly.
2512TEST_P(CameraAidlTest, processCaptureRequestInvalidBuffer) {
2513 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2514 std::vector<AvailableStream> outputBlobStreams;
2515 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2516 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2517 int32_t frameNumber = 1;
2518 CameraMetadata settings;
2519
2520 for (const auto& name : cameraDeviceNames) {
2521 Stream previewStream;
2522 std::vector<HalStream> halStreams;
2523 std::shared_ptr<DeviceCb> cb;
2524 bool supportsPartialResults = false;
2525 bool useHalBufManager = false;
2526 int32_t partialResultCount = 0;
2527 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2528 &previewStream /*out*/, &halStreams /*out*/,
2529 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2530 &useHalBufManager /*out*/, &cb /*out*/);
2531
2532 RequestTemplate reqTemplate = RequestTemplate::PREVIEW;
2533 ndk::ScopedAStatus ret = mSession->constructDefaultRequestSettings(reqTemplate, &settings);
2534 ASSERT_TRUE(ret.isOk());
2535 overrideRotateAndCrop(&settings);
2536
2537 std::vector<CaptureRequest> requests(1);
2538 CaptureRequest& request = requests[0];
2539 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2540 outputBuffers.resize(1);
2541 // Empty output buffer
2542 outputBuffers[0] = {
2543 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2544
2545 request.inputBuffer = {
2546 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2547 request.frameNumber = frameNumber;
2548 request.fmqSettingsSize = 0;
2549 request.settings = settings;
2550
2551 // Output buffers are missing, we should fail here
2552 int32_t numRequestProcessed = 0;
2553 std::vector<BufferCache> cachesToRemove;
2554 ret = mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2555 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
2556 ASSERT_EQ(numRequestProcessed, 0u);
2557
2558 ret = mSession->close();
2559 mSession = nullptr;
2560 ASSERT_TRUE(ret.isOk());
2561 }
2562}
2563
2564// Generate, trigger and flush a preview request
2565TEST_P(CameraAidlTest, flushPreviewRequest) {
2566 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2567 std::vector<AvailableStream> outputPreviewStreams;
2568 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2569 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2570 int64_t bufferId = 1;
2571 int32_t frameNumber = 1;
2572 CameraMetadata settings;
2573
2574 for (const auto& name : cameraDeviceNames) {
2575 Stream previewStream;
2576 std::vector<HalStream> halStreams;
2577 std::shared_ptr<DeviceCb> cb;
2578 bool supportsPartialResults = false;
2579 bool useHalBufManager = false;
2580 int32_t partialResultCount = 0;
2581
2582 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2583 &previewStream /*out*/, &halStreams /*out*/,
2584 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2585 &useHalBufManager /*out*/, &cb /*out*/);
2586
2587 ASSERT_NE(mSession, nullptr);
2588 ASSERT_NE(cb, nullptr);
2589 ASSERT_FALSE(halStreams.empty());
2590
2591 ::aidl::android::hardware::common::fmq::MQDescriptor<
2592 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2593 descriptor;
2594
2595 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2596 std::shared_ptr<ResultMetadataQueue> resultQueue =
2597 std::make_shared<ResultMetadataQueue>(descriptor);
2598 ASSERT_TRUE(resultQueueRet.isOk());
2599 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2600 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2601 resultQueue = nullptr;
2602 // Don't use the queue onwards.
2603 }
2604
2605 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
2606 1, false, supportsPartialResults, partialResultCount, resultQueue);
2607 RequestTemplate reqTemplate = RequestTemplate::PREVIEW;
2608
2609 ndk::ScopedAStatus ret = mSession->constructDefaultRequestSettings(reqTemplate, &settings);
2610 ASSERT_TRUE(ret.isOk());
2611 overrideRotateAndCrop(&settings);
2612
2613 buffer_handle_t buffer_handle;
2614 std::vector<CaptureRequest> requests(1);
2615 CaptureRequest& request = requests[0];
2616 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2617 outputBuffers.resize(1);
2618 StreamBuffer& outputBuffer = outputBuffers[0];
2619 if (useHalBufManager) {
2620 bufferId = 0;
2621 outputBuffer = {halStreams[0].id, bufferId, NativeHandle(),
2622 BufferStatus::OK, NativeHandle(), NativeHandle()};
2623 } else {
2624 allocateGraphicBuffer(previewStream.width, previewStream.height,
2625 android_convertGralloc1To0Usage(
2626 static_cast<uint64_t>(halStreams[0].producerUsage),
2627 static_cast<uint64_t>(halStreams[0].consumerUsage)),
2628 halStreams[0].overrideFormat, &buffer_handle);
2629 outputBuffer = {halStreams[0].id, bufferId, ::android::makeToAidl(buffer_handle),
2630 BufferStatus::OK, NativeHandle(), NativeHandle()};
2631 }
2632
2633 request.frameNumber = frameNumber;
2634 request.fmqSettingsSize = 0;
2635 request.settings = settings;
2636 request.inputBuffer = {
2637 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2638
2639 {
2640 std::unique_lock<std::mutex> l(mLock);
2641 mInflightMap.clear();
2642 mInflightMap[frameNumber] = inflightReq;
2643 }
2644
2645 int32_t numRequestProcessed = 0;
2646 std::vector<BufferCache> cachesToRemove;
2647 ret = mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2648 ASSERT_TRUE(ret.isOk());
2649 ASSERT_EQ(numRequestProcessed, 1u);
2650
2651 // Flush before waiting for request to complete.
2652 ndk::ScopedAStatus returnStatus = mSession->flush();
2653 ASSERT_TRUE(returnStatus.isOk());
2654
2655 {
2656 std::unique_lock<std::mutex> l(mLock);
2657 while (!inflightReq->errorCodeValid &&
2658 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
2659 auto timeout = std::chrono::system_clock::now() +
2660 std::chrono::seconds(kStreamBufferTimeoutSec);
2661 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2662 }
2663
2664 if (!inflightReq->errorCodeValid) {
2665 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
2666 ASSERT_EQ(previewStream.id, inflightReq->resultOutputBuffers[0].buffer.streamId);
2667 } else {
2668 switch (inflightReq->errorCode) {
2669 case ErrorCode::ERROR_REQUEST:
2670 case ErrorCode::ERROR_RESULT:
2671 case ErrorCode::ERROR_BUFFER:
2672 // Expected
2673 break;
2674 case ErrorCode::ERROR_DEVICE:
2675 default:
2676 FAIL() << "Unexpected error:"
2677 << static_cast<uint32_t>(inflightReq->errorCode);
2678 }
2679 }
2680 }
2681
2682 if (useHalBufManager) {
2683 verifyBuffersReturned(mSession, previewStream.id, cb);
2684 }
2685
2686 ret = mSession->close();
2687 mSession = nullptr;
2688 ASSERT_TRUE(ret.isOk());
2689 }
2690}
2691
2692// Verify that camera flushes correctly without any pending requests.
2693TEST_P(CameraAidlTest, flushEmpty) {
2694 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2695 std::vector<AvailableStream> outputPreviewStreams;
2696 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2697 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2698
2699 for (const auto& name : cameraDeviceNames) {
2700 Stream previewStream;
2701 std::vector<HalStream> halStreams;
2702 std::shared_ptr<DeviceCb> cb;
2703 bool supportsPartialResults = false;
2704 bool useHalBufManager = false;
2705
2706 int32_t partialResultCount = 0;
2707 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2708 &previewStream /*out*/, &halStreams /*out*/,
2709 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2710 &useHalBufManager /*out*/, &cb /*out*/);
2711
2712 ndk::ScopedAStatus returnStatus = mSession->flush();
2713 ASSERT_TRUE(returnStatus.isOk());
2714
2715 {
2716 std::unique_lock<std::mutex> l(mLock);
2717 auto timeout = std::chrono::system_clock::now() +
2718 std::chrono::milliseconds(kEmptyFlushTimeoutMSec);
2719 ASSERT_EQ(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2720 }
2721
2722 ndk::ScopedAStatus ret = mSession->close();
2723 mSession = nullptr;
2724 ASSERT_TRUE(ret.isOk());
2725 }
2726}
2727
2728// Test camera provider notify method
2729TEST_P(CameraAidlTest, providerDeviceStateNotification) {
2730 notifyDeviceState(ICameraProvider::DEVICE_STATE_BACK_COVERED);
2731 notifyDeviceState(ICameraProvider::DEVICE_STATE_NORMAL);
2732}
2733
2734// Verify that all supported stream formats and sizes can be configured
2735// successfully for injection camera.
2736TEST_P(CameraAidlTest, configureInjectionStreamsAvailableOutputs) {
2737 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2738 std::vector<AvailableStream> outputStreams;
2739
2740 for (const auto& name : cameraDeviceNames) {
2741 CameraMetadata metadata;
2742
2743 std::shared_ptr<ICameraInjectionSession> injectionSession;
2744 std::shared_ptr<ICameraDevice> unusedDevice;
2745 openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/,
2746 &unusedDevice /*out*/);
2747 if (injectionSession == nullptr) {
2748 continue;
2749 }
2750
2751 camera_metadata_t* staticMetaBuffer =
2752 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
2753 CameraMetadata chars;
2754 chars.metadata = metadata.metadata;
2755
2756 outputStreams.clear();
2757 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputStreams));
2758 ASSERT_NE(0u, outputStreams.size());
2759
2760 int32_t jpegBufferSize = 0;
2761 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMetaBuffer, &jpegBufferSize));
2762 ASSERT_NE(0u, jpegBufferSize);
2763
2764 int32_t streamId = 0;
2765 int32_t streamConfigCounter = 0;
2766 for (auto& it : outputStreams) {
2767 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(it.format));
2768 Stream stream = {streamId,
2769 StreamType::OUTPUT,
2770 it.width,
2771 it.height,
2772 static_cast<PixelFormat>(it.format),
2773 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2774 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2775 dataspace,
2776 StreamRotation::ROTATION_0,
2777 std::string(),
2778 jpegBufferSize,
2779 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002780 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2781 RequestAvailableDynamicRangeProfilesMap::
2782 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002783
2784 std::vector<Stream> streams = {stream};
2785 StreamConfiguration config;
2786 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2787 jpegBufferSize);
2788
2789 config.streamConfigCounter = streamConfigCounter++;
2790 ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars);
2791 ASSERT_TRUE(s.isOk());
2792 streamId++;
2793 }
2794
2795 std::shared_ptr<ICameraDeviceSession> session;
2796 ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session);
2797 ASSERT_TRUE(ret.isOk());
2798 ASSERT_NE(session, nullptr);
2799 ret = session->close();
2800 ASSERT_TRUE(ret.isOk());
2801 }
2802}
2803
2804// Check for correct handling of invalid/incorrect configuration parameters for injection camera.
2805TEST_P(CameraAidlTest, configureInjectionStreamsInvalidOutputs) {
2806 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2807 std::vector<AvailableStream> outputStreams;
2808
2809 for (const auto& name : cameraDeviceNames) {
2810 CameraMetadata metadata;
2811 std::shared_ptr<ICameraInjectionSession> injectionSession;
2812 std::shared_ptr<ICameraDevice> unusedDevice;
2813 openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/,
2814 &unusedDevice);
2815 if (injectionSession == nullptr) {
2816 continue;
2817 }
2818
2819 camera_metadata_t* staticMetaBuffer =
2820 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
2821 std::shared_ptr<ICameraDeviceSession> session;
2822 ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session);
2823 ASSERT_TRUE(ret.isOk());
2824 ASSERT_NE(session, nullptr);
2825
2826 CameraMetadata chars;
2827 chars.metadata = metadata.metadata;
2828
2829 outputStreams.clear();
2830 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputStreams));
2831 ASSERT_NE(0u, outputStreams.size());
2832
2833 int32_t jpegBufferSize = 0;
2834 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMetaBuffer, &jpegBufferSize));
2835 ASSERT_NE(0u, jpegBufferSize);
2836
2837 int32_t streamId = 0;
2838 Stream stream = {streamId++,
2839 StreamType::OUTPUT,
2840 0,
2841 0,
2842 static_cast<PixelFormat>(outputStreams[0].format),
2843 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2844 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2845 Dataspace::UNKNOWN,
2846 StreamRotation::ROTATION_0,
2847 std::string(),
2848 jpegBufferSize,
2849 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002850 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2851 RequestAvailableDynamicRangeProfilesMap::
2852 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002853
2854 int32_t streamConfigCounter = 0;
2855 std::vector<Stream> streams = {stream};
2856 StreamConfiguration config;
2857 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2858 jpegBufferSize);
2859
2860 config.streamConfigCounter = streamConfigCounter++;
2861 ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars);
2862 ASSERT_TRUE(
2863 (static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) == s.getServiceSpecificError()) ||
2864 (static_cast<int32_t>(Status::INTERNAL_ERROR) == s.getServiceSpecificError()));
2865
2866 stream = {streamId++,
2867 StreamType::OUTPUT,
2868 INT32_MAX,
2869 INT32_MAX,
2870 static_cast<PixelFormat>(outputStreams[0].format),
2871 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2872 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2873 Dataspace::UNKNOWN,
2874 StreamRotation::ROTATION_0,
2875 std::string(),
2876 jpegBufferSize,
2877 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002878 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2879 RequestAvailableDynamicRangeProfilesMap::
2880 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
2881
Avichal Rakesh362242f2022-02-08 12:40:53 -08002882 streams[0] = stream;
2883 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2884 jpegBufferSize);
2885 config.streamConfigCounter = streamConfigCounter++;
2886 s = injectionSession->configureInjectionStreams(config, chars);
2887 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError());
2888
2889 for (auto& it : outputStreams) {
2890 stream = {streamId++,
2891 StreamType::OUTPUT,
2892 it.width,
2893 it.height,
2894 static_cast<PixelFormat>(INT32_MAX),
2895 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2896 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2897 Dataspace::UNKNOWN,
2898 StreamRotation::ROTATION_0,
2899 std::string(),
2900 jpegBufferSize,
2901 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002902 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2903 RequestAvailableDynamicRangeProfilesMap::
2904 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002905 streams[0] = stream;
2906 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2907 jpegBufferSize);
2908 config.streamConfigCounter = streamConfigCounter++;
2909 s = injectionSession->configureInjectionStreams(config, chars);
2910 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError());
2911
2912 stream = {streamId++,
2913 StreamType::OUTPUT,
2914 it.width,
2915 it.height,
2916 static_cast<PixelFormat>(it.format),
2917 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2918 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2919 Dataspace::UNKNOWN,
2920 static_cast<StreamRotation>(INT32_MAX),
2921 std::string(),
2922 jpegBufferSize,
2923 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002924 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2925 RequestAvailableDynamicRangeProfilesMap::
2926 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002927 streams[0] = stream;
2928 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2929 jpegBufferSize);
2930 config.streamConfigCounter = streamConfigCounter++;
2931 s = injectionSession->configureInjectionStreams(config, chars);
2932 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError());
2933 }
2934
2935 ret = session->close();
2936 ASSERT_TRUE(ret.isOk());
2937 }
2938}
2939
2940// Check whether session parameters are supported for injection camera. If Hal support for them
2941// exist, then try to configure a preview stream using them.
2942TEST_P(CameraAidlTest, configureInjectionStreamsWithSessionParameters) {
2943 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2944 std::vector<AvailableStream> outputPreviewStreams;
2945 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2946 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2947
2948 for (const auto& name : cameraDeviceNames) {
2949 CameraMetadata metadata;
2950 std::shared_ptr<ICameraInjectionSession> injectionSession;
2951 std::shared_ptr<ICameraDevice> unusedDevice;
2952 openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/,
2953 &unusedDevice /*out*/);
2954 if (injectionSession == nullptr) {
2955 continue;
2956 }
2957
2958 std::shared_ptr<ICameraDeviceSession> session;
2959 ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session);
2960 ASSERT_TRUE(ret.isOk());
2961 ASSERT_NE(session, nullptr);
2962
2963 camera_metadata_t* staticMetaBuffer =
2964 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
2965 CameraMetadata chars;
2966 chars.metadata = metadata.metadata;
2967
2968 std::unordered_set<int32_t> availableSessionKeys;
2969 Status rc = getSupportedKeys(staticMetaBuffer, ANDROID_REQUEST_AVAILABLE_SESSION_KEYS,
2970 &availableSessionKeys);
2971 ASSERT_EQ(Status::OK, rc);
2972 if (availableSessionKeys.empty()) {
2973 ret = session->close();
2974 ASSERT_TRUE(ret.isOk());
2975 continue;
2976 }
2977
2978 android::hardware::camera::common::V1_0::helper::CameraMetadata previewRequestSettings;
2979 android::hardware::camera::common::V1_0::helper::CameraMetadata sessionParams,
2980 modifiedSessionParams;
2981 constructFilteredSettings(session, availableSessionKeys, RequestTemplate::PREVIEW,
2982 &previewRequestSettings, &sessionParams);
2983 if (sessionParams.isEmpty()) {
2984 ret = session->close();
2985 ASSERT_TRUE(ret.isOk());
2986 continue;
2987 }
2988
2989 outputPreviewStreams.clear();
2990
2991 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputPreviewStreams,
2992 &previewThreshold));
2993 ASSERT_NE(0u, outputPreviewStreams.size());
2994
2995 Stream previewStream = {
2996 0,
2997 StreamType::OUTPUT,
2998 outputPreviewStreams[0].width,
2999 outputPreviewStreams[0].height,
3000 static_cast<PixelFormat>(outputPreviewStreams[0].format),
3001 static_cast<::aidl::android::hardware::graphics::common::BufferUsage>(
3002 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
3003 Dataspace::UNKNOWN,
3004 StreamRotation::ROTATION_0,
3005 std::string(),
3006 0,
3007 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00003008 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
3009 RequestAvailableDynamicRangeProfilesMap::
3010 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08003011 std::vector<Stream> streams = {previewStream};
3012 StreamConfiguration config;
3013 config.streams = streams;
3014 config.operationMode = StreamConfigurationMode::NORMAL_MODE;
3015
3016 modifiedSessionParams = sessionParams;
3017 camera_metadata_t* sessionParamsBuffer = sessionParams.release();
3018 uint8_t* rawSessionParamsBuffer = reinterpret_cast<uint8_t*>(sessionParamsBuffer);
3019 config.sessionParams.metadata =
3020 std::vector(rawSessionParamsBuffer,
3021 rawSessionParamsBuffer + get_camera_metadata_size(sessionParamsBuffer));
3022
3023 config.streamConfigCounter = 0;
3024 config.streamConfigCounter = 0;
3025 config.multiResolutionInputImage = false;
3026
3027 ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars);
3028 ASSERT_TRUE(s.isOk());
3029
3030 sessionParams.acquire(sessionParamsBuffer);
3031 free_camera_metadata(staticMetaBuffer);
3032 ret = session->close();
3033 ASSERT_TRUE(ret.isOk());
3034 }
3035}
3036
Jayant Chowdharyde1909e2022-11-23 17:18:38 +00003037TEST_P(CameraAidlTest, configureStreamsUseCasesCroppedRaw) {
3038 AvailableStream rawStreamThreshold =
3039 {INT_MAX, INT_MAX, static_cast<int32_t>(PixelFormat::RAW16)};
3040 configureStreamUseCaseInternal(rawStreamThreshold);
3041}
3042
Avichal Rakesh362242f2022-02-08 12:40:53 -08003043// Verify that valid stream use cases can be configured successfully, and invalid use cases
3044// fail stream configuration.
3045TEST_P(CameraAidlTest, configureStreamsUseCases) {
Jayant Chowdharyde1909e2022-11-23 17:18:38 +00003046 AvailableStream previewStreamThreshold =
3047 {kMaxPreviewWidth, kMaxPreviewHeight, static_cast<int32_t>(PixelFormat::YCBCR_420_888)};
3048 configureStreamUseCaseInternal(previewStreamThreshold);
Avichal Rakesh362242f2022-02-08 12:40:53 -08003049}
3050
Austin Borger0918fc82023-03-21 18:48:18 -07003051// Validate the integrity of stream configuration metadata
3052TEST_P(CameraAidlTest, validateStreamConfigurations) {
3053 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
3054 std::vector<AvailableStream> outputStreams;
3055
3056 const int32_t scalerSizesTag = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS;
3057 const int32_t scalerMinFrameDurationsTag = ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS;
3058 const int32_t scalerStallDurationsTag = ANDROID_SCALER_AVAILABLE_STALL_DURATIONS;
3059
3060 for (const auto& name : cameraDeviceNames) {
3061 CameraMetadata meta;
3062 std::shared_ptr<ICameraDevice> cameraDevice;
3063
3064 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
3065 &cameraDevice /*out*/);
3066 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
3067
3068 if (is10BitDynamicRangeCapable(staticMeta)) {
3069 std::vector<std::tuple<size_t, size_t>> supportedP010Sizes, supportedBlobSizes;
3070
3071 getSupportedSizes(staticMeta, scalerSizesTag, HAL_PIXEL_FORMAT_BLOB,
3072 &supportedBlobSizes);
3073 getSupportedSizes(staticMeta, scalerSizesTag, HAL_PIXEL_FORMAT_YCBCR_P010,
3074 &supportedP010Sizes);
3075 ASSERT_FALSE(supportedP010Sizes.empty());
3076
3077 std::vector<int64_t> blobMinDurations, blobStallDurations;
3078 getSupportedDurations(staticMeta, scalerMinFrameDurationsTag, HAL_PIXEL_FORMAT_BLOB,
3079 supportedP010Sizes, &blobMinDurations);
3080 getSupportedDurations(staticMeta, scalerStallDurationsTag, HAL_PIXEL_FORMAT_BLOB,
3081 supportedP010Sizes, &blobStallDurations);
3082 ASSERT_FALSE(blobStallDurations.empty());
3083 ASSERT_FALSE(blobMinDurations.empty());
3084 ASSERT_EQ(supportedP010Sizes.size(), blobMinDurations.size());
3085 ASSERT_EQ(blobMinDurations.size(), blobStallDurations.size());
3086 }
3087
Austin Borger8e9ac022023-05-04 11:17:26 -07003088 // TODO (b/280887191): Validate other aspects of stream configuration metadata...
3089
3090 ndk::ScopedAStatus ret = mSession->close();
3091 mSession = nullptr;
3092 ASSERT_TRUE(ret.isOk());
Austin Borger0918fc82023-03-21 18:48:18 -07003093 }
3094}
3095
Avichal Rakesh362242f2022-02-08 12:40:53 -08003096GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(CameraAidlTest);
3097INSTANTIATE_TEST_SUITE_P(
3098 PerInstance, CameraAidlTest,
3099 testing::ValuesIn(android::getAidlHalInstanceNames(ICameraProvider::descriptor)),
Jayant Chowdharyde1909e2022-11-23 17:18:38 +00003100 android::hardware::PrintInstanceNameToString);