blob: 462698c87a4ac799b064fe79437709624bce500f [file] [log] [blame]
Avichal Rakesh362242f2022-02-08 12:40:53 -08001/*
2 * Copyright (C) 2022 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <aidl/Vintf.h>
18#include <aidl/android/hardware/camera/common/VendorTagSection.h>
19#include <aidl/android/hardware/camera/device/ICameraDevice.h>
20#include <aidlcommonsupport/NativeHandle.h>
21#include <camera_aidl_test.h>
22#include <cutils/properties.h>
23#include <device_cb.h>
24#include <empty_device_cb.h>
25#include <grallocusage/GrallocUsageConversion.h>
26#include <gtest/gtest.h>
27#include <hardware/gralloc.h>
28#include <hardware/gralloc1.h>
29#include <hidl/GtestPrinter.h>
30#include <hidl/HidlSupport.h>
31#include <torch_provider_cb.h>
32#include <list>
33
34using ::aidl::android::hardware::camera::common::CameraDeviceStatus;
35using ::aidl::android::hardware::camera::common::CameraResourceCost;
36using ::aidl::android::hardware::camera::common::TorchModeStatus;
37using ::aidl::android::hardware::camera::common::VendorTagSection;
38using ::aidl::android::hardware::camera::device::ICameraDevice;
Avichal Rakeshd3503a32022-02-25 06:23:14 +000039using ::aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap;
Avichal Rakesh362242f2022-02-08 12:40:53 -080040using ::aidl::android::hardware::camera::metadata::SensorPixelMode;
41using ::aidl::android::hardware::camera::provider::CameraIdAndStreamCombination;
42using ::aidl::android::hardware::camera::provider::ICameraProviderCallbackDefault;
43
44using ::ndk::ScopedAStatus;
45
46namespace {
47const int32_t kBurstFrameCount = 10;
48const uint32_t kMaxStillWidth = 2048;
49const uint32_t kMaxStillHeight = 1536;
50
51const int64_t kEmptyFlushTimeoutMSec = 200;
52
Shuzhen Wang36efa712022-03-08 10:10:44 -080053const static std::vector<int64_t> kMandatoryUseCases = {
Avichal Rakesh362242f2022-02-08 12:40:53 -080054 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
55 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW,
56 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_STILL_CAPTURE,
57 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_RECORD,
58 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW_VIDEO_STILL,
59 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL};
60} // namespace
61
62TEST_P(CameraAidlTest, getCameraIdList) {
63 std::vector<std::string> idList;
64 ScopedAStatus ret = mProvider->getCameraIdList(&idList);
65 ASSERT_TRUE(ret.isOk());
66
67 for (size_t i = 0; i < idList.size(); i++) {
68 ALOGI("Camera Id[%zu] is %s", i, idList[i].c_str());
69 }
70}
71
72// Test if ICameraProvider::getVendorTags returns Status::OK
73TEST_P(CameraAidlTest, getVendorTags) {
74 std::vector<VendorTagSection> vendorTags;
75 ScopedAStatus ret = mProvider->getVendorTags(&vendorTags);
76
77 ASSERT_TRUE(ret.isOk());
78 for (size_t i = 0; i < vendorTags.size(); i++) {
79 ALOGI("Vendor tag section %zu name %s", i, vendorTags[i].sectionName.c_str());
80 for (auto& tag : vendorTags[i].tags) {
81 ALOGI("Vendor tag id %u name %s type %d", tag.tagId, tag.tagName.c_str(),
82 (int)tag.tagType);
83 }
84 }
85}
86
87// Test if ICameraProvider::setCallback returns Status::OK
88TEST_P(CameraAidlTest, setCallback) {
89 struct ProviderCb : public ICameraProviderCallbackDefault {
90 ScopedAStatus cameraDeviceStatusChange(const std::string& cameraDeviceName,
91 CameraDeviceStatus newStatus) override {
92 ALOGI("camera device status callback name %s, status %d", cameraDeviceName.c_str(),
93 (int)newStatus);
94 return ScopedAStatus::ok();
95 }
96 ScopedAStatus torchModeStatusChange(const std::string& cameraDeviceName,
97 TorchModeStatus newStatus) override {
98 ALOGI("Torch mode status callback name %s, status %d", cameraDeviceName.c_str(),
99 (int)newStatus);
100 return ScopedAStatus::ok();
101 }
102 ScopedAStatus physicalCameraDeviceStatusChange(const std::string& cameraDeviceName,
103 const std::string& physicalCameraDeviceName,
104 CameraDeviceStatus newStatus) override {
105 ALOGI("physical camera device status callback name %s, physical camera name %s,"
106 " status %d",
107 cameraDeviceName.c_str(), physicalCameraDeviceName.c_str(), (int)newStatus);
108 return ScopedAStatus::ok();
109 }
110 };
111
112 std::shared_ptr<ProviderCb> cb = ProviderCb::make<ProviderCb>();
113 ScopedAStatus ret = mProvider->setCallback(cb);
114 ASSERT_TRUE(ret.isOk());
115 ret = mProvider->setCallback(nullptr);
116 ASSERT_TRUE(ret.isOk());
117}
118
119// Test if ICameraProvider::getCameraDeviceInterface returns Status::OK and non-null device
120TEST_P(CameraAidlTest, getCameraDeviceInterface) {
121 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
122
123 for (const auto& name : cameraDeviceNames) {
124 std::shared_ptr<ICameraDevice> cameraDevice;
125 ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &cameraDevice);
126 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
127 ret.getServiceSpecificError());
128 ASSERT_TRUE(ret.isOk());
129 ASSERT_NE(cameraDevice, nullptr);
130 }
131}
132
133// Verify that the device resource cost can be retrieved and the values are
134// correct.
135TEST_P(CameraAidlTest, getResourceCost) {
136 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
137
138 for (const auto& deviceName : cameraDeviceNames) {
139 std::shared_ptr<ICameraDevice> cameraDevice;
140 ScopedAStatus ret = mProvider->getCameraDeviceInterface(deviceName, &cameraDevice);
141 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
142 ret.getServiceSpecificError());
143 ASSERT_TRUE(ret.isOk());
144 ASSERT_NE(cameraDevice, nullptr);
145
146 CameraResourceCost resourceCost;
147 ret = cameraDevice->getResourceCost(&resourceCost);
148 ALOGI("getResourceCost returns: %d:%d", ret.getExceptionCode(),
149 ret.getServiceSpecificError());
150 ASSERT_TRUE(ret.isOk());
151
152 ALOGI(" Resource cost is %d", resourceCost.resourceCost);
153 ASSERT_LE(resourceCost.resourceCost, 100u);
154
155 for (const auto& name : resourceCost.conflictingDevices) {
156 ALOGI(" Conflicting device: %s", name.c_str());
157 }
158 }
159}
160
161TEST_P(CameraAidlTest, systemCameraTest) {
162 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
163 std::map<std::string, std::vector<SystemCameraKind>> hiddenPhysicalIdToLogicalMap;
164 for (const auto& name : cameraDeviceNames) {
165 std::shared_ptr<ICameraDevice> device;
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +0000166 ALOGI("systemCameraTest: Testing camera device %s", name.c_str());
Avichal Rakesh362242f2022-02-08 12:40:53 -0800167 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
168 ASSERT_TRUE(ret.isOk());
169 ASSERT_NE(device, nullptr);
170
171 CameraMetadata cameraCharacteristics;
172 ret = device->getCameraCharacteristics(&cameraCharacteristics);
173 ASSERT_TRUE(ret.isOk());
174
175 const camera_metadata_t* staticMeta =
176 reinterpret_cast<const camera_metadata_t*>(cameraCharacteristics.metadata.data());
177 Status rc = isLogicalMultiCamera(staticMeta);
178 if (rc == Status::OPERATION_NOT_SUPPORTED) {
179 return;
180 }
181
182 ASSERT_EQ(rc, Status::OK);
183 std::unordered_set<std::string> physicalIds;
184 ASSERT_EQ(getPhysicalCameraIds(staticMeta, &physicalIds), Status::OK);
185 SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC;
186 Status retStatus = getSystemCameraKind(staticMeta, &systemCameraKind);
187 ASSERT_EQ(retStatus, Status::OK);
188
189 for (auto physicalId : physicalIds) {
190 bool isPublicId = false;
191 for (auto& deviceName : cameraDeviceNames) {
192 std::string publicVersion, publicId;
193 ASSERT_TRUE(matchDeviceName(deviceName, mProviderType, &publicVersion, &publicId));
194 if (physicalId == publicId) {
195 isPublicId = true;
196 break;
197 }
198 }
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +0000199
Avichal Rakesh362242f2022-02-08 12:40:53 -0800200 // For hidden physical cameras, collect their associated logical cameras
201 // and store the system camera kind.
202 if (!isPublicId) {
203 auto it = hiddenPhysicalIdToLogicalMap.find(physicalId);
204 if (it == hiddenPhysicalIdToLogicalMap.end()) {
205 hiddenPhysicalIdToLogicalMap.insert(std::make_pair(
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +0000206 physicalId, std::vector<SystemCameraKind>({systemCameraKind})));
Avichal Rakesh362242f2022-02-08 12:40:53 -0800207 } else {
208 it->second.push_back(systemCameraKind);
209 }
210 }
211 }
212 }
213
214 // Check that the system camera kind of the logical cameras associated with
215 // each hidden physical camera is the same.
216 for (const auto& it : hiddenPhysicalIdToLogicalMap) {
217 SystemCameraKind neededSystemCameraKind = it.second.front();
218 for (auto foundSystemCamera : it.second) {
219 ASSERT_EQ(neededSystemCameraKind, foundSystemCamera);
220 }
221 }
222}
223
224// Verify that the static camera characteristics can be retrieved
225// successfully.
226TEST_P(CameraAidlTest, getCameraCharacteristics) {
227 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
228
229 for (const auto& name : cameraDeviceNames) {
230 std::shared_ptr<ICameraDevice> device;
231 ALOGI("getCameraCharacteristics: Testing camera device %s", name.c_str());
232 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
233 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
234 ret.getServiceSpecificError());
235 ASSERT_TRUE(ret.isOk());
236 ASSERT_NE(device, nullptr);
237
238 CameraMetadata chars;
239 ret = device->getCameraCharacteristics(&chars);
240 ASSERT_TRUE(ret.isOk());
241 verifyCameraCharacteristics(chars);
242 verifyMonochromeCharacteristics(chars);
243 verifyRecommendedConfigs(chars);
244 verifyLogicalOrUltraHighResCameraMetadata(name, device, chars, cameraDeviceNames);
245
246 ASSERT_TRUE(ret.isOk());
247
248 // getPhysicalCameraCharacteristics will fail for publicly
249 // advertised camera IDs.
250 std::string version, cameraId;
251 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &cameraId));
252 CameraMetadata devChars;
253 ret = device->getPhysicalCameraCharacteristics(cameraId, &devChars);
254 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
255 ASSERT_EQ(0, devChars.metadata.size());
256 }
257}
258
259// Verify that the torch strength level can be set and retrieved successfully.
260TEST_P(CameraAidlTest, turnOnTorchWithStrengthLevel) {
261 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
262
263 std::shared_ptr<TorchProviderCb> cb = ndk::SharedRefBase::make<TorchProviderCb>(this);
264 ndk::ScopedAStatus ret = mProvider->setCallback(cb);
265 ASSERT_TRUE(ret.isOk());
266
267 for (const auto& name : cameraDeviceNames) {
268 int32_t defaultLevel;
269 std::shared_ptr<ICameraDevice> device;
270 ALOGI("%s: Testing camera device %s", __FUNCTION__, name.c_str());
271
272 ret = mProvider->getCameraDeviceInterface(name, &device);
273 ASSERT_TRUE(ret.isOk());
274 ASSERT_NE(device, nullptr);
275
276 CameraMetadata chars;
277 ret = device->getCameraCharacteristics(&chars);
278 ASSERT_TRUE(ret.isOk());
279
280 const camera_metadata_t* staticMeta =
281 reinterpret_cast<const camera_metadata_t*>(chars.metadata.data());
282 bool torchStrengthControlSupported = isTorchStrengthControlSupported(staticMeta);
283 camera_metadata_ro_entry entry;
284 int rc = find_camera_metadata_ro_entry(staticMeta,
285 ANDROID_FLASH_INFO_STRENGTH_DEFAULT_LEVEL, &entry);
286 if (torchStrengthControlSupported) {
287 ASSERT_EQ(rc, 0);
288 ASSERT_GT(entry.count, 0);
289 defaultLevel = *entry.data.i32;
290 ALOGI("Default level is:%d", defaultLevel);
291 }
292
293 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
294 ret = device->turnOnTorchWithStrengthLevel(2);
295 ALOGI("turnOnTorchWithStrengthLevel returns status: %d", ret.getServiceSpecificError());
296 // OPERATION_NOT_SUPPORTED check
297 if (!torchStrengthControlSupported) {
298 ALOGI("Torch strength control not supported.");
299 ASSERT_EQ(static_cast<int32_t>(Status::OPERATION_NOT_SUPPORTED),
300 ret.getServiceSpecificError());
301 } else {
302 {
303 ASSERT_TRUE(ret.isOk());
304 std::unique_lock<std::mutex> l(mTorchLock);
305 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
306 auto timeout = std::chrono::system_clock::now() +
307 std::chrono::seconds(kTorchTimeoutSec);
308 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
309 }
310 ASSERT_EQ(TorchModeStatus::AVAILABLE_ON, mTorchStatus);
311 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
312 }
313 ALOGI("getTorchStrengthLevel: Testing");
314 int32_t strengthLevel;
315 ret = device->getTorchStrengthLevel(&strengthLevel);
316 ASSERT_TRUE(ret.isOk());
317 ALOGI("Torch strength level is : %d", strengthLevel);
318 ASSERT_EQ(strengthLevel, 2);
319
320 // Turn OFF the torch and verify torch strength level is reset to default level.
321 ALOGI("Testing torch strength level reset after turning the torch OFF.");
322 ret = device->setTorchMode(false);
323 ASSERT_TRUE(ret.isOk());
324 {
325 std::unique_lock<std::mutex> l(mTorchLock);
326 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
327 auto timeout = std::chrono::system_clock::now() +
328 std::chrono::seconds(kTorchTimeoutSec);
329 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
330 }
331 ASSERT_EQ(TorchModeStatus::AVAILABLE_OFF, mTorchStatus);
332 }
333
334 ret = device->getTorchStrengthLevel(&strengthLevel);
335 ASSERT_TRUE(ret.isOk());
336 ALOGI("Torch strength level after turning OFF torch is : %d", strengthLevel);
337 ASSERT_EQ(strengthLevel, defaultLevel);
338 }
339 }
340}
341
342// In case it is supported verify that torch can be enabled.
343// Check for corresponding torch callbacks as well.
344TEST_P(CameraAidlTest, setTorchMode) {
345 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
346
347 std::shared_ptr<TorchProviderCb> cb = ndk::SharedRefBase::make<TorchProviderCb>(this);
348 ndk::ScopedAStatus ret = mProvider->setCallback(cb);
349 ALOGI("setCallback returns status: %d", ret.getServiceSpecificError());
350 ASSERT_TRUE(ret.isOk());
351 ASSERT_NE(cb, nullptr);
352
353 for (const auto& name : cameraDeviceNames) {
354 std::shared_ptr<ICameraDevice> device;
355 ALOGI("setTorchMode: Testing camera device %s", name.c_str());
356 ret = mProvider->getCameraDeviceInterface(name, &device);
357 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
358 ret.getServiceSpecificError());
359 ASSERT_TRUE(ret.isOk());
360 ASSERT_NE(device, nullptr);
361
362 CameraMetadata metadata;
363 ret = device->getCameraCharacteristics(&metadata);
364 ALOGI("getCameraCharacteristics returns status:%d", ret.getServiceSpecificError());
365 ASSERT_TRUE(ret.isOk());
366 camera_metadata_t* staticMeta =
367 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
368 bool torchSupported = isTorchSupported(staticMeta);
369
370 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
371 ret = device->setTorchMode(true);
372 ALOGI("setTorchMode returns status: %d", ret.getServiceSpecificError());
373 if (!torchSupported) {
374 ASSERT_EQ(static_cast<int32_t>(Status::OPERATION_NOT_SUPPORTED),
375 ret.getServiceSpecificError());
376 } else {
377 ASSERT_TRUE(ret.isOk());
378 {
379 std::unique_lock<std::mutex> l(mTorchLock);
380 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
381 auto timeout = std::chrono::system_clock::now() +
382 std::chrono::seconds(kTorchTimeoutSec);
383 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
384 }
385 ASSERT_EQ(TorchModeStatus::AVAILABLE_ON, mTorchStatus);
386 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
387 }
388
389 ret = device->setTorchMode(false);
390 ASSERT_TRUE(ret.isOk());
391 {
392 std::unique_lock<std::mutex> l(mTorchLock);
393 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
394 auto timeout = std::chrono::system_clock::now() +
395 std::chrono::seconds(kTorchTimeoutSec);
396 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
397 }
398 ASSERT_EQ(TorchModeStatus::AVAILABLE_OFF, mTorchStatus);
399 }
400 }
401 }
402
403 ret = mProvider->setCallback(nullptr);
404 ASSERT_TRUE(ret.isOk());
405}
406
407// Check dump functionality.
408TEST_P(CameraAidlTest, dump) {
409 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
410
411 for (const auto& name : cameraDeviceNames) {
412 std::shared_ptr<ICameraDevice> device;
413 ALOGI("dump: Testing camera device %s", name.c_str());
414
415 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
416 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
417 ret.getServiceSpecificError());
418 ASSERT_TRUE(ret.isOk());
419 ASSERT_NE(device, nullptr);
420
421 int raw_handle = open(kDumpOutput, O_RDWR);
422 ASSERT_GE(raw_handle, 0);
423
424 auto retStatus = device->dump(raw_handle, nullptr, 0);
425 ASSERT_EQ(retStatus, ::android::OK);
426 close(raw_handle);
427 }
428}
429
430// Open, dump, then close
431TEST_P(CameraAidlTest, openClose) {
432 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
433
434 for (const auto& name : cameraDeviceNames) {
435 std::shared_ptr<ICameraDevice> device;
436 ALOGI("openClose: Testing camera device %s", name.c_str());
437 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
438 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
439 ret.getServiceSpecificError());
440 ASSERT_TRUE(ret.isOk());
441 ASSERT_NE(device, nullptr);
442
443 std::shared_ptr<EmptyDeviceCb> cb = ndk::SharedRefBase::make<EmptyDeviceCb>();
444
445 ret = device->open(cb, &mSession);
446 ASSERT_TRUE(ret.isOk());
447 ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
448 ret.getServiceSpecificError());
449 ASSERT_NE(mSession, nullptr);
450 int raw_handle = open(kDumpOutput, O_RDWR);
451 ASSERT_GE(raw_handle, 0);
452
453 auto retStatus = device->dump(raw_handle, nullptr, 0);
454 ASSERT_EQ(retStatus, ::android::OK);
455 close(raw_handle);
456
457 ret = mSession->close();
458 mSession = nullptr;
459 ASSERT_TRUE(ret.isOk());
460 // TODO: test all session API calls return INTERNAL_ERROR after close
461 // TODO: keep a wp copy here and verify session cannot be promoted out of this scope
462 }
463}
464
465// Check whether all common default request settings can be successfully
466// constructed.
467TEST_P(CameraAidlTest, constructDefaultRequestSettings) {
468 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
469
470 for (const auto& name : cameraDeviceNames) {
471 std::shared_ptr<ICameraDevice> device;
472 ALOGI("constructDefaultRequestSettings: Testing camera device %s", name.c_str());
473 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
474 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
475 ret.getServiceSpecificError());
476 ASSERT_TRUE(ret.isOk());
477 ASSERT_NE(device, nullptr);
478
479 std::shared_ptr<EmptyDeviceCb> cb = ndk::SharedRefBase::make<EmptyDeviceCb>();
480 ret = device->open(cb, &mSession);
481 ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
482 ret.getServiceSpecificError());
483 ASSERT_TRUE(ret.isOk());
484 ASSERT_NE(mSession, nullptr);
485
486 for (int32_t t = (int32_t)RequestTemplate::PREVIEW; t <= (int32_t)RequestTemplate::MANUAL;
487 t++) {
488 RequestTemplate reqTemplate = (RequestTemplate)t;
489 CameraMetadata rawMetadata;
490 ret = mSession->constructDefaultRequestSettings(reqTemplate, &rawMetadata);
491 ALOGI("constructDefaultRequestSettings returns status:%d:%d", ret.getExceptionCode(),
492 ret.getServiceSpecificError());
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000493
Avichal Rakesh362242f2022-02-08 12:40:53 -0800494 if (reqTemplate == RequestTemplate::ZERO_SHUTTER_LAG ||
495 reqTemplate == RequestTemplate::MANUAL) {
496 // optional templates
497 ASSERT_TRUE(ret.isOk() || static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
498 ret.getServiceSpecificError());
499 } else {
500 ASSERT_TRUE(ret.isOk());
501 }
502
503 if (ret.isOk()) {
504 const camera_metadata_t* metadata = (camera_metadata_t*)rawMetadata.metadata.data();
505 size_t expectedSize = rawMetadata.metadata.size();
506 int result = validate_camera_metadata_structure(metadata, &expectedSize);
507 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
508 verifyRequestTemplate(metadata, reqTemplate);
509 } else {
510 ASSERT_EQ(0u, rawMetadata.metadata.size());
511 }
512 }
513 ret = mSession->close();
514 mSession = nullptr;
515 ASSERT_TRUE(ret.isOk());
516 }
517}
518
519// Verify that all supported stream formats and sizes can be configured
520// successfully.
521TEST_P(CameraAidlTest, configureStreamsAvailableOutputs) {
522 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
523 std::vector<AvailableStream> outputStreams;
524
525 for (const auto& name : cameraDeviceNames) {
526 CameraMetadata meta;
527 std::shared_ptr<ICameraDevice> device;
528
529 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/, &device /*out*/);
530
531 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
532 outputStreams.clear();
533 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputStreams));
534 ASSERT_NE(0u, outputStreams.size());
535
536 int32_t jpegBufferSize = 0;
537 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
538 ASSERT_NE(0u, jpegBufferSize);
539
540 int32_t streamId = 0;
541 int32_t streamConfigCounter = 0;
542 for (auto& it : outputStreams) {
543 Stream stream;
544 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(it.format));
545 stream.id = streamId;
546 stream.streamType = StreamType::OUTPUT;
547 stream.width = it.width;
548 stream.height = it.height;
549 stream.format = static_cast<PixelFormat>(it.format);
550 stream.dataSpace = dataspace;
551 stream.usage = static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
552 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER);
553 stream.rotation = StreamRotation::ROTATION_0;
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000554 stream.dynamicRangeProfile = RequestAvailableDynamicRangeProfilesMap::
555 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
Avichal Rakesh362242f2022-02-08 12:40:53 -0800556
557 std::vector<Stream> streams = {stream};
558 StreamConfiguration config;
559 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
560 jpegBufferSize);
561
562 bool expectStreamCombQuery = (isLogicalMultiCamera(staticMeta) == Status::OK);
563 verifyStreamCombination(device, config, /*expectedStatus*/ true, expectStreamCombQuery);
564
565 config.streamConfigCounter = streamConfigCounter++;
566 std::vector<HalStream> halConfigs;
567 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
568 ASSERT_TRUE(ret.isOk());
569 ASSERT_EQ(halConfigs.size(), 1);
570 ASSERT_EQ(halConfigs[0].id, streamId);
571
572 streamId++;
573 }
574
575 ndk::ScopedAStatus ret = mSession->close();
576 mSession = nullptr;
577 ASSERT_TRUE(ret.isOk());
578 }
579}
580
581// Verify that mandatory concurrent streams and outputs are supported.
582TEST_P(CameraAidlTest, configureConcurrentStreamsAvailableOutputs) {
583 struct CameraTestInfo {
584 CameraMetadata staticMeta;
585 std::shared_ptr<ICameraDeviceSession> session;
586 std::shared_ptr<ICameraDevice> cameraDevice;
587 StreamConfiguration config;
588 };
589
590 std::map<std::string, std::string> idToNameMap = getCameraDeviceIdToNameMap(mProvider);
591 std::vector<ConcurrentCameraIdCombination> concurrentDeviceCombinations =
592 getConcurrentDeviceCombinations(mProvider);
593 std::vector<AvailableStream> outputStreams;
594 for (const auto& cameraDeviceIds : concurrentDeviceCombinations) {
595 std::vector<CameraIdAndStreamCombination> cameraIdsAndStreamCombinations;
596 std::vector<CameraTestInfo> cameraTestInfos;
597 size_t i = 0;
598 for (const auto& id : cameraDeviceIds.combination) {
599 CameraTestInfo cti;
600 auto it = idToNameMap.find(id);
601 ASSERT_TRUE(idToNameMap.end() != it);
602 std::string name = it->second;
603
604 openEmptyDeviceSession(name, mProvider, &cti.session /*out*/, &cti.staticMeta /*out*/,
605 &cti.cameraDevice /*out*/);
606
607 outputStreams.clear();
608 camera_metadata_t* staticMeta =
609 reinterpret_cast<camera_metadata_t*>(cti.staticMeta.metadata.data());
610 ASSERT_EQ(Status::OK, getMandatoryConcurrentStreams(staticMeta, &outputStreams));
611 ASSERT_NE(0u, outputStreams.size());
612
613 int32_t jpegBufferSize = 0;
614 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
615 ASSERT_NE(0u, jpegBufferSize);
616
617 int32_t streamId = 0;
618 std::vector<Stream> streams(outputStreams.size());
619 size_t j = 0;
620 for (const auto& s : outputStreams) {
621 Stream stream;
622 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(s.format));
623 stream.id = streamId++;
624 stream.streamType = StreamType::OUTPUT;
625 stream.width = s.width;
626 stream.height = s.height;
627 stream.format = static_cast<PixelFormat>(s.format);
628 stream.usage = static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
629 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER);
630 stream.dataSpace = dataspace;
631 stream.rotation = StreamRotation::ROTATION_0;
632 stream.sensorPixelModesUsed = {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT};
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000633 stream.dynamicRangeProfile = RequestAvailableDynamicRangeProfilesMap::
634 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
Avichal Rakesh362242f2022-02-08 12:40:53 -0800635 streams[j] = stream;
636 j++;
637 }
638
639 // Add the created stream configs to cameraIdsAndStreamCombinations
640 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &cti.config,
641 jpegBufferSize);
642
643 cti.config.streamConfigCounter = outputStreams.size();
644 CameraIdAndStreamCombination cameraIdAndStreamCombination;
645 cameraIdAndStreamCombination.cameraId = id;
646 cameraIdAndStreamCombination.streamConfiguration = cti.config;
647 cameraIdsAndStreamCombinations.push_back(cameraIdAndStreamCombination);
648 i++;
649 cameraTestInfos.push_back(cti);
650 }
651 // Now verify that concurrent streams are supported
652 bool combinationSupported;
653 ndk::ScopedAStatus ret = mProvider->isConcurrentStreamCombinationSupported(
654 cameraIdsAndStreamCombinations, &combinationSupported);
655 ASSERT_TRUE(ret.isOk());
656 ASSERT_EQ(combinationSupported, true);
657
658 // Test the stream can actually be configured
659 for (auto& cti : cameraTestInfos) {
660 if (cti.session != nullptr) {
661 camera_metadata_t* staticMeta =
662 reinterpret_cast<camera_metadata_t*>(cti.staticMeta.metadata.data());
663 bool expectStreamCombQuery = (isLogicalMultiCamera(staticMeta) == Status::OK);
664 verifyStreamCombination(cti.cameraDevice, cti.config, /*expectedStatus*/ true,
665 expectStreamCombQuery);
666 }
667
668 if (cti.session != nullptr) {
669 std::vector<HalStream> streamConfigs;
670 ret = cti.session->configureStreams(cti.config, &streamConfigs);
671 ASSERT_TRUE(ret.isOk());
672 ASSERT_EQ(cti.config.streams.size(), streamConfigs.size());
673 }
674 }
675
676 for (auto& cti : cameraTestInfos) {
677 ret = cti.session->close();
678 ASSERT_TRUE(ret.isOk());
679 }
680 }
681}
682
683// Check for correct handling of invalid/incorrect configuration parameters.
684TEST_P(CameraAidlTest, configureStreamsInvalidOutputs) {
685 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
686 std::vector<AvailableStream> outputStreams;
687
688 for (const auto& name : cameraDeviceNames) {
689 CameraMetadata meta;
690 std::shared_ptr<ICameraDevice> cameraDevice;
691
692 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
693 &cameraDevice /*out*/);
694 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
695 outputStreams.clear();
696
697 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputStreams));
698 ASSERT_NE(0u, outputStreams.size());
699
700 int32_t jpegBufferSize = 0;
701 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
702 ASSERT_NE(0u, jpegBufferSize);
703
704 int32_t streamId = 0;
705 Stream stream = {streamId++,
706 StreamType::OUTPUT,
707 static_cast<uint32_t>(0),
708 static_cast<uint32_t>(0),
709 static_cast<PixelFormat>(outputStreams[0].format),
710 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
711 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
712 Dataspace::UNKNOWN,
713 StreamRotation::ROTATION_0,
714 std::string(),
715 jpegBufferSize,
716 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000717 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
718 RequestAvailableDynamicRangeProfilesMap::
719 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800720 int32_t streamConfigCounter = 0;
721 std::vector<Stream> streams = {stream};
722 StreamConfiguration config;
723 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
724 jpegBufferSize);
725
726 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ false,
727 /*expectStreamCombQuery*/ false);
728
729 config.streamConfigCounter = streamConfigCounter++;
730 std::vector<HalStream> halConfigs;
731 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
732 ASSERT_TRUE(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
733 ret.getServiceSpecificError() ||
734 static_cast<int32_t>(Status::INTERNAL_ERROR) == ret.getServiceSpecificError());
735
736 stream = {streamId++,
737 StreamType::OUTPUT,
738 /*width*/ INT32_MAX,
739 /*height*/ INT32_MAX,
740 static_cast<PixelFormat>(outputStreams[0].format),
741 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
742 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
743 Dataspace::UNKNOWN,
744 StreamRotation::ROTATION_0,
745 std::string(),
746 jpegBufferSize,
747 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000748 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
749 RequestAvailableDynamicRangeProfilesMap::
750 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800751
752 streams[0] = stream;
753 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
754 jpegBufferSize);
755
756 config.streamConfigCounter = streamConfigCounter++;
757 halConfigs.clear();
758 ret = mSession->configureStreams(config, &halConfigs);
759 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
760
761 for (auto& it : outputStreams) {
762 stream = {streamId++,
763 StreamType::OUTPUT,
764 it.width,
765 it.height,
766 static_cast<PixelFormat>(UINT32_MAX),
767 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
768 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
769 Dataspace::UNKNOWN,
770 StreamRotation::ROTATION_0,
771 std::string(),
772 jpegBufferSize,
773 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000774 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
775 RequestAvailableDynamicRangeProfilesMap::
776 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800777
778 streams[0] = stream;
779 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
780 jpegBufferSize);
781 config.streamConfigCounter = streamConfigCounter++;
782 halConfigs.clear();
783 ret = mSession->configureStreams(config, &halConfigs);
784 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
785 ret.getServiceSpecificError());
786
787 stream = {streamId++,
788 StreamType::OUTPUT,
789 it.width,
790 it.height,
791 static_cast<PixelFormat>(it.format),
792 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
793 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
794 Dataspace::UNKNOWN,
795 static_cast<StreamRotation>(UINT32_MAX),
796 std::string(),
797 jpegBufferSize,
798 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000799 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
800 RequestAvailableDynamicRangeProfilesMap::
801 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800802
803 streams[0] = stream;
804 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
805 jpegBufferSize);
806
807 config.streamConfigCounter = streamConfigCounter++;
808 halConfigs.clear();
809 ret = mSession->configureStreams(config, &halConfigs);
810 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
811 ret.getServiceSpecificError());
812 }
813
814 ret = mSession->close();
815 mSession = nullptr;
816 ASSERT_TRUE(ret.isOk());
817 }
818}
819
820// Check whether all supported ZSL output stream combinations can be
821// configured successfully.
822TEST_P(CameraAidlTest, configureStreamsZSLInputOutputs) {
823 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
824 std::vector<AvailableStream> inputStreams;
825 std::vector<AvailableZSLInputOutput> inputOutputMap;
826
827 for (const auto& name : cameraDeviceNames) {
828 CameraMetadata meta;
829 std::shared_ptr<ICameraDevice> cameraDevice;
830
831 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
832 &cameraDevice /*out*/);
833 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
834
835 Status rc = isZSLModeAvailable(staticMeta);
836 if (Status::OPERATION_NOT_SUPPORTED == rc) {
837 ndk::ScopedAStatus ret = mSession->close();
838 mSession = nullptr;
839 ASSERT_TRUE(ret.isOk());
840 continue;
841 }
842 ASSERT_EQ(Status::OK, rc);
843
844 inputStreams.clear();
845 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, inputStreams));
846 ASSERT_NE(0u, inputStreams.size());
847
848 inputOutputMap.clear();
849 ASSERT_EQ(Status::OK, getZSLInputOutputMap(staticMeta, inputOutputMap));
850 ASSERT_NE(0u, inputOutputMap.size());
851
852 bool supportMonoY8 = false;
853 if (Status::OK == isMonochromeCamera(staticMeta)) {
854 for (auto& it : inputStreams) {
855 if (it.format == static_cast<uint32_t>(PixelFormat::Y8)) {
856 supportMonoY8 = true;
857 break;
858 }
859 }
860 }
861
862 int32_t jpegBufferSize = 0;
863 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
864 ASSERT_NE(0u, jpegBufferSize);
865
866 int32_t streamId = 0;
867 bool hasPrivToY8 = false, hasY8ToY8 = false, hasY8ToBlob = false;
868 uint32_t streamConfigCounter = 0;
869 for (auto& inputIter : inputOutputMap) {
870 AvailableStream input;
871 ASSERT_EQ(Status::OK, findLargestSize(inputStreams, inputIter.inputFormat, input));
872 ASSERT_NE(0u, inputStreams.size());
873
874 if (inputIter.inputFormat ==
875 static_cast<uint32_t>(PixelFormat::IMPLEMENTATION_DEFINED) &&
876 inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::Y8)) {
877 hasPrivToY8 = true;
878 } else if (inputIter.inputFormat == static_cast<uint32_t>(PixelFormat::Y8)) {
879 if (inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::BLOB)) {
880 hasY8ToBlob = true;
881 } else if (inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::Y8)) {
882 hasY8ToY8 = true;
883 }
884 }
885 AvailableStream outputThreshold = {INT32_MAX, INT32_MAX, inputIter.outputFormat};
886 std::vector<AvailableStream> outputStreams;
887 ASSERT_EQ(Status::OK,
888 getAvailableOutputStreams(staticMeta, outputStreams, &outputThreshold));
889 for (auto& outputIter : outputStreams) {
890 Dataspace outputDataSpace =
891 getDataspace(static_cast<PixelFormat>(outputIter.format));
892 Stream zslStream = {
893 streamId++,
894 StreamType::OUTPUT,
895 input.width,
896 input.height,
897 static_cast<PixelFormat>(input.format),
898 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
899 GRALLOC_USAGE_HW_CAMERA_ZSL),
900 Dataspace::UNKNOWN,
901 StreamRotation::ROTATION_0,
902 std::string(),
903 jpegBufferSize,
904 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000905 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
906 RequestAvailableDynamicRangeProfilesMap::
907 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800908 Stream inputStream = {
909 streamId++,
910 StreamType::INPUT,
911 input.width,
912 input.height,
913 static_cast<PixelFormat>(input.format),
914 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(0),
915 Dataspace::UNKNOWN,
916 StreamRotation::ROTATION_0,
917 std::string(),
918 jpegBufferSize,
919 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000920 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
921 RequestAvailableDynamicRangeProfilesMap::
922 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800923 Stream outputStream = {
924 streamId++,
925 StreamType::OUTPUT,
926 outputIter.width,
927 outputIter.height,
928 static_cast<PixelFormat>(outputIter.format),
929 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
930 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
931 outputDataSpace,
932 StreamRotation::ROTATION_0,
933 std::string(),
934 jpegBufferSize,
935 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000936 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
937 RequestAvailableDynamicRangeProfilesMap::
938 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800939
940 std::vector<Stream> streams = {inputStream, zslStream, outputStream};
941
942 StreamConfiguration config;
943 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
944 jpegBufferSize);
945
946 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true,
947 /*expectStreamCombQuery*/ false);
948
949 config.streamConfigCounter = streamConfigCounter++;
950 std::vector<HalStream> halConfigs;
951 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
952 ASSERT_TRUE(ret.isOk());
953 ASSERT_EQ(3u, halConfigs.size());
954 }
955 }
956
957 if (supportMonoY8) {
958 if (Status::OK == isZSLModeAvailable(staticMeta, PRIV_REPROCESS)) {
959 ASSERT_TRUE(hasPrivToY8);
960 }
961 if (Status::OK == isZSLModeAvailable(staticMeta, YUV_REPROCESS)) {
962 ASSERT_TRUE(hasY8ToY8);
963 ASSERT_TRUE(hasY8ToBlob);
964 }
965 }
966
967 ndk::ScopedAStatus ret = mSession->close();
968 mSession = nullptr;
969 ASSERT_TRUE(ret.isOk());
970 }
971}
972
973// Check whether session parameters are supported. If Hal support for them
974// exist, then try to configure a preview stream using them.
975TEST_P(CameraAidlTest, configureStreamsWithSessionParameters) {
976 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
977 std::vector<AvailableStream> outputPreviewStreams;
978 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
979 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
980
981 for (const auto& name : cameraDeviceNames) {
982 CameraMetadata meta;
983
984 std::shared_ptr<ICameraDevice> unusedCameraDevice;
985 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
986 &unusedCameraDevice /*out*/);
987 camera_metadata_t* staticMetaBuffer =
988 reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
989
990 std::unordered_set<int32_t> availableSessionKeys;
991 auto rc = getSupportedKeys(staticMetaBuffer, ANDROID_REQUEST_AVAILABLE_SESSION_KEYS,
992 &availableSessionKeys);
993 ASSERT_TRUE(Status::OK == rc);
994 if (availableSessionKeys.empty()) {
995 ndk::ScopedAStatus ret = mSession->close();
996 mSession = nullptr;
997 ASSERT_TRUE(ret.isOk());
998 continue;
999 }
1000
1001 android::hardware::camera::common::V1_0::helper::CameraMetadata previewRequestSettings;
1002 android::hardware::camera::common::V1_0::helper::CameraMetadata sessionParams,
1003 modifiedSessionParams;
1004 constructFilteredSettings(mSession, availableSessionKeys, RequestTemplate::PREVIEW,
1005 &previewRequestSettings, &sessionParams);
1006 if (sessionParams.isEmpty()) {
1007 ndk::ScopedAStatus ret = mSession->close();
1008 mSession = nullptr;
1009 ASSERT_TRUE(ret.isOk());
1010 continue;
1011 }
1012
1013 outputPreviewStreams.clear();
1014
1015 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputPreviewStreams,
1016 &previewThreshold));
1017 ASSERT_NE(0u, outputPreviewStreams.size());
1018
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001019 Stream previewStream = {
1020 0,
1021 StreamType::OUTPUT,
1022 outputPreviewStreams[0].width,
1023 outputPreviewStreams[0].height,
1024 static_cast<PixelFormat>(outputPreviewStreams[0].format),
1025 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1026 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
1027 Dataspace::UNKNOWN,
1028 StreamRotation::ROTATION_0,
1029 std::string(),
1030 /*bufferSize*/ 0,
1031 /*groupId*/ -1,
1032 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1033 RequestAvailableDynamicRangeProfilesMap::
1034 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001035
1036 std::vector<Stream> streams = {previewStream};
1037 StreamConfiguration config;
1038
1039 config.streams = streams;
1040 config.operationMode = StreamConfigurationMode::NORMAL_MODE;
1041 modifiedSessionParams = sessionParams;
1042 auto sessionParamsBuffer = sessionParams.release();
1043 std::vector<uint8_t> rawSessionParam =
1044 std::vector(reinterpret_cast<uint8_t*>(sessionParamsBuffer),
1045 reinterpret_cast<uint8_t*>(sessionParamsBuffer) +
1046 get_camera_metadata_size(sessionParamsBuffer));
1047
1048 config.sessionParams.metadata = rawSessionParam;
1049 config.streamConfigCounter = 0;
1050 config.streams = {previewStream};
1051 config.streamConfigCounter = 0;
1052 config.multiResolutionInputImage = false;
1053
1054 bool newSessionParamsAvailable = false;
1055 for (const auto& it : availableSessionKeys) {
1056 if (modifiedSessionParams.exists(it)) {
1057 modifiedSessionParams.erase(it);
1058 newSessionParamsAvailable = true;
1059 break;
1060 }
1061 }
1062 if (newSessionParamsAvailable) {
1063 auto modifiedSessionParamsBuffer = modifiedSessionParams.release();
1064 verifySessionReconfigurationQuery(mSession, sessionParamsBuffer,
1065 modifiedSessionParamsBuffer);
1066 modifiedSessionParams.acquire(modifiedSessionParamsBuffer);
1067 }
1068
1069 std::vector<HalStream> halConfigs;
1070 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1071 ASSERT_TRUE(ret.isOk());
1072 ASSERT_EQ(1u, halConfigs.size());
1073
1074 sessionParams.acquire(sessionParamsBuffer);
1075 ret = mSession->close();
1076 mSession = nullptr;
1077 ASSERT_TRUE(ret.isOk());
1078 }
1079}
1080
1081// Verify that all supported preview + still capture stream combinations
1082// can be configured successfully.
1083TEST_P(CameraAidlTest, configureStreamsPreviewStillOutputs) {
1084 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1085 std::vector<AvailableStream> outputBlobStreams;
1086 std::vector<AvailableStream> outputPreviewStreams;
1087 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
1088 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
1089 AvailableStream blobThreshold = {INT32_MAX, INT32_MAX, static_cast<int32_t>(PixelFormat::BLOB)};
1090
1091 for (const auto& name : cameraDeviceNames) {
1092 CameraMetadata meta;
1093
1094 std::shared_ptr<ICameraDevice> cameraDevice;
1095 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1096 &cameraDevice /*out*/);
1097
1098 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1099
1100 // Check if camera support depth only
1101 if (isDepthOnly(staticMeta)) {
1102 ndk::ScopedAStatus ret = mSession->close();
1103 mSession = nullptr;
1104 ASSERT_TRUE(ret.isOk());
1105 continue;
1106 }
1107
1108 outputBlobStreams.clear();
1109 ASSERT_EQ(Status::OK,
1110 getAvailableOutputStreams(staticMeta, outputBlobStreams, &blobThreshold));
1111 ASSERT_NE(0u, outputBlobStreams.size());
1112
1113 outputPreviewStreams.clear();
1114 ASSERT_EQ(Status::OK,
1115 getAvailableOutputStreams(staticMeta, outputPreviewStreams, &previewThreshold));
1116 ASSERT_NE(0u, outputPreviewStreams.size());
1117
1118 int32_t jpegBufferSize = 0;
1119 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
1120 ASSERT_NE(0u, jpegBufferSize);
1121
1122 int32_t streamId = 0;
1123 uint32_t streamConfigCounter = 0;
1124
1125 for (auto& blobIter : outputBlobStreams) {
1126 for (auto& previewIter : outputPreviewStreams) {
1127 Stream previewStream = {
1128 streamId++,
1129 StreamType::OUTPUT,
1130 previewIter.width,
1131 previewIter.height,
1132 static_cast<PixelFormat>(previewIter.format),
1133 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1134 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
1135 Dataspace::UNKNOWN,
1136 StreamRotation::ROTATION_0,
1137 std::string(),
1138 /*bufferSize*/ 0,
1139 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001140 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1141 RequestAvailableDynamicRangeProfilesMap::
1142 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001143 Stream blobStream = {
1144 streamId++,
1145 StreamType::OUTPUT,
1146 blobIter.width,
1147 blobIter.height,
1148 static_cast<PixelFormat>(blobIter.format),
1149 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1150 GRALLOC1_CONSUMER_USAGE_CPU_READ),
1151 Dataspace::JFIF,
1152 StreamRotation::ROTATION_0,
1153 std::string(),
1154 /*bufferSize*/ 0,
1155 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001156 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1157 RequestAvailableDynamicRangeProfilesMap::
1158 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001159 std::vector<Stream> streams = {previewStream, blobStream};
1160 StreamConfiguration config;
1161
1162 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
1163 jpegBufferSize);
1164 config.streamConfigCounter = streamConfigCounter++;
1165 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true,
1166 /*expectStreamCombQuery*/ false);
1167
1168 std::vector<HalStream> halConfigs;
1169 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1170 ASSERT_TRUE(ret.isOk());
1171 ASSERT_EQ(2u, halConfigs.size());
1172 }
1173 }
1174
1175 ndk::ScopedAStatus ret = mSession->close();
1176 mSession = nullptr;
1177 ASSERT_TRUE(ret.isOk());
1178 }
1179}
1180
1181// In case constrained mode is supported, test whether it can be
1182// configured. Additionally check for common invalid inputs when
1183// using this mode.
1184TEST_P(CameraAidlTest, configureStreamsConstrainedOutputs) {
1185 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1186
1187 for (const auto& name : cameraDeviceNames) {
1188 CameraMetadata meta;
1189 std::shared_ptr<ICameraDevice> cameraDevice;
1190
1191 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1192 &cameraDevice /*out*/);
1193 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1194
1195 Status rc = isConstrainedModeAvailable(staticMeta);
1196 if (Status::OPERATION_NOT_SUPPORTED == rc) {
1197 ndk::ScopedAStatus ret = mSession->close();
1198 mSession = nullptr;
1199 ASSERT_TRUE(ret.isOk());
1200 continue;
1201 }
1202 ASSERT_EQ(Status::OK, rc);
1203
1204 AvailableStream hfrStream;
1205 rc = pickConstrainedModeSize(staticMeta, hfrStream);
1206 ASSERT_EQ(Status::OK, rc);
1207
1208 int32_t streamId = 0;
1209 uint32_t streamConfigCounter = 0;
1210 Stream stream = {streamId,
1211 StreamType::OUTPUT,
1212 hfrStream.width,
1213 hfrStream.height,
1214 static_cast<PixelFormat>(hfrStream.format),
1215 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1216 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1217 Dataspace::UNKNOWN,
1218 StreamRotation::ROTATION_0,
1219 std::string(),
1220 /*bufferSize*/ 0,
1221 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001222 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1223 RequestAvailableDynamicRangeProfilesMap::
1224 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001225 std::vector<Stream> streams = {stream};
1226 StreamConfiguration config;
1227 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1228 &config);
1229
1230 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true,
1231 /*expectStreamCombQuery*/ false);
1232
1233 config.streamConfigCounter = streamConfigCounter++;
1234 std::vector<HalStream> halConfigs;
1235 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1236 ASSERT_TRUE(ret.isOk());
1237 ASSERT_EQ(1u, halConfigs.size());
1238 ASSERT_EQ(halConfigs[0].id, streamId);
1239
1240 stream = {streamId++,
1241 StreamType::OUTPUT,
1242 static_cast<uint32_t>(0),
1243 static_cast<uint32_t>(0),
1244 static_cast<PixelFormat>(hfrStream.format),
1245 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1246 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1247 Dataspace::UNKNOWN,
1248 StreamRotation::ROTATION_0,
1249 std::string(),
1250 /*bufferSize*/ 0,
1251 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001252 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1253 RequestAvailableDynamicRangeProfilesMap::
1254 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001255 streams[0] = stream;
1256 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1257 &config);
1258
1259 config.streamConfigCounter = streamConfigCounter++;
1260 std::vector<HalStream> halConfig;
1261 ret = mSession->configureStreams(config, &halConfig);
1262 ASSERT_TRUE(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
1263 ret.getServiceSpecificError() ||
1264 static_cast<int32_t>(Status::INTERNAL_ERROR) == ret.getServiceSpecificError());
1265
1266 stream = {streamId++,
1267 StreamType::OUTPUT,
1268 INT32_MAX,
1269 INT32_MAX,
1270 static_cast<PixelFormat>(hfrStream.format),
1271 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1272 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1273 Dataspace::UNKNOWN,
1274 StreamRotation::ROTATION_0,
1275 std::string(),
1276 /*bufferSize*/ 0,
1277 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001278 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1279 RequestAvailableDynamicRangeProfilesMap::
1280 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001281 streams[0] = stream;
1282 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1283 &config);
1284
1285 config.streamConfigCounter = streamConfigCounter++;
1286 halConfigs.clear();
1287 ret = mSession->configureStreams(config, &halConfigs);
1288 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
1289
1290 stream = {streamId++,
1291 StreamType::OUTPUT,
1292 hfrStream.width,
1293 hfrStream.height,
1294 static_cast<PixelFormat>(UINT32_MAX),
1295 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1296 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1297 Dataspace::UNKNOWN,
1298 StreamRotation::ROTATION_0,
1299 std::string(),
1300 /*bufferSize*/ 0,
1301 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001302 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1303 RequestAvailableDynamicRangeProfilesMap::
1304 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001305 streams[0] = stream;
1306 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1307 &config);
1308
1309 config.streamConfigCounter = streamConfigCounter++;
1310 halConfigs.clear();
1311 ret = mSession->configureStreams(config, &halConfigs);
1312 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
1313
1314 ret = mSession->close();
1315 mSession = nullptr;
1316 ASSERT_TRUE(ret.isOk());
1317 }
1318}
1319
1320// Verify that all supported video + snapshot stream combinations can
1321// be configured successfully.
1322TEST_P(CameraAidlTest, configureStreamsVideoStillOutputs) {
1323 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1324 std::vector<AvailableStream> outputBlobStreams;
1325 std::vector<AvailableStream> outputVideoStreams;
1326 AvailableStream videoThreshold = {kMaxVideoWidth, kMaxVideoHeight,
1327 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
1328 AvailableStream blobThreshold = {kMaxVideoWidth, kMaxVideoHeight,
1329 static_cast<int32_t>(PixelFormat::BLOB)};
1330
1331 for (const auto& name : cameraDeviceNames) {
1332 CameraMetadata meta;
1333 std::shared_ptr<ICameraDevice> cameraDevice;
1334
1335 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1336 &cameraDevice /*out*/);
1337
1338 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1339
1340 // Check if camera support depth only
1341 if (isDepthOnly(staticMeta)) {
1342 ndk::ScopedAStatus ret = mSession->close();
1343 mSession = nullptr;
1344 ASSERT_TRUE(ret.isOk());
1345 continue;
1346 }
1347
1348 outputBlobStreams.clear();
1349 ASSERT_EQ(Status::OK,
1350 getAvailableOutputStreams(staticMeta, outputBlobStreams, &blobThreshold));
1351 ASSERT_NE(0u, outputBlobStreams.size());
1352
1353 outputVideoStreams.clear();
1354 ASSERT_EQ(Status::OK,
1355 getAvailableOutputStreams(staticMeta, outputVideoStreams, &videoThreshold));
1356 ASSERT_NE(0u, outputVideoStreams.size());
1357
1358 int32_t jpegBufferSize = 0;
1359 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
1360 ASSERT_NE(0u, jpegBufferSize);
1361
1362 int32_t streamId = 0;
1363 uint32_t streamConfigCounter = 0;
1364 for (auto& blobIter : outputBlobStreams) {
1365 for (auto& videoIter : outputVideoStreams) {
1366 Stream videoStream = {
1367 streamId++,
1368 StreamType::OUTPUT,
1369 videoIter.width,
1370 videoIter.height,
1371 static_cast<PixelFormat>(videoIter.format),
1372 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1373 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1374 Dataspace::UNKNOWN,
1375 StreamRotation::ROTATION_0,
1376 std::string(),
1377 jpegBufferSize,
1378 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001379 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1380 RequestAvailableDynamicRangeProfilesMap::
1381 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001382 Stream blobStream = {
1383 streamId++,
1384 StreamType::OUTPUT,
1385 blobIter.width,
1386 blobIter.height,
1387 static_cast<PixelFormat>(blobIter.format),
1388 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1389 GRALLOC1_CONSUMER_USAGE_CPU_READ),
1390 Dataspace::JFIF,
1391 StreamRotation::ROTATION_0,
1392 std::string(),
1393 jpegBufferSize,
1394 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001395 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1396 RequestAvailableDynamicRangeProfilesMap::
1397 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001398 std::vector<Stream> streams = {videoStream, blobStream};
1399 StreamConfiguration config;
1400
1401 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
1402 jpegBufferSize);
1403 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true,
1404 /*expectStreamCombQuery*/ false);
1405
1406 config.streamConfigCounter = streamConfigCounter++;
1407 std::vector<HalStream> halConfigs;
1408 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1409 ASSERT_TRUE(ret.isOk());
1410 ASSERT_EQ(2u, halConfigs.size());
1411 }
1412 }
1413
1414 ndk::ScopedAStatus ret = mSession->close();
1415 mSession = nullptr;
1416 ASSERT_TRUE(ret.isOk());
1417 }
1418}
1419
1420// Generate and verify a camera capture request
1421TEST_P(CameraAidlTest, processCaptureRequestPreview) {
1422 // TODO(b/220897574): Failing with BUFFER_ERROR
1423 processCaptureRequestInternal(GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, RequestTemplate::PREVIEW,
1424 false /*secureOnlyCameras*/);
1425}
1426
1427// Generate and verify a secure camera capture request
1428TEST_P(CameraAidlTest, processSecureCaptureRequest) {
1429 processCaptureRequestInternal(GRALLOC1_PRODUCER_USAGE_PROTECTED, RequestTemplate::STILL_CAPTURE,
1430 true /*secureOnlyCameras*/);
1431}
1432
1433TEST_P(CameraAidlTest, processCaptureRequestPreviewStabilization) {
1434 std::unordered_map<std::string, nsecs_t> cameraDeviceToTimeLag;
1435 processPreviewStabilizationCaptureRequestInternal(/*previewStabilizationOn*/ false,
1436 cameraDeviceToTimeLag);
1437 processPreviewStabilizationCaptureRequestInternal(/*previewStabilizationOn*/ true,
1438 cameraDeviceToTimeLag);
1439}
1440
1441// Generate and verify a multi-camera capture request
1442TEST_P(CameraAidlTest, processMultiCaptureRequestPreview) {
1443 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1444 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
1445 static_cast<int32_t>(PixelFormat::YCBCR_420_888)};
1446 int64_t bufferId = 1;
1447 uint32_t frameNumber = 1;
1448 std::vector<uint8_t> settings;
1449 std::vector<uint8_t> emptySettings;
1450 std::string invalidPhysicalId = "-1";
1451
1452 for (const auto& name : cameraDeviceNames) {
1453 std::string version, deviceId;
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +00001454 ALOGI("processMultiCaptureRequestPreview: Test device %s", name.c_str());
Avichal Rakesh362242f2022-02-08 12:40:53 -08001455 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
1456 CameraMetadata metadata;
1457
1458 std::shared_ptr<ICameraDevice> unusedDevice;
1459 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &metadata /*out*/,
1460 &unusedDevice /*out*/);
1461
1462 camera_metadata_t* staticMeta =
1463 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
1464 Status rc = isLogicalMultiCamera(staticMeta);
1465 if (Status::OPERATION_NOT_SUPPORTED == rc) {
1466 ndk::ScopedAStatus ret = mSession->close();
1467 mSession = nullptr;
1468 ASSERT_TRUE(ret.isOk());
1469 continue;
1470 }
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +00001471 ASSERT_EQ(Status::OK, rc);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001472
1473 std::unordered_set<std::string> physicalIds;
1474 rc = getPhysicalCameraIds(staticMeta, &physicalIds);
1475 ASSERT_TRUE(Status::OK == rc);
1476 ASSERT_TRUE(physicalIds.size() > 1);
1477
1478 std::unordered_set<int32_t> physicalRequestKeyIDs;
1479 rc = getSupportedKeys(staticMeta, ANDROID_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS,
1480 &physicalRequestKeyIDs);
1481 ASSERT_TRUE(Status::OK == rc);
1482 if (physicalRequestKeyIDs.empty()) {
1483 ndk::ScopedAStatus ret = mSession->close();
1484 mSession = nullptr;
1485 ASSERT_TRUE(ret.isOk());
1486 // The logical camera doesn't support any individual physical requests.
1487 continue;
1488 }
1489
1490 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultPreviewSettings;
1491 android::hardware::camera::common::V1_0::helper::CameraMetadata filteredSettings;
1492 constructFilteredSettings(mSession, physicalRequestKeyIDs, RequestTemplate::PREVIEW,
1493 &defaultPreviewSettings, &filteredSettings);
1494 if (filteredSettings.isEmpty()) {
1495 // No physical device settings in default request.
1496 ndk::ScopedAStatus ret = mSession->close();
1497 mSession = nullptr;
1498 ASSERT_TRUE(ret.isOk());
1499 continue;
1500 }
1501
1502 const camera_metadata_t* settingsBuffer = defaultPreviewSettings.getAndLock();
1503 uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
1504 settings.assign(rawSettingsBuffer,
1505 rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
1506 CameraMetadata settingsMetadata = {settings};
1507 overrideRotateAndCrop(&settingsMetadata);
1508
1509 ndk::ScopedAStatus ret = mSession->close();
1510 mSession = nullptr;
1511 ASSERT_TRUE(ret.isOk());
1512
1513 // Leave only 2 physical devices in the id set.
1514 auto it = physicalIds.begin();
1515 std::string physicalDeviceId = *it;
1516 it++;
1517 physicalIds.erase(++it, physicalIds.end());
1518 ASSERT_EQ(physicalIds.size(), 2u);
1519
1520 std::vector<HalStream> halStreams;
1521 bool supportsPartialResults = false;
1522 bool useHalBufManager = false;
1523 int32_t partialResultCount = 0;
1524 Stream previewStream;
1525 std::shared_ptr<DeviceCb> cb;
1526
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +00001527 configurePreviewStreams(
1528 name, mProvider, &previewThreshold, physicalIds, &mSession, &previewStream,
1529 &halStreams /*out*/, &supportsPartialResults /*out*/, &partialResultCount /*out*/,
1530 &useHalBufManager /*out*/, &cb /*out*/, 0 /*streamConfigCounter*/, true);
1531 if (mSession == nullptr) {
1532 // stream combination not supported by HAL, skip test for device
1533 continue;
1534 }
Avichal Rakesh362242f2022-02-08 12:40:53 -08001535
1536 ::aidl::android::hardware::common::fmq::MQDescriptor<
1537 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
1538 descriptor;
1539 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
1540 ASSERT_TRUE(resultQueueRet.isOk());
1541 std::shared_ptr<ResultMetadataQueue> resultQueue =
1542 std::make_shared<ResultMetadataQueue>(descriptor);
1543 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
1544 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
1545 resultQueue = nullptr;
1546 // Don't use the queue onwards.
1547 }
1548
1549 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
1550 static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
1551 partialResultCount, physicalIds, resultQueue);
1552
1553 std::vector<CaptureRequest> requests(1);
1554 CaptureRequest& request = requests[0];
1555 request.frameNumber = frameNumber;
1556 request.fmqSettingsSize = 0;
Emilian Peev3d919f92022-04-20 13:50:59 -07001557 request.settings = settingsMetadata;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001558
1559 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
1560
1561 std::vector<buffer_handle_t> graphicBuffers;
1562 graphicBuffers.reserve(halStreams.size());
1563 outputBuffers.resize(halStreams.size());
1564 size_t k = 0;
1565 for (const auto& halStream : halStreams) {
1566 buffer_handle_t buffer_handle;
1567 if (useHalBufManager) {
1568 outputBuffers[k] = {halStream.id, /*bufferId*/ 0, NativeHandle(),
1569 BufferStatus::OK, NativeHandle(), NativeHandle()};
1570 } else {
1571 allocateGraphicBuffer(previewStream.width, previewStream.height,
1572 android_convertGralloc1To0Usage(
1573 static_cast<uint64_t>(halStream.producerUsage),
1574 static_cast<uint64_t>(halStream.consumerUsage)),
1575 halStream.overrideFormat, &buffer_handle);
1576 graphicBuffers.push_back(buffer_handle);
1577 outputBuffers[k] = {
1578 halStream.id, bufferId, ::android::makeToAidl(buffer_handle),
1579 BufferStatus::OK, NativeHandle(), NativeHandle()};
1580 bufferId++;
1581 }
1582 k++;
1583 }
1584
1585 std::vector<PhysicalCameraSetting> camSettings(1);
1586 const camera_metadata_t* filteredSettingsBuffer = filteredSettings.getAndLock();
1587 uint8_t* rawFilteredSettingsBuffer = (uint8_t*)filteredSettingsBuffer;
1588 camSettings[0].settings = {std::vector(
1589 rawFilteredSettingsBuffer,
1590 rawFilteredSettingsBuffer + get_camera_metadata_size(filteredSettingsBuffer))};
1591 overrideRotateAndCrop(&camSettings[0].settings);
1592 camSettings[0].fmqSettingsSize = 0;
1593 camSettings[0].physicalCameraId = physicalDeviceId;
1594
1595 request.inputBuffer = {
1596 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
1597 request.physicalCameraSettings = camSettings;
1598
1599 {
1600 std::unique_lock<std::mutex> l(mLock);
1601 mInflightMap.clear();
1602 mInflightMap[frameNumber] = inflightReq;
1603 }
1604
1605 int32_t numRequestProcessed = 0;
1606 std::vector<BufferCache> cachesToRemove;
1607 ndk::ScopedAStatus returnStatus =
1608 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1609 ASSERT_TRUE(returnStatus.isOk());
1610 ASSERT_EQ(numRequestProcessed, 1u);
1611
1612 {
1613 std::unique_lock<std::mutex> l(mLock);
1614 while (!inflightReq->errorCodeValid &&
1615 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1616 auto timeout = std::chrono::system_clock::now() +
1617 std::chrono::seconds(kStreamBufferTimeoutSec);
1618 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1619 }
1620
1621 ASSERT_FALSE(inflightReq->errorCodeValid);
1622 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1623
1624 request.frameNumber++;
1625 // Empty settings should be supported after the first call
1626 // for repeating requests.
1627 request.settings.metadata.clear();
1628 request.physicalCameraSettings[0].settings.metadata.clear();
1629 // The buffer has been registered to HAL by bufferId, so per
1630 // API contract we should send a null handle for this buffer
1631 request.outputBuffers[0].buffer = NativeHandle();
1632 mInflightMap.clear();
1633 inflightReq = std::make_shared<InFlightRequest>(
1634 static_cast<ssize_t>(physicalIds.size()), false, supportsPartialResults,
1635 partialResultCount, physicalIds, resultQueue);
1636 mInflightMap[request.frameNumber] = inflightReq;
1637 }
1638
1639 returnStatus =
1640 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1641 ASSERT_TRUE(returnStatus.isOk());
1642 ASSERT_EQ(numRequestProcessed, 1u);
1643
1644 {
1645 std::unique_lock<std::mutex> l(mLock);
1646 while (!inflightReq->errorCodeValid &&
1647 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1648 auto timeout = std::chrono::system_clock::now() +
1649 std::chrono::seconds(kStreamBufferTimeoutSec);
1650 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1651 }
1652
1653 ASSERT_FALSE(inflightReq->errorCodeValid);
1654 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1655 }
1656
1657 // Invalid physical camera id should fail process requests
1658 frameNumber++;
1659 camSettings[0].physicalCameraId = invalidPhysicalId;
1660 camSettings[0].settings.metadata = settings;
1661
1662 request.physicalCameraSettings = camSettings; // Invalid camera settings
1663 returnStatus =
1664 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1665 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
1666 returnStatus.getServiceSpecificError());
1667
1668 defaultPreviewSettings.unlock(settingsBuffer);
1669 filteredSettings.unlock(filteredSettingsBuffer);
1670
1671 if (useHalBufManager) {
1672 std::vector<int32_t> streamIds(halStreams.size());
1673 for (size_t i = 0; i < streamIds.size(); i++) {
1674 streamIds[i] = halStreams[i].id;
1675 }
1676 verifyBuffersReturned(mSession, streamIds, cb);
1677 }
1678
1679 ret = mSession->close();
1680 mSession = nullptr;
1681 ASSERT_TRUE(ret.isOk());
1682 }
1683}
1684
1685// Generate and verify an ultra high resolution capture request
1686TEST_P(CameraAidlTest, processUltraHighResolutionRequest) {
1687 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1688 int64_t bufferId = 1;
1689 int32_t frameNumber = 1;
1690 CameraMetadata settings;
1691
1692 for (const auto& name : cameraDeviceNames) {
1693 std::string version, deviceId;
1694 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
1695 CameraMetadata meta;
1696
1697 std::shared_ptr<ICameraDevice> unusedDevice;
1698 openEmptyDeviceSession(name, mProvider, &mSession, &meta, &unusedDevice);
1699 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1700 if (!isUltraHighResolution(staticMeta)) {
1701 ndk::ScopedAStatus ret = mSession->close();
1702 mSession = nullptr;
1703 ASSERT_TRUE(ret.isOk());
1704 continue;
1705 }
1706 CameraMetadata req;
1707 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultSettings;
1708 ndk::ScopedAStatus ret =
1709 mSession->constructDefaultRequestSettings(RequestTemplate::STILL_CAPTURE, &req);
1710 ASSERT_TRUE(ret.isOk());
1711
1712 const camera_metadata_t* metadata =
1713 reinterpret_cast<const camera_metadata_t*>(req.metadata.data());
1714 size_t expectedSize = req.metadata.size();
1715 int result = validate_camera_metadata_structure(metadata, &expectedSize);
1716 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
1717
1718 size_t entryCount = get_camera_metadata_entry_count(metadata);
1719 ASSERT_GT(entryCount, 0u);
1720 defaultSettings = metadata;
1721 uint8_t sensorPixelMode =
1722 static_cast<uint8_t>(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
1723 ASSERT_EQ(::android::OK,
1724 defaultSettings.update(ANDROID_SENSOR_PIXEL_MODE, &sensorPixelMode, 1));
1725
1726 const camera_metadata_t* settingsBuffer = defaultSettings.getAndLock();
1727 uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
1728 settings.metadata = std::vector(
1729 rawSettingsBuffer, rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
1730 overrideRotateAndCrop(&settings);
1731
1732 ret = mSession->close();
1733 mSession = nullptr;
1734 ASSERT_TRUE(ret.isOk());
1735
1736 std::vector<HalStream> halStreams;
1737 bool supportsPartialResults = false;
1738 bool useHalBufManager = false;
1739 int32_t partialResultCount = 0;
1740 Stream previewStream;
1741 std::shared_ptr<DeviceCb> cb;
1742
1743 std::list<PixelFormat> pixelFormats = {PixelFormat::YCBCR_420_888, PixelFormat::RAW16};
1744 for (PixelFormat format : pixelFormats) {
1745 configureStreams(name, mProvider, format, &mSession, &previewStream, &halStreams,
1746 &supportsPartialResults, &partialResultCount, &useHalBufManager, &cb,
1747 0, /*maxResolution*/ true);
1748 ASSERT_NE(mSession, nullptr);
1749
1750 ::aidl::android::hardware::common::fmq::MQDescriptor<
1751 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
1752 descriptor;
1753 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
1754 ASSERT_TRUE(resultQueueRet.isOk());
1755
1756 std::shared_ptr<ResultMetadataQueue> resultQueue =
1757 std::make_shared<ResultMetadataQueue>(descriptor);
1758 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
1759 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
1760 resultQueue = nullptr;
1761 // Don't use the queue onwards.
1762 }
1763
1764 std::vector<buffer_handle_t> graphicBuffers;
1765 graphicBuffers.reserve(halStreams.size());
1766 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
1767 static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
1768 partialResultCount, std::unordered_set<std::string>(), resultQueue);
1769
1770 std::vector<CaptureRequest> requests(1);
1771 CaptureRequest& request = requests[0];
1772 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
1773 outputBuffers.resize(halStreams.size());
1774
1775 size_t k = 0;
1776 for (const auto& halStream : halStreams) {
1777 buffer_handle_t buffer_handle;
1778 if (useHalBufManager) {
1779 outputBuffers[k] = {halStream.id, 0,
1780 NativeHandle(), BufferStatus::OK,
1781 NativeHandle(), NativeHandle()};
1782 } else {
1783 allocateGraphicBuffer(previewStream.width, previewStream.height,
1784 android_convertGralloc1To0Usage(
1785 static_cast<uint64_t>(halStream.producerUsage),
1786 static_cast<uint64_t>(halStream.consumerUsage)),
1787 halStream.overrideFormat, &buffer_handle);
1788 graphicBuffers.push_back(buffer_handle);
1789 outputBuffers[k] = {
1790 halStream.id, bufferId, ::android::makeToAidl(buffer_handle),
1791 BufferStatus::OK, NativeHandle(), NativeHandle()};
1792 bufferId++;
1793 }
1794 k++;
1795 }
1796
1797 request.inputBuffer = {
1798 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
1799 request.frameNumber = frameNumber;
1800 request.fmqSettingsSize = 0;
1801 request.settings = settings;
1802 request.inputWidth = 0;
1803 request.inputHeight = 0;
1804
1805 {
1806 std::unique_lock<std::mutex> l(mLock);
1807 mInflightMap.clear();
1808 mInflightMap[frameNumber] = inflightReq;
1809 }
1810
1811 int32_t numRequestProcessed = 0;
1812 std::vector<BufferCache> cachesToRemove;
1813 ndk::ScopedAStatus returnStatus =
1814 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1815 ASSERT_TRUE(returnStatus.isOk());
1816 ASSERT_EQ(numRequestProcessed, 1u);
1817
1818 {
1819 std::unique_lock<std::mutex> l(mLock);
1820 while (!inflightReq->errorCodeValid &&
1821 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1822 auto timeout = std::chrono::system_clock::now() +
1823 std::chrono::seconds(kStreamBufferTimeoutSec);
1824 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1825 }
1826
1827 ASSERT_FALSE(inflightReq->errorCodeValid);
1828 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1829 }
1830 if (useHalBufManager) {
1831 std::vector<int32_t> streamIds(halStreams.size());
1832 for (size_t i = 0; i < streamIds.size(); i++) {
1833 streamIds[i] = halStreams[i].id;
1834 }
1835 verifyBuffersReturned(mSession, streamIds, cb);
1836 }
1837
1838 ret = mSession->close();
1839 mSession = nullptr;
1840 ASSERT_TRUE(ret.isOk());
1841 }
1842 }
1843}
1844
1845// Generate and verify 10-bit dynamic range request
1846TEST_P(CameraAidlTest, process10BitDynamicRangeRequest) {
1847 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1848 int64_t bufferId = 1;
1849 int32_t frameNumber = 1;
1850 CameraMetadata settings;
1851
1852 for (const auto& name : cameraDeviceNames) {
1853 std::string version, deviceId;
1854 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
1855 CameraMetadata meta;
1856 std::shared_ptr<ICameraDevice> device;
1857 openEmptyDeviceSession(name, mProvider, &mSession, &meta, &device);
1858 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1859 if (!is10BitDynamicRangeCapable(staticMeta)) {
1860 ndk::ScopedAStatus ret = mSession->close();
1861 mSession = nullptr;
1862 ASSERT_TRUE(ret.isOk());
1863 continue;
1864 }
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001865 std::vector<RequestAvailableDynamicRangeProfilesMap> profileList;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001866 get10BitDynamicRangeProfiles(staticMeta, &profileList);
1867 ASSERT_FALSE(profileList.empty());
1868
1869 CameraMetadata req;
1870 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultSettings;
1871 ndk::ScopedAStatus ret =
1872 mSession->constructDefaultRequestSettings(RequestTemplate::STILL_CAPTURE, &req);
1873 ASSERT_TRUE(ret.isOk());
1874
1875 const camera_metadata_t* metadata =
1876 reinterpret_cast<const camera_metadata_t*>(req.metadata.data());
1877 size_t expectedSize = req.metadata.size();
1878 int result = validate_camera_metadata_structure(metadata, &expectedSize);
1879 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
1880
1881 size_t entryCount = get_camera_metadata_entry_count(metadata);
1882 ASSERT_GT(entryCount, 0u);
1883 defaultSettings = metadata;
1884
1885 const camera_metadata_t* settingsBuffer = defaultSettings.getAndLock();
1886 uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
1887 settings.metadata = std::vector(
1888 rawSettingsBuffer, rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
1889 overrideRotateAndCrop(&settings);
1890
1891 ret = mSession->close();
1892 mSession = nullptr;
1893 ASSERT_TRUE(ret.isOk());
1894
1895 std::vector<HalStream> halStreams;
1896 bool supportsPartialResults = false;
1897 bool useHalBufManager = false;
1898 int32_t partialResultCount = 0;
1899 Stream previewStream;
1900 std::shared_ptr<DeviceCb> cb;
1901 for (const auto& profile : profileList) {
1902 configureStreams(name, mProvider, PixelFormat::IMPLEMENTATION_DEFINED, &mSession,
1903 &previewStream, &halStreams, &supportsPartialResults,
1904 &partialResultCount, &useHalBufManager, &cb, 0,
1905 /*maxResolution*/ false, profile);
1906 ASSERT_NE(mSession, nullptr);
1907
1908 ::aidl::android::hardware::common::fmq::MQDescriptor<
1909 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
1910 descriptor;
1911 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
1912 ASSERT_TRUE(resultQueueRet.isOk());
1913
1914 std::shared_ptr<ResultMetadataQueue> resultQueue =
1915 std::make_shared<ResultMetadataQueue>(descriptor);
1916 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
1917 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
1918 resultQueue = nullptr;
1919 // Don't use the queue onwards.
1920 }
1921
1922 std::vector<buffer_handle_t> graphicBuffers;
1923 graphicBuffers.reserve(halStreams.size());
1924
1925 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
1926 static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
1927 partialResultCount, std::unordered_set<std::string>(), resultQueue);
1928
1929 std::vector<CaptureRequest> requests(1);
1930 CaptureRequest& request = requests[0];
1931 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
1932 outputBuffers.resize(halStreams.size());
1933
1934 size_t k = 0;
1935 for (const auto& halStream : halStreams) {
1936 buffer_handle_t buffer_handle;
1937 if (useHalBufManager) {
1938 outputBuffers[k] = {halStream.id, 0,
1939 NativeHandle(), BufferStatus::OK,
1940 NativeHandle(), NativeHandle()};
1941 } else {
1942 allocateGraphicBuffer(previewStream.width, previewStream.height,
1943 android_convertGralloc1To0Usage(
1944 static_cast<uint64_t>(halStream.producerUsage),
1945 static_cast<uint64_t>(halStream.consumerUsage)),
1946 halStream.overrideFormat, &buffer_handle);
1947
1948 graphicBuffers.push_back(buffer_handle);
1949 outputBuffers[k] = {
1950 halStream.id, bufferId, android::makeToAidl(buffer_handle),
1951 BufferStatus::OK, NativeHandle(), NativeHandle()};
1952 bufferId++;
1953 }
1954 k++;
1955 }
1956
1957 request.inputBuffer = {
1958 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
1959 request.frameNumber = frameNumber;
1960 request.fmqSettingsSize = 0;
1961 request.settings = settings;
1962 request.inputWidth = 0;
1963 request.inputHeight = 0;
1964
1965 {
1966 std::unique_lock<std::mutex> l(mLock);
1967 mInflightMap.clear();
1968 mInflightMap[frameNumber] = inflightReq;
1969 }
1970
1971 int32_t numRequestProcessed = 0;
1972 std::vector<BufferCache> cachesToRemove;
1973 ndk::ScopedAStatus returnStatus =
1974 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1975 ASSERT_TRUE(returnStatus.isOk());
1976 ASSERT_EQ(numRequestProcessed, 1u);
1977
1978 {
1979 std::unique_lock<std::mutex> l(mLock);
1980 while (!inflightReq->errorCodeValid &&
1981 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1982 auto timeout = std::chrono::system_clock::now() +
1983 std::chrono::seconds(kStreamBufferTimeoutSec);
1984 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1985 }
1986
1987 ASSERT_FALSE(inflightReq->errorCodeValid);
1988 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1989 verify10BitMetadata(mHandleImporter, *inflightReq, profile);
1990 }
1991 if (useHalBufManager) {
1992 std::vector<int32_t> streamIds(halStreams.size());
1993 for (size_t i = 0; i < streamIds.size(); i++) {
1994 streamIds[i] = halStreams[i].id;
1995 }
1996 mSession->signalStreamFlush(streamIds, /*streamConfigCounter*/ 0);
1997 cb->waitForBuffersReturned();
1998 }
1999
2000 ret = mSession->close();
2001 mSession = nullptr;
2002 ASSERT_TRUE(ret.isOk());
2003 }
2004 }
2005}
2006
2007// Generate and verify a burst containing alternating sensor sensitivity values
2008TEST_P(CameraAidlTest, processCaptureRequestBurstISO) {
2009 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2010 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2011 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2012 int64_t bufferId = 1;
2013 int32_t frameNumber = 1;
2014 float isoTol = .03f;
2015 CameraMetadata settings;
2016
2017 for (const auto& name : cameraDeviceNames) {
2018 CameraMetadata meta;
2019 settings.metadata.clear();
2020 std::shared_ptr<ICameraDevice> unusedDevice;
2021 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
2022 &unusedDevice /*out*/);
2023 camera_metadata_t* staticMetaBuffer =
2024 clone_camera_metadata(reinterpret_cast<camera_metadata_t*>(meta.metadata.data()));
2025 ::android::hardware::camera::common::V1_0::helper::CameraMetadata staticMeta(
2026 staticMetaBuffer);
2027
2028 camera_metadata_entry_t hwLevel = staticMeta.find(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL);
2029 ASSERT_TRUE(0 < hwLevel.count);
2030 if (ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED == hwLevel.data.u8[0] ||
2031 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL == hwLevel.data.u8[0]) {
2032 // Limited/External devices can skip this test
2033 ndk::ScopedAStatus ret = mSession->close();
2034 mSession = nullptr;
2035 ASSERT_TRUE(ret.isOk());
2036 continue;
2037 }
2038
2039 camera_metadata_entry_t isoRange = staticMeta.find(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE);
2040 ASSERT_EQ(isoRange.count, 2u);
2041
2042 ndk::ScopedAStatus ret = mSession->close();
2043 mSession = nullptr;
2044 ASSERT_TRUE(ret.isOk());
2045
2046 bool supportsPartialResults = false;
2047 bool useHalBufManager = false;
2048 int32_t partialResultCount = 0;
2049 Stream previewStream;
2050 std::vector<HalStream> halStreams;
2051 std::shared_ptr<DeviceCb> cb;
2052 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2053 &previewStream /*out*/, &halStreams /*out*/,
2054 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2055 &useHalBufManager /*out*/, &cb /*out*/);
2056
2057 ::aidl::android::hardware::common::fmq::MQDescriptor<
2058 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2059 descriptor;
2060 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2061 std::shared_ptr<ResultMetadataQueue> resultQueue =
2062 std::make_shared<ResultMetadataQueue>(descriptor);
2063 ASSERT_TRUE(resultQueueRet.isOk());
2064 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2065 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2066 resultQueue = nullptr;
2067 // Don't use the queue onwards.
2068 }
2069
2070 ret = mSession->constructDefaultRequestSettings(RequestTemplate::PREVIEW, &settings);
2071 ASSERT_TRUE(ret.isOk());
2072
2073 ::android::hardware::camera::common::V1_0::helper::CameraMetadata requestMeta;
2074 std::vector<CaptureRequest> requests(kBurstFrameCount);
2075 std::vector<buffer_handle_t> buffers(kBurstFrameCount);
2076 std::vector<std::shared_ptr<InFlightRequest>> inflightReqs(kBurstFrameCount);
2077 std::vector<int32_t> isoValues(kBurstFrameCount);
2078 std::vector<CameraMetadata> requestSettings(kBurstFrameCount);
2079
2080 for (int32_t i = 0; i < kBurstFrameCount; i++) {
2081 std::unique_lock<std::mutex> l(mLock);
2082 CaptureRequest& request = requests[i];
2083 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2084 outputBuffers.resize(1);
2085 StreamBuffer& outputBuffer = outputBuffers[0];
2086
2087 isoValues[i] = ((i % 2) == 0) ? isoRange.data.i32[0] : isoRange.data.i32[1];
2088 if (useHalBufManager) {
2089 outputBuffer = {halStreams[0].id, 0,
2090 NativeHandle(), BufferStatus::OK,
2091 NativeHandle(), NativeHandle()};
2092 } else {
2093 allocateGraphicBuffer(previewStream.width, previewStream.height,
2094 android_convertGralloc1To0Usage(
2095 static_cast<uint64_t>(halStreams[0].producerUsage),
2096 static_cast<uint64_t>(halStreams[0].consumerUsage)),
2097 halStreams[0].overrideFormat, &buffers[i]);
2098 outputBuffer = {halStreams[0].id, bufferId + i, ::android::makeToAidl(buffers[i]),
2099 BufferStatus::OK, NativeHandle(), NativeHandle()};
2100 }
2101
2102 requestMeta.append(reinterpret_cast<camera_metadata_t*>(settings.metadata.data()));
2103
2104 // Disable all 3A routines
2105 uint8_t mode = static_cast<uint8_t>(ANDROID_CONTROL_MODE_OFF);
2106 ASSERT_EQ(::android::OK, requestMeta.update(ANDROID_CONTROL_MODE, &mode, 1));
2107 ASSERT_EQ(::android::OK,
2108 requestMeta.update(ANDROID_SENSOR_SENSITIVITY, &isoValues[i], 1));
2109 camera_metadata_t* metaBuffer = requestMeta.release();
2110 uint8_t* rawMetaBuffer = reinterpret_cast<uint8_t*>(metaBuffer);
2111 requestSettings[i].metadata = std::vector(
2112 rawMetaBuffer, rawMetaBuffer + get_camera_metadata_size(metaBuffer));
2113 overrideRotateAndCrop(&(requestSettings[i]));
2114
2115 request.frameNumber = frameNumber + i;
2116 request.fmqSettingsSize = 0;
2117 request.settings = requestSettings[i];
2118 request.inputBuffer = {
2119 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2120
2121 inflightReqs[i] = std::make_shared<InFlightRequest>(1, false, supportsPartialResults,
2122 partialResultCount, resultQueue);
2123 mInflightMap[frameNumber + i] = inflightReqs[i];
2124 }
2125
2126 int32_t numRequestProcessed = 0;
2127 std::vector<BufferCache> cachesToRemove;
2128
2129 ndk::ScopedAStatus returnStatus =
2130 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2131 ASSERT_TRUE(returnStatus.isOk());
2132 ASSERT_EQ(numRequestProcessed, kBurstFrameCount);
2133
2134 for (size_t i = 0; i < kBurstFrameCount; i++) {
2135 std::unique_lock<std::mutex> l(mLock);
2136 while (!inflightReqs[i]->errorCodeValid && ((0 < inflightReqs[i]->numBuffersLeft) ||
2137 (!inflightReqs[i]->haveResultMetadata))) {
2138 auto timeout = std::chrono::system_clock::now() +
2139 std::chrono::seconds(kStreamBufferTimeoutSec);
2140 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2141 }
2142
2143 ASSERT_FALSE(inflightReqs[i]->errorCodeValid);
2144 ASSERT_NE(inflightReqs[i]->resultOutputBuffers.size(), 0u);
2145 ASSERT_EQ(previewStream.id, inflightReqs[i]->resultOutputBuffers[0].buffer.streamId);
2146 ASSERT_FALSE(inflightReqs[i]->collectedResult.isEmpty());
2147 ASSERT_TRUE(inflightReqs[i]->collectedResult.exists(ANDROID_SENSOR_SENSITIVITY));
2148 camera_metadata_entry_t isoResult =
2149 inflightReqs[i]->collectedResult.find(ANDROID_SENSOR_SENSITIVITY);
2150 ASSERT_TRUE(std::abs(isoResult.data.i32[0] - isoValues[i]) <=
2151 std::round(isoValues[i] * isoTol));
2152 }
2153
2154 if (useHalBufManager) {
2155 verifyBuffersReturned(mSession, previewStream.id, cb);
2156 }
2157 ret = mSession->close();
2158 mSession = nullptr;
2159 ASSERT_TRUE(ret.isOk());
2160 }
2161}
2162
2163// Test whether an incorrect capture request with missing settings will
2164// be reported correctly.
2165TEST_P(CameraAidlTest, processCaptureRequestInvalidSinglePreview) {
2166 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2167 std::vector<AvailableStream> outputPreviewStreams;
2168 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2169 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2170 int64_t bufferId = 1;
2171 int32_t frameNumber = 1;
2172 CameraMetadata settings;
2173
2174 for (const auto& name : cameraDeviceNames) {
2175 Stream previewStream;
2176 std::vector<HalStream> halStreams;
2177 std::shared_ptr<DeviceCb> cb;
2178 bool supportsPartialResults = false;
2179 bool useHalBufManager = false;
2180 int32_t partialResultCount = 0;
2181 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2182 &previewStream /*out*/, &halStreams /*out*/,
2183 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2184 &useHalBufManager /*out*/, &cb /*out*/);
2185 ASSERT_NE(mSession, nullptr);
2186 ASSERT_FALSE(halStreams.empty());
2187
2188 buffer_handle_t buffer_handle = nullptr;
2189
2190 if (useHalBufManager) {
2191 bufferId = 0;
2192 } else {
2193 allocateGraphicBuffer(previewStream.width, previewStream.height,
2194 android_convertGralloc1To0Usage(
2195 static_cast<uint64_t>(halStreams[0].producerUsage),
2196 static_cast<uint64_t>(halStreams[0].consumerUsage)),
2197 halStreams[0].overrideFormat, &buffer_handle);
2198 }
2199
2200 std::vector<CaptureRequest> requests(1);
2201 CaptureRequest& request = requests[0];
2202 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2203 outputBuffers.resize(1);
2204 StreamBuffer& outputBuffer = outputBuffers[0];
2205
2206 outputBuffer = {
2207 halStreams[0].id,
2208 bufferId,
2209 buffer_handle == nullptr ? NativeHandle() : ::android::makeToAidl(buffer_handle),
2210 BufferStatus::OK,
2211 NativeHandle(),
2212 NativeHandle()};
2213
2214 request.inputBuffer = {
2215 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2216 request.frameNumber = frameNumber;
2217 request.fmqSettingsSize = 0;
2218 request.settings = settings;
2219
2220 // Settings were not correctly initialized, we should fail here
2221 int32_t numRequestProcessed = 0;
2222 std::vector<BufferCache> cachesToRemove;
2223 ndk::ScopedAStatus ret =
2224 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2225 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
2226 ASSERT_EQ(numRequestProcessed, 0u);
2227
2228 ret = mSession->close();
2229 mSession = nullptr;
2230 ASSERT_TRUE(ret.isOk());
2231 }
2232}
2233
2234// Verify camera offline session behavior
2235TEST_P(CameraAidlTest, switchToOffline) {
2236 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2237 AvailableStream threshold = {kMaxStillWidth, kMaxStillHeight,
2238 static_cast<int32_t>(PixelFormat::BLOB)};
2239 int64_t bufferId = 1;
2240 int32_t frameNumber = 1;
2241 CameraMetadata settings;
2242
2243 for (const auto& name : cameraDeviceNames) {
2244 CameraMetadata meta;
2245 {
2246 std::shared_ptr<ICameraDevice> unusedDevice;
2247 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
2248 &unusedDevice);
2249 camera_metadata_t* staticMetaBuffer = clone_camera_metadata(
2250 reinterpret_cast<camera_metadata_t*>(meta.metadata.data()));
2251 ::android::hardware::camera::common::V1_0::helper::CameraMetadata staticMeta(
2252 staticMetaBuffer);
2253
2254 if (isOfflineSessionSupported(staticMetaBuffer) != Status::OK) {
2255 ndk::ScopedAStatus ret = mSession->close();
2256 mSession = nullptr;
2257 ASSERT_TRUE(ret.isOk());
2258 continue;
2259 }
2260 ndk::ScopedAStatus ret = mSession->close();
2261 mSession = nullptr;
2262 ASSERT_TRUE(ret.isOk());
2263 }
2264
2265 bool supportsPartialResults = false;
2266 int32_t partialResultCount = 0;
2267 Stream stream;
2268 std::vector<HalStream> halStreams;
2269 std::shared_ptr<DeviceCb> cb;
2270 int32_t jpegBufferSize;
2271 bool useHalBufManager;
2272 configureOfflineStillStream(name, mProvider, &threshold, &mSession /*out*/, &stream /*out*/,
2273 &halStreams /*out*/, &supportsPartialResults /*out*/,
2274 &partialResultCount /*out*/, &cb /*out*/,
2275 &jpegBufferSize /*out*/, &useHalBufManager /*out*/);
2276
2277 auto ret = mSession->constructDefaultRequestSettings(RequestTemplate::STILL_CAPTURE,
2278 &settings);
2279 ASSERT_TRUE(ret.isOk());
2280
2281 ::aidl::android::hardware::common::fmq::MQDescriptor<
2282 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2283 descriptor;
2284
2285 ndk::ScopedAStatus resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2286 ASSERT_TRUE(resultQueueRet.isOk());
2287 std::shared_ptr<ResultMetadataQueue> resultQueue =
2288 std::make_shared<ResultMetadataQueue>(descriptor);
2289 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2290 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2291 resultQueue = nullptr;
2292 // Don't use the queue onwards.
2293 }
2294
2295 ::android::hardware::camera::common::V1_0::helper::CameraMetadata requestMeta;
2296
2297 std::vector<buffer_handle_t> buffers(kBurstFrameCount);
2298 std::vector<std::shared_ptr<InFlightRequest>> inflightReqs(kBurstFrameCount);
2299 std::vector<CameraMetadata> requestSettings(kBurstFrameCount);
2300
2301 std::vector<CaptureRequest> requests(kBurstFrameCount);
2302
2303 HalStream halStream = halStreams[0];
2304 for (uint32_t i = 0; i < kBurstFrameCount; i++) {
2305 CaptureRequest& request = requests[i];
2306 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2307 outputBuffers.resize(1);
2308 StreamBuffer& outputBuffer = outputBuffers[0];
2309
2310 std::unique_lock<std::mutex> l(mLock);
2311 if (useHalBufManager) {
2312 outputBuffer = {halStream.id, 0, NativeHandle(), BufferStatus::OK, NativeHandle(),
2313 NativeHandle()};
2314 } else {
2315 // jpeg buffer (w,h) = (blobLen, 1)
2316 allocateGraphicBuffer(jpegBufferSize, /*height*/ 1,
2317 android_convertGralloc1To0Usage(
2318 static_cast<uint64_t>(halStream.producerUsage),
2319 static_cast<uint64_t>(halStream.consumerUsage)),
2320 halStream.overrideFormat, &buffers[i]);
2321 outputBuffer = {halStream.id, bufferId + i, ::android::makeToAidl(buffers[i]),
2322 BufferStatus::OK, NativeHandle(), NativeHandle()};
2323 }
2324
2325 requestMeta.clear();
2326 requestMeta.append(reinterpret_cast<camera_metadata_t*>(settings.metadata.data()));
2327
2328 camera_metadata_t* metaBuffer = requestMeta.release();
2329 uint8_t* rawMetaBuffer = reinterpret_cast<uint8_t*>(metaBuffer);
2330 requestSettings[i].metadata = std::vector(
2331 rawMetaBuffer, rawMetaBuffer + get_camera_metadata_size(metaBuffer));
2332 overrideRotateAndCrop(&requestSettings[i]);
2333
2334 request.frameNumber = frameNumber + i;
2335 request.fmqSettingsSize = 0;
2336 request.settings = requestSettings[i];
2337 request.inputBuffer = {/*streamId*/ -1,
2338 /*bufferId*/ 0, NativeHandle(),
2339 BufferStatus::ERROR, NativeHandle(),
2340 NativeHandle()};
2341
2342 inflightReqs[i] = std::make_shared<InFlightRequest>(1, false, supportsPartialResults,
2343 partialResultCount, resultQueue);
2344 mInflightMap[frameNumber + i] = inflightReqs[i];
2345 }
2346
2347 int32_t numRequestProcessed = 0;
2348 std::vector<BufferCache> cachesToRemove;
2349
2350 ndk::ScopedAStatus returnStatus =
2351 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2352 ASSERT_TRUE(returnStatus.isOk());
2353 ASSERT_EQ(numRequestProcessed, kBurstFrameCount);
2354
2355 std::vector<int32_t> offlineStreamIds = {halStream.id};
2356 CameraOfflineSessionInfo offlineSessionInfo;
2357 std::shared_ptr<ICameraOfflineSession> offlineSession;
2358 returnStatus =
2359 mSession->switchToOffline(offlineStreamIds, &offlineSessionInfo, &offlineSession);
2360
2361 if (!halStreams[0].supportOffline) {
2362 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
2363 returnStatus.getServiceSpecificError());
2364 ret = mSession->close();
2365 mSession = nullptr;
2366 ASSERT_TRUE(ret.isOk());
2367 continue;
2368 }
2369
2370 ASSERT_TRUE(returnStatus.isOk());
2371 // Hal might be unable to find any requests qualified for offline mode.
2372 if (offlineSession == nullptr) {
2373 ret = mSession->close();
2374 mSession = nullptr;
2375 ASSERT_TRUE(ret.isOk());
2376 continue;
2377 }
2378
2379 ASSERT_EQ(offlineSessionInfo.offlineStreams.size(), 1u);
2380 ASSERT_EQ(offlineSessionInfo.offlineStreams[0].id, halStream.id);
2381 ASSERT_NE(offlineSessionInfo.offlineRequests.size(), 0u);
2382
2383 // close device session to make sure offline session does not rely on it
2384 ret = mSession->close();
2385 mSession = nullptr;
2386 ASSERT_TRUE(ret.isOk());
2387
2388 ::aidl::android::hardware::common::fmq::MQDescriptor<
2389 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2390 offlineResultDescriptor;
2391
2392 auto offlineResultQueueRet =
2393 offlineSession->getCaptureResultMetadataQueue(&offlineResultDescriptor);
2394 std::shared_ptr<ResultMetadataQueue> offlineResultQueue =
2395 std::make_shared<ResultMetadataQueue>(descriptor);
2396 if (!offlineResultQueue->isValid() || offlineResultQueue->availableToWrite() <= 0) {
2397 ALOGE("%s: offline session returns empty result metadata fmq, not use it", __func__);
2398 offlineResultQueue = nullptr;
2399 // Don't use the queue onwards.
2400 }
2401 ASSERT_TRUE(offlineResultQueueRet.isOk());
2402
2403 updateInflightResultQueue(offlineResultQueue);
2404
2405 ret = offlineSession->setCallback(cb);
2406 ASSERT_TRUE(ret.isOk());
2407
2408 for (size_t i = 0; i < kBurstFrameCount; i++) {
2409 std::unique_lock<std::mutex> l(mLock);
2410 while (!inflightReqs[i]->errorCodeValid && ((0 < inflightReqs[i]->numBuffersLeft) ||
2411 (!inflightReqs[i]->haveResultMetadata))) {
2412 auto timeout = std::chrono::system_clock::now() +
2413 std::chrono::seconds(kStreamBufferTimeoutSec);
2414 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2415 }
2416
2417 ASSERT_FALSE(inflightReqs[i]->errorCodeValid);
2418 ASSERT_NE(inflightReqs[i]->resultOutputBuffers.size(), 0u);
2419 ASSERT_EQ(stream.id, inflightReqs[i]->resultOutputBuffers[0].buffer.streamId);
2420 ASSERT_FALSE(inflightReqs[i]->collectedResult.isEmpty());
2421 }
2422
2423 ret = offlineSession->close();
2424 ASSERT_TRUE(ret.isOk());
2425 }
2426}
2427
2428// Check whether an invalid capture request with missing output buffers
2429// will be reported correctly.
2430TEST_P(CameraAidlTest, processCaptureRequestInvalidBuffer) {
2431 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2432 std::vector<AvailableStream> outputBlobStreams;
2433 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2434 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2435 int32_t frameNumber = 1;
2436 CameraMetadata settings;
2437
2438 for (const auto& name : cameraDeviceNames) {
2439 Stream previewStream;
2440 std::vector<HalStream> halStreams;
2441 std::shared_ptr<DeviceCb> cb;
2442 bool supportsPartialResults = false;
2443 bool useHalBufManager = false;
2444 int32_t partialResultCount = 0;
2445 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2446 &previewStream /*out*/, &halStreams /*out*/,
2447 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2448 &useHalBufManager /*out*/, &cb /*out*/);
2449
2450 RequestTemplate reqTemplate = RequestTemplate::PREVIEW;
2451 ndk::ScopedAStatus ret = mSession->constructDefaultRequestSettings(reqTemplate, &settings);
2452 ASSERT_TRUE(ret.isOk());
2453 overrideRotateAndCrop(&settings);
2454
2455 std::vector<CaptureRequest> requests(1);
2456 CaptureRequest& request = requests[0];
2457 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2458 outputBuffers.resize(1);
2459 // Empty output buffer
2460 outputBuffers[0] = {
2461 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2462
2463 request.inputBuffer = {
2464 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2465 request.frameNumber = frameNumber;
2466 request.fmqSettingsSize = 0;
2467 request.settings = settings;
2468
2469 // Output buffers are missing, we should fail here
2470 int32_t numRequestProcessed = 0;
2471 std::vector<BufferCache> cachesToRemove;
2472 ret = mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2473 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
2474 ASSERT_EQ(numRequestProcessed, 0u);
2475
2476 ret = mSession->close();
2477 mSession = nullptr;
2478 ASSERT_TRUE(ret.isOk());
2479 }
2480}
2481
2482// Generate, trigger and flush a preview request
2483TEST_P(CameraAidlTest, flushPreviewRequest) {
2484 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2485 std::vector<AvailableStream> outputPreviewStreams;
2486 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2487 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2488 int64_t bufferId = 1;
2489 int32_t frameNumber = 1;
2490 CameraMetadata settings;
2491
2492 for (const auto& name : cameraDeviceNames) {
2493 Stream previewStream;
2494 std::vector<HalStream> halStreams;
2495 std::shared_ptr<DeviceCb> cb;
2496 bool supportsPartialResults = false;
2497 bool useHalBufManager = false;
2498 int32_t partialResultCount = 0;
2499
2500 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2501 &previewStream /*out*/, &halStreams /*out*/,
2502 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2503 &useHalBufManager /*out*/, &cb /*out*/);
2504
2505 ASSERT_NE(mSession, nullptr);
2506 ASSERT_NE(cb, nullptr);
2507 ASSERT_FALSE(halStreams.empty());
2508
2509 ::aidl::android::hardware::common::fmq::MQDescriptor<
2510 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2511 descriptor;
2512
2513 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2514 std::shared_ptr<ResultMetadataQueue> resultQueue =
2515 std::make_shared<ResultMetadataQueue>(descriptor);
2516 ASSERT_TRUE(resultQueueRet.isOk());
2517 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2518 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2519 resultQueue = nullptr;
2520 // Don't use the queue onwards.
2521 }
2522
2523 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
2524 1, false, supportsPartialResults, partialResultCount, resultQueue);
2525 RequestTemplate reqTemplate = RequestTemplate::PREVIEW;
2526
2527 ndk::ScopedAStatus ret = mSession->constructDefaultRequestSettings(reqTemplate, &settings);
2528 ASSERT_TRUE(ret.isOk());
2529 overrideRotateAndCrop(&settings);
2530
2531 buffer_handle_t buffer_handle;
2532 std::vector<CaptureRequest> requests(1);
2533 CaptureRequest& request = requests[0];
2534 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2535 outputBuffers.resize(1);
2536 StreamBuffer& outputBuffer = outputBuffers[0];
2537 if (useHalBufManager) {
2538 bufferId = 0;
2539 outputBuffer = {halStreams[0].id, bufferId, NativeHandle(),
2540 BufferStatus::OK, NativeHandle(), NativeHandle()};
2541 } else {
2542 allocateGraphicBuffer(previewStream.width, previewStream.height,
2543 android_convertGralloc1To0Usage(
2544 static_cast<uint64_t>(halStreams[0].producerUsage),
2545 static_cast<uint64_t>(halStreams[0].consumerUsage)),
2546 halStreams[0].overrideFormat, &buffer_handle);
2547 outputBuffer = {halStreams[0].id, bufferId, ::android::makeToAidl(buffer_handle),
2548 BufferStatus::OK, NativeHandle(), NativeHandle()};
2549 }
2550
2551 request.frameNumber = frameNumber;
2552 request.fmqSettingsSize = 0;
2553 request.settings = settings;
2554 request.inputBuffer = {
2555 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2556
2557 {
2558 std::unique_lock<std::mutex> l(mLock);
2559 mInflightMap.clear();
2560 mInflightMap[frameNumber] = inflightReq;
2561 }
2562
2563 int32_t numRequestProcessed = 0;
2564 std::vector<BufferCache> cachesToRemove;
2565 ret = mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2566 ASSERT_TRUE(ret.isOk());
2567 ASSERT_EQ(numRequestProcessed, 1u);
2568
2569 // Flush before waiting for request to complete.
2570 ndk::ScopedAStatus returnStatus = mSession->flush();
2571 ASSERT_TRUE(returnStatus.isOk());
2572
2573 {
2574 std::unique_lock<std::mutex> l(mLock);
2575 while (!inflightReq->errorCodeValid &&
2576 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
2577 auto timeout = std::chrono::system_clock::now() +
2578 std::chrono::seconds(kStreamBufferTimeoutSec);
2579 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2580 }
2581
2582 if (!inflightReq->errorCodeValid) {
2583 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
2584 ASSERT_EQ(previewStream.id, inflightReq->resultOutputBuffers[0].buffer.streamId);
2585 } else {
2586 switch (inflightReq->errorCode) {
2587 case ErrorCode::ERROR_REQUEST:
2588 case ErrorCode::ERROR_RESULT:
2589 case ErrorCode::ERROR_BUFFER:
2590 // Expected
2591 break;
2592 case ErrorCode::ERROR_DEVICE:
2593 default:
2594 FAIL() << "Unexpected error:"
2595 << static_cast<uint32_t>(inflightReq->errorCode);
2596 }
2597 }
2598 }
2599
2600 if (useHalBufManager) {
2601 verifyBuffersReturned(mSession, previewStream.id, cb);
2602 }
2603
2604 ret = mSession->close();
2605 mSession = nullptr;
2606 ASSERT_TRUE(ret.isOk());
2607 }
2608}
2609
2610// Verify that camera flushes correctly without any pending requests.
2611TEST_P(CameraAidlTest, flushEmpty) {
2612 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2613 std::vector<AvailableStream> outputPreviewStreams;
2614 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2615 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2616
2617 for (const auto& name : cameraDeviceNames) {
2618 Stream previewStream;
2619 std::vector<HalStream> halStreams;
2620 std::shared_ptr<DeviceCb> cb;
2621 bool supportsPartialResults = false;
2622 bool useHalBufManager = false;
2623
2624 int32_t partialResultCount = 0;
2625 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2626 &previewStream /*out*/, &halStreams /*out*/,
2627 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2628 &useHalBufManager /*out*/, &cb /*out*/);
2629
2630 ndk::ScopedAStatus returnStatus = mSession->flush();
2631 ASSERT_TRUE(returnStatus.isOk());
2632
2633 {
2634 std::unique_lock<std::mutex> l(mLock);
2635 auto timeout = std::chrono::system_clock::now() +
2636 std::chrono::milliseconds(kEmptyFlushTimeoutMSec);
2637 ASSERT_EQ(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2638 }
2639
2640 ndk::ScopedAStatus ret = mSession->close();
2641 mSession = nullptr;
2642 ASSERT_TRUE(ret.isOk());
2643 }
2644}
2645
2646// Test camera provider notify method
2647TEST_P(CameraAidlTest, providerDeviceStateNotification) {
2648 notifyDeviceState(ICameraProvider::DEVICE_STATE_BACK_COVERED);
2649 notifyDeviceState(ICameraProvider::DEVICE_STATE_NORMAL);
2650}
2651
2652// Verify that all supported stream formats and sizes can be configured
2653// successfully for injection camera.
2654TEST_P(CameraAidlTest, configureInjectionStreamsAvailableOutputs) {
2655 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2656 std::vector<AvailableStream> outputStreams;
2657
2658 for (const auto& name : cameraDeviceNames) {
2659 CameraMetadata metadata;
2660
2661 std::shared_ptr<ICameraInjectionSession> injectionSession;
2662 std::shared_ptr<ICameraDevice> unusedDevice;
2663 openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/,
2664 &unusedDevice /*out*/);
2665 if (injectionSession == nullptr) {
2666 continue;
2667 }
2668
2669 camera_metadata_t* staticMetaBuffer =
2670 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
2671 CameraMetadata chars;
2672 chars.metadata = metadata.metadata;
2673
2674 outputStreams.clear();
2675 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputStreams));
2676 ASSERT_NE(0u, outputStreams.size());
2677
2678 int32_t jpegBufferSize = 0;
2679 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMetaBuffer, &jpegBufferSize));
2680 ASSERT_NE(0u, jpegBufferSize);
2681
2682 int32_t streamId = 0;
2683 int32_t streamConfigCounter = 0;
2684 for (auto& it : outputStreams) {
2685 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(it.format));
2686 Stream stream = {streamId,
2687 StreamType::OUTPUT,
2688 it.width,
2689 it.height,
2690 static_cast<PixelFormat>(it.format),
2691 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2692 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2693 dataspace,
2694 StreamRotation::ROTATION_0,
2695 std::string(),
2696 jpegBufferSize,
2697 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002698 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2699 RequestAvailableDynamicRangeProfilesMap::
2700 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002701
2702 std::vector<Stream> streams = {stream};
2703 StreamConfiguration config;
2704 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2705 jpegBufferSize);
2706
2707 config.streamConfigCounter = streamConfigCounter++;
2708 ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars);
2709 ASSERT_TRUE(s.isOk());
2710 streamId++;
2711 }
2712
2713 std::shared_ptr<ICameraDeviceSession> session;
2714 ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session);
2715 ASSERT_TRUE(ret.isOk());
2716 ASSERT_NE(session, nullptr);
2717 ret = session->close();
2718 ASSERT_TRUE(ret.isOk());
2719 }
2720}
2721
2722// Check for correct handling of invalid/incorrect configuration parameters for injection camera.
2723TEST_P(CameraAidlTest, configureInjectionStreamsInvalidOutputs) {
2724 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2725 std::vector<AvailableStream> outputStreams;
2726
2727 for (const auto& name : cameraDeviceNames) {
2728 CameraMetadata metadata;
2729 std::shared_ptr<ICameraInjectionSession> injectionSession;
2730 std::shared_ptr<ICameraDevice> unusedDevice;
2731 openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/,
2732 &unusedDevice);
2733 if (injectionSession == nullptr) {
2734 continue;
2735 }
2736
2737 camera_metadata_t* staticMetaBuffer =
2738 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
2739 std::shared_ptr<ICameraDeviceSession> session;
2740 ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session);
2741 ASSERT_TRUE(ret.isOk());
2742 ASSERT_NE(session, nullptr);
2743
2744 CameraMetadata chars;
2745 chars.metadata = metadata.metadata;
2746
2747 outputStreams.clear();
2748 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputStreams));
2749 ASSERT_NE(0u, outputStreams.size());
2750
2751 int32_t jpegBufferSize = 0;
2752 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMetaBuffer, &jpegBufferSize));
2753 ASSERT_NE(0u, jpegBufferSize);
2754
2755 int32_t streamId = 0;
2756 Stream stream = {streamId++,
2757 StreamType::OUTPUT,
2758 0,
2759 0,
2760 static_cast<PixelFormat>(outputStreams[0].format),
2761 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2762 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2763 Dataspace::UNKNOWN,
2764 StreamRotation::ROTATION_0,
2765 std::string(),
2766 jpegBufferSize,
2767 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002768 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2769 RequestAvailableDynamicRangeProfilesMap::
2770 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002771
2772 int32_t streamConfigCounter = 0;
2773 std::vector<Stream> streams = {stream};
2774 StreamConfiguration config;
2775 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2776 jpegBufferSize);
2777
2778 config.streamConfigCounter = streamConfigCounter++;
2779 ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars);
2780 ASSERT_TRUE(
2781 (static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) == s.getServiceSpecificError()) ||
2782 (static_cast<int32_t>(Status::INTERNAL_ERROR) == s.getServiceSpecificError()));
2783
2784 stream = {streamId++,
2785 StreamType::OUTPUT,
2786 INT32_MAX,
2787 INT32_MAX,
2788 static_cast<PixelFormat>(outputStreams[0].format),
2789 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2790 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2791 Dataspace::UNKNOWN,
2792 StreamRotation::ROTATION_0,
2793 std::string(),
2794 jpegBufferSize,
2795 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002796 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2797 RequestAvailableDynamicRangeProfilesMap::
2798 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
2799
Avichal Rakesh362242f2022-02-08 12:40:53 -08002800 streams[0] = stream;
2801 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2802 jpegBufferSize);
2803 config.streamConfigCounter = streamConfigCounter++;
2804 s = injectionSession->configureInjectionStreams(config, chars);
2805 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError());
2806
2807 for (auto& it : outputStreams) {
2808 stream = {streamId++,
2809 StreamType::OUTPUT,
2810 it.width,
2811 it.height,
2812 static_cast<PixelFormat>(INT32_MAX),
2813 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2814 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2815 Dataspace::UNKNOWN,
2816 StreamRotation::ROTATION_0,
2817 std::string(),
2818 jpegBufferSize,
2819 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002820 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2821 RequestAvailableDynamicRangeProfilesMap::
2822 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002823 streams[0] = stream;
2824 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2825 jpegBufferSize);
2826 config.streamConfigCounter = streamConfigCounter++;
2827 s = injectionSession->configureInjectionStreams(config, chars);
2828 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError());
2829
2830 stream = {streamId++,
2831 StreamType::OUTPUT,
2832 it.width,
2833 it.height,
2834 static_cast<PixelFormat>(it.format),
2835 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2836 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2837 Dataspace::UNKNOWN,
2838 static_cast<StreamRotation>(INT32_MAX),
2839 std::string(),
2840 jpegBufferSize,
2841 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002842 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2843 RequestAvailableDynamicRangeProfilesMap::
2844 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002845 streams[0] = stream;
2846 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2847 jpegBufferSize);
2848 config.streamConfigCounter = streamConfigCounter++;
2849 s = injectionSession->configureInjectionStreams(config, chars);
2850 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError());
2851 }
2852
2853 ret = session->close();
2854 ASSERT_TRUE(ret.isOk());
2855 }
2856}
2857
2858// Check whether session parameters are supported for injection camera. If Hal support for them
2859// exist, then try to configure a preview stream using them.
2860TEST_P(CameraAidlTest, configureInjectionStreamsWithSessionParameters) {
2861 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2862 std::vector<AvailableStream> outputPreviewStreams;
2863 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2864 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2865
2866 for (const auto& name : cameraDeviceNames) {
2867 CameraMetadata metadata;
2868 std::shared_ptr<ICameraInjectionSession> injectionSession;
2869 std::shared_ptr<ICameraDevice> unusedDevice;
2870 openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/,
2871 &unusedDevice /*out*/);
2872 if (injectionSession == nullptr) {
2873 continue;
2874 }
2875
2876 std::shared_ptr<ICameraDeviceSession> session;
2877 ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session);
2878 ASSERT_TRUE(ret.isOk());
2879 ASSERT_NE(session, nullptr);
2880
2881 camera_metadata_t* staticMetaBuffer =
2882 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
2883 CameraMetadata chars;
2884 chars.metadata = metadata.metadata;
2885
2886 std::unordered_set<int32_t> availableSessionKeys;
2887 Status rc = getSupportedKeys(staticMetaBuffer, ANDROID_REQUEST_AVAILABLE_SESSION_KEYS,
2888 &availableSessionKeys);
2889 ASSERT_EQ(Status::OK, rc);
2890 if (availableSessionKeys.empty()) {
2891 ret = session->close();
2892 ASSERT_TRUE(ret.isOk());
2893 continue;
2894 }
2895
2896 android::hardware::camera::common::V1_0::helper::CameraMetadata previewRequestSettings;
2897 android::hardware::camera::common::V1_0::helper::CameraMetadata sessionParams,
2898 modifiedSessionParams;
2899 constructFilteredSettings(session, availableSessionKeys, RequestTemplate::PREVIEW,
2900 &previewRequestSettings, &sessionParams);
2901 if (sessionParams.isEmpty()) {
2902 ret = session->close();
2903 ASSERT_TRUE(ret.isOk());
2904 continue;
2905 }
2906
2907 outputPreviewStreams.clear();
2908
2909 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputPreviewStreams,
2910 &previewThreshold));
2911 ASSERT_NE(0u, outputPreviewStreams.size());
2912
2913 Stream previewStream = {
2914 0,
2915 StreamType::OUTPUT,
2916 outputPreviewStreams[0].width,
2917 outputPreviewStreams[0].height,
2918 static_cast<PixelFormat>(outputPreviewStreams[0].format),
2919 static_cast<::aidl::android::hardware::graphics::common::BufferUsage>(
2920 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2921 Dataspace::UNKNOWN,
2922 StreamRotation::ROTATION_0,
2923 std::string(),
2924 0,
2925 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002926 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2927 RequestAvailableDynamicRangeProfilesMap::
2928 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002929 std::vector<Stream> streams = {previewStream};
2930 StreamConfiguration config;
2931 config.streams = streams;
2932 config.operationMode = StreamConfigurationMode::NORMAL_MODE;
2933
2934 modifiedSessionParams = sessionParams;
2935 camera_metadata_t* sessionParamsBuffer = sessionParams.release();
2936 uint8_t* rawSessionParamsBuffer = reinterpret_cast<uint8_t*>(sessionParamsBuffer);
2937 config.sessionParams.metadata =
2938 std::vector(rawSessionParamsBuffer,
2939 rawSessionParamsBuffer + get_camera_metadata_size(sessionParamsBuffer));
2940
2941 config.streamConfigCounter = 0;
2942 config.streamConfigCounter = 0;
2943 config.multiResolutionInputImage = false;
2944
2945 ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars);
2946 ASSERT_TRUE(s.isOk());
2947
2948 sessionParams.acquire(sessionParamsBuffer);
2949 free_camera_metadata(staticMetaBuffer);
2950 ret = session->close();
2951 ASSERT_TRUE(ret.isOk());
2952 }
2953}
2954
2955// Verify that valid stream use cases can be configured successfully, and invalid use cases
2956// fail stream configuration.
2957TEST_P(CameraAidlTest, configureStreamsUseCases) {
2958 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2959
2960 for (const auto& name : cameraDeviceNames) {
2961 CameraMetadata meta;
2962 std::shared_ptr<ICameraDevice> cameraDevice;
2963
2964 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
2965 &cameraDevice /*out*/);
2966
2967 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
2968 // Check if camera support depth only
2969 if (isDepthOnly(staticMeta)) {
2970 ndk::ScopedAStatus ret = mSession->close();
2971 mSession = nullptr;
2972 ASSERT_TRUE(ret.isOk());
2973 continue;
2974 }
2975
2976 std::vector<AvailableStream> outputPreviewStreams;
2977 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2978 static_cast<int32_t>(PixelFormat::YCBCR_420_888)};
2979 ASSERT_EQ(Status::OK,
2980 getAvailableOutputStreams(staticMeta, outputPreviewStreams, &previewThreshold));
2981 ASSERT_NE(0u, outputPreviewStreams.size());
2982
2983 // Combine valid and invalid stream use cases
Shuzhen Wang36efa712022-03-08 10:10:44 -08002984 std::vector<int64_t> useCases(kMandatoryUseCases);
Avichal Rakesh362242f2022-02-08 12:40:53 -08002985 useCases.push_back(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL + 1);
2986
Shuzhen Wang36efa712022-03-08 10:10:44 -08002987 std::vector<int64_t> supportedUseCases;
Avichal Rakesh362242f2022-02-08 12:40:53 -08002988 camera_metadata_ro_entry entry;
2989 auto retcode = find_camera_metadata_ro_entry(
2990 staticMeta, ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES, &entry);
2991 if ((0 == retcode) && (entry.count > 0)) {
Avichal Rakeshe1685a72022-03-22 13:52:36 -07002992 supportedUseCases.insert(supportedUseCases.end(), entry.data.i64,
2993 entry.data.i64 + entry.count);
Avichal Rakesh362242f2022-02-08 12:40:53 -08002994 } else {
2995 supportedUseCases.push_back(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT);
2996 }
2997
2998 std::vector<Stream> streams(1);
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002999 streams[0] = {0,
3000 StreamType::OUTPUT,
3001 outputPreviewStreams[0].width,
3002 outputPreviewStreams[0].height,
3003 static_cast<PixelFormat>(outputPreviewStreams[0].format),
3004 static_cast<::aidl::android::hardware::graphics::common::BufferUsage>(
3005 GRALLOC1_CONSUMER_USAGE_CPU_READ),
3006 Dataspace::UNKNOWN,
3007 StreamRotation::ROTATION_0,
3008 std::string(),
3009 0,
3010 -1,
3011 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
3012 RequestAvailableDynamicRangeProfilesMap::
3013 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08003014
3015 int32_t streamConfigCounter = 0;
3016 CameraMetadata req;
3017 StreamConfiguration config;
3018 RequestTemplate reqTemplate = RequestTemplate::STILL_CAPTURE;
3019 ndk::ScopedAStatus ret = mSession->constructDefaultRequestSettings(reqTemplate, &req);
3020 ASSERT_TRUE(ret.isOk());
3021 config.sessionParams = req;
3022
Shuzhen Wang36efa712022-03-08 10:10:44 -08003023 for (int64_t useCase : useCases) {
Avichal Rakesh362242f2022-02-08 12:40:53 -08003024 bool useCaseSupported = std::find(supportedUseCases.begin(), supportedUseCases.end(),
3025 useCase) != supportedUseCases.end();
3026
3027 streams[0].useCase = static_cast<
3028 aidl::android::hardware::camera::metadata::ScalerAvailableStreamUseCases>(
3029 useCase);
3030 config.streams = streams;
3031 config.operationMode = StreamConfigurationMode::NORMAL_MODE;
3032 config.streamConfigCounter = streamConfigCounter;
3033 config.multiResolutionInputImage = false;
3034
3035 bool combSupported;
3036 ret = cameraDevice->isStreamCombinationSupported(config, &combSupported);
Avichal Rakeshe1685a72022-03-22 13:52:36 -07003037 if (static_cast<int32_t>(Status::OPERATION_NOT_SUPPORTED) ==
3038 ret.getServiceSpecificError()) {
3039 continue;
Avichal Rakesh362242f2022-02-08 12:40:53 -08003040 }
Avichal Rakeshe1685a72022-03-22 13:52:36 -07003041
Avichal Rakesh362242f2022-02-08 12:40:53 -08003042 ASSERT_TRUE(ret.isOk());
Avichal Rakeshe1685a72022-03-22 13:52:36 -07003043 ASSERT_EQ(combSupported, useCaseSupported);
Avichal Rakesh362242f2022-02-08 12:40:53 -08003044
3045 std::vector<HalStream> halStreams;
3046 ret = mSession->configureStreams(config, &halStreams);
3047 ALOGI("configureStreams returns status: %d", ret.getServiceSpecificError());
3048 if (useCaseSupported) {
3049 ASSERT_TRUE(ret.isOk());
3050 ASSERT_EQ(1u, halStreams.size());
3051 } else {
3052 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
3053 ret.getServiceSpecificError());
3054 }
3055 }
3056 ret = mSession->close();
3057 mSession = nullptr;
3058 ASSERT_TRUE(ret.isOk());
3059 }
3060}
3061
3062GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(CameraAidlTest);
3063INSTANTIATE_TEST_SUITE_P(
3064 PerInstance, CameraAidlTest,
3065 testing::ValuesIn(android::getAidlHalInstanceNames(ICameraProvider::descriptor)),
3066 android::hardware::PrintInstanceNameToString);