blob: fe03732aff31ca317af3d19c58bee7844965bdb0 [file] [log] [blame]
Avichal Rakesh362242f2022-02-08 12:40:53 -08001/*
2 * Copyright (C) 2022 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <aidl/Vintf.h>
18#include <aidl/android/hardware/camera/common/VendorTagSection.h>
19#include <aidl/android/hardware/camera/device/ICameraDevice.h>
20#include <aidlcommonsupport/NativeHandle.h>
21#include <camera_aidl_test.h>
22#include <cutils/properties.h>
23#include <device_cb.h>
24#include <empty_device_cb.h>
25#include <grallocusage/GrallocUsageConversion.h>
26#include <gtest/gtest.h>
27#include <hardware/gralloc.h>
28#include <hardware/gralloc1.h>
29#include <hidl/GtestPrinter.h>
30#include <hidl/HidlSupport.h>
31#include <torch_provider_cb.h>
32#include <list>
33
34using ::aidl::android::hardware::camera::common::CameraDeviceStatus;
35using ::aidl::android::hardware::camera::common::CameraResourceCost;
36using ::aidl::android::hardware::camera::common::TorchModeStatus;
37using ::aidl::android::hardware::camera::common::VendorTagSection;
38using ::aidl::android::hardware::camera::device::ICameraDevice;
Avichal Rakeshd3503a32022-02-25 06:23:14 +000039using ::aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap;
Avichal Rakesh362242f2022-02-08 12:40:53 -080040using ::aidl::android::hardware::camera::metadata::SensorPixelMode;
41using ::aidl::android::hardware::camera::provider::CameraIdAndStreamCombination;
Avichal Rakesh4bf91c72022-05-23 20:44:02 +000042using ::aidl::android::hardware::camera::provider::BnCameraProviderCallback;
Avichal Rakesh362242f2022-02-08 12:40:53 -080043
44using ::ndk::ScopedAStatus;
45
46namespace {
47const int32_t kBurstFrameCount = 10;
48const uint32_t kMaxStillWidth = 2048;
49const uint32_t kMaxStillHeight = 1536;
50
51const int64_t kEmptyFlushTimeoutMSec = 200;
52
Shuzhen Wang36efa712022-03-08 10:10:44 -080053const static std::vector<int64_t> kMandatoryUseCases = {
Avichal Rakesh362242f2022-02-08 12:40:53 -080054 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
55 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW,
56 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_STILL_CAPTURE,
57 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_RECORD,
58 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW_VIDEO_STILL,
59 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL};
60} // namespace
61
62TEST_P(CameraAidlTest, getCameraIdList) {
63 std::vector<std::string> idList;
64 ScopedAStatus ret = mProvider->getCameraIdList(&idList);
65 ASSERT_TRUE(ret.isOk());
66
67 for (size_t i = 0; i < idList.size(); i++) {
68 ALOGI("Camera Id[%zu] is %s", i, idList[i].c_str());
69 }
70}
71
72// Test if ICameraProvider::getVendorTags returns Status::OK
73TEST_P(CameraAidlTest, getVendorTags) {
74 std::vector<VendorTagSection> vendorTags;
75 ScopedAStatus ret = mProvider->getVendorTags(&vendorTags);
76
77 ASSERT_TRUE(ret.isOk());
78 for (size_t i = 0; i < vendorTags.size(); i++) {
79 ALOGI("Vendor tag section %zu name %s", i, vendorTags[i].sectionName.c_str());
80 for (auto& tag : vendorTags[i].tags) {
81 ALOGI("Vendor tag id %u name %s type %d", tag.tagId, tag.tagName.c_str(),
82 (int)tag.tagType);
83 }
84 }
85}
86
87// Test if ICameraProvider::setCallback returns Status::OK
88TEST_P(CameraAidlTest, setCallback) {
Avichal Rakesh4bf91c72022-05-23 20:44:02 +000089 struct ProviderCb : public BnCameraProviderCallback {
Avichal Rakesh362242f2022-02-08 12:40:53 -080090 ScopedAStatus cameraDeviceStatusChange(const std::string& cameraDeviceName,
91 CameraDeviceStatus newStatus) override {
92 ALOGI("camera device status callback name %s, status %d", cameraDeviceName.c_str(),
93 (int)newStatus);
94 return ScopedAStatus::ok();
95 }
96 ScopedAStatus torchModeStatusChange(const std::string& cameraDeviceName,
97 TorchModeStatus newStatus) override {
98 ALOGI("Torch mode status callback name %s, status %d", cameraDeviceName.c_str(),
99 (int)newStatus);
100 return ScopedAStatus::ok();
101 }
102 ScopedAStatus physicalCameraDeviceStatusChange(const std::string& cameraDeviceName,
103 const std::string& physicalCameraDeviceName,
104 CameraDeviceStatus newStatus) override {
105 ALOGI("physical camera device status callback name %s, physical camera name %s,"
106 " status %d",
107 cameraDeviceName.c_str(), physicalCameraDeviceName.c_str(), (int)newStatus);
108 return ScopedAStatus::ok();
109 }
110 };
111
Avichal Rakesh4bf91c72022-05-23 20:44:02 +0000112 std::shared_ptr<ProviderCb> cb = ndk::SharedRefBase::make<ProviderCb>();
Avichal Rakesh362242f2022-02-08 12:40:53 -0800113 ScopedAStatus ret = mProvider->setCallback(cb);
114 ASSERT_TRUE(ret.isOk());
115 ret = mProvider->setCallback(nullptr);
Avichal Rakesh4bf91c72022-05-23 20:44:02 +0000116 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
Avichal Rakesh362242f2022-02-08 12:40:53 -0800117}
118
119// Test if ICameraProvider::getCameraDeviceInterface returns Status::OK and non-null device
120TEST_P(CameraAidlTest, getCameraDeviceInterface) {
121 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
122
123 for (const auto& name : cameraDeviceNames) {
124 std::shared_ptr<ICameraDevice> cameraDevice;
125 ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &cameraDevice);
126 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
127 ret.getServiceSpecificError());
128 ASSERT_TRUE(ret.isOk());
129 ASSERT_NE(cameraDevice, nullptr);
130 }
131}
132
133// Verify that the device resource cost can be retrieved and the values are
134// correct.
135TEST_P(CameraAidlTest, getResourceCost) {
136 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
137
138 for (const auto& deviceName : cameraDeviceNames) {
139 std::shared_ptr<ICameraDevice> cameraDevice;
140 ScopedAStatus ret = mProvider->getCameraDeviceInterface(deviceName, &cameraDevice);
141 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
142 ret.getServiceSpecificError());
143 ASSERT_TRUE(ret.isOk());
144 ASSERT_NE(cameraDevice, nullptr);
145
146 CameraResourceCost resourceCost;
147 ret = cameraDevice->getResourceCost(&resourceCost);
148 ALOGI("getResourceCost returns: %d:%d", ret.getExceptionCode(),
149 ret.getServiceSpecificError());
150 ASSERT_TRUE(ret.isOk());
151
152 ALOGI(" Resource cost is %d", resourceCost.resourceCost);
153 ASSERT_LE(resourceCost.resourceCost, 100u);
154
155 for (const auto& name : resourceCost.conflictingDevices) {
156 ALOGI(" Conflicting device: %s", name.c_str());
157 }
158 }
159}
160
161TEST_P(CameraAidlTest, systemCameraTest) {
162 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
163 std::map<std::string, std::vector<SystemCameraKind>> hiddenPhysicalIdToLogicalMap;
164 for (const auto& name : cameraDeviceNames) {
165 std::shared_ptr<ICameraDevice> device;
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +0000166 ALOGI("systemCameraTest: Testing camera device %s", name.c_str());
Avichal Rakesh362242f2022-02-08 12:40:53 -0800167 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
168 ASSERT_TRUE(ret.isOk());
169 ASSERT_NE(device, nullptr);
170
171 CameraMetadata cameraCharacteristics;
172 ret = device->getCameraCharacteristics(&cameraCharacteristics);
173 ASSERT_TRUE(ret.isOk());
174
175 const camera_metadata_t* staticMeta =
176 reinterpret_cast<const camera_metadata_t*>(cameraCharacteristics.metadata.data());
177 Status rc = isLogicalMultiCamera(staticMeta);
178 if (rc == Status::OPERATION_NOT_SUPPORTED) {
179 return;
180 }
181
182 ASSERT_EQ(rc, Status::OK);
183 std::unordered_set<std::string> physicalIds;
184 ASSERT_EQ(getPhysicalCameraIds(staticMeta, &physicalIds), Status::OK);
185 SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC;
186 Status retStatus = getSystemCameraKind(staticMeta, &systemCameraKind);
187 ASSERT_EQ(retStatus, Status::OK);
188
189 for (auto physicalId : physicalIds) {
190 bool isPublicId = false;
191 for (auto& deviceName : cameraDeviceNames) {
192 std::string publicVersion, publicId;
193 ASSERT_TRUE(matchDeviceName(deviceName, mProviderType, &publicVersion, &publicId));
194 if (physicalId == publicId) {
195 isPublicId = true;
196 break;
197 }
198 }
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +0000199
Avichal Rakesh362242f2022-02-08 12:40:53 -0800200 // For hidden physical cameras, collect their associated logical cameras
201 // and store the system camera kind.
202 if (!isPublicId) {
203 auto it = hiddenPhysicalIdToLogicalMap.find(physicalId);
204 if (it == hiddenPhysicalIdToLogicalMap.end()) {
205 hiddenPhysicalIdToLogicalMap.insert(std::make_pair(
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +0000206 physicalId, std::vector<SystemCameraKind>({systemCameraKind})));
Avichal Rakesh362242f2022-02-08 12:40:53 -0800207 } else {
208 it->second.push_back(systemCameraKind);
209 }
210 }
211 }
212 }
213
214 // Check that the system camera kind of the logical cameras associated with
215 // each hidden physical camera is the same.
216 for (const auto& it : hiddenPhysicalIdToLogicalMap) {
217 SystemCameraKind neededSystemCameraKind = it.second.front();
218 for (auto foundSystemCamera : it.second) {
219 ASSERT_EQ(neededSystemCameraKind, foundSystemCamera);
220 }
221 }
222}
223
224// Verify that the static camera characteristics can be retrieved
225// successfully.
226TEST_P(CameraAidlTest, getCameraCharacteristics) {
227 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
228
229 for (const auto& name : cameraDeviceNames) {
230 std::shared_ptr<ICameraDevice> device;
231 ALOGI("getCameraCharacteristics: Testing camera device %s", name.c_str());
232 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
233 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
234 ret.getServiceSpecificError());
235 ASSERT_TRUE(ret.isOk());
236 ASSERT_NE(device, nullptr);
237
238 CameraMetadata chars;
239 ret = device->getCameraCharacteristics(&chars);
240 ASSERT_TRUE(ret.isOk());
241 verifyCameraCharacteristics(chars);
242 verifyMonochromeCharacteristics(chars);
243 verifyRecommendedConfigs(chars);
244 verifyLogicalOrUltraHighResCameraMetadata(name, device, chars, cameraDeviceNames);
245
246 ASSERT_TRUE(ret.isOk());
247
248 // getPhysicalCameraCharacteristics will fail for publicly
249 // advertised camera IDs.
250 std::string version, cameraId;
251 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &cameraId));
252 CameraMetadata devChars;
253 ret = device->getPhysicalCameraCharacteristics(cameraId, &devChars);
254 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
255 ASSERT_EQ(0, devChars.metadata.size());
256 }
257}
258
259// Verify that the torch strength level can be set and retrieved successfully.
260TEST_P(CameraAidlTest, turnOnTorchWithStrengthLevel) {
261 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
262
263 std::shared_ptr<TorchProviderCb> cb = ndk::SharedRefBase::make<TorchProviderCb>(this);
264 ndk::ScopedAStatus ret = mProvider->setCallback(cb);
265 ASSERT_TRUE(ret.isOk());
266
267 for (const auto& name : cameraDeviceNames) {
268 int32_t defaultLevel;
269 std::shared_ptr<ICameraDevice> device;
270 ALOGI("%s: Testing camera device %s", __FUNCTION__, name.c_str());
271
272 ret = mProvider->getCameraDeviceInterface(name, &device);
273 ASSERT_TRUE(ret.isOk());
274 ASSERT_NE(device, nullptr);
275
276 CameraMetadata chars;
277 ret = device->getCameraCharacteristics(&chars);
278 ASSERT_TRUE(ret.isOk());
279
280 const camera_metadata_t* staticMeta =
281 reinterpret_cast<const camera_metadata_t*>(chars.metadata.data());
282 bool torchStrengthControlSupported = isTorchStrengthControlSupported(staticMeta);
283 camera_metadata_ro_entry entry;
284 int rc = find_camera_metadata_ro_entry(staticMeta,
285 ANDROID_FLASH_INFO_STRENGTH_DEFAULT_LEVEL, &entry);
286 if (torchStrengthControlSupported) {
287 ASSERT_EQ(rc, 0);
288 ASSERT_GT(entry.count, 0);
289 defaultLevel = *entry.data.i32;
290 ALOGI("Default level is:%d", defaultLevel);
291 }
292
293 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
294 ret = device->turnOnTorchWithStrengthLevel(2);
295 ALOGI("turnOnTorchWithStrengthLevel returns status: %d", ret.getServiceSpecificError());
296 // OPERATION_NOT_SUPPORTED check
297 if (!torchStrengthControlSupported) {
298 ALOGI("Torch strength control not supported.");
299 ASSERT_EQ(static_cast<int32_t>(Status::OPERATION_NOT_SUPPORTED),
300 ret.getServiceSpecificError());
301 } else {
302 {
303 ASSERT_TRUE(ret.isOk());
304 std::unique_lock<std::mutex> l(mTorchLock);
305 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
306 auto timeout = std::chrono::system_clock::now() +
307 std::chrono::seconds(kTorchTimeoutSec);
308 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
309 }
310 ASSERT_EQ(TorchModeStatus::AVAILABLE_ON, mTorchStatus);
311 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
312 }
313 ALOGI("getTorchStrengthLevel: Testing");
314 int32_t strengthLevel;
315 ret = device->getTorchStrengthLevel(&strengthLevel);
316 ASSERT_TRUE(ret.isOk());
317 ALOGI("Torch strength level is : %d", strengthLevel);
318 ASSERT_EQ(strengthLevel, 2);
319
320 // Turn OFF the torch and verify torch strength level is reset to default level.
321 ALOGI("Testing torch strength level reset after turning the torch OFF.");
322 ret = device->setTorchMode(false);
323 ASSERT_TRUE(ret.isOk());
324 {
325 std::unique_lock<std::mutex> l(mTorchLock);
326 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
327 auto timeout = std::chrono::system_clock::now() +
328 std::chrono::seconds(kTorchTimeoutSec);
329 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
330 }
331 ASSERT_EQ(TorchModeStatus::AVAILABLE_OFF, mTorchStatus);
332 }
333
334 ret = device->getTorchStrengthLevel(&strengthLevel);
335 ASSERT_TRUE(ret.isOk());
336 ALOGI("Torch strength level after turning OFF torch is : %d", strengthLevel);
337 ASSERT_EQ(strengthLevel, defaultLevel);
338 }
339 }
340}
341
342// In case it is supported verify that torch can be enabled.
343// Check for corresponding torch callbacks as well.
344TEST_P(CameraAidlTest, setTorchMode) {
345 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
346
347 std::shared_ptr<TorchProviderCb> cb = ndk::SharedRefBase::make<TorchProviderCb>(this);
348 ndk::ScopedAStatus ret = mProvider->setCallback(cb);
349 ALOGI("setCallback returns status: %d", ret.getServiceSpecificError());
350 ASSERT_TRUE(ret.isOk());
351 ASSERT_NE(cb, nullptr);
352
353 for (const auto& name : cameraDeviceNames) {
354 std::shared_ptr<ICameraDevice> device;
355 ALOGI("setTorchMode: Testing camera device %s", name.c_str());
356 ret = mProvider->getCameraDeviceInterface(name, &device);
357 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
358 ret.getServiceSpecificError());
359 ASSERT_TRUE(ret.isOk());
360 ASSERT_NE(device, nullptr);
361
362 CameraMetadata metadata;
363 ret = device->getCameraCharacteristics(&metadata);
364 ALOGI("getCameraCharacteristics returns status:%d", ret.getServiceSpecificError());
365 ASSERT_TRUE(ret.isOk());
366 camera_metadata_t* staticMeta =
367 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
368 bool torchSupported = isTorchSupported(staticMeta);
369
370 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
371 ret = device->setTorchMode(true);
372 ALOGI("setTorchMode returns status: %d", ret.getServiceSpecificError());
373 if (!torchSupported) {
374 ASSERT_EQ(static_cast<int32_t>(Status::OPERATION_NOT_SUPPORTED),
375 ret.getServiceSpecificError());
376 } else {
377 ASSERT_TRUE(ret.isOk());
378 {
379 std::unique_lock<std::mutex> l(mTorchLock);
380 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
381 auto timeout = std::chrono::system_clock::now() +
382 std::chrono::seconds(kTorchTimeoutSec);
383 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
384 }
385 ASSERT_EQ(TorchModeStatus::AVAILABLE_ON, mTorchStatus);
386 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
387 }
388
389 ret = device->setTorchMode(false);
390 ASSERT_TRUE(ret.isOk());
391 {
392 std::unique_lock<std::mutex> l(mTorchLock);
393 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
394 auto timeout = std::chrono::system_clock::now() +
395 std::chrono::seconds(kTorchTimeoutSec);
396 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
397 }
398 ASSERT_EQ(TorchModeStatus::AVAILABLE_OFF, mTorchStatus);
399 }
400 }
401 }
Avichal Rakesh362242f2022-02-08 12:40:53 -0800402}
403
404// Check dump functionality.
405TEST_P(CameraAidlTest, dump) {
406 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
407
408 for (const auto& name : cameraDeviceNames) {
409 std::shared_ptr<ICameraDevice> device;
410 ALOGI("dump: Testing camera device %s", name.c_str());
411
412 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
413 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
414 ret.getServiceSpecificError());
415 ASSERT_TRUE(ret.isOk());
416 ASSERT_NE(device, nullptr);
417
418 int raw_handle = open(kDumpOutput, O_RDWR);
419 ASSERT_GE(raw_handle, 0);
420
421 auto retStatus = device->dump(raw_handle, nullptr, 0);
422 ASSERT_EQ(retStatus, ::android::OK);
423 close(raw_handle);
424 }
425}
426
427// Open, dump, then close
428TEST_P(CameraAidlTest, openClose) {
429 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
430
431 for (const auto& name : cameraDeviceNames) {
432 std::shared_ptr<ICameraDevice> device;
433 ALOGI("openClose: Testing camera device %s", name.c_str());
434 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
435 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
436 ret.getServiceSpecificError());
437 ASSERT_TRUE(ret.isOk());
438 ASSERT_NE(device, nullptr);
439
440 std::shared_ptr<EmptyDeviceCb> cb = ndk::SharedRefBase::make<EmptyDeviceCb>();
441
442 ret = device->open(cb, &mSession);
443 ASSERT_TRUE(ret.isOk());
444 ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
445 ret.getServiceSpecificError());
446 ASSERT_NE(mSession, nullptr);
447 int raw_handle = open(kDumpOutput, O_RDWR);
448 ASSERT_GE(raw_handle, 0);
449
450 auto retStatus = device->dump(raw_handle, nullptr, 0);
451 ASSERT_EQ(retStatus, ::android::OK);
452 close(raw_handle);
453
454 ret = mSession->close();
455 mSession = nullptr;
456 ASSERT_TRUE(ret.isOk());
457 // TODO: test all session API calls return INTERNAL_ERROR after close
458 // TODO: keep a wp copy here and verify session cannot be promoted out of this scope
459 }
460}
461
462// Check whether all common default request settings can be successfully
463// constructed.
464TEST_P(CameraAidlTest, constructDefaultRequestSettings) {
465 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
466
467 for (const auto& name : cameraDeviceNames) {
468 std::shared_ptr<ICameraDevice> device;
469 ALOGI("constructDefaultRequestSettings: Testing camera device %s", name.c_str());
470 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
471 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
472 ret.getServiceSpecificError());
473 ASSERT_TRUE(ret.isOk());
474 ASSERT_NE(device, nullptr);
475
476 std::shared_ptr<EmptyDeviceCb> cb = ndk::SharedRefBase::make<EmptyDeviceCb>();
477 ret = device->open(cb, &mSession);
478 ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
479 ret.getServiceSpecificError());
480 ASSERT_TRUE(ret.isOk());
481 ASSERT_NE(mSession, nullptr);
482
483 for (int32_t t = (int32_t)RequestTemplate::PREVIEW; t <= (int32_t)RequestTemplate::MANUAL;
484 t++) {
485 RequestTemplate reqTemplate = (RequestTemplate)t;
486 CameraMetadata rawMetadata;
487 ret = mSession->constructDefaultRequestSettings(reqTemplate, &rawMetadata);
488 ALOGI("constructDefaultRequestSettings returns status:%d:%d", ret.getExceptionCode(),
489 ret.getServiceSpecificError());
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000490
Avichal Rakesh362242f2022-02-08 12:40:53 -0800491 if (reqTemplate == RequestTemplate::ZERO_SHUTTER_LAG ||
492 reqTemplate == RequestTemplate::MANUAL) {
493 // optional templates
494 ASSERT_TRUE(ret.isOk() || static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
495 ret.getServiceSpecificError());
496 } else {
497 ASSERT_TRUE(ret.isOk());
498 }
499
500 if (ret.isOk()) {
501 const camera_metadata_t* metadata = (camera_metadata_t*)rawMetadata.metadata.data();
502 size_t expectedSize = rawMetadata.metadata.size();
503 int result = validate_camera_metadata_structure(metadata, &expectedSize);
504 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
505 verifyRequestTemplate(metadata, reqTemplate);
506 } else {
507 ASSERT_EQ(0u, rawMetadata.metadata.size());
508 }
509 }
510 ret = mSession->close();
511 mSession = nullptr;
512 ASSERT_TRUE(ret.isOk());
513 }
514}
515
516// Verify that all supported stream formats and sizes can be configured
517// successfully.
518TEST_P(CameraAidlTest, configureStreamsAvailableOutputs) {
519 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
520 std::vector<AvailableStream> outputStreams;
521
522 for (const auto& name : cameraDeviceNames) {
523 CameraMetadata meta;
524 std::shared_ptr<ICameraDevice> device;
525
526 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/, &device /*out*/);
527
528 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
529 outputStreams.clear();
530 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputStreams));
531 ASSERT_NE(0u, outputStreams.size());
532
533 int32_t jpegBufferSize = 0;
534 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
535 ASSERT_NE(0u, jpegBufferSize);
536
537 int32_t streamId = 0;
538 int32_t streamConfigCounter = 0;
539 for (auto& it : outputStreams) {
540 Stream stream;
541 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(it.format));
542 stream.id = streamId;
543 stream.streamType = StreamType::OUTPUT;
544 stream.width = it.width;
545 stream.height = it.height;
546 stream.format = static_cast<PixelFormat>(it.format);
547 stream.dataSpace = dataspace;
548 stream.usage = static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
549 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER);
550 stream.rotation = StreamRotation::ROTATION_0;
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000551 stream.dynamicRangeProfile = RequestAvailableDynamicRangeProfilesMap::
552 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
Avichal Rakesh362242f2022-02-08 12:40:53 -0800553
554 std::vector<Stream> streams = {stream};
555 StreamConfiguration config;
556 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
557 jpegBufferSize);
558
559 bool expectStreamCombQuery = (isLogicalMultiCamera(staticMeta) == Status::OK);
560 verifyStreamCombination(device, config, /*expectedStatus*/ true, expectStreamCombQuery);
561
562 config.streamConfigCounter = streamConfigCounter++;
563 std::vector<HalStream> halConfigs;
564 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
565 ASSERT_TRUE(ret.isOk());
566 ASSERT_EQ(halConfigs.size(), 1);
567 ASSERT_EQ(halConfigs[0].id, streamId);
568
569 streamId++;
570 }
571
572 ndk::ScopedAStatus ret = mSession->close();
573 mSession = nullptr;
574 ASSERT_TRUE(ret.isOk());
575 }
576}
577
578// Verify that mandatory concurrent streams and outputs are supported.
579TEST_P(CameraAidlTest, configureConcurrentStreamsAvailableOutputs) {
580 struct CameraTestInfo {
581 CameraMetadata staticMeta;
582 std::shared_ptr<ICameraDeviceSession> session;
583 std::shared_ptr<ICameraDevice> cameraDevice;
584 StreamConfiguration config;
585 };
586
587 std::map<std::string, std::string> idToNameMap = getCameraDeviceIdToNameMap(mProvider);
588 std::vector<ConcurrentCameraIdCombination> concurrentDeviceCombinations =
589 getConcurrentDeviceCombinations(mProvider);
590 std::vector<AvailableStream> outputStreams;
591 for (const auto& cameraDeviceIds : concurrentDeviceCombinations) {
592 std::vector<CameraIdAndStreamCombination> cameraIdsAndStreamCombinations;
593 std::vector<CameraTestInfo> cameraTestInfos;
594 size_t i = 0;
595 for (const auto& id : cameraDeviceIds.combination) {
596 CameraTestInfo cti;
597 auto it = idToNameMap.find(id);
598 ASSERT_TRUE(idToNameMap.end() != it);
599 std::string name = it->second;
600
601 openEmptyDeviceSession(name, mProvider, &cti.session /*out*/, &cti.staticMeta /*out*/,
602 &cti.cameraDevice /*out*/);
603
604 outputStreams.clear();
605 camera_metadata_t* staticMeta =
606 reinterpret_cast<camera_metadata_t*>(cti.staticMeta.metadata.data());
607 ASSERT_EQ(Status::OK, getMandatoryConcurrentStreams(staticMeta, &outputStreams));
608 ASSERT_NE(0u, outputStreams.size());
609
610 int32_t jpegBufferSize = 0;
611 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
612 ASSERT_NE(0u, jpegBufferSize);
613
614 int32_t streamId = 0;
615 std::vector<Stream> streams(outputStreams.size());
616 size_t j = 0;
617 for (const auto& s : outputStreams) {
618 Stream stream;
619 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(s.format));
620 stream.id = streamId++;
621 stream.streamType = StreamType::OUTPUT;
622 stream.width = s.width;
623 stream.height = s.height;
624 stream.format = static_cast<PixelFormat>(s.format);
625 stream.usage = static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
626 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER);
627 stream.dataSpace = dataspace;
628 stream.rotation = StreamRotation::ROTATION_0;
629 stream.sensorPixelModesUsed = {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT};
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000630 stream.dynamicRangeProfile = RequestAvailableDynamicRangeProfilesMap::
631 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
Avichal Rakesh362242f2022-02-08 12:40:53 -0800632 streams[j] = stream;
633 j++;
634 }
635
636 // Add the created stream configs to cameraIdsAndStreamCombinations
637 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &cti.config,
638 jpegBufferSize);
639
640 cti.config.streamConfigCounter = outputStreams.size();
641 CameraIdAndStreamCombination cameraIdAndStreamCombination;
642 cameraIdAndStreamCombination.cameraId = id;
643 cameraIdAndStreamCombination.streamConfiguration = cti.config;
644 cameraIdsAndStreamCombinations.push_back(cameraIdAndStreamCombination);
645 i++;
646 cameraTestInfos.push_back(cti);
647 }
648 // Now verify that concurrent streams are supported
649 bool combinationSupported;
650 ndk::ScopedAStatus ret = mProvider->isConcurrentStreamCombinationSupported(
651 cameraIdsAndStreamCombinations, &combinationSupported);
652 ASSERT_TRUE(ret.isOk());
653 ASSERT_EQ(combinationSupported, true);
654
655 // Test the stream can actually be configured
656 for (auto& cti : cameraTestInfos) {
657 if (cti.session != nullptr) {
658 camera_metadata_t* staticMeta =
659 reinterpret_cast<camera_metadata_t*>(cti.staticMeta.metadata.data());
660 bool expectStreamCombQuery = (isLogicalMultiCamera(staticMeta) == Status::OK);
661 verifyStreamCombination(cti.cameraDevice, cti.config, /*expectedStatus*/ true,
662 expectStreamCombQuery);
663 }
664
665 if (cti.session != nullptr) {
666 std::vector<HalStream> streamConfigs;
667 ret = cti.session->configureStreams(cti.config, &streamConfigs);
668 ASSERT_TRUE(ret.isOk());
669 ASSERT_EQ(cti.config.streams.size(), streamConfigs.size());
670 }
671 }
672
673 for (auto& cti : cameraTestInfos) {
674 ret = cti.session->close();
675 ASSERT_TRUE(ret.isOk());
676 }
677 }
678}
679
680// Check for correct handling of invalid/incorrect configuration parameters.
681TEST_P(CameraAidlTest, configureStreamsInvalidOutputs) {
682 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
683 std::vector<AvailableStream> outputStreams;
684
685 for (const auto& name : cameraDeviceNames) {
686 CameraMetadata meta;
687 std::shared_ptr<ICameraDevice> cameraDevice;
688
689 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
690 &cameraDevice /*out*/);
691 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
692 outputStreams.clear();
693
694 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputStreams));
695 ASSERT_NE(0u, outputStreams.size());
696
697 int32_t jpegBufferSize = 0;
698 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
699 ASSERT_NE(0u, jpegBufferSize);
700
701 int32_t streamId = 0;
702 Stream stream = {streamId++,
703 StreamType::OUTPUT,
704 static_cast<uint32_t>(0),
705 static_cast<uint32_t>(0),
706 static_cast<PixelFormat>(outputStreams[0].format),
707 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
708 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
709 Dataspace::UNKNOWN,
710 StreamRotation::ROTATION_0,
711 std::string(),
712 jpegBufferSize,
713 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000714 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
715 RequestAvailableDynamicRangeProfilesMap::
716 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800717 int32_t streamConfigCounter = 0;
718 std::vector<Stream> streams = {stream};
719 StreamConfiguration config;
720 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
721 jpegBufferSize);
722
723 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ false,
724 /*expectStreamCombQuery*/ false);
725
726 config.streamConfigCounter = streamConfigCounter++;
727 std::vector<HalStream> halConfigs;
728 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
729 ASSERT_TRUE(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
730 ret.getServiceSpecificError() ||
731 static_cast<int32_t>(Status::INTERNAL_ERROR) == ret.getServiceSpecificError());
732
733 stream = {streamId++,
734 StreamType::OUTPUT,
735 /*width*/ INT32_MAX,
736 /*height*/ INT32_MAX,
737 static_cast<PixelFormat>(outputStreams[0].format),
738 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
739 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
740 Dataspace::UNKNOWN,
741 StreamRotation::ROTATION_0,
742 std::string(),
743 jpegBufferSize,
744 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000745 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
746 RequestAvailableDynamicRangeProfilesMap::
747 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800748
749 streams[0] = stream;
750 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
751 jpegBufferSize);
752
753 config.streamConfigCounter = streamConfigCounter++;
754 halConfigs.clear();
755 ret = mSession->configureStreams(config, &halConfigs);
756 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
757
758 for (auto& it : outputStreams) {
759 stream = {streamId++,
760 StreamType::OUTPUT,
761 it.width,
762 it.height,
763 static_cast<PixelFormat>(UINT32_MAX),
764 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
765 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
766 Dataspace::UNKNOWN,
767 StreamRotation::ROTATION_0,
768 std::string(),
769 jpegBufferSize,
770 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000771 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
772 RequestAvailableDynamicRangeProfilesMap::
773 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800774
775 streams[0] = stream;
776 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
777 jpegBufferSize);
778 config.streamConfigCounter = streamConfigCounter++;
779 halConfigs.clear();
780 ret = mSession->configureStreams(config, &halConfigs);
781 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
782 ret.getServiceSpecificError());
783
784 stream = {streamId++,
785 StreamType::OUTPUT,
786 it.width,
787 it.height,
788 static_cast<PixelFormat>(it.format),
789 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
790 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
791 Dataspace::UNKNOWN,
792 static_cast<StreamRotation>(UINT32_MAX),
793 std::string(),
794 jpegBufferSize,
795 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000796 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
797 RequestAvailableDynamicRangeProfilesMap::
798 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800799
800 streams[0] = stream;
801 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
802 jpegBufferSize);
803
804 config.streamConfigCounter = streamConfigCounter++;
805 halConfigs.clear();
806 ret = mSession->configureStreams(config, &halConfigs);
807 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
808 ret.getServiceSpecificError());
809 }
810
811 ret = mSession->close();
812 mSession = nullptr;
813 ASSERT_TRUE(ret.isOk());
814 }
815}
816
817// Check whether all supported ZSL output stream combinations can be
818// configured successfully.
819TEST_P(CameraAidlTest, configureStreamsZSLInputOutputs) {
820 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
821 std::vector<AvailableStream> inputStreams;
822 std::vector<AvailableZSLInputOutput> inputOutputMap;
823
824 for (const auto& name : cameraDeviceNames) {
825 CameraMetadata meta;
826 std::shared_ptr<ICameraDevice> cameraDevice;
827
828 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
829 &cameraDevice /*out*/);
830 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
831
832 Status rc = isZSLModeAvailable(staticMeta);
833 if (Status::OPERATION_NOT_SUPPORTED == rc) {
834 ndk::ScopedAStatus ret = mSession->close();
835 mSession = nullptr;
836 ASSERT_TRUE(ret.isOk());
837 continue;
838 }
839 ASSERT_EQ(Status::OK, rc);
840
841 inputStreams.clear();
842 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, inputStreams));
843 ASSERT_NE(0u, inputStreams.size());
844
845 inputOutputMap.clear();
846 ASSERT_EQ(Status::OK, getZSLInputOutputMap(staticMeta, inputOutputMap));
847 ASSERT_NE(0u, inputOutputMap.size());
848
849 bool supportMonoY8 = false;
850 if (Status::OK == isMonochromeCamera(staticMeta)) {
851 for (auto& it : inputStreams) {
852 if (it.format == static_cast<uint32_t>(PixelFormat::Y8)) {
853 supportMonoY8 = true;
854 break;
855 }
856 }
857 }
858
859 int32_t jpegBufferSize = 0;
860 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
861 ASSERT_NE(0u, jpegBufferSize);
862
863 int32_t streamId = 0;
864 bool hasPrivToY8 = false, hasY8ToY8 = false, hasY8ToBlob = false;
865 uint32_t streamConfigCounter = 0;
866 for (auto& inputIter : inputOutputMap) {
867 AvailableStream input;
868 ASSERT_EQ(Status::OK, findLargestSize(inputStreams, inputIter.inputFormat, input));
869 ASSERT_NE(0u, inputStreams.size());
870
871 if (inputIter.inputFormat ==
872 static_cast<uint32_t>(PixelFormat::IMPLEMENTATION_DEFINED) &&
873 inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::Y8)) {
874 hasPrivToY8 = true;
875 } else if (inputIter.inputFormat == static_cast<uint32_t>(PixelFormat::Y8)) {
876 if (inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::BLOB)) {
877 hasY8ToBlob = true;
878 } else if (inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::Y8)) {
879 hasY8ToY8 = true;
880 }
881 }
882 AvailableStream outputThreshold = {INT32_MAX, INT32_MAX, inputIter.outputFormat};
883 std::vector<AvailableStream> outputStreams;
884 ASSERT_EQ(Status::OK,
885 getAvailableOutputStreams(staticMeta, outputStreams, &outputThreshold));
886 for (auto& outputIter : outputStreams) {
887 Dataspace outputDataSpace =
888 getDataspace(static_cast<PixelFormat>(outputIter.format));
889 Stream zslStream = {
890 streamId++,
891 StreamType::OUTPUT,
892 input.width,
893 input.height,
894 static_cast<PixelFormat>(input.format),
895 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
896 GRALLOC_USAGE_HW_CAMERA_ZSL),
897 Dataspace::UNKNOWN,
898 StreamRotation::ROTATION_0,
899 std::string(),
900 jpegBufferSize,
901 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000902 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
903 RequestAvailableDynamicRangeProfilesMap::
904 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800905 Stream inputStream = {
906 streamId++,
907 StreamType::INPUT,
908 input.width,
909 input.height,
910 static_cast<PixelFormat>(input.format),
911 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(0),
912 Dataspace::UNKNOWN,
913 StreamRotation::ROTATION_0,
914 std::string(),
915 jpegBufferSize,
916 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000917 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
918 RequestAvailableDynamicRangeProfilesMap::
919 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800920 Stream outputStream = {
921 streamId++,
922 StreamType::OUTPUT,
923 outputIter.width,
924 outputIter.height,
925 static_cast<PixelFormat>(outputIter.format),
926 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
927 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
928 outputDataSpace,
929 StreamRotation::ROTATION_0,
930 std::string(),
931 jpegBufferSize,
932 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000933 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
934 RequestAvailableDynamicRangeProfilesMap::
935 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800936
937 std::vector<Stream> streams = {inputStream, zslStream, outputStream};
938
939 StreamConfiguration config;
940 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
941 jpegBufferSize);
942
943 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true,
944 /*expectStreamCombQuery*/ false);
945
946 config.streamConfigCounter = streamConfigCounter++;
947 std::vector<HalStream> halConfigs;
948 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
949 ASSERT_TRUE(ret.isOk());
950 ASSERT_EQ(3u, halConfigs.size());
951 }
952 }
953
954 if (supportMonoY8) {
955 if (Status::OK == isZSLModeAvailable(staticMeta, PRIV_REPROCESS)) {
956 ASSERT_TRUE(hasPrivToY8);
957 }
958 if (Status::OK == isZSLModeAvailable(staticMeta, YUV_REPROCESS)) {
959 ASSERT_TRUE(hasY8ToY8);
960 ASSERT_TRUE(hasY8ToBlob);
961 }
962 }
963
964 ndk::ScopedAStatus ret = mSession->close();
965 mSession = nullptr;
966 ASSERT_TRUE(ret.isOk());
967 }
968}
969
970// Check whether session parameters are supported. If Hal support for them
971// exist, then try to configure a preview stream using them.
972TEST_P(CameraAidlTest, configureStreamsWithSessionParameters) {
973 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
974 std::vector<AvailableStream> outputPreviewStreams;
975 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
976 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
977
978 for (const auto& name : cameraDeviceNames) {
979 CameraMetadata meta;
980
981 std::shared_ptr<ICameraDevice> unusedCameraDevice;
982 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
983 &unusedCameraDevice /*out*/);
984 camera_metadata_t* staticMetaBuffer =
985 reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
986
987 std::unordered_set<int32_t> availableSessionKeys;
988 auto rc = getSupportedKeys(staticMetaBuffer, ANDROID_REQUEST_AVAILABLE_SESSION_KEYS,
989 &availableSessionKeys);
990 ASSERT_TRUE(Status::OK == rc);
991 if (availableSessionKeys.empty()) {
992 ndk::ScopedAStatus ret = mSession->close();
993 mSession = nullptr;
994 ASSERT_TRUE(ret.isOk());
995 continue;
996 }
997
998 android::hardware::camera::common::V1_0::helper::CameraMetadata previewRequestSettings;
999 android::hardware::camera::common::V1_0::helper::CameraMetadata sessionParams,
1000 modifiedSessionParams;
1001 constructFilteredSettings(mSession, availableSessionKeys, RequestTemplate::PREVIEW,
1002 &previewRequestSettings, &sessionParams);
1003 if (sessionParams.isEmpty()) {
1004 ndk::ScopedAStatus ret = mSession->close();
1005 mSession = nullptr;
1006 ASSERT_TRUE(ret.isOk());
1007 continue;
1008 }
1009
1010 outputPreviewStreams.clear();
1011
1012 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputPreviewStreams,
1013 &previewThreshold));
1014 ASSERT_NE(0u, outputPreviewStreams.size());
1015
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001016 Stream previewStream = {
1017 0,
1018 StreamType::OUTPUT,
1019 outputPreviewStreams[0].width,
1020 outputPreviewStreams[0].height,
1021 static_cast<PixelFormat>(outputPreviewStreams[0].format),
1022 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1023 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
1024 Dataspace::UNKNOWN,
1025 StreamRotation::ROTATION_0,
1026 std::string(),
1027 /*bufferSize*/ 0,
1028 /*groupId*/ -1,
1029 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1030 RequestAvailableDynamicRangeProfilesMap::
1031 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001032
1033 std::vector<Stream> streams = {previewStream};
1034 StreamConfiguration config;
1035
1036 config.streams = streams;
1037 config.operationMode = StreamConfigurationMode::NORMAL_MODE;
1038 modifiedSessionParams = sessionParams;
1039 auto sessionParamsBuffer = sessionParams.release();
1040 std::vector<uint8_t> rawSessionParam =
1041 std::vector(reinterpret_cast<uint8_t*>(sessionParamsBuffer),
1042 reinterpret_cast<uint8_t*>(sessionParamsBuffer) +
1043 get_camera_metadata_size(sessionParamsBuffer));
1044
1045 config.sessionParams.metadata = rawSessionParam;
1046 config.streamConfigCounter = 0;
1047 config.streams = {previewStream};
1048 config.streamConfigCounter = 0;
1049 config.multiResolutionInputImage = false;
1050
1051 bool newSessionParamsAvailable = false;
1052 for (const auto& it : availableSessionKeys) {
1053 if (modifiedSessionParams.exists(it)) {
1054 modifiedSessionParams.erase(it);
1055 newSessionParamsAvailable = true;
1056 break;
1057 }
1058 }
1059 if (newSessionParamsAvailable) {
1060 auto modifiedSessionParamsBuffer = modifiedSessionParams.release();
1061 verifySessionReconfigurationQuery(mSession, sessionParamsBuffer,
1062 modifiedSessionParamsBuffer);
1063 modifiedSessionParams.acquire(modifiedSessionParamsBuffer);
1064 }
1065
1066 std::vector<HalStream> halConfigs;
1067 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1068 ASSERT_TRUE(ret.isOk());
1069 ASSERT_EQ(1u, halConfigs.size());
1070
1071 sessionParams.acquire(sessionParamsBuffer);
1072 ret = mSession->close();
1073 mSession = nullptr;
1074 ASSERT_TRUE(ret.isOk());
1075 }
1076}
1077
1078// Verify that all supported preview + still capture stream combinations
1079// can be configured successfully.
1080TEST_P(CameraAidlTest, configureStreamsPreviewStillOutputs) {
1081 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1082 std::vector<AvailableStream> outputBlobStreams;
1083 std::vector<AvailableStream> outputPreviewStreams;
1084 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
1085 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
1086 AvailableStream blobThreshold = {INT32_MAX, INT32_MAX, static_cast<int32_t>(PixelFormat::BLOB)};
1087
1088 for (const auto& name : cameraDeviceNames) {
1089 CameraMetadata meta;
1090
1091 std::shared_ptr<ICameraDevice> cameraDevice;
1092 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1093 &cameraDevice /*out*/);
1094
1095 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1096
1097 // Check if camera support depth only
1098 if (isDepthOnly(staticMeta)) {
1099 ndk::ScopedAStatus ret = mSession->close();
1100 mSession = nullptr;
1101 ASSERT_TRUE(ret.isOk());
1102 continue;
1103 }
1104
1105 outputBlobStreams.clear();
1106 ASSERT_EQ(Status::OK,
1107 getAvailableOutputStreams(staticMeta, outputBlobStreams, &blobThreshold));
1108 ASSERT_NE(0u, outputBlobStreams.size());
1109
1110 outputPreviewStreams.clear();
1111 ASSERT_EQ(Status::OK,
1112 getAvailableOutputStreams(staticMeta, outputPreviewStreams, &previewThreshold));
1113 ASSERT_NE(0u, outputPreviewStreams.size());
1114
1115 int32_t jpegBufferSize = 0;
1116 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
1117 ASSERT_NE(0u, jpegBufferSize);
1118
1119 int32_t streamId = 0;
1120 uint32_t streamConfigCounter = 0;
1121
1122 for (auto& blobIter : outputBlobStreams) {
1123 for (auto& previewIter : outputPreviewStreams) {
1124 Stream previewStream = {
1125 streamId++,
1126 StreamType::OUTPUT,
1127 previewIter.width,
1128 previewIter.height,
1129 static_cast<PixelFormat>(previewIter.format),
1130 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1131 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
1132 Dataspace::UNKNOWN,
1133 StreamRotation::ROTATION_0,
1134 std::string(),
1135 /*bufferSize*/ 0,
1136 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001137 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1138 RequestAvailableDynamicRangeProfilesMap::
1139 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001140 Stream blobStream = {
1141 streamId++,
1142 StreamType::OUTPUT,
1143 blobIter.width,
1144 blobIter.height,
1145 static_cast<PixelFormat>(blobIter.format),
1146 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1147 GRALLOC1_CONSUMER_USAGE_CPU_READ),
1148 Dataspace::JFIF,
1149 StreamRotation::ROTATION_0,
1150 std::string(),
1151 /*bufferSize*/ 0,
1152 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001153 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1154 RequestAvailableDynamicRangeProfilesMap::
1155 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001156 std::vector<Stream> streams = {previewStream, blobStream};
1157 StreamConfiguration config;
1158
1159 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
1160 jpegBufferSize);
1161 config.streamConfigCounter = streamConfigCounter++;
1162 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true,
1163 /*expectStreamCombQuery*/ false);
1164
1165 std::vector<HalStream> halConfigs;
1166 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1167 ASSERT_TRUE(ret.isOk());
1168 ASSERT_EQ(2u, halConfigs.size());
1169 }
1170 }
1171
1172 ndk::ScopedAStatus ret = mSession->close();
1173 mSession = nullptr;
1174 ASSERT_TRUE(ret.isOk());
1175 }
1176}
1177
1178// In case constrained mode is supported, test whether it can be
1179// configured. Additionally check for common invalid inputs when
1180// using this mode.
1181TEST_P(CameraAidlTest, configureStreamsConstrainedOutputs) {
1182 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1183
1184 for (const auto& name : cameraDeviceNames) {
1185 CameraMetadata meta;
1186 std::shared_ptr<ICameraDevice> cameraDevice;
1187
1188 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1189 &cameraDevice /*out*/);
1190 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1191
1192 Status rc = isConstrainedModeAvailable(staticMeta);
1193 if (Status::OPERATION_NOT_SUPPORTED == rc) {
1194 ndk::ScopedAStatus ret = mSession->close();
1195 mSession = nullptr;
1196 ASSERT_TRUE(ret.isOk());
1197 continue;
1198 }
1199 ASSERT_EQ(Status::OK, rc);
1200
1201 AvailableStream hfrStream;
1202 rc = pickConstrainedModeSize(staticMeta, hfrStream);
1203 ASSERT_EQ(Status::OK, rc);
1204
1205 int32_t streamId = 0;
1206 uint32_t streamConfigCounter = 0;
1207 Stream stream = {streamId,
1208 StreamType::OUTPUT,
1209 hfrStream.width,
1210 hfrStream.height,
1211 static_cast<PixelFormat>(hfrStream.format),
1212 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1213 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1214 Dataspace::UNKNOWN,
1215 StreamRotation::ROTATION_0,
1216 std::string(),
1217 /*bufferSize*/ 0,
1218 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001219 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1220 RequestAvailableDynamicRangeProfilesMap::
1221 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001222 std::vector<Stream> streams = {stream};
1223 StreamConfiguration config;
1224 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1225 &config);
1226
1227 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true,
1228 /*expectStreamCombQuery*/ false);
1229
1230 config.streamConfigCounter = streamConfigCounter++;
1231 std::vector<HalStream> halConfigs;
1232 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1233 ASSERT_TRUE(ret.isOk());
1234 ASSERT_EQ(1u, halConfigs.size());
1235 ASSERT_EQ(halConfigs[0].id, streamId);
1236
1237 stream = {streamId++,
1238 StreamType::OUTPUT,
1239 static_cast<uint32_t>(0),
1240 static_cast<uint32_t>(0),
1241 static_cast<PixelFormat>(hfrStream.format),
1242 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1243 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1244 Dataspace::UNKNOWN,
1245 StreamRotation::ROTATION_0,
1246 std::string(),
1247 /*bufferSize*/ 0,
1248 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001249 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1250 RequestAvailableDynamicRangeProfilesMap::
1251 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001252 streams[0] = stream;
1253 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1254 &config);
1255
1256 config.streamConfigCounter = streamConfigCounter++;
1257 std::vector<HalStream> halConfig;
1258 ret = mSession->configureStreams(config, &halConfig);
1259 ASSERT_TRUE(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
1260 ret.getServiceSpecificError() ||
1261 static_cast<int32_t>(Status::INTERNAL_ERROR) == ret.getServiceSpecificError());
1262
1263 stream = {streamId++,
1264 StreamType::OUTPUT,
1265 INT32_MAX,
1266 INT32_MAX,
1267 static_cast<PixelFormat>(hfrStream.format),
1268 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1269 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1270 Dataspace::UNKNOWN,
1271 StreamRotation::ROTATION_0,
1272 std::string(),
1273 /*bufferSize*/ 0,
1274 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001275 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1276 RequestAvailableDynamicRangeProfilesMap::
1277 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001278 streams[0] = stream;
1279 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1280 &config);
1281
1282 config.streamConfigCounter = streamConfigCounter++;
1283 halConfigs.clear();
1284 ret = mSession->configureStreams(config, &halConfigs);
1285 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
1286
1287 stream = {streamId++,
1288 StreamType::OUTPUT,
1289 hfrStream.width,
1290 hfrStream.height,
1291 static_cast<PixelFormat>(UINT32_MAX),
1292 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1293 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1294 Dataspace::UNKNOWN,
1295 StreamRotation::ROTATION_0,
1296 std::string(),
1297 /*bufferSize*/ 0,
1298 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001299 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1300 RequestAvailableDynamicRangeProfilesMap::
1301 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001302 streams[0] = stream;
1303 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1304 &config);
1305
1306 config.streamConfigCounter = streamConfigCounter++;
1307 halConfigs.clear();
1308 ret = mSession->configureStreams(config, &halConfigs);
1309 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
1310
1311 ret = mSession->close();
1312 mSession = nullptr;
1313 ASSERT_TRUE(ret.isOk());
1314 }
1315}
1316
1317// Verify that all supported video + snapshot stream combinations can
1318// be configured successfully.
1319TEST_P(CameraAidlTest, configureStreamsVideoStillOutputs) {
1320 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1321 std::vector<AvailableStream> outputBlobStreams;
1322 std::vector<AvailableStream> outputVideoStreams;
1323 AvailableStream videoThreshold = {kMaxVideoWidth, kMaxVideoHeight,
1324 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
1325 AvailableStream blobThreshold = {kMaxVideoWidth, kMaxVideoHeight,
1326 static_cast<int32_t>(PixelFormat::BLOB)};
1327
1328 for (const auto& name : cameraDeviceNames) {
1329 CameraMetadata meta;
1330 std::shared_ptr<ICameraDevice> cameraDevice;
1331
1332 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1333 &cameraDevice /*out*/);
1334
1335 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1336
1337 // Check if camera support depth only
1338 if (isDepthOnly(staticMeta)) {
1339 ndk::ScopedAStatus ret = mSession->close();
1340 mSession = nullptr;
1341 ASSERT_TRUE(ret.isOk());
1342 continue;
1343 }
1344
1345 outputBlobStreams.clear();
1346 ASSERT_EQ(Status::OK,
1347 getAvailableOutputStreams(staticMeta, outputBlobStreams, &blobThreshold));
1348 ASSERT_NE(0u, outputBlobStreams.size());
1349
1350 outputVideoStreams.clear();
1351 ASSERT_EQ(Status::OK,
1352 getAvailableOutputStreams(staticMeta, outputVideoStreams, &videoThreshold));
1353 ASSERT_NE(0u, outputVideoStreams.size());
1354
1355 int32_t jpegBufferSize = 0;
1356 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
1357 ASSERT_NE(0u, jpegBufferSize);
1358
1359 int32_t streamId = 0;
1360 uint32_t streamConfigCounter = 0;
1361 for (auto& blobIter : outputBlobStreams) {
1362 for (auto& videoIter : outputVideoStreams) {
1363 Stream videoStream = {
1364 streamId++,
1365 StreamType::OUTPUT,
1366 videoIter.width,
1367 videoIter.height,
1368 static_cast<PixelFormat>(videoIter.format),
1369 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1370 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1371 Dataspace::UNKNOWN,
1372 StreamRotation::ROTATION_0,
1373 std::string(),
1374 jpegBufferSize,
1375 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001376 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1377 RequestAvailableDynamicRangeProfilesMap::
1378 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001379 Stream blobStream = {
1380 streamId++,
1381 StreamType::OUTPUT,
1382 blobIter.width,
1383 blobIter.height,
1384 static_cast<PixelFormat>(blobIter.format),
1385 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1386 GRALLOC1_CONSUMER_USAGE_CPU_READ),
1387 Dataspace::JFIF,
1388 StreamRotation::ROTATION_0,
1389 std::string(),
1390 jpegBufferSize,
1391 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001392 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1393 RequestAvailableDynamicRangeProfilesMap::
1394 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001395 std::vector<Stream> streams = {videoStream, blobStream};
1396 StreamConfiguration config;
1397
1398 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
1399 jpegBufferSize);
1400 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true,
1401 /*expectStreamCombQuery*/ false);
1402
1403 config.streamConfigCounter = streamConfigCounter++;
1404 std::vector<HalStream> halConfigs;
1405 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1406 ASSERT_TRUE(ret.isOk());
1407 ASSERT_EQ(2u, halConfigs.size());
1408 }
1409 }
1410
1411 ndk::ScopedAStatus ret = mSession->close();
1412 mSession = nullptr;
1413 ASSERT_TRUE(ret.isOk());
1414 }
1415}
1416
1417// Generate and verify a camera capture request
1418TEST_P(CameraAidlTest, processCaptureRequestPreview) {
1419 // TODO(b/220897574): Failing with BUFFER_ERROR
1420 processCaptureRequestInternal(GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, RequestTemplate::PREVIEW,
1421 false /*secureOnlyCameras*/);
1422}
1423
1424// Generate and verify a secure camera capture request
1425TEST_P(CameraAidlTest, processSecureCaptureRequest) {
1426 processCaptureRequestInternal(GRALLOC1_PRODUCER_USAGE_PROTECTED, RequestTemplate::STILL_CAPTURE,
1427 true /*secureOnlyCameras*/);
1428}
1429
1430TEST_P(CameraAidlTest, processCaptureRequestPreviewStabilization) {
1431 std::unordered_map<std::string, nsecs_t> cameraDeviceToTimeLag;
1432 processPreviewStabilizationCaptureRequestInternal(/*previewStabilizationOn*/ false,
1433 cameraDeviceToTimeLag);
1434 processPreviewStabilizationCaptureRequestInternal(/*previewStabilizationOn*/ true,
1435 cameraDeviceToTimeLag);
1436}
1437
1438// Generate and verify a multi-camera capture request
1439TEST_P(CameraAidlTest, processMultiCaptureRequestPreview) {
1440 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1441 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
1442 static_cast<int32_t>(PixelFormat::YCBCR_420_888)};
1443 int64_t bufferId = 1;
1444 uint32_t frameNumber = 1;
1445 std::vector<uint8_t> settings;
1446 std::vector<uint8_t> emptySettings;
1447 std::string invalidPhysicalId = "-1";
1448
1449 for (const auto& name : cameraDeviceNames) {
1450 std::string version, deviceId;
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +00001451 ALOGI("processMultiCaptureRequestPreview: Test device %s", name.c_str());
Avichal Rakesh362242f2022-02-08 12:40:53 -08001452 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
1453 CameraMetadata metadata;
1454
1455 std::shared_ptr<ICameraDevice> unusedDevice;
1456 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &metadata /*out*/,
1457 &unusedDevice /*out*/);
1458
1459 camera_metadata_t* staticMeta =
1460 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
1461 Status rc = isLogicalMultiCamera(staticMeta);
1462 if (Status::OPERATION_NOT_SUPPORTED == rc) {
1463 ndk::ScopedAStatus ret = mSession->close();
1464 mSession = nullptr;
1465 ASSERT_TRUE(ret.isOk());
1466 continue;
1467 }
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +00001468 ASSERT_EQ(Status::OK, rc);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001469
1470 std::unordered_set<std::string> physicalIds;
1471 rc = getPhysicalCameraIds(staticMeta, &physicalIds);
1472 ASSERT_TRUE(Status::OK == rc);
1473 ASSERT_TRUE(physicalIds.size() > 1);
1474
1475 std::unordered_set<int32_t> physicalRequestKeyIDs;
1476 rc = getSupportedKeys(staticMeta, ANDROID_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS,
1477 &physicalRequestKeyIDs);
1478 ASSERT_TRUE(Status::OK == rc);
1479 if (physicalRequestKeyIDs.empty()) {
1480 ndk::ScopedAStatus ret = mSession->close();
1481 mSession = nullptr;
1482 ASSERT_TRUE(ret.isOk());
1483 // The logical camera doesn't support any individual physical requests.
1484 continue;
1485 }
1486
1487 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultPreviewSettings;
1488 android::hardware::camera::common::V1_0::helper::CameraMetadata filteredSettings;
1489 constructFilteredSettings(mSession, physicalRequestKeyIDs, RequestTemplate::PREVIEW,
1490 &defaultPreviewSettings, &filteredSettings);
1491 if (filteredSettings.isEmpty()) {
1492 // No physical device settings in default request.
1493 ndk::ScopedAStatus ret = mSession->close();
1494 mSession = nullptr;
1495 ASSERT_TRUE(ret.isOk());
1496 continue;
1497 }
1498
1499 const camera_metadata_t* settingsBuffer = defaultPreviewSettings.getAndLock();
1500 uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
1501 settings.assign(rawSettingsBuffer,
1502 rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
1503 CameraMetadata settingsMetadata = {settings};
1504 overrideRotateAndCrop(&settingsMetadata);
1505
1506 ndk::ScopedAStatus ret = mSession->close();
1507 mSession = nullptr;
1508 ASSERT_TRUE(ret.isOk());
1509
1510 // Leave only 2 physical devices in the id set.
1511 auto it = physicalIds.begin();
1512 std::string physicalDeviceId = *it;
1513 it++;
1514 physicalIds.erase(++it, physicalIds.end());
1515 ASSERT_EQ(physicalIds.size(), 2u);
1516
1517 std::vector<HalStream> halStreams;
1518 bool supportsPartialResults = false;
1519 bool useHalBufManager = false;
1520 int32_t partialResultCount = 0;
1521 Stream previewStream;
1522 std::shared_ptr<DeviceCb> cb;
1523
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +00001524 configurePreviewStreams(
1525 name, mProvider, &previewThreshold, physicalIds, &mSession, &previewStream,
1526 &halStreams /*out*/, &supportsPartialResults /*out*/, &partialResultCount /*out*/,
1527 &useHalBufManager /*out*/, &cb /*out*/, 0 /*streamConfigCounter*/, true);
1528 if (mSession == nullptr) {
1529 // stream combination not supported by HAL, skip test for device
1530 continue;
1531 }
Avichal Rakesh362242f2022-02-08 12:40:53 -08001532
1533 ::aidl::android::hardware::common::fmq::MQDescriptor<
1534 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
1535 descriptor;
1536 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
1537 ASSERT_TRUE(resultQueueRet.isOk());
1538 std::shared_ptr<ResultMetadataQueue> resultQueue =
1539 std::make_shared<ResultMetadataQueue>(descriptor);
1540 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
1541 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
1542 resultQueue = nullptr;
1543 // Don't use the queue onwards.
1544 }
1545
1546 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
1547 static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
1548 partialResultCount, physicalIds, resultQueue);
1549
1550 std::vector<CaptureRequest> requests(1);
1551 CaptureRequest& request = requests[0];
1552 request.frameNumber = frameNumber;
1553 request.fmqSettingsSize = 0;
Emilian Peev3d919f92022-04-20 13:50:59 -07001554 request.settings = settingsMetadata;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001555
1556 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
1557
1558 std::vector<buffer_handle_t> graphicBuffers;
1559 graphicBuffers.reserve(halStreams.size());
1560 outputBuffers.resize(halStreams.size());
1561 size_t k = 0;
1562 for (const auto& halStream : halStreams) {
1563 buffer_handle_t buffer_handle;
1564 if (useHalBufManager) {
1565 outputBuffers[k] = {halStream.id, /*bufferId*/ 0, NativeHandle(),
1566 BufferStatus::OK, NativeHandle(), NativeHandle()};
1567 } else {
1568 allocateGraphicBuffer(previewStream.width, previewStream.height,
1569 android_convertGralloc1To0Usage(
1570 static_cast<uint64_t>(halStream.producerUsage),
1571 static_cast<uint64_t>(halStream.consumerUsage)),
1572 halStream.overrideFormat, &buffer_handle);
1573 graphicBuffers.push_back(buffer_handle);
1574 outputBuffers[k] = {
1575 halStream.id, bufferId, ::android::makeToAidl(buffer_handle),
1576 BufferStatus::OK, NativeHandle(), NativeHandle()};
1577 bufferId++;
1578 }
1579 k++;
1580 }
1581
1582 std::vector<PhysicalCameraSetting> camSettings(1);
1583 const camera_metadata_t* filteredSettingsBuffer = filteredSettings.getAndLock();
1584 uint8_t* rawFilteredSettingsBuffer = (uint8_t*)filteredSettingsBuffer;
1585 camSettings[0].settings = {std::vector(
1586 rawFilteredSettingsBuffer,
1587 rawFilteredSettingsBuffer + get_camera_metadata_size(filteredSettingsBuffer))};
1588 overrideRotateAndCrop(&camSettings[0].settings);
1589 camSettings[0].fmqSettingsSize = 0;
1590 camSettings[0].physicalCameraId = physicalDeviceId;
1591
1592 request.inputBuffer = {
1593 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
1594 request.physicalCameraSettings = camSettings;
1595
1596 {
1597 std::unique_lock<std::mutex> l(mLock);
1598 mInflightMap.clear();
1599 mInflightMap[frameNumber] = inflightReq;
1600 }
1601
1602 int32_t numRequestProcessed = 0;
1603 std::vector<BufferCache> cachesToRemove;
1604 ndk::ScopedAStatus returnStatus =
1605 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1606 ASSERT_TRUE(returnStatus.isOk());
1607 ASSERT_EQ(numRequestProcessed, 1u);
1608
1609 {
1610 std::unique_lock<std::mutex> l(mLock);
1611 while (!inflightReq->errorCodeValid &&
1612 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1613 auto timeout = std::chrono::system_clock::now() +
1614 std::chrono::seconds(kStreamBufferTimeoutSec);
1615 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1616 }
1617
1618 ASSERT_FALSE(inflightReq->errorCodeValid);
1619 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1620
1621 request.frameNumber++;
1622 // Empty settings should be supported after the first call
1623 // for repeating requests.
1624 request.settings.metadata.clear();
1625 request.physicalCameraSettings[0].settings.metadata.clear();
1626 // The buffer has been registered to HAL by bufferId, so per
1627 // API contract we should send a null handle for this buffer
1628 request.outputBuffers[0].buffer = NativeHandle();
1629 mInflightMap.clear();
1630 inflightReq = std::make_shared<InFlightRequest>(
1631 static_cast<ssize_t>(physicalIds.size()), false, supportsPartialResults,
1632 partialResultCount, physicalIds, resultQueue);
1633 mInflightMap[request.frameNumber] = inflightReq;
1634 }
1635
1636 returnStatus =
1637 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1638 ASSERT_TRUE(returnStatus.isOk());
1639 ASSERT_EQ(numRequestProcessed, 1u);
1640
1641 {
1642 std::unique_lock<std::mutex> l(mLock);
1643 while (!inflightReq->errorCodeValid &&
1644 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1645 auto timeout = std::chrono::system_clock::now() +
1646 std::chrono::seconds(kStreamBufferTimeoutSec);
1647 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1648 }
1649
1650 ASSERT_FALSE(inflightReq->errorCodeValid);
1651 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1652 }
1653
1654 // Invalid physical camera id should fail process requests
1655 frameNumber++;
1656 camSettings[0].physicalCameraId = invalidPhysicalId;
1657 camSettings[0].settings.metadata = settings;
1658
1659 request.physicalCameraSettings = camSettings; // Invalid camera settings
1660 returnStatus =
1661 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1662 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
1663 returnStatus.getServiceSpecificError());
1664
1665 defaultPreviewSettings.unlock(settingsBuffer);
1666 filteredSettings.unlock(filteredSettingsBuffer);
1667
1668 if (useHalBufManager) {
1669 std::vector<int32_t> streamIds(halStreams.size());
1670 for (size_t i = 0; i < streamIds.size(); i++) {
1671 streamIds[i] = halStreams[i].id;
1672 }
1673 verifyBuffersReturned(mSession, streamIds, cb);
1674 }
1675
1676 ret = mSession->close();
1677 mSession = nullptr;
1678 ASSERT_TRUE(ret.isOk());
1679 }
1680}
1681
1682// Generate and verify an ultra high resolution capture request
1683TEST_P(CameraAidlTest, processUltraHighResolutionRequest) {
1684 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1685 int64_t bufferId = 1;
1686 int32_t frameNumber = 1;
1687 CameraMetadata settings;
1688
1689 for (const auto& name : cameraDeviceNames) {
1690 std::string version, deviceId;
1691 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
1692 CameraMetadata meta;
1693
1694 std::shared_ptr<ICameraDevice> unusedDevice;
1695 openEmptyDeviceSession(name, mProvider, &mSession, &meta, &unusedDevice);
1696 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1697 if (!isUltraHighResolution(staticMeta)) {
1698 ndk::ScopedAStatus ret = mSession->close();
1699 mSession = nullptr;
1700 ASSERT_TRUE(ret.isOk());
1701 continue;
1702 }
1703 CameraMetadata req;
1704 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultSettings;
1705 ndk::ScopedAStatus ret =
1706 mSession->constructDefaultRequestSettings(RequestTemplate::STILL_CAPTURE, &req);
1707 ASSERT_TRUE(ret.isOk());
1708
1709 const camera_metadata_t* metadata =
1710 reinterpret_cast<const camera_metadata_t*>(req.metadata.data());
1711 size_t expectedSize = req.metadata.size();
1712 int result = validate_camera_metadata_structure(metadata, &expectedSize);
1713 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
1714
1715 size_t entryCount = get_camera_metadata_entry_count(metadata);
1716 ASSERT_GT(entryCount, 0u);
1717 defaultSettings = metadata;
1718 uint8_t sensorPixelMode =
1719 static_cast<uint8_t>(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
1720 ASSERT_EQ(::android::OK,
1721 defaultSettings.update(ANDROID_SENSOR_PIXEL_MODE, &sensorPixelMode, 1));
1722
1723 const camera_metadata_t* settingsBuffer = defaultSettings.getAndLock();
1724 uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
1725 settings.metadata = std::vector(
1726 rawSettingsBuffer, rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
1727 overrideRotateAndCrop(&settings);
1728
1729 ret = mSession->close();
1730 mSession = nullptr;
1731 ASSERT_TRUE(ret.isOk());
1732
1733 std::vector<HalStream> halStreams;
1734 bool supportsPartialResults = false;
1735 bool useHalBufManager = false;
1736 int32_t partialResultCount = 0;
1737 Stream previewStream;
1738 std::shared_ptr<DeviceCb> cb;
1739
1740 std::list<PixelFormat> pixelFormats = {PixelFormat::YCBCR_420_888, PixelFormat::RAW16};
1741 for (PixelFormat format : pixelFormats) {
1742 configureStreams(name, mProvider, format, &mSession, &previewStream, &halStreams,
1743 &supportsPartialResults, &partialResultCount, &useHalBufManager, &cb,
1744 0, /*maxResolution*/ true);
1745 ASSERT_NE(mSession, nullptr);
1746
1747 ::aidl::android::hardware::common::fmq::MQDescriptor<
1748 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
1749 descriptor;
1750 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
1751 ASSERT_TRUE(resultQueueRet.isOk());
1752
1753 std::shared_ptr<ResultMetadataQueue> resultQueue =
1754 std::make_shared<ResultMetadataQueue>(descriptor);
1755 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
1756 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
1757 resultQueue = nullptr;
1758 // Don't use the queue onwards.
1759 }
1760
1761 std::vector<buffer_handle_t> graphicBuffers;
1762 graphicBuffers.reserve(halStreams.size());
1763 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
1764 static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
1765 partialResultCount, std::unordered_set<std::string>(), resultQueue);
1766
1767 std::vector<CaptureRequest> requests(1);
1768 CaptureRequest& request = requests[0];
1769 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
1770 outputBuffers.resize(halStreams.size());
1771
1772 size_t k = 0;
1773 for (const auto& halStream : halStreams) {
1774 buffer_handle_t buffer_handle;
1775 if (useHalBufManager) {
1776 outputBuffers[k] = {halStream.id, 0,
1777 NativeHandle(), BufferStatus::OK,
1778 NativeHandle(), NativeHandle()};
1779 } else {
1780 allocateGraphicBuffer(previewStream.width, previewStream.height,
1781 android_convertGralloc1To0Usage(
1782 static_cast<uint64_t>(halStream.producerUsage),
1783 static_cast<uint64_t>(halStream.consumerUsage)),
1784 halStream.overrideFormat, &buffer_handle);
1785 graphicBuffers.push_back(buffer_handle);
1786 outputBuffers[k] = {
1787 halStream.id, bufferId, ::android::makeToAidl(buffer_handle),
1788 BufferStatus::OK, NativeHandle(), NativeHandle()};
1789 bufferId++;
1790 }
1791 k++;
1792 }
1793
1794 request.inputBuffer = {
1795 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
1796 request.frameNumber = frameNumber;
1797 request.fmqSettingsSize = 0;
1798 request.settings = settings;
1799 request.inputWidth = 0;
1800 request.inputHeight = 0;
1801
1802 {
1803 std::unique_lock<std::mutex> l(mLock);
1804 mInflightMap.clear();
1805 mInflightMap[frameNumber] = inflightReq;
1806 }
1807
1808 int32_t numRequestProcessed = 0;
1809 std::vector<BufferCache> cachesToRemove;
1810 ndk::ScopedAStatus returnStatus =
1811 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1812 ASSERT_TRUE(returnStatus.isOk());
1813 ASSERT_EQ(numRequestProcessed, 1u);
1814
1815 {
1816 std::unique_lock<std::mutex> l(mLock);
1817 while (!inflightReq->errorCodeValid &&
1818 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1819 auto timeout = std::chrono::system_clock::now() +
1820 std::chrono::seconds(kStreamBufferTimeoutSec);
1821 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1822 }
1823
1824 ASSERT_FALSE(inflightReq->errorCodeValid);
1825 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1826 }
1827 if (useHalBufManager) {
1828 std::vector<int32_t> streamIds(halStreams.size());
1829 for (size_t i = 0; i < streamIds.size(); i++) {
1830 streamIds[i] = halStreams[i].id;
1831 }
1832 verifyBuffersReturned(mSession, streamIds, cb);
1833 }
1834
1835 ret = mSession->close();
1836 mSession = nullptr;
1837 ASSERT_TRUE(ret.isOk());
1838 }
1839 }
1840}
1841
1842// Generate and verify 10-bit dynamic range request
1843TEST_P(CameraAidlTest, process10BitDynamicRangeRequest) {
1844 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1845 int64_t bufferId = 1;
1846 int32_t frameNumber = 1;
1847 CameraMetadata settings;
1848
1849 for (const auto& name : cameraDeviceNames) {
1850 std::string version, deviceId;
1851 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
1852 CameraMetadata meta;
1853 std::shared_ptr<ICameraDevice> device;
1854 openEmptyDeviceSession(name, mProvider, &mSession, &meta, &device);
1855 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1856 if (!is10BitDynamicRangeCapable(staticMeta)) {
1857 ndk::ScopedAStatus ret = mSession->close();
1858 mSession = nullptr;
1859 ASSERT_TRUE(ret.isOk());
1860 continue;
1861 }
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001862 std::vector<RequestAvailableDynamicRangeProfilesMap> profileList;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001863 get10BitDynamicRangeProfiles(staticMeta, &profileList);
1864 ASSERT_FALSE(profileList.empty());
1865
1866 CameraMetadata req;
1867 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultSettings;
1868 ndk::ScopedAStatus ret =
1869 mSession->constructDefaultRequestSettings(RequestTemplate::STILL_CAPTURE, &req);
1870 ASSERT_TRUE(ret.isOk());
1871
1872 const camera_metadata_t* metadata =
1873 reinterpret_cast<const camera_metadata_t*>(req.metadata.data());
1874 size_t expectedSize = req.metadata.size();
1875 int result = validate_camera_metadata_structure(metadata, &expectedSize);
1876 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
1877
1878 size_t entryCount = get_camera_metadata_entry_count(metadata);
1879 ASSERT_GT(entryCount, 0u);
1880 defaultSettings = metadata;
1881
1882 const camera_metadata_t* settingsBuffer = defaultSettings.getAndLock();
1883 uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
1884 settings.metadata = std::vector(
1885 rawSettingsBuffer, rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
1886 overrideRotateAndCrop(&settings);
1887
1888 ret = mSession->close();
1889 mSession = nullptr;
1890 ASSERT_TRUE(ret.isOk());
1891
1892 std::vector<HalStream> halStreams;
1893 bool supportsPartialResults = false;
1894 bool useHalBufManager = false;
1895 int32_t partialResultCount = 0;
1896 Stream previewStream;
1897 std::shared_ptr<DeviceCb> cb;
1898 for (const auto& profile : profileList) {
1899 configureStreams(name, mProvider, PixelFormat::IMPLEMENTATION_DEFINED, &mSession,
1900 &previewStream, &halStreams, &supportsPartialResults,
1901 &partialResultCount, &useHalBufManager, &cb, 0,
1902 /*maxResolution*/ false, profile);
1903 ASSERT_NE(mSession, nullptr);
1904
1905 ::aidl::android::hardware::common::fmq::MQDescriptor<
1906 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
1907 descriptor;
1908 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
1909 ASSERT_TRUE(resultQueueRet.isOk());
1910
1911 std::shared_ptr<ResultMetadataQueue> resultQueue =
1912 std::make_shared<ResultMetadataQueue>(descriptor);
1913 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
1914 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
1915 resultQueue = nullptr;
1916 // Don't use the queue onwards.
1917 }
1918
1919 std::vector<buffer_handle_t> graphicBuffers;
1920 graphicBuffers.reserve(halStreams.size());
1921
1922 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
1923 static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
1924 partialResultCount, std::unordered_set<std::string>(), resultQueue);
1925
1926 std::vector<CaptureRequest> requests(1);
1927 CaptureRequest& request = requests[0];
1928 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
1929 outputBuffers.resize(halStreams.size());
1930
1931 size_t k = 0;
1932 for (const auto& halStream : halStreams) {
1933 buffer_handle_t buffer_handle;
1934 if (useHalBufManager) {
1935 outputBuffers[k] = {halStream.id, 0,
1936 NativeHandle(), BufferStatus::OK,
1937 NativeHandle(), NativeHandle()};
1938 } else {
1939 allocateGraphicBuffer(previewStream.width, previewStream.height,
1940 android_convertGralloc1To0Usage(
1941 static_cast<uint64_t>(halStream.producerUsage),
1942 static_cast<uint64_t>(halStream.consumerUsage)),
1943 halStream.overrideFormat, &buffer_handle);
1944
1945 graphicBuffers.push_back(buffer_handle);
1946 outputBuffers[k] = {
1947 halStream.id, bufferId, android::makeToAidl(buffer_handle),
1948 BufferStatus::OK, NativeHandle(), NativeHandle()};
1949 bufferId++;
1950 }
1951 k++;
1952 }
1953
1954 request.inputBuffer = {
1955 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
1956 request.frameNumber = frameNumber;
1957 request.fmqSettingsSize = 0;
1958 request.settings = settings;
1959 request.inputWidth = 0;
1960 request.inputHeight = 0;
1961
1962 {
1963 std::unique_lock<std::mutex> l(mLock);
1964 mInflightMap.clear();
1965 mInflightMap[frameNumber] = inflightReq;
1966 }
1967
1968 int32_t numRequestProcessed = 0;
1969 std::vector<BufferCache> cachesToRemove;
1970 ndk::ScopedAStatus returnStatus =
1971 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1972 ASSERT_TRUE(returnStatus.isOk());
1973 ASSERT_EQ(numRequestProcessed, 1u);
1974
1975 {
1976 std::unique_lock<std::mutex> l(mLock);
1977 while (!inflightReq->errorCodeValid &&
1978 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1979 auto timeout = std::chrono::system_clock::now() +
1980 std::chrono::seconds(kStreamBufferTimeoutSec);
1981 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1982 }
1983
1984 ASSERT_FALSE(inflightReq->errorCodeValid);
1985 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1986 verify10BitMetadata(mHandleImporter, *inflightReq, profile);
1987 }
1988 if (useHalBufManager) {
1989 std::vector<int32_t> streamIds(halStreams.size());
1990 for (size_t i = 0; i < streamIds.size(); i++) {
1991 streamIds[i] = halStreams[i].id;
1992 }
1993 mSession->signalStreamFlush(streamIds, /*streamConfigCounter*/ 0);
1994 cb->waitForBuffersReturned();
1995 }
1996
1997 ret = mSession->close();
1998 mSession = nullptr;
1999 ASSERT_TRUE(ret.isOk());
2000 }
2001 }
2002}
2003
2004// Generate and verify a burst containing alternating sensor sensitivity values
2005TEST_P(CameraAidlTest, processCaptureRequestBurstISO) {
2006 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2007 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2008 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2009 int64_t bufferId = 1;
2010 int32_t frameNumber = 1;
2011 float isoTol = .03f;
2012 CameraMetadata settings;
2013
2014 for (const auto& name : cameraDeviceNames) {
2015 CameraMetadata meta;
2016 settings.metadata.clear();
2017 std::shared_ptr<ICameraDevice> unusedDevice;
2018 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
2019 &unusedDevice /*out*/);
2020 camera_metadata_t* staticMetaBuffer =
2021 clone_camera_metadata(reinterpret_cast<camera_metadata_t*>(meta.metadata.data()));
2022 ::android::hardware::camera::common::V1_0::helper::CameraMetadata staticMeta(
2023 staticMetaBuffer);
2024
2025 camera_metadata_entry_t hwLevel = staticMeta.find(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL);
2026 ASSERT_TRUE(0 < hwLevel.count);
2027 if (ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED == hwLevel.data.u8[0] ||
2028 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL == hwLevel.data.u8[0]) {
2029 // Limited/External devices can skip this test
2030 ndk::ScopedAStatus ret = mSession->close();
2031 mSession = nullptr;
2032 ASSERT_TRUE(ret.isOk());
2033 continue;
2034 }
2035
2036 camera_metadata_entry_t isoRange = staticMeta.find(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE);
2037 ASSERT_EQ(isoRange.count, 2u);
2038
2039 ndk::ScopedAStatus ret = mSession->close();
2040 mSession = nullptr;
2041 ASSERT_TRUE(ret.isOk());
2042
2043 bool supportsPartialResults = false;
2044 bool useHalBufManager = false;
2045 int32_t partialResultCount = 0;
2046 Stream previewStream;
2047 std::vector<HalStream> halStreams;
2048 std::shared_ptr<DeviceCb> cb;
2049 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2050 &previewStream /*out*/, &halStreams /*out*/,
2051 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2052 &useHalBufManager /*out*/, &cb /*out*/);
2053
2054 ::aidl::android::hardware::common::fmq::MQDescriptor<
2055 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2056 descriptor;
2057 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2058 std::shared_ptr<ResultMetadataQueue> resultQueue =
2059 std::make_shared<ResultMetadataQueue>(descriptor);
2060 ASSERT_TRUE(resultQueueRet.isOk());
2061 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2062 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2063 resultQueue = nullptr;
2064 // Don't use the queue onwards.
2065 }
2066
2067 ret = mSession->constructDefaultRequestSettings(RequestTemplate::PREVIEW, &settings);
2068 ASSERT_TRUE(ret.isOk());
2069
2070 ::android::hardware::camera::common::V1_0::helper::CameraMetadata requestMeta;
2071 std::vector<CaptureRequest> requests(kBurstFrameCount);
2072 std::vector<buffer_handle_t> buffers(kBurstFrameCount);
2073 std::vector<std::shared_ptr<InFlightRequest>> inflightReqs(kBurstFrameCount);
2074 std::vector<int32_t> isoValues(kBurstFrameCount);
2075 std::vector<CameraMetadata> requestSettings(kBurstFrameCount);
2076
2077 for (int32_t i = 0; i < kBurstFrameCount; i++) {
2078 std::unique_lock<std::mutex> l(mLock);
2079 CaptureRequest& request = requests[i];
2080 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2081 outputBuffers.resize(1);
2082 StreamBuffer& outputBuffer = outputBuffers[0];
2083
2084 isoValues[i] = ((i % 2) == 0) ? isoRange.data.i32[0] : isoRange.data.i32[1];
2085 if (useHalBufManager) {
2086 outputBuffer = {halStreams[0].id, 0,
2087 NativeHandle(), BufferStatus::OK,
2088 NativeHandle(), NativeHandle()};
2089 } else {
2090 allocateGraphicBuffer(previewStream.width, previewStream.height,
2091 android_convertGralloc1To0Usage(
2092 static_cast<uint64_t>(halStreams[0].producerUsage),
2093 static_cast<uint64_t>(halStreams[0].consumerUsage)),
2094 halStreams[0].overrideFormat, &buffers[i]);
2095 outputBuffer = {halStreams[0].id, bufferId + i, ::android::makeToAidl(buffers[i]),
2096 BufferStatus::OK, NativeHandle(), NativeHandle()};
2097 }
2098
2099 requestMeta.append(reinterpret_cast<camera_metadata_t*>(settings.metadata.data()));
2100
2101 // Disable all 3A routines
2102 uint8_t mode = static_cast<uint8_t>(ANDROID_CONTROL_MODE_OFF);
2103 ASSERT_EQ(::android::OK, requestMeta.update(ANDROID_CONTROL_MODE, &mode, 1));
2104 ASSERT_EQ(::android::OK,
2105 requestMeta.update(ANDROID_SENSOR_SENSITIVITY, &isoValues[i], 1));
2106 camera_metadata_t* metaBuffer = requestMeta.release();
2107 uint8_t* rawMetaBuffer = reinterpret_cast<uint8_t*>(metaBuffer);
2108 requestSettings[i].metadata = std::vector(
2109 rawMetaBuffer, rawMetaBuffer + get_camera_metadata_size(metaBuffer));
2110 overrideRotateAndCrop(&(requestSettings[i]));
2111
2112 request.frameNumber = frameNumber + i;
2113 request.fmqSettingsSize = 0;
2114 request.settings = requestSettings[i];
2115 request.inputBuffer = {
2116 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2117
2118 inflightReqs[i] = std::make_shared<InFlightRequest>(1, false, supportsPartialResults,
2119 partialResultCount, resultQueue);
2120 mInflightMap[frameNumber + i] = inflightReqs[i];
2121 }
2122
2123 int32_t numRequestProcessed = 0;
2124 std::vector<BufferCache> cachesToRemove;
2125
2126 ndk::ScopedAStatus returnStatus =
2127 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2128 ASSERT_TRUE(returnStatus.isOk());
2129 ASSERT_EQ(numRequestProcessed, kBurstFrameCount);
2130
2131 for (size_t i = 0; i < kBurstFrameCount; i++) {
2132 std::unique_lock<std::mutex> l(mLock);
2133 while (!inflightReqs[i]->errorCodeValid && ((0 < inflightReqs[i]->numBuffersLeft) ||
2134 (!inflightReqs[i]->haveResultMetadata))) {
2135 auto timeout = std::chrono::system_clock::now() +
2136 std::chrono::seconds(kStreamBufferTimeoutSec);
2137 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2138 }
2139
2140 ASSERT_FALSE(inflightReqs[i]->errorCodeValid);
2141 ASSERT_NE(inflightReqs[i]->resultOutputBuffers.size(), 0u);
2142 ASSERT_EQ(previewStream.id, inflightReqs[i]->resultOutputBuffers[0].buffer.streamId);
2143 ASSERT_FALSE(inflightReqs[i]->collectedResult.isEmpty());
2144 ASSERT_TRUE(inflightReqs[i]->collectedResult.exists(ANDROID_SENSOR_SENSITIVITY));
2145 camera_metadata_entry_t isoResult =
2146 inflightReqs[i]->collectedResult.find(ANDROID_SENSOR_SENSITIVITY);
2147 ASSERT_TRUE(std::abs(isoResult.data.i32[0] - isoValues[i]) <=
2148 std::round(isoValues[i] * isoTol));
2149 }
2150
2151 if (useHalBufManager) {
2152 verifyBuffersReturned(mSession, previewStream.id, cb);
2153 }
2154 ret = mSession->close();
2155 mSession = nullptr;
2156 ASSERT_TRUE(ret.isOk());
2157 }
2158}
2159
2160// Test whether an incorrect capture request with missing settings will
2161// be reported correctly.
2162TEST_P(CameraAidlTest, processCaptureRequestInvalidSinglePreview) {
2163 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2164 std::vector<AvailableStream> outputPreviewStreams;
2165 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2166 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2167 int64_t bufferId = 1;
2168 int32_t frameNumber = 1;
2169 CameraMetadata settings;
2170
2171 for (const auto& name : cameraDeviceNames) {
2172 Stream previewStream;
2173 std::vector<HalStream> halStreams;
2174 std::shared_ptr<DeviceCb> cb;
2175 bool supportsPartialResults = false;
2176 bool useHalBufManager = false;
2177 int32_t partialResultCount = 0;
2178 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2179 &previewStream /*out*/, &halStreams /*out*/,
2180 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2181 &useHalBufManager /*out*/, &cb /*out*/);
2182 ASSERT_NE(mSession, nullptr);
2183 ASSERT_FALSE(halStreams.empty());
2184
2185 buffer_handle_t buffer_handle = nullptr;
2186
2187 if (useHalBufManager) {
2188 bufferId = 0;
2189 } else {
2190 allocateGraphicBuffer(previewStream.width, previewStream.height,
2191 android_convertGralloc1To0Usage(
2192 static_cast<uint64_t>(halStreams[0].producerUsage),
2193 static_cast<uint64_t>(halStreams[0].consumerUsage)),
2194 halStreams[0].overrideFormat, &buffer_handle);
2195 }
2196
2197 std::vector<CaptureRequest> requests(1);
2198 CaptureRequest& request = requests[0];
2199 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2200 outputBuffers.resize(1);
2201 StreamBuffer& outputBuffer = outputBuffers[0];
2202
2203 outputBuffer = {
2204 halStreams[0].id,
2205 bufferId,
2206 buffer_handle == nullptr ? NativeHandle() : ::android::makeToAidl(buffer_handle),
2207 BufferStatus::OK,
2208 NativeHandle(),
2209 NativeHandle()};
2210
2211 request.inputBuffer = {
2212 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2213 request.frameNumber = frameNumber;
2214 request.fmqSettingsSize = 0;
2215 request.settings = settings;
2216
2217 // Settings were not correctly initialized, we should fail here
2218 int32_t numRequestProcessed = 0;
2219 std::vector<BufferCache> cachesToRemove;
2220 ndk::ScopedAStatus ret =
2221 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2222 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
2223 ASSERT_EQ(numRequestProcessed, 0u);
2224
2225 ret = mSession->close();
2226 mSession = nullptr;
2227 ASSERT_TRUE(ret.isOk());
2228 }
2229}
2230
2231// Verify camera offline session behavior
2232TEST_P(CameraAidlTest, switchToOffline) {
2233 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2234 AvailableStream threshold = {kMaxStillWidth, kMaxStillHeight,
2235 static_cast<int32_t>(PixelFormat::BLOB)};
2236 int64_t bufferId = 1;
2237 int32_t frameNumber = 1;
2238 CameraMetadata settings;
2239
2240 for (const auto& name : cameraDeviceNames) {
2241 CameraMetadata meta;
2242 {
2243 std::shared_ptr<ICameraDevice> unusedDevice;
2244 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
2245 &unusedDevice);
2246 camera_metadata_t* staticMetaBuffer = clone_camera_metadata(
2247 reinterpret_cast<camera_metadata_t*>(meta.metadata.data()));
2248 ::android::hardware::camera::common::V1_0::helper::CameraMetadata staticMeta(
2249 staticMetaBuffer);
2250
2251 if (isOfflineSessionSupported(staticMetaBuffer) != Status::OK) {
2252 ndk::ScopedAStatus ret = mSession->close();
2253 mSession = nullptr;
2254 ASSERT_TRUE(ret.isOk());
2255 continue;
2256 }
2257 ndk::ScopedAStatus ret = mSession->close();
2258 mSession = nullptr;
2259 ASSERT_TRUE(ret.isOk());
2260 }
2261
2262 bool supportsPartialResults = false;
2263 int32_t partialResultCount = 0;
2264 Stream stream;
2265 std::vector<HalStream> halStreams;
2266 std::shared_ptr<DeviceCb> cb;
2267 int32_t jpegBufferSize;
2268 bool useHalBufManager;
2269 configureOfflineStillStream(name, mProvider, &threshold, &mSession /*out*/, &stream /*out*/,
2270 &halStreams /*out*/, &supportsPartialResults /*out*/,
2271 &partialResultCount /*out*/, &cb /*out*/,
2272 &jpegBufferSize /*out*/, &useHalBufManager /*out*/);
2273
2274 auto ret = mSession->constructDefaultRequestSettings(RequestTemplate::STILL_CAPTURE,
2275 &settings);
2276 ASSERT_TRUE(ret.isOk());
2277
2278 ::aidl::android::hardware::common::fmq::MQDescriptor<
2279 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2280 descriptor;
2281
2282 ndk::ScopedAStatus resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2283 ASSERT_TRUE(resultQueueRet.isOk());
2284 std::shared_ptr<ResultMetadataQueue> resultQueue =
2285 std::make_shared<ResultMetadataQueue>(descriptor);
2286 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2287 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2288 resultQueue = nullptr;
2289 // Don't use the queue onwards.
2290 }
2291
2292 ::android::hardware::camera::common::V1_0::helper::CameraMetadata requestMeta;
2293
2294 std::vector<buffer_handle_t> buffers(kBurstFrameCount);
2295 std::vector<std::shared_ptr<InFlightRequest>> inflightReqs(kBurstFrameCount);
2296 std::vector<CameraMetadata> requestSettings(kBurstFrameCount);
2297
2298 std::vector<CaptureRequest> requests(kBurstFrameCount);
2299
2300 HalStream halStream = halStreams[0];
2301 for (uint32_t i = 0; i < kBurstFrameCount; i++) {
2302 CaptureRequest& request = requests[i];
2303 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2304 outputBuffers.resize(1);
2305 StreamBuffer& outputBuffer = outputBuffers[0];
2306
2307 std::unique_lock<std::mutex> l(mLock);
2308 if (useHalBufManager) {
2309 outputBuffer = {halStream.id, 0, NativeHandle(), BufferStatus::OK, NativeHandle(),
2310 NativeHandle()};
2311 } else {
2312 // jpeg buffer (w,h) = (blobLen, 1)
2313 allocateGraphicBuffer(jpegBufferSize, /*height*/ 1,
2314 android_convertGralloc1To0Usage(
2315 static_cast<uint64_t>(halStream.producerUsage),
2316 static_cast<uint64_t>(halStream.consumerUsage)),
2317 halStream.overrideFormat, &buffers[i]);
2318 outputBuffer = {halStream.id, bufferId + i, ::android::makeToAidl(buffers[i]),
2319 BufferStatus::OK, NativeHandle(), NativeHandle()};
2320 }
2321
2322 requestMeta.clear();
2323 requestMeta.append(reinterpret_cast<camera_metadata_t*>(settings.metadata.data()));
2324
2325 camera_metadata_t* metaBuffer = requestMeta.release();
2326 uint8_t* rawMetaBuffer = reinterpret_cast<uint8_t*>(metaBuffer);
2327 requestSettings[i].metadata = std::vector(
2328 rawMetaBuffer, rawMetaBuffer + get_camera_metadata_size(metaBuffer));
2329 overrideRotateAndCrop(&requestSettings[i]);
2330
2331 request.frameNumber = frameNumber + i;
2332 request.fmqSettingsSize = 0;
2333 request.settings = requestSettings[i];
2334 request.inputBuffer = {/*streamId*/ -1,
2335 /*bufferId*/ 0, NativeHandle(),
2336 BufferStatus::ERROR, NativeHandle(),
2337 NativeHandle()};
2338
2339 inflightReqs[i] = std::make_shared<InFlightRequest>(1, false, supportsPartialResults,
2340 partialResultCount, resultQueue);
2341 mInflightMap[frameNumber + i] = inflightReqs[i];
2342 }
2343
2344 int32_t numRequestProcessed = 0;
2345 std::vector<BufferCache> cachesToRemove;
2346
2347 ndk::ScopedAStatus returnStatus =
2348 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2349 ASSERT_TRUE(returnStatus.isOk());
2350 ASSERT_EQ(numRequestProcessed, kBurstFrameCount);
2351
2352 std::vector<int32_t> offlineStreamIds = {halStream.id};
2353 CameraOfflineSessionInfo offlineSessionInfo;
2354 std::shared_ptr<ICameraOfflineSession> offlineSession;
2355 returnStatus =
2356 mSession->switchToOffline(offlineStreamIds, &offlineSessionInfo, &offlineSession);
2357
2358 if (!halStreams[0].supportOffline) {
2359 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
2360 returnStatus.getServiceSpecificError());
2361 ret = mSession->close();
2362 mSession = nullptr;
2363 ASSERT_TRUE(ret.isOk());
2364 continue;
2365 }
2366
2367 ASSERT_TRUE(returnStatus.isOk());
2368 // Hal might be unable to find any requests qualified for offline mode.
2369 if (offlineSession == nullptr) {
2370 ret = mSession->close();
2371 mSession = nullptr;
2372 ASSERT_TRUE(ret.isOk());
2373 continue;
2374 }
2375
2376 ASSERT_EQ(offlineSessionInfo.offlineStreams.size(), 1u);
2377 ASSERT_EQ(offlineSessionInfo.offlineStreams[0].id, halStream.id);
2378 ASSERT_NE(offlineSessionInfo.offlineRequests.size(), 0u);
2379
2380 // close device session to make sure offline session does not rely on it
2381 ret = mSession->close();
2382 mSession = nullptr;
2383 ASSERT_TRUE(ret.isOk());
2384
2385 ::aidl::android::hardware::common::fmq::MQDescriptor<
2386 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2387 offlineResultDescriptor;
2388
2389 auto offlineResultQueueRet =
2390 offlineSession->getCaptureResultMetadataQueue(&offlineResultDescriptor);
2391 std::shared_ptr<ResultMetadataQueue> offlineResultQueue =
2392 std::make_shared<ResultMetadataQueue>(descriptor);
2393 if (!offlineResultQueue->isValid() || offlineResultQueue->availableToWrite() <= 0) {
2394 ALOGE("%s: offline session returns empty result metadata fmq, not use it", __func__);
2395 offlineResultQueue = nullptr;
2396 // Don't use the queue onwards.
2397 }
2398 ASSERT_TRUE(offlineResultQueueRet.isOk());
2399
2400 updateInflightResultQueue(offlineResultQueue);
2401
2402 ret = offlineSession->setCallback(cb);
2403 ASSERT_TRUE(ret.isOk());
2404
2405 for (size_t i = 0; i < kBurstFrameCount; i++) {
2406 std::unique_lock<std::mutex> l(mLock);
2407 while (!inflightReqs[i]->errorCodeValid && ((0 < inflightReqs[i]->numBuffersLeft) ||
2408 (!inflightReqs[i]->haveResultMetadata))) {
2409 auto timeout = std::chrono::system_clock::now() +
2410 std::chrono::seconds(kStreamBufferTimeoutSec);
2411 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2412 }
2413
2414 ASSERT_FALSE(inflightReqs[i]->errorCodeValid);
2415 ASSERT_NE(inflightReqs[i]->resultOutputBuffers.size(), 0u);
2416 ASSERT_EQ(stream.id, inflightReqs[i]->resultOutputBuffers[0].buffer.streamId);
2417 ASSERT_FALSE(inflightReqs[i]->collectedResult.isEmpty());
2418 }
2419
2420 ret = offlineSession->close();
2421 ASSERT_TRUE(ret.isOk());
2422 }
2423}
2424
2425// Check whether an invalid capture request with missing output buffers
2426// will be reported correctly.
2427TEST_P(CameraAidlTest, processCaptureRequestInvalidBuffer) {
2428 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2429 std::vector<AvailableStream> outputBlobStreams;
2430 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2431 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2432 int32_t frameNumber = 1;
2433 CameraMetadata settings;
2434
2435 for (const auto& name : cameraDeviceNames) {
2436 Stream previewStream;
2437 std::vector<HalStream> halStreams;
2438 std::shared_ptr<DeviceCb> cb;
2439 bool supportsPartialResults = false;
2440 bool useHalBufManager = false;
2441 int32_t partialResultCount = 0;
2442 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2443 &previewStream /*out*/, &halStreams /*out*/,
2444 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2445 &useHalBufManager /*out*/, &cb /*out*/);
2446
2447 RequestTemplate reqTemplate = RequestTemplate::PREVIEW;
2448 ndk::ScopedAStatus ret = mSession->constructDefaultRequestSettings(reqTemplate, &settings);
2449 ASSERT_TRUE(ret.isOk());
2450 overrideRotateAndCrop(&settings);
2451
2452 std::vector<CaptureRequest> requests(1);
2453 CaptureRequest& request = requests[0];
2454 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2455 outputBuffers.resize(1);
2456 // Empty output buffer
2457 outputBuffers[0] = {
2458 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2459
2460 request.inputBuffer = {
2461 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2462 request.frameNumber = frameNumber;
2463 request.fmqSettingsSize = 0;
2464 request.settings = settings;
2465
2466 // Output buffers are missing, we should fail here
2467 int32_t numRequestProcessed = 0;
2468 std::vector<BufferCache> cachesToRemove;
2469 ret = mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2470 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
2471 ASSERT_EQ(numRequestProcessed, 0u);
2472
2473 ret = mSession->close();
2474 mSession = nullptr;
2475 ASSERT_TRUE(ret.isOk());
2476 }
2477}
2478
2479// Generate, trigger and flush a preview request
2480TEST_P(CameraAidlTest, flushPreviewRequest) {
2481 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2482 std::vector<AvailableStream> outputPreviewStreams;
2483 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2484 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2485 int64_t bufferId = 1;
2486 int32_t frameNumber = 1;
2487 CameraMetadata settings;
2488
2489 for (const auto& name : cameraDeviceNames) {
2490 Stream previewStream;
2491 std::vector<HalStream> halStreams;
2492 std::shared_ptr<DeviceCb> cb;
2493 bool supportsPartialResults = false;
2494 bool useHalBufManager = false;
2495 int32_t partialResultCount = 0;
2496
2497 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2498 &previewStream /*out*/, &halStreams /*out*/,
2499 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2500 &useHalBufManager /*out*/, &cb /*out*/);
2501
2502 ASSERT_NE(mSession, nullptr);
2503 ASSERT_NE(cb, nullptr);
2504 ASSERT_FALSE(halStreams.empty());
2505
2506 ::aidl::android::hardware::common::fmq::MQDescriptor<
2507 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2508 descriptor;
2509
2510 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2511 std::shared_ptr<ResultMetadataQueue> resultQueue =
2512 std::make_shared<ResultMetadataQueue>(descriptor);
2513 ASSERT_TRUE(resultQueueRet.isOk());
2514 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2515 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2516 resultQueue = nullptr;
2517 // Don't use the queue onwards.
2518 }
2519
2520 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
2521 1, false, supportsPartialResults, partialResultCount, resultQueue);
2522 RequestTemplate reqTemplate = RequestTemplate::PREVIEW;
2523
2524 ndk::ScopedAStatus ret = mSession->constructDefaultRequestSettings(reqTemplate, &settings);
2525 ASSERT_TRUE(ret.isOk());
2526 overrideRotateAndCrop(&settings);
2527
2528 buffer_handle_t buffer_handle;
2529 std::vector<CaptureRequest> requests(1);
2530 CaptureRequest& request = requests[0];
2531 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2532 outputBuffers.resize(1);
2533 StreamBuffer& outputBuffer = outputBuffers[0];
2534 if (useHalBufManager) {
2535 bufferId = 0;
2536 outputBuffer = {halStreams[0].id, bufferId, NativeHandle(),
2537 BufferStatus::OK, NativeHandle(), NativeHandle()};
2538 } else {
2539 allocateGraphicBuffer(previewStream.width, previewStream.height,
2540 android_convertGralloc1To0Usage(
2541 static_cast<uint64_t>(halStreams[0].producerUsage),
2542 static_cast<uint64_t>(halStreams[0].consumerUsage)),
2543 halStreams[0].overrideFormat, &buffer_handle);
2544 outputBuffer = {halStreams[0].id, bufferId, ::android::makeToAidl(buffer_handle),
2545 BufferStatus::OK, NativeHandle(), NativeHandle()};
2546 }
2547
2548 request.frameNumber = frameNumber;
2549 request.fmqSettingsSize = 0;
2550 request.settings = settings;
2551 request.inputBuffer = {
2552 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2553
2554 {
2555 std::unique_lock<std::mutex> l(mLock);
2556 mInflightMap.clear();
2557 mInflightMap[frameNumber] = inflightReq;
2558 }
2559
2560 int32_t numRequestProcessed = 0;
2561 std::vector<BufferCache> cachesToRemove;
2562 ret = mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2563 ASSERT_TRUE(ret.isOk());
2564 ASSERT_EQ(numRequestProcessed, 1u);
2565
2566 // Flush before waiting for request to complete.
2567 ndk::ScopedAStatus returnStatus = mSession->flush();
2568 ASSERT_TRUE(returnStatus.isOk());
2569
2570 {
2571 std::unique_lock<std::mutex> l(mLock);
2572 while (!inflightReq->errorCodeValid &&
2573 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
2574 auto timeout = std::chrono::system_clock::now() +
2575 std::chrono::seconds(kStreamBufferTimeoutSec);
2576 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2577 }
2578
2579 if (!inflightReq->errorCodeValid) {
2580 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
2581 ASSERT_EQ(previewStream.id, inflightReq->resultOutputBuffers[0].buffer.streamId);
2582 } else {
2583 switch (inflightReq->errorCode) {
2584 case ErrorCode::ERROR_REQUEST:
2585 case ErrorCode::ERROR_RESULT:
2586 case ErrorCode::ERROR_BUFFER:
2587 // Expected
2588 break;
2589 case ErrorCode::ERROR_DEVICE:
2590 default:
2591 FAIL() << "Unexpected error:"
2592 << static_cast<uint32_t>(inflightReq->errorCode);
2593 }
2594 }
2595 }
2596
2597 if (useHalBufManager) {
2598 verifyBuffersReturned(mSession, previewStream.id, cb);
2599 }
2600
2601 ret = mSession->close();
2602 mSession = nullptr;
2603 ASSERT_TRUE(ret.isOk());
2604 }
2605}
2606
2607// Verify that camera flushes correctly without any pending requests.
2608TEST_P(CameraAidlTest, flushEmpty) {
2609 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2610 std::vector<AvailableStream> outputPreviewStreams;
2611 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2612 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2613
2614 for (const auto& name : cameraDeviceNames) {
2615 Stream previewStream;
2616 std::vector<HalStream> halStreams;
2617 std::shared_ptr<DeviceCb> cb;
2618 bool supportsPartialResults = false;
2619 bool useHalBufManager = false;
2620
2621 int32_t partialResultCount = 0;
2622 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2623 &previewStream /*out*/, &halStreams /*out*/,
2624 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2625 &useHalBufManager /*out*/, &cb /*out*/);
2626
2627 ndk::ScopedAStatus returnStatus = mSession->flush();
2628 ASSERT_TRUE(returnStatus.isOk());
2629
2630 {
2631 std::unique_lock<std::mutex> l(mLock);
2632 auto timeout = std::chrono::system_clock::now() +
2633 std::chrono::milliseconds(kEmptyFlushTimeoutMSec);
2634 ASSERT_EQ(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2635 }
2636
2637 ndk::ScopedAStatus ret = mSession->close();
2638 mSession = nullptr;
2639 ASSERT_TRUE(ret.isOk());
2640 }
2641}
2642
2643// Test camera provider notify method
2644TEST_P(CameraAidlTest, providerDeviceStateNotification) {
2645 notifyDeviceState(ICameraProvider::DEVICE_STATE_BACK_COVERED);
2646 notifyDeviceState(ICameraProvider::DEVICE_STATE_NORMAL);
2647}
2648
2649// Verify that all supported stream formats and sizes can be configured
2650// successfully for injection camera.
2651TEST_P(CameraAidlTest, configureInjectionStreamsAvailableOutputs) {
2652 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2653 std::vector<AvailableStream> outputStreams;
2654
2655 for (const auto& name : cameraDeviceNames) {
2656 CameraMetadata metadata;
2657
2658 std::shared_ptr<ICameraInjectionSession> injectionSession;
2659 std::shared_ptr<ICameraDevice> unusedDevice;
2660 openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/,
2661 &unusedDevice /*out*/);
2662 if (injectionSession == nullptr) {
2663 continue;
2664 }
2665
2666 camera_metadata_t* staticMetaBuffer =
2667 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
2668 CameraMetadata chars;
2669 chars.metadata = metadata.metadata;
2670
2671 outputStreams.clear();
2672 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputStreams));
2673 ASSERT_NE(0u, outputStreams.size());
2674
2675 int32_t jpegBufferSize = 0;
2676 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMetaBuffer, &jpegBufferSize));
2677 ASSERT_NE(0u, jpegBufferSize);
2678
2679 int32_t streamId = 0;
2680 int32_t streamConfigCounter = 0;
2681 for (auto& it : outputStreams) {
2682 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(it.format));
2683 Stream stream = {streamId,
2684 StreamType::OUTPUT,
2685 it.width,
2686 it.height,
2687 static_cast<PixelFormat>(it.format),
2688 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2689 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2690 dataspace,
2691 StreamRotation::ROTATION_0,
2692 std::string(),
2693 jpegBufferSize,
2694 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002695 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2696 RequestAvailableDynamicRangeProfilesMap::
2697 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002698
2699 std::vector<Stream> streams = {stream};
2700 StreamConfiguration config;
2701 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2702 jpegBufferSize);
2703
2704 config.streamConfigCounter = streamConfigCounter++;
2705 ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars);
2706 ASSERT_TRUE(s.isOk());
2707 streamId++;
2708 }
2709
2710 std::shared_ptr<ICameraDeviceSession> session;
2711 ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session);
2712 ASSERT_TRUE(ret.isOk());
2713 ASSERT_NE(session, nullptr);
2714 ret = session->close();
2715 ASSERT_TRUE(ret.isOk());
2716 }
2717}
2718
2719// Check for correct handling of invalid/incorrect configuration parameters for injection camera.
2720TEST_P(CameraAidlTest, configureInjectionStreamsInvalidOutputs) {
2721 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2722 std::vector<AvailableStream> outputStreams;
2723
2724 for (const auto& name : cameraDeviceNames) {
2725 CameraMetadata metadata;
2726 std::shared_ptr<ICameraInjectionSession> injectionSession;
2727 std::shared_ptr<ICameraDevice> unusedDevice;
2728 openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/,
2729 &unusedDevice);
2730 if (injectionSession == nullptr) {
2731 continue;
2732 }
2733
2734 camera_metadata_t* staticMetaBuffer =
2735 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
2736 std::shared_ptr<ICameraDeviceSession> session;
2737 ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session);
2738 ASSERT_TRUE(ret.isOk());
2739 ASSERT_NE(session, nullptr);
2740
2741 CameraMetadata chars;
2742 chars.metadata = metadata.metadata;
2743
2744 outputStreams.clear();
2745 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputStreams));
2746 ASSERT_NE(0u, outputStreams.size());
2747
2748 int32_t jpegBufferSize = 0;
2749 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMetaBuffer, &jpegBufferSize));
2750 ASSERT_NE(0u, jpegBufferSize);
2751
2752 int32_t streamId = 0;
2753 Stream stream = {streamId++,
2754 StreamType::OUTPUT,
2755 0,
2756 0,
2757 static_cast<PixelFormat>(outputStreams[0].format),
2758 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2759 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2760 Dataspace::UNKNOWN,
2761 StreamRotation::ROTATION_0,
2762 std::string(),
2763 jpegBufferSize,
2764 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002765 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2766 RequestAvailableDynamicRangeProfilesMap::
2767 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002768
2769 int32_t streamConfigCounter = 0;
2770 std::vector<Stream> streams = {stream};
2771 StreamConfiguration config;
2772 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2773 jpegBufferSize);
2774
2775 config.streamConfigCounter = streamConfigCounter++;
2776 ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars);
2777 ASSERT_TRUE(
2778 (static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) == s.getServiceSpecificError()) ||
2779 (static_cast<int32_t>(Status::INTERNAL_ERROR) == s.getServiceSpecificError()));
2780
2781 stream = {streamId++,
2782 StreamType::OUTPUT,
2783 INT32_MAX,
2784 INT32_MAX,
2785 static_cast<PixelFormat>(outputStreams[0].format),
2786 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2787 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2788 Dataspace::UNKNOWN,
2789 StreamRotation::ROTATION_0,
2790 std::string(),
2791 jpegBufferSize,
2792 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002793 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2794 RequestAvailableDynamicRangeProfilesMap::
2795 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
2796
Avichal Rakesh362242f2022-02-08 12:40:53 -08002797 streams[0] = stream;
2798 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2799 jpegBufferSize);
2800 config.streamConfigCounter = streamConfigCounter++;
2801 s = injectionSession->configureInjectionStreams(config, chars);
2802 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError());
2803
2804 for (auto& it : outputStreams) {
2805 stream = {streamId++,
2806 StreamType::OUTPUT,
2807 it.width,
2808 it.height,
2809 static_cast<PixelFormat>(INT32_MAX),
2810 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2811 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2812 Dataspace::UNKNOWN,
2813 StreamRotation::ROTATION_0,
2814 std::string(),
2815 jpegBufferSize,
2816 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002817 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2818 RequestAvailableDynamicRangeProfilesMap::
2819 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002820 streams[0] = stream;
2821 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2822 jpegBufferSize);
2823 config.streamConfigCounter = streamConfigCounter++;
2824 s = injectionSession->configureInjectionStreams(config, chars);
2825 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError());
2826
2827 stream = {streamId++,
2828 StreamType::OUTPUT,
2829 it.width,
2830 it.height,
2831 static_cast<PixelFormat>(it.format),
2832 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2833 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2834 Dataspace::UNKNOWN,
2835 static_cast<StreamRotation>(INT32_MAX),
2836 std::string(),
2837 jpegBufferSize,
2838 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002839 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2840 RequestAvailableDynamicRangeProfilesMap::
2841 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002842 streams[0] = stream;
2843 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2844 jpegBufferSize);
2845 config.streamConfigCounter = streamConfigCounter++;
2846 s = injectionSession->configureInjectionStreams(config, chars);
2847 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError());
2848 }
2849
2850 ret = session->close();
2851 ASSERT_TRUE(ret.isOk());
2852 }
2853}
2854
2855// Check whether session parameters are supported for injection camera. If Hal support for them
2856// exist, then try to configure a preview stream using them.
2857TEST_P(CameraAidlTest, configureInjectionStreamsWithSessionParameters) {
2858 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2859 std::vector<AvailableStream> outputPreviewStreams;
2860 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2861 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2862
2863 for (const auto& name : cameraDeviceNames) {
2864 CameraMetadata metadata;
2865 std::shared_ptr<ICameraInjectionSession> injectionSession;
2866 std::shared_ptr<ICameraDevice> unusedDevice;
2867 openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/,
2868 &unusedDevice /*out*/);
2869 if (injectionSession == nullptr) {
2870 continue;
2871 }
2872
2873 std::shared_ptr<ICameraDeviceSession> session;
2874 ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session);
2875 ASSERT_TRUE(ret.isOk());
2876 ASSERT_NE(session, nullptr);
2877
2878 camera_metadata_t* staticMetaBuffer =
2879 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
2880 CameraMetadata chars;
2881 chars.metadata = metadata.metadata;
2882
2883 std::unordered_set<int32_t> availableSessionKeys;
2884 Status rc = getSupportedKeys(staticMetaBuffer, ANDROID_REQUEST_AVAILABLE_SESSION_KEYS,
2885 &availableSessionKeys);
2886 ASSERT_EQ(Status::OK, rc);
2887 if (availableSessionKeys.empty()) {
2888 ret = session->close();
2889 ASSERT_TRUE(ret.isOk());
2890 continue;
2891 }
2892
2893 android::hardware::camera::common::V1_0::helper::CameraMetadata previewRequestSettings;
2894 android::hardware::camera::common::V1_0::helper::CameraMetadata sessionParams,
2895 modifiedSessionParams;
2896 constructFilteredSettings(session, availableSessionKeys, RequestTemplate::PREVIEW,
2897 &previewRequestSettings, &sessionParams);
2898 if (sessionParams.isEmpty()) {
2899 ret = session->close();
2900 ASSERT_TRUE(ret.isOk());
2901 continue;
2902 }
2903
2904 outputPreviewStreams.clear();
2905
2906 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputPreviewStreams,
2907 &previewThreshold));
2908 ASSERT_NE(0u, outputPreviewStreams.size());
2909
2910 Stream previewStream = {
2911 0,
2912 StreamType::OUTPUT,
2913 outputPreviewStreams[0].width,
2914 outputPreviewStreams[0].height,
2915 static_cast<PixelFormat>(outputPreviewStreams[0].format),
2916 static_cast<::aidl::android::hardware::graphics::common::BufferUsage>(
2917 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2918 Dataspace::UNKNOWN,
2919 StreamRotation::ROTATION_0,
2920 std::string(),
2921 0,
2922 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002923 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2924 RequestAvailableDynamicRangeProfilesMap::
2925 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002926 std::vector<Stream> streams = {previewStream};
2927 StreamConfiguration config;
2928 config.streams = streams;
2929 config.operationMode = StreamConfigurationMode::NORMAL_MODE;
2930
2931 modifiedSessionParams = sessionParams;
2932 camera_metadata_t* sessionParamsBuffer = sessionParams.release();
2933 uint8_t* rawSessionParamsBuffer = reinterpret_cast<uint8_t*>(sessionParamsBuffer);
2934 config.sessionParams.metadata =
2935 std::vector(rawSessionParamsBuffer,
2936 rawSessionParamsBuffer + get_camera_metadata_size(sessionParamsBuffer));
2937
2938 config.streamConfigCounter = 0;
2939 config.streamConfigCounter = 0;
2940 config.multiResolutionInputImage = false;
2941
2942 ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars);
2943 ASSERT_TRUE(s.isOk());
2944
2945 sessionParams.acquire(sessionParamsBuffer);
2946 free_camera_metadata(staticMetaBuffer);
2947 ret = session->close();
2948 ASSERT_TRUE(ret.isOk());
2949 }
2950}
2951
2952// Verify that valid stream use cases can be configured successfully, and invalid use cases
2953// fail stream configuration.
2954TEST_P(CameraAidlTest, configureStreamsUseCases) {
2955 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2956
2957 for (const auto& name : cameraDeviceNames) {
2958 CameraMetadata meta;
2959 std::shared_ptr<ICameraDevice> cameraDevice;
2960
2961 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
2962 &cameraDevice /*out*/);
2963
2964 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
2965 // Check if camera support depth only
2966 if (isDepthOnly(staticMeta)) {
2967 ndk::ScopedAStatus ret = mSession->close();
2968 mSession = nullptr;
2969 ASSERT_TRUE(ret.isOk());
2970 continue;
2971 }
2972
2973 std::vector<AvailableStream> outputPreviewStreams;
2974 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2975 static_cast<int32_t>(PixelFormat::YCBCR_420_888)};
2976 ASSERT_EQ(Status::OK,
2977 getAvailableOutputStreams(staticMeta, outputPreviewStreams, &previewThreshold));
2978 ASSERT_NE(0u, outputPreviewStreams.size());
2979
2980 // Combine valid and invalid stream use cases
Shuzhen Wang36efa712022-03-08 10:10:44 -08002981 std::vector<int64_t> useCases(kMandatoryUseCases);
Avichal Rakesh362242f2022-02-08 12:40:53 -08002982 useCases.push_back(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL + 1);
2983
Shuzhen Wang36efa712022-03-08 10:10:44 -08002984 std::vector<int64_t> supportedUseCases;
Avichal Rakesh362242f2022-02-08 12:40:53 -08002985 camera_metadata_ro_entry entry;
2986 auto retcode = find_camera_metadata_ro_entry(
2987 staticMeta, ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES, &entry);
2988 if ((0 == retcode) && (entry.count > 0)) {
Avichal Rakeshe1685a72022-03-22 13:52:36 -07002989 supportedUseCases.insert(supportedUseCases.end(), entry.data.i64,
2990 entry.data.i64 + entry.count);
Avichal Rakesh362242f2022-02-08 12:40:53 -08002991 } else {
2992 supportedUseCases.push_back(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT);
2993 }
2994
2995 std::vector<Stream> streams(1);
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002996 streams[0] = {0,
2997 StreamType::OUTPUT,
2998 outputPreviewStreams[0].width,
2999 outputPreviewStreams[0].height,
3000 static_cast<PixelFormat>(outputPreviewStreams[0].format),
3001 static_cast<::aidl::android::hardware::graphics::common::BufferUsage>(
3002 GRALLOC1_CONSUMER_USAGE_CPU_READ),
3003 Dataspace::UNKNOWN,
3004 StreamRotation::ROTATION_0,
3005 std::string(),
3006 0,
3007 -1,
3008 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
3009 RequestAvailableDynamicRangeProfilesMap::
3010 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08003011
3012 int32_t streamConfigCounter = 0;
3013 CameraMetadata req;
3014 StreamConfiguration config;
3015 RequestTemplate reqTemplate = RequestTemplate::STILL_CAPTURE;
3016 ndk::ScopedAStatus ret = mSession->constructDefaultRequestSettings(reqTemplate, &req);
3017 ASSERT_TRUE(ret.isOk());
3018 config.sessionParams = req;
3019
Shuzhen Wang36efa712022-03-08 10:10:44 -08003020 for (int64_t useCase : useCases) {
Avichal Rakesh362242f2022-02-08 12:40:53 -08003021 bool useCaseSupported = std::find(supportedUseCases.begin(), supportedUseCases.end(),
3022 useCase) != supportedUseCases.end();
3023
3024 streams[0].useCase = static_cast<
3025 aidl::android::hardware::camera::metadata::ScalerAvailableStreamUseCases>(
3026 useCase);
3027 config.streams = streams;
3028 config.operationMode = StreamConfigurationMode::NORMAL_MODE;
3029 config.streamConfigCounter = streamConfigCounter;
3030 config.multiResolutionInputImage = false;
3031
3032 bool combSupported;
3033 ret = cameraDevice->isStreamCombinationSupported(config, &combSupported);
Avichal Rakeshe1685a72022-03-22 13:52:36 -07003034 if (static_cast<int32_t>(Status::OPERATION_NOT_SUPPORTED) ==
3035 ret.getServiceSpecificError()) {
3036 continue;
Avichal Rakesh362242f2022-02-08 12:40:53 -08003037 }
Avichal Rakeshe1685a72022-03-22 13:52:36 -07003038
Avichal Rakesh362242f2022-02-08 12:40:53 -08003039 ASSERT_TRUE(ret.isOk());
Avichal Rakeshe1685a72022-03-22 13:52:36 -07003040 ASSERT_EQ(combSupported, useCaseSupported);
Avichal Rakesh362242f2022-02-08 12:40:53 -08003041
3042 std::vector<HalStream> halStreams;
3043 ret = mSession->configureStreams(config, &halStreams);
3044 ALOGI("configureStreams returns status: %d", ret.getServiceSpecificError());
3045 if (useCaseSupported) {
3046 ASSERT_TRUE(ret.isOk());
3047 ASSERT_EQ(1u, halStreams.size());
3048 } else {
3049 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
3050 ret.getServiceSpecificError());
3051 }
3052 }
3053 ret = mSession->close();
3054 mSession = nullptr;
3055 ASSERT_TRUE(ret.isOk());
3056 }
3057}
3058
3059GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(CameraAidlTest);
3060INSTANTIATE_TEST_SUITE_P(
3061 PerInstance, CameraAidlTest,
3062 testing::ValuesIn(android::getAidlHalInstanceNames(ICameraProvider::descriptor)),
3063 android::hardware::PrintInstanceNameToString);