blob: 2c98db875a9563fe61dc5b74eec4ede8899b9344 [file] [log] [blame]
Avichal Rakesh362242f2022-02-08 12:40:53 -08001/*
2 * Copyright (C) 2022 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <aidl/Vintf.h>
18#include <aidl/android/hardware/camera/common/VendorTagSection.h>
19#include <aidl/android/hardware/camera/device/ICameraDevice.h>
20#include <aidlcommonsupport/NativeHandle.h>
21#include <camera_aidl_test.h>
22#include <cutils/properties.h>
23#include <device_cb.h>
24#include <empty_device_cb.h>
25#include <grallocusage/GrallocUsageConversion.h>
26#include <gtest/gtest.h>
27#include <hardware/gralloc.h>
28#include <hardware/gralloc1.h>
29#include <hidl/GtestPrinter.h>
30#include <hidl/HidlSupport.h>
31#include <torch_provider_cb.h>
32#include <list>
33
34using ::aidl::android::hardware::camera::common::CameraDeviceStatus;
35using ::aidl::android::hardware::camera::common::CameraResourceCost;
36using ::aidl::android::hardware::camera::common::TorchModeStatus;
37using ::aidl::android::hardware::camera::common::VendorTagSection;
38using ::aidl::android::hardware::camera::device::ICameraDevice;
Avichal Rakeshd3503a32022-02-25 06:23:14 +000039using ::aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap;
Avichal Rakesh362242f2022-02-08 12:40:53 -080040using ::aidl::android::hardware::camera::metadata::SensorPixelMode;
41using ::aidl::android::hardware::camera::provider::CameraIdAndStreamCombination;
Avichal Rakesh4bf91c72022-05-23 20:44:02 +000042using ::aidl::android::hardware::camera::provider::BnCameraProviderCallback;
Avichal Rakesh362242f2022-02-08 12:40:53 -080043
44using ::ndk::ScopedAStatus;
45
46namespace {
47const int32_t kBurstFrameCount = 10;
48const uint32_t kMaxStillWidth = 2048;
49const uint32_t kMaxStillHeight = 1536;
50
51const int64_t kEmptyFlushTimeoutMSec = 200;
52
Shuzhen Wang36efa712022-03-08 10:10:44 -080053const static std::vector<int64_t> kMandatoryUseCases = {
Avichal Rakesh362242f2022-02-08 12:40:53 -080054 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
55 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW,
56 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_STILL_CAPTURE,
57 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_RECORD,
58 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW_VIDEO_STILL,
59 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL};
60} // namespace
61
62TEST_P(CameraAidlTest, getCameraIdList) {
63 std::vector<std::string> idList;
64 ScopedAStatus ret = mProvider->getCameraIdList(&idList);
65 ASSERT_TRUE(ret.isOk());
66
67 for (size_t i = 0; i < idList.size(); i++) {
68 ALOGI("Camera Id[%zu] is %s", i, idList[i].c_str());
69 }
70}
71
72// Test if ICameraProvider::getVendorTags returns Status::OK
73TEST_P(CameraAidlTest, getVendorTags) {
74 std::vector<VendorTagSection> vendorTags;
75 ScopedAStatus ret = mProvider->getVendorTags(&vendorTags);
76
77 ASSERT_TRUE(ret.isOk());
78 for (size_t i = 0; i < vendorTags.size(); i++) {
79 ALOGI("Vendor tag section %zu name %s", i, vendorTags[i].sectionName.c_str());
80 for (auto& tag : vendorTags[i].tags) {
81 ALOGI("Vendor tag id %u name %s type %d", tag.tagId, tag.tagName.c_str(),
82 (int)tag.tagType);
83 }
84 }
85}
86
87// Test if ICameraProvider::setCallback returns Status::OK
88TEST_P(CameraAidlTest, setCallback) {
Avichal Rakesh4bf91c72022-05-23 20:44:02 +000089 struct ProviderCb : public BnCameraProviderCallback {
Avichal Rakesh362242f2022-02-08 12:40:53 -080090 ScopedAStatus cameraDeviceStatusChange(const std::string& cameraDeviceName,
91 CameraDeviceStatus newStatus) override {
92 ALOGI("camera device status callback name %s, status %d", cameraDeviceName.c_str(),
93 (int)newStatus);
94 return ScopedAStatus::ok();
95 }
96 ScopedAStatus torchModeStatusChange(const std::string& cameraDeviceName,
97 TorchModeStatus newStatus) override {
98 ALOGI("Torch mode status callback name %s, status %d", cameraDeviceName.c_str(),
99 (int)newStatus);
100 return ScopedAStatus::ok();
101 }
102 ScopedAStatus physicalCameraDeviceStatusChange(const std::string& cameraDeviceName,
103 const std::string& physicalCameraDeviceName,
104 CameraDeviceStatus newStatus) override {
105 ALOGI("physical camera device status callback name %s, physical camera name %s,"
106 " status %d",
107 cameraDeviceName.c_str(), physicalCameraDeviceName.c_str(), (int)newStatus);
108 return ScopedAStatus::ok();
109 }
110 };
111
Avichal Rakesh4bf91c72022-05-23 20:44:02 +0000112 std::shared_ptr<ProviderCb> cb = ndk::SharedRefBase::make<ProviderCb>();
Avichal Rakesh362242f2022-02-08 12:40:53 -0800113 ScopedAStatus ret = mProvider->setCallback(cb);
114 ASSERT_TRUE(ret.isOk());
115 ret = mProvider->setCallback(nullptr);
Avichal Rakesh4bf91c72022-05-23 20:44:02 +0000116 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
Avichal Rakesh362242f2022-02-08 12:40:53 -0800117}
118
119// Test if ICameraProvider::getCameraDeviceInterface returns Status::OK and non-null device
120TEST_P(CameraAidlTest, getCameraDeviceInterface) {
121 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
122
123 for (const auto& name : cameraDeviceNames) {
124 std::shared_ptr<ICameraDevice> cameraDevice;
125 ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &cameraDevice);
126 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
127 ret.getServiceSpecificError());
128 ASSERT_TRUE(ret.isOk());
129 ASSERT_NE(cameraDevice, nullptr);
130 }
131}
132
133// Verify that the device resource cost can be retrieved and the values are
134// correct.
135TEST_P(CameraAidlTest, getResourceCost) {
136 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
137
138 for (const auto& deviceName : cameraDeviceNames) {
139 std::shared_ptr<ICameraDevice> cameraDevice;
140 ScopedAStatus ret = mProvider->getCameraDeviceInterface(deviceName, &cameraDevice);
141 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
142 ret.getServiceSpecificError());
143 ASSERT_TRUE(ret.isOk());
144 ASSERT_NE(cameraDevice, nullptr);
145
146 CameraResourceCost resourceCost;
147 ret = cameraDevice->getResourceCost(&resourceCost);
148 ALOGI("getResourceCost returns: %d:%d", ret.getExceptionCode(),
149 ret.getServiceSpecificError());
150 ASSERT_TRUE(ret.isOk());
151
152 ALOGI(" Resource cost is %d", resourceCost.resourceCost);
153 ASSERT_LE(resourceCost.resourceCost, 100u);
154
155 for (const auto& name : resourceCost.conflictingDevices) {
156 ALOGI(" Conflicting device: %s", name.c_str());
157 }
158 }
159}
160
161TEST_P(CameraAidlTest, systemCameraTest) {
162 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
163 std::map<std::string, std::vector<SystemCameraKind>> hiddenPhysicalIdToLogicalMap;
164 for (const auto& name : cameraDeviceNames) {
165 std::shared_ptr<ICameraDevice> device;
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +0000166 ALOGI("systemCameraTest: Testing camera device %s", name.c_str());
Avichal Rakesh362242f2022-02-08 12:40:53 -0800167 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
168 ASSERT_TRUE(ret.isOk());
169 ASSERT_NE(device, nullptr);
170
171 CameraMetadata cameraCharacteristics;
172 ret = device->getCameraCharacteristics(&cameraCharacteristics);
173 ASSERT_TRUE(ret.isOk());
174
175 const camera_metadata_t* staticMeta =
176 reinterpret_cast<const camera_metadata_t*>(cameraCharacteristics.metadata.data());
177 Status rc = isLogicalMultiCamera(staticMeta);
178 if (rc == Status::OPERATION_NOT_SUPPORTED) {
179 return;
180 }
181
182 ASSERT_EQ(rc, Status::OK);
183 std::unordered_set<std::string> physicalIds;
184 ASSERT_EQ(getPhysicalCameraIds(staticMeta, &physicalIds), Status::OK);
185 SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC;
186 Status retStatus = getSystemCameraKind(staticMeta, &systemCameraKind);
187 ASSERT_EQ(retStatus, Status::OK);
188
189 for (auto physicalId : physicalIds) {
190 bool isPublicId = false;
191 for (auto& deviceName : cameraDeviceNames) {
192 std::string publicVersion, publicId;
193 ASSERT_TRUE(matchDeviceName(deviceName, mProviderType, &publicVersion, &publicId));
194 if (physicalId == publicId) {
195 isPublicId = true;
196 break;
197 }
198 }
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +0000199
Avichal Rakesh362242f2022-02-08 12:40:53 -0800200 // For hidden physical cameras, collect their associated logical cameras
201 // and store the system camera kind.
202 if (!isPublicId) {
203 auto it = hiddenPhysicalIdToLogicalMap.find(physicalId);
204 if (it == hiddenPhysicalIdToLogicalMap.end()) {
205 hiddenPhysicalIdToLogicalMap.insert(std::make_pair(
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +0000206 physicalId, std::vector<SystemCameraKind>({systemCameraKind})));
Avichal Rakesh362242f2022-02-08 12:40:53 -0800207 } else {
208 it->second.push_back(systemCameraKind);
209 }
210 }
211 }
212 }
213
214 // Check that the system camera kind of the logical cameras associated with
215 // each hidden physical camera is the same.
216 for (const auto& it : hiddenPhysicalIdToLogicalMap) {
217 SystemCameraKind neededSystemCameraKind = it.second.front();
218 for (auto foundSystemCamera : it.second) {
219 ASSERT_EQ(neededSystemCameraKind, foundSystemCamera);
220 }
221 }
222}
223
224// Verify that the static camera characteristics can be retrieved
225// successfully.
226TEST_P(CameraAidlTest, getCameraCharacteristics) {
227 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
228
229 for (const auto& name : cameraDeviceNames) {
230 std::shared_ptr<ICameraDevice> device;
231 ALOGI("getCameraCharacteristics: Testing camera device %s", name.c_str());
232 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
233 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
234 ret.getServiceSpecificError());
235 ASSERT_TRUE(ret.isOk());
236 ASSERT_NE(device, nullptr);
237
238 CameraMetadata chars;
239 ret = device->getCameraCharacteristics(&chars);
240 ASSERT_TRUE(ret.isOk());
241 verifyCameraCharacteristics(chars);
242 verifyMonochromeCharacteristics(chars);
243 verifyRecommendedConfigs(chars);
244 verifyLogicalOrUltraHighResCameraMetadata(name, device, chars, cameraDeviceNames);
245
246 ASSERT_TRUE(ret.isOk());
247
248 // getPhysicalCameraCharacteristics will fail for publicly
249 // advertised camera IDs.
250 std::string version, cameraId;
251 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &cameraId));
252 CameraMetadata devChars;
253 ret = device->getPhysicalCameraCharacteristics(cameraId, &devChars);
254 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
255 ASSERT_EQ(0, devChars.metadata.size());
256 }
257}
258
259// Verify that the torch strength level can be set and retrieved successfully.
260TEST_P(CameraAidlTest, turnOnTorchWithStrengthLevel) {
261 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
262
263 std::shared_ptr<TorchProviderCb> cb = ndk::SharedRefBase::make<TorchProviderCb>(this);
264 ndk::ScopedAStatus ret = mProvider->setCallback(cb);
265 ASSERT_TRUE(ret.isOk());
266
267 for (const auto& name : cameraDeviceNames) {
268 int32_t defaultLevel;
269 std::shared_ptr<ICameraDevice> device;
270 ALOGI("%s: Testing camera device %s", __FUNCTION__, name.c_str());
271
272 ret = mProvider->getCameraDeviceInterface(name, &device);
273 ASSERT_TRUE(ret.isOk());
274 ASSERT_NE(device, nullptr);
275
276 CameraMetadata chars;
277 ret = device->getCameraCharacteristics(&chars);
278 ASSERT_TRUE(ret.isOk());
279
280 const camera_metadata_t* staticMeta =
281 reinterpret_cast<const camera_metadata_t*>(chars.metadata.data());
282 bool torchStrengthControlSupported = isTorchStrengthControlSupported(staticMeta);
283 camera_metadata_ro_entry entry;
284 int rc = find_camera_metadata_ro_entry(staticMeta,
285 ANDROID_FLASH_INFO_STRENGTH_DEFAULT_LEVEL, &entry);
286 if (torchStrengthControlSupported) {
287 ASSERT_EQ(rc, 0);
288 ASSERT_GT(entry.count, 0);
289 defaultLevel = *entry.data.i32;
290 ALOGI("Default level is:%d", defaultLevel);
291 }
292
293 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
294 ret = device->turnOnTorchWithStrengthLevel(2);
295 ALOGI("turnOnTorchWithStrengthLevel returns status: %d", ret.getServiceSpecificError());
296 // OPERATION_NOT_SUPPORTED check
297 if (!torchStrengthControlSupported) {
298 ALOGI("Torch strength control not supported.");
299 ASSERT_EQ(static_cast<int32_t>(Status::OPERATION_NOT_SUPPORTED),
300 ret.getServiceSpecificError());
301 } else {
302 {
303 ASSERT_TRUE(ret.isOk());
304 std::unique_lock<std::mutex> l(mTorchLock);
305 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
306 auto timeout = std::chrono::system_clock::now() +
307 std::chrono::seconds(kTorchTimeoutSec);
308 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
309 }
310 ASSERT_EQ(TorchModeStatus::AVAILABLE_ON, mTorchStatus);
311 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
312 }
313 ALOGI("getTorchStrengthLevel: Testing");
314 int32_t strengthLevel;
315 ret = device->getTorchStrengthLevel(&strengthLevel);
316 ASSERT_TRUE(ret.isOk());
317 ALOGI("Torch strength level is : %d", strengthLevel);
318 ASSERT_EQ(strengthLevel, 2);
319
320 // Turn OFF the torch and verify torch strength level is reset to default level.
321 ALOGI("Testing torch strength level reset after turning the torch OFF.");
322 ret = device->setTorchMode(false);
323 ASSERT_TRUE(ret.isOk());
324 {
325 std::unique_lock<std::mutex> l(mTorchLock);
326 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
327 auto timeout = std::chrono::system_clock::now() +
328 std::chrono::seconds(kTorchTimeoutSec);
329 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
330 }
331 ASSERT_EQ(TorchModeStatus::AVAILABLE_OFF, mTorchStatus);
332 }
333
334 ret = device->getTorchStrengthLevel(&strengthLevel);
335 ASSERT_TRUE(ret.isOk());
336 ALOGI("Torch strength level after turning OFF torch is : %d", strengthLevel);
337 ASSERT_EQ(strengthLevel, defaultLevel);
338 }
339 }
340}
341
342// In case it is supported verify that torch can be enabled.
343// Check for corresponding torch callbacks as well.
344TEST_P(CameraAidlTest, setTorchMode) {
345 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
346
347 std::shared_ptr<TorchProviderCb> cb = ndk::SharedRefBase::make<TorchProviderCb>(this);
348 ndk::ScopedAStatus ret = mProvider->setCallback(cb);
349 ALOGI("setCallback returns status: %d", ret.getServiceSpecificError());
350 ASSERT_TRUE(ret.isOk());
351 ASSERT_NE(cb, nullptr);
352
353 for (const auto& name : cameraDeviceNames) {
354 std::shared_ptr<ICameraDevice> device;
355 ALOGI("setTorchMode: Testing camera device %s", name.c_str());
356 ret = mProvider->getCameraDeviceInterface(name, &device);
357 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
358 ret.getServiceSpecificError());
359 ASSERT_TRUE(ret.isOk());
360 ASSERT_NE(device, nullptr);
361
362 CameraMetadata metadata;
363 ret = device->getCameraCharacteristics(&metadata);
364 ALOGI("getCameraCharacteristics returns status:%d", ret.getServiceSpecificError());
365 ASSERT_TRUE(ret.isOk());
366 camera_metadata_t* staticMeta =
367 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
368 bool torchSupported = isTorchSupported(staticMeta);
369
370 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
371 ret = device->setTorchMode(true);
372 ALOGI("setTorchMode returns status: %d", ret.getServiceSpecificError());
373 if (!torchSupported) {
374 ASSERT_EQ(static_cast<int32_t>(Status::OPERATION_NOT_SUPPORTED),
375 ret.getServiceSpecificError());
376 } else {
377 ASSERT_TRUE(ret.isOk());
378 {
379 std::unique_lock<std::mutex> l(mTorchLock);
380 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
381 auto timeout = std::chrono::system_clock::now() +
382 std::chrono::seconds(kTorchTimeoutSec);
383 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
384 }
385 ASSERT_EQ(TorchModeStatus::AVAILABLE_ON, mTorchStatus);
386 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
387 }
388
389 ret = device->setTorchMode(false);
390 ASSERT_TRUE(ret.isOk());
391 {
392 std::unique_lock<std::mutex> l(mTorchLock);
393 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
394 auto timeout = std::chrono::system_clock::now() +
395 std::chrono::seconds(kTorchTimeoutSec);
396 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
397 }
398 ASSERT_EQ(TorchModeStatus::AVAILABLE_OFF, mTorchStatus);
399 }
400 }
401 }
Avichal Rakesh362242f2022-02-08 12:40:53 -0800402}
403
404// Check dump functionality.
405TEST_P(CameraAidlTest, dump) {
406 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
407
408 for (const auto& name : cameraDeviceNames) {
409 std::shared_ptr<ICameraDevice> device;
410 ALOGI("dump: Testing camera device %s", name.c_str());
411
412 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
413 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
414 ret.getServiceSpecificError());
415 ASSERT_TRUE(ret.isOk());
416 ASSERT_NE(device, nullptr);
417
418 int raw_handle = open(kDumpOutput, O_RDWR);
419 ASSERT_GE(raw_handle, 0);
420
421 auto retStatus = device->dump(raw_handle, nullptr, 0);
422 ASSERT_EQ(retStatus, ::android::OK);
423 close(raw_handle);
424 }
425}
426
427// Open, dump, then close
428TEST_P(CameraAidlTest, openClose) {
429 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
430
431 for (const auto& name : cameraDeviceNames) {
432 std::shared_ptr<ICameraDevice> device;
433 ALOGI("openClose: Testing camera device %s", name.c_str());
434 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
435 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
436 ret.getServiceSpecificError());
437 ASSERT_TRUE(ret.isOk());
438 ASSERT_NE(device, nullptr);
439
440 std::shared_ptr<EmptyDeviceCb> cb = ndk::SharedRefBase::make<EmptyDeviceCb>();
441
442 ret = device->open(cb, &mSession);
443 ASSERT_TRUE(ret.isOk());
444 ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
445 ret.getServiceSpecificError());
446 ASSERT_NE(mSession, nullptr);
447 int raw_handle = open(kDumpOutput, O_RDWR);
448 ASSERT_GE(raw_handle, 0);
449
450 auto retStatus = device->dump(raw_handle, nullptr, 0);
451 ASSERT_EQ(retStatus, ::android::OK);
452 close(raw_handle);
453
454 ret = mSession->close();
455 mSession = nullptr;
456 ASSERT_TRUE(ret.isOk());
457 // TODO: test all session API calls return INTERNAL_ERROR after close
458 // TODO: keep a wp copy here and verify session cannot be promoted out of this scope
459 }
460}
461
462// Check whether all common default request settings can be successfully
463// constructed.
464TEST_P(CameraAidlTest, constructDefaultRequestSettings) {
465 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
466
467 for (const auto& name : cameraDeviceNames) {
468 std::shared_ptr<ICameraDevice> device;
469 ALOGI("constructDefaultRequestSettings: Testing camera device %s", name.c_str());
470 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
471 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
472 ret.getServiceSpecificError());
473 ASSERT_TRUE(ret.isOk());
474 ASSERT_NE(device, nullptr);
475
476 std::shared_ptr<EmptyDeviceCb> cb = ndk::SharedRefBase::make<EmptyDeviceCb>();
477 ret = device->open(cb, &mSession);
478 ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
479 ret.getServiceSpecificError());
480 ASSERT_TRUE(ret.isOk());
481 ASSERT_NE(mSession, nullptr);
482
483 for (int32_t t = (int32_t)RequestTemplate::PREVIEW; t <= (int32_t)RequestTemplate::MANUAL;
484 t++) {
485 RequestTemplate reqTemplate = (RequestTemplate)t;
486 CameraMetadata rawMetadata;
487 ret = mSession->constructDefaultRequestSettings(reqTemplate, &rawMetadata);
488 ALOGI("constructDefaultRequestSettings returns status:%d:%d", ret.getExceptionCode(),
489 ret.getServiceSpecificError());
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000490
Avichal Rakesh362242f2022-02-08 12:40:53 -0800491 if (reqTemplate == RequestTemplate::ZERO_SHUTTER_LAG ||
492 reqTemplate == RequestTemplate::MANUAL) {
493 // optional templates
494 ASSERT_TRUE(ret.isOk() || static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
495 ret.getServiceSpecificError());
496 } else {
497 ASSERT_TRUE(ret.isOk());
498 }
499
500 if (ret.isOk()) {
501 const camera_metadata_t* metadata = (camera_metadata_t*)rawMetadata.metadata.data();
502 size_t expectedSize = rawMetadata.metadata.size();
503 int result = validate_camera_metadata_structure(metadata, &expectedSize);
504 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
505 verifyRequestTemplate(metadata, reqTemplate);
506 } else {
507 ASSERT_EQ(0u, rawMetadata.metadata.size());
508 }
509 }
510 ret = mSession->close();
511 mSession = nullptr;
512 ASSERT_TRUE(ret.isOk());
513 }
514}
515
516// Verify that all supported stream formats and sizes can be configured
517// successfully.
518TEST_P(CameraAidlTest, configureStreamsAvailableOutputs) {
519 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
520 std::vector<AvailableStream> outputStreams;
521
522 for (const auto& name : cameraDeviceNames) {
523 CameraMetadata meta;
524 std::shared_ptr<ICameraDevice> device;
525
526 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/, &device /*out*/);
527
528 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
529 outputStreams.clear();
530 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputStreams));
531 ASSERT_NE(0u, outputStreams.size());
532
533 int32_t jpegBufferSize = 0;
534 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
535 ASSERT_NE(0u, jpegBufferSize);
536
537 int32_t streamId = 0;
538 int32_t streamConfigCounter = 0;
539 for (auto& it : outputStreams) {
540 Stream stream;
541 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(it.format));
542 stream.id = streamId;
543 stream.streamType = StreamType::OUTPUT;
544 stream.width = it.width;
545 stream.height = it.height;
546 stream.format = static_cast<PixelFormat>(it.format);
547 stream.dataSpace = dataspace;
548 stream.usage = static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
549 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER);
550 stream.rotation = StreamRotation::ROTATION_0;
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000551 stream.dynamicRangeProfile = RequestAvailableDynamicRangeProfilesMap::
552 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
Avichal Rakesh362242f2022-02-08 12:40:53 -0800553
554 std::vector<Stream> streams = {stream};
555 StreamConfiguration config;
556 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
557 jpegBufferSize);
558
559 bool expectStreamCombQuery = (isLogicalMultiCamera(staticMeta) == Status::OK);
560 verifyStreamCombination(device, config, /*expectedStatus*/ true, expectStreamCombQuery);
561
562 config.streamConfigCounter = streamConfigCounter++;
563 std::vector<HalStream> halConfigs;
564 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
565 ASSERT_TRUE(ret.isOk());
566 ASSERT_EQ(halConfigs.size(), 1);
567 ASSERT_EQ(halConfigs[0].id, streamId);
568
569 streamId++;
570 }
571
572 ndk::ScopedAStatus ret = mSession->close();
573 mSession = nullptr;
574 ASSERT_TRUE(ret.isOk());
575 }
576}
577
578// Verify that mandatory concurrent streams and outputs are supported.
579TEST_P(CameraAidlTest, configureConcurrentStreamsAvailableOutputs) {
580 struct CameraTestInfo {
581 CameraMetadata staticMeta;
582 std::shared_ptr<ICameraDeviceSession> session;
583 std::shared_ptr<ICameraDevice> cameraDevice;
584 StreamConfiguration config;
585 };
586
587 std::map<std::string, std::string> idToNameMap = getCameraDeviceIdToNameMap(mProvider);
588 std::vector<ConcurrentCameraIdCombination> concurrentDeviceCombinations =
589 getConcurrentDeviceCombinations(mProvider);
590 std::vector<AvailableStream> outputStreams;
591 for (const auto& cameraDeviceIds : concurrentDeviceCombinations) {
592 std::vector<CameraIdAndStreamCombination> cameraIdsAndStreamCombinations;
593 std::vector<CameraTestInfo> cameraTestInfos;
594 size_t i = 0;
595 for (const auto& id : cameraDeviceIds.combination) {
596 CameraTestInfo cti;
597 auto it = idToNameMap.find(id);
598 ASSERT_TRUE(idToNameMap.end() != it);
599 std::string name = it->second;
600
601 openEmptyDeviceSession(name, mProvider, &cti.session /*out*/, &cti.staticMeta /*out*/,
602 &cti.cameraDevice /*out*/);
603
604 outputStreams.clear();
605 camera_metadata_t* staticMeta =
606 reinterpret_cast<camera_metadata_t*>(cti.staticMeta.metadata.data());
607 ASSERT_EQ(Status::OK, getMandatoryConcurrentStreams(staticMeta, &outputStreams));
608 ASSERT_NE(0u, outputStreams.size());
609
610 int32_t jpegBufferSize = 0;
611 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
612 ASSERT_NE(0u, jpegBufferSize);
613
614 int32_t streamId = 0;
615 std::vector<Stream> streams(outputStreams.size());
616 size_t j = 0;
617 for (const auto& s : outputStreams) {
618 Stream stream;
619 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(s.format));
620 stream.id = streamId++;
621 stream.streamType = StreamType::OUTPUT;
622 stream.width = s.width;
623 stream.height = s.height;
624 stream.format = static_cast<PixelFormat>(s.format);
625 stream.usage = static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
626 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER);
627 stream.dataSpace = dataspace;
628 stream.rotation = StreamRotation::ROTATION_0;
629 stream.sensorPixelModesUsed = {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT};
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000630 stream.dynamicRangeProfile = RequestAvailableDynamicRangeProfilesMap::
631 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
Avichal Rakesh362242f2022-02-08 12:40:53 -0800632 streams[j] = stream;
633 j++;
634 }
635
636 // Add the created stream configs to cameraIdsAndStreamCombinations
637 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &cti.config,
638 jpegBufferSize);
639
640 cti.config.streamConfigCounter = outputStreams.size();
641 CameraIdAndStreamCombination cameraIdAndStreamCombination;
642 cameraIdAndStreamCombination.cameraId = id;
643 cameraIdAndStreamCombination.streamConfiguration = cti.config;
644 cameraIdsAndStreamCombinations.push_back(cameraIdAndStreamCombination);
645 i++;
646 cameraTestInfos.push_back(cti);
647 }
648 // Now verify that concurrent streams are supported
649 bool combinationSupported;
650 ndk::ScopedAStatus ret = mProvider->isConcurrentStreamCombinationSupported(
651 cameraIdsAndStreamCombinations, &combinationSupported);
652 ASSERT_TRUE(ret.isOk());
653 ASSERT_EQ(combinationSupported, true);
654
655 // Test the stream can actually be configured
656 for (auto& cti : cameraTestInfos) {
657 if (cti.session != nullptr) {
658 camera_metadata_t* staticMeta =
659 reinterpret_cast<camera_metadata_t*>(cti.staticMeta.metadata.data());
660 bool expectStreamCombQuery = (isLogicalMultiCamera(staticMeta) == Status::OK);
661 verifyStreamCombination(cti.cameraDevice, cti.config, /*expectedStatus*/ true,
662 expectStreamCombQuery);
663 }
664
665 if (cti.session != nullptr) {
666 std::vector<HalStream> streamConfigs;
667 ret = cti.session->configureStreams(cti.config, &streamConfigs);
668 ASSERT_TRUE(ret.isOk());
669 ASSERT_EQ(cti.config.streams.size(), streamConfigs.size());
670 }
671 }
672
673 for (auto& cti : cameraTestInfos) {
674 ret = cti.session->close();
675 ASSERT_TRUE(ret.isOk());
676 }
677 }
678}
679
680// Check for correct handling of invalid/incorrect configuration parameters.
681TEST_P(CameraAidlTest, configureStreamsInvalidOutputs) {
682 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
683 std::vector<AvailableStream> outputStreams;
684
685 for (const auto& name : cameraDeviceNames) {
686 CameraMetadata meta;
687 std::shared_ptr<ICameraDevice> cameraDevice;
688
689 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
690 &cameraDevice /*out*/);
691 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
692 outputStreams.clear();
693
694 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputStreams));
695 ASSERT_NE(0u, outputStreams.size());
696
697 int32_t jpegBufferSize = 0;
698 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
699 ASSERT_NE(0u, jpegBufferSize);
700
701 int32_t streamId = 0;
702 Stream stream = {streamId++,
703 StreamType::OUTPUT,
704 static_cast<uint32_t>(0),
705 static_cast<uint32_t>(0),
706 static_cast<PixelFormat>(outputStreams[0].format),
707 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
708 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
709 Dataspace::UNKNOWN,
710 StreamRotation::ROTATION_0,
711 std::string(),
712 jpegBufferSize,
713 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000714 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
715 RequestAvailableDynamicRangeProfilesMap::
716 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800717 int32_t streamConfigCounter = 0;
718 std::vector<Stream> streams = {stream};
719 StreamConfiguration config;
720 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
721 jpegBufferSize);
722
723 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ false,
724 /*expectStreamCombQuery*/ false);
725
726 config.streamConfigCounter = streamConfigCounter++;
727 std::vector<HalStream> halConfigs;
728 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
729 ASSERT_TRUE(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
730 ret.getServiceSpecificError() ||
731 static_cast<int32_t>(Status::INTERNAL_ERROR) == ret.getServiceSpecificError());
732
733 stream = {streamId++,
734 StreamType::OUTPUT,
735 /*width*/ INT32_MAX,
736 /*height*/ INT32_MAX,
737 static_cast<PixelFormat>(outputStreams[0].format),
738 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
739 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
740 Dataspace::UNKNOWN,
741 StreamRotation::ROTATION_0,
742 std::string(),
743 jpegBufferSize,
744 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000745 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
746 RequestAvailableDynamicRangeProfilesMap::
747 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800748
749 streams[0] = stream;
750 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
751 jpegBufferSize);
752
753 config.streamConfigCounter = streamConfigCounter++;
754 halConfigs.clear();
755 ret = mSession->configureStreams(config, &halConfigs);
756 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
757
758 for (auto& it : outputStreams) {
759 stream = {streamId++,
760 StreamType::OUTPUT,
761 it.width,
762 it.height,
763 static_cast<PixelFormat>(UINT32_MAX),
764 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
765 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
766 Dataspace::UNKNOWN,
767 StreamRotation::ROTATION_0,
768 std::string(),
769 jpegBufferSize,
770 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000771 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
772 RequestAvailableDynamicRangeProfilesMap::
773 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800774
775 streams[0] = stream;
776 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
777 jpegBufferSize);
778 config.streamConfigCounter = streamConfigCounter++;
779 halConfigs.clear();
780 ret = mSession->configureStreams(config, &halConfigs);
781 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
782 ret.getServiceSpecificError());
783
784 stream = {streamId++,
785 StreamType::OUTPUT,
786 it.width,
787 it.height,
788 static_cast<PixelFormat>(it.format),
789 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
790 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
791 Dataspace::UNKNOWN,
792 static_cast<StreamRotation>(UINT32_MAX),
793 std::string(),
794 jpegBufferSize,
795 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000796 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
797 RequestAvailableDynamicRangeProfilesMap::
798 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800799
800 streams[0] = stream;
801 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
802 jpegBufferSize);
803
804 config.streamConfigCounter = streamConfigCounter++;
805 halConfigs.clear();
806 ret = mSession->configureStreams(config, &halConfigs);
807 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
808 ret.getServiceSpecificError());
809 }
810
811 ret = mSession->close();
812 mSession = nullptr;
813 ASSERT_TRUE(ret.isOk());
814 }
815}
816
817// Check whether all supported ZSL output stream combinations can be
818// configured successfully.
819TEST_P(CameraAidlTest, configureStreamsZSLInputOutputs) {
820 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
821 std::vector<AvailableStream> inputStreams;
822 std::vector<AvailableZSLInputOutput> inputOutputMap;
823
824 for (const auto& name : cameraDeviceNames) {
825 CameraMetadata meta;
826 std::shared_ptr<ICameraDevice> cameraDevice;
827
828 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
829 &cameraDevice /*out*/);
830 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
831
832 Status rc = isZSLModeAvailable(staticMeta);
833 if (Status::OPERATION_NOT_SUPPORTED == rc) {
834 ndk::ScopedAStatus ret = mSession->close();
835 mSession = nullptr;
836 ASSERT_TRUE(ret.isOk());
837 continue;
838 }
839 ASSERT_EQ(Status::OK, rc);
840
841 inputStreams.clear();
842 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, inputStreams));
843 ASSERT_NE(0u, inputStreams.size());
844
845 inputOutputMap.clear();
846 ASSERT_EQ(Status::OK, getZSLInputOutputMap(staticMeta, inputOutputMap));
847 ASSERT_NE(0u, inputOutputMap.size());
848
849 bool supportMonoY8 = false;
850 if (Status::OK == isMonochromeCamera(staticMeta)) {
851 for (auto& it : inputStreams) {
852 if (it.format == static_cast<uint32_t>(PixelFormat::Y8)) {
853 supportMonoY8 = true;
854 break;
855 }
856 }
857 }
858
859 int32_t jpegBufferSize = 0;
860 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
861 ASSERT_NE(0u, jpegBufferSize);
862
863 int32_t streamId = 0;
864 bool hasPrivToY8 = false, hasY8ToY8 = false, hasY8ToBlob = false;
865 uint32_t streamConfigCounter = 0;
866 for (auto& inputIter : inputOutputMap) {
867 AvailableStream input;
868 ASSERT_EQ(Status::OK, findLargestSize(inputStreams, inputIter.inputFormat, input));
869 ASSERT_NE(0u, inputStreams.size());
870
871 if (inputIter.inputFormat ==
872 static_cast<uint32_t>(PixelFormat::IMPLEMENTATION_DEFINED) &&
873 inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::Y8)) {
874 hasPrivToY8 = true;
875 } else if (inputIter.inputFormat == static_cast<uint32_t>(PixelFormat::Y8)) {
876 if (inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::BLOB)) {
877 hasY8ToBlob = true;
878 } else if (inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::Y8)) {
879 hasY8ToY8 = true;
880 }
881 }
882 AvailableStream outputThreshold = {INT32_MAX, INT32_MAX, inputIter.outputFormat};
883 std::vector<AvailableStream> outputStreams;
884 ASSERT_EQ(Status::OK,
885 getAvailableOutputStreams(staticMeta, outputStreams, &outputThreshold));
886 for (auto& outputIter : outputStreams) {
887 Dataspace outputDataSpace =
888 getDataspace(static_cast<PixelFormat>(outputIter.format));
889 Stream zslStream = {
890 streamId++,
891 StreamType::OUTPUT,
892 input.width,
893 input.height,
894 static_cast<PixelFormat>(input.format),
895 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
896 GRALLOC_USAGE_HW_CAMERA_ZSL),
897 Dataspace::UNKNOWN,
898 StreamRotation::ROTATION_0,
899 std::string(),
900 jpegBufferSize,
901 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000902 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
903 RequestAvailableDynamicRangeProfilesMap::
904 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800905 Stream inputStream = {
906 streamId++,
907 StreamType::INPUT,
908 input.width,
909 input.height,
910 static_cast<PixelFormat>(input.format),
911 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(0),
912 Dataspace::UNKNOWN,
913 StreamRotation::ROTATION_0,
914 std::string(),
915 jpegBufferSize,
916 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000917 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
918 RequestAvailableDynamicRangeProfilesMap::
919 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800920 Stream outputStream = {
921 streamId++,
922 StreamType::OUTPUT,
923 outputIter.width,
924 outputIter.height,
925 static_cast<PixelFormat>(outputIter.format),
926 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
927 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
928 outputDataSpace,
929 StreamRotation::ROTATION_0,
930 std::string(),
931 jpegBufferSize,
932 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000933 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
934 RequestAvailableDynamicRangeProfilesMap::
935 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800936
937 std::vector<Stream> streams = {inputStream, zslStream, outputStream};
938
939 StreamConfiguration config;
940 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
941 jpegBufferSize);
942
943 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true,
944 /*expectStreamCombQuery*/ false);
945
946 config.streamConfigCounter = streamConfigCounter++;
947 std::vector<HalStream> halConfigs;
948 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
949 ASSERT_TRUE(ret.isOk());
950 ASSERT_EQ(3u, halConfigs.size());
951 }
952 }
953
954 if (supportMonoY8) {
955 if (Status::OK == isZSLModeAvailable(staticMeta, PRIV_REPROCESS)) {
956 ASSERT_TRUE(hasPrivToY8);
957 }
958 if (Status::OK == isZSLModeAvailable(staticMeta, YUV_REPROCESS)) {
959 ASSERT_TRUE(hasY8ToY8);
960 ASSERT_TRUE(hasY8ToBlob);
961 }
962 }
963
964 ndk::ScopedAStatus ret = mSession->close();
965 mSession = nullptr;
966 ASSERT_TRUE(ret.isOk());
967 }
968}
969
970// Check whether session parameters are supported. If Hal support for them
971// exist, then try to configure a preview stream using them.
972TEST_P(CameraAidlTest, configureStreamsWithSessionParameters) {
973 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
974 std::vector<AvailableStream> outputPreviewStreams;
975 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
976 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
977
978 for (const auto& name : cameraDeviceNames) {
979 CameraMetadata meta;
980
981 std::shared_ptr<ICameraDevice> unusedCameraDevice;
982 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
983 &unusedCameraDevice /*out*/);
984 camera_metadata_t* staticMetaBuffer =
985 reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
986
987 std::unordered_set<int32_t> availableSessionKeys;
988 auto rc = getSupportedKeys(staticMetaBuffer, ANDROID_REQUEST_AVAILABLE_SESSION_KEYS,
989 &availableSessionKeys);
990 ASSERT_TRUE(Status::OK == rc);
991 if (availableSessionKeys.empty()) {
992 ndk::ScopedAStatus ret = mSession->close();
993 mSession = nullptr;
994 ASSERT_TRUE(ret.isOk());
995 continue;
996 }
997
998 android::hardware::camera::common::V1_0::helper::CameraMetadata previewRequestSettings;
999 android::hardware::camera::common::V1_0::helper::CameraMetadata sessionParams,
1000 modifiedSessionParams;
1001 constructFilteredSettings(mSession, availableSessionKeys, RequestTemplate::PREVIEW,
1002 &previewRequestSettings, &sessionParams);
1003 if (sessionParams.isEmpty()) {
1004 ndk::ScopedAStatus ret = mSession->close();
1005 mSession = nullptr;
1006 ASSERT_TRUE(ret.isOk());
1007 continue;
1008 }
1009
1010 outputPreviewStreams.clear();
1011
1012 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputPreviewStreams,
1013 &previewThreshold));
1014 ASSERT_NE(0u, outputPreviewStreams.size());
1015
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001016 Stream previewStream = {
1017 0,
1018 StreamType::OUTPUT,
1019 outputPreviewStreams[0].width,
1020 outputPreviewStreams[0].height,
1021 static_cast<PixelFormat>(outputPreviewStreams[0].format),
1022 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1023 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
1024 Dataspace::UNKNOWN,
1025 StreamRotation::ROTATION_0,
1026 std::string(),
1027 /*bufferSize*/ 0,
1028 /*groupId*/ -1,
1029 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1030 RequestAvailableDynamicRangeProfilesMap::
1031 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001032
1033 std::vector<Stream> streams = {previewStream};
1034 StreamConfiguration config;
1035
1036 config.streams = streams;
1037 config.operationMode = StreamConfigurationMode::NORMAL_MODE;
1038 modifiedSessionParams = sessionParams;
1039 auto sessionParamsBuffer = sessionParams.release();
1040 std::vector<uint8_t> rawSessionParam =
1041 std::vector(reinterpret_cast<uint8_t*>(sessionParamsBuffer),
1042 reinterpret_cast<uint8_t*>(sessionParamsBuffer) +
1043 get_camera_metadata_size(sessionParamsBuffer));
1044
1045 config.sessionParams.metadata = rawSessionParam;
1046 config.streamConfigCounter = 0;
1047 config.streams = {previewStream};
1048 config.streamConfigCounter = 0;
1049 config.multiResolutionInputImage = false;
1050
1051 bool newSessionParamsAvailable = false;
1052 for (const auto& it : availableSessionKeys) {
1053 if (modifiedSessionParams.exists(it)) {
1054 modifiedSessionParams.erase(it);
1055 newSessionParamsAvailable = true;
1056 break;
1057 }
1058 }
1059 if (newSessionParamsAvailable) {
1060 auto modifiedSessionParamsBuffer = modifiedSessionParams.release();
1061 verifySessionReconfigurationQuery(mSession, sessionParamsBuffer,
1062 modifiedSessionParamsBuffer);
1063 modifiedSessionParams.acquire(modifiedSessionParamsBuffer);
1064 }
1065
1066 std::vector<HalStream> halConfigs;
1067 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1068 ASSERT_TRUE(ret.isOk());
1069 ASSERT_EQ(1u, halConfigs.size());
1070
1071 sessionParams.acquire(sessionParamsBuffer);
1072 ret = mSession->close();
1073 mSession = nullptr;
1074 ASSERT_TRUE(ret.isOk());
1075 }
1076}
1077
1078// Verify that all supported preview + still capture stream combinations
1079// can be configured successfully.
1080TEST_P(CameraAidlTest, configureStreamsPreviewStillOutputs) {
1081 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1082 std::vector<AvailableStream> outputBlobStreams;
1083 std::vector<AvailableStream> outputPreviewStreams;
1084 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
1085 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
1086 AvailableStream blobThreshold = {INT32_MAX, INT32_MAX, static_cast<int32_t>(PixelFormat::BLOB)};
1087
1088 for (const auto& name : cameraDeviceNames) {
1089 CameraMetadata meta;
1090
1091 std::shared_ptr<ICameraDevice> cameraDevice;
1092 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1093 &cameraDevice /*out*/);
1094
1095 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1096
1097 // Check if camera support depth only
1098 if (isDepthOnly(staticMeta)) {
1099 ndk::ScopedAStatus ret = mSession->close();
1100 mSession = nullptr;
1101 ASSERT_TRUE(ret.isOk());
1102 continue;
1103 }
1104
1105 outputBlobStreams.clear();
1106 ASSERT_EQ(Status::OK,
1107 getAvailableOutputStreams(staticMeta, outputBlobStreams, &blobThreshold));
1108 ASSERT_NE(0u, outputBlobStreams.size());
1109
1110 outputPreviewStreams.clear();
1111 ASSERT_EQ(Status::OK,
1112 getAvailableOutputStreams(staticMeta, outputPreviewStreams, &previewThreshold));
1113 ASSERT_NE(0u, outputPreviewStreams.size());
1114
1115 int32_t jpegBufferSize = 0;
1116 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
1117 ASSERT_NE(0u, jpegBufferSize);
1118
1119 int32_t streamId = 0;
1120 uint32_t streamConfigCounter = 0;
1121
1122 for (auto& blobIter : outputBlobStreams) {
1123 for (auto& previewIter : outputPreviewStreams) {
1124 Stream previewStream = {
1125 streamId++,
1126 StreamType::OUTPUT,
1127 previewIter.width,
1128 previewIter.height,
1129 static_cast<PixelFormat>(previewIter.format),
1130 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1131 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
1132 Dataspace::UNKNOWN,
1133 StreamRotation::ROTATION_0,
1134 std::string(),
1135 /*bufferSize*/ 0,
1136 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001137 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1138 RequestAvailableDynamicRangeProfilesMap::
1139 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001140 Stream blobStream = {
1141 streamId++,
1142 StreamType::OUTPUT,
1143 blobIter.width,
1144 blobIter.height,
1145 static_cast<PixelFormat>(blobIter.format),
1146 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1147 GRALLOC1_CONSUMER_USAGE_CPU_READ),
1148 Dataspace::JFIF,
1149 StreamRotation::ROTATION_0,
1150 std::string(),
1151 /*bufferSize*/ 0,
1152 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001153 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1154 RequestAvailableDynamicRangeProfilesMap::
1155 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001156 std::vector<Stream> streams = {previewStream, blobStream};
1157 StreamConfiguration config;
1158
1159 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
1160 jpegBufferSize);
1161 config.streamConfigCounter = streamConfigCounter++;
1162 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true,
1163 /*expectStreamCombQuery*/ false);
1164
1165 std::vector<HalStream> halConfigs;
1166 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1167 ASSERT_TRUE(ret.isOk());
1168 ASSERT_EQ(2u, halConfigs.size());
1169 }
1170 }
1171
1172 ndk::ScopedAStatus ret = mSession->close();
1173 mSession = nullptr;
1174 ASSERT_TRUE(ret.isOk());
1175 }
1176}
1177
1178// In case constrained mode is supported, test whether it can be
1179// configured. Additionally check for common invalid inputs when
1180// using this mode.
1181TEST_P(CameraAidlTest, configureStreamsConstrainedOutputs) {
1182 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1183
1184 for (const auto& name : cameraDeviceNames) {
1185 CameraMetadata meta;
1186 std::shared_ptr<ICameraDevice> cameraDevice;
1187
1188 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1189 &cameraDevice /*out*/);
1190 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1191
1192 Status rc = isConstrainedModeAvailable(staticMeta);
1193 if (Status::OPERATION_NOT_SUPPORTED == rc) {
1194 ndk::ScopedAStatus ret = mSession->close();
1195 mSession = nullptr;
1196 ASSERT_TRUE(ret.isOk());
1197 continue;
1198 }
1199 ASSERT_EQ(Status::OK, rc);
1200
1201 AvailableStream hfrStream;
1202 rc = pickConstrainedModeSize(staticMeta, hfrStream);
1203 ASSERT_EQ(Status::OK, rc);
1204
1205 int32_t streamId = 0;
1206 uint32_t streamConfigCounter = 0;
1207 Stream stream = {streamId,
1208 StreamType::OUTPUT,
1209 hfrStream.width,
1210 hfrStream.height,
1211 static_cast<PixelFormat>(hfrStream.format),
1212 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1213 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1214 Dataspace::UNKNOWN,
1215 StreamRotation::ROTATION_0,
1216 std::string(),
1217 /*bufferSize*/ 0,
1218 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001219 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1220 RequestAvailableDynamicRangeProfilesMap::
1221 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001222 std::vector<Stream> streams = {stream};
1223 StreamConfiguration config;
1224 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1225 &config);
1226
1227 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true,
1228 /*expectStreamCombQuery*/ false);
1229
1230 config.streamConfigCounter = streamConfigCounter++;
1231 std::vector<HalStream> halConfigs;
1232 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1233 ASSERT_TRUE(ret.isOk());
1234 ASSERT_EQ(1u, halConfigs.size());
1235 ASSERT_EQ(halConfigs[0].id, streamId);
1236
1237 stream = {streamId++,
1238 StreamType::OUTPUT,
1239 static_cast<uint32_t>(0),
1240 static_cast<uint32_t>(0),
1241 static_cast<PixelFormat>(hfrStream.format),
1242 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1243 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1244 Dataspace::UNKNOWN,
1245 StreamRotation::ROTATION_0,
1246 std::string(),
1247 /*bufferSize*/ 0,
1248 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001249 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1250 RequestAvailableDynamicRangeProfilesMap::
1251 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001252 streams[0] = stream;
1253 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1254 &config);
1255
1256 config.streamConfigCounter = streamConfigCounter++;
1257 std::vector<HalStream> halConfig;
1258 ret = mSession->configureStreams(config, &halConfig);
1259 ASSERT_TRUE(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
1260 ret.getServiceSpecificError() ||
1261 static_cast<int32_t>(Status::INTERNAL_ERROR) == ret.getServiceSpecificError());
1262
1263 stream = {streamId++,
1264 StreamType::OUTPUT,
1265 INT32_MAX,
1266 INT32_MAX,
1267 static_cast<PixelFormat>(hfrStream.format),
1268 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1269 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1270 Dataspace::UNKNOWN,
1271 StreamRotation::ROTATION_0,
1272 std::string(),
1273 /*bufferSize*/ 0,
1274 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001275 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1276 RequestAvailableDynamicRangeProfilesMap::
1277 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001278 streams[0] = stream;
1279 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1280 &config);
1281
1282 config.streamConfigCounter = streamConfigCounter++;
1283 halConfigs.clear();
1284 ret = mSession->configureStreams(config, &halConfigs);
1285 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
1286
1287 stream = {streamId++,
1288 StreamType::OUTPUT,
1289 hfrStream.width,
1290 hfrStream.height,
1291 static_cast<PixelFormat>(UINT32_MAX),
1292 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1293 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1294 Dataspace::UNKNOWN,
1295 StreamRotation::ROTATION_0,
1296 std::string(),
1297 /*bufferSize*/ 0,
1298 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001299 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1300 RequestAvailableDynamicRangeProfilesMap::
1301 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001302 streams[0] = stream;
1303 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1304 &config);
1305
1306 config.streamConfigCounter = streamConfigCounter++;
1307 halConfigs.clear();
1308 ret = mSession->configureStreams(config, &halConfigs);
1309 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
1310
1311 ret = mSession->close();
1312 mSession = nullptr;
1313 ASSERT_TRUE(ret.isOk());
1314 }
1315}
1316
1317// Verify that all supported video + snapshot stream combinations can
1318// be configured successfully.
1319TEST_P(CameraAidlTest, configureStreamsVideoStillOutputs) {
1320 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1321 std::vector<AvailableStream> outputBlobStreams;
1322 std::vector<AvailableStream> outputVideoStreams;
1323 AvailableStream videoThreshold = {kMaxVideoWidth, kMaxVideoHeight,
1324 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
1325 AvailableStream blobThreshold = {kMaxVideoWidth, kMaxVideoHeight,
1326 static_cast<int32_t>(PixelFormat::BLOB)};
1327
1328 for (const auto& name : cameraDeviceNames) {
1329 CameraMetadata meta;
1330 std::shared_ptr<ICameraDevice> cameraDevice;
1331
1332 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1333 &cameraDevice /*out*/);
1334
1335 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1336
1337 // Check if camera support depth only
1338 if (isDepthOnly(staticMeta)) {
1339 ndk::ScopedAStatus ret = mSession->close();
1340 mSession = nullptr;
1341 ASSERT_TRUE(ret.isOk());
1342 continue;
1343 }
1344
1345 outputBlobStreams.clear();
1346 ASSERT_EQ(Status::OK,
1347 getAvailableOutputStreams(staticMeta, outputBlobStreams, &blobThreshold));
1348 ASSERT_NE(0u, outputBlobStreams.size());
1349
1350 outputVideoStreams.clear();
1351 ASSERT_EQ(Status::OK,
1352 getAvailableOutputStreams(staticMeta, outputVideoStreams, &videoThreshold));
1353 ASSERT_NE(0u, outputVideoStreams.size());
1354
1355 int32_t jpegBufferSize = 0;
1356 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
1357 ASSERT_NE(0u, jpegBufferSize);
1358
1359 int32_t streamId = 0;
1360 uint32_t streamConfigCounter = 0;
1361 for (auto& blobIter : outputBlobStreams) {
1362 for (auto& videoIter : outputVideoStreams) {
1363 Stream videoStream = {
1364 streamId++,
1365 StreamType::OUTPUT,
1366 videoIter.width,
1367 videoIter.height,
1368 static_cast<PixelFormat>(videoIter.format),
1369 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1370 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1371 Dataspace::UNKNOWN,
1372 StreamRotation::ROTATION_0,
1373 std::string(),
1374 jpegBufferSize,
1375 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001376 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1377 RequestAvailableDynamicRangeProfilesMap::
1378 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001379 Stream blobStream = {
1380 streamId++,
1381 StreamType::OUTPUT,
1382 blobIter.width,
1383 blobIter.height,
1384 static_cast<PixelFormat>(blobIter.format),
1385 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1386 GRALLOC1_CONSUMER_USAGE_CPU_READ),
1387 Dataspace::JFIF,
1388 StreamRotation::ROTATION_0,
1389 std::string(),
1390 jpegBufferSize,
1391 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001392 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1393 RequestAvailableDynamicRangeProfilesMap::
1394 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001395 std::vector<Stream> streams = {videoStream, blobStream};
1396 StreamConfiguration config;
1397
1398 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
1399 jpegBufferSize);
1400 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true,
1401 /*expectStreamCombQuery*/ false);
1402
1403 config.streamConfigCounter = streamConfigCounter++;
1404 std::vector<HalStream> halConfigs;
1405 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1406 ASSERT_TRUE(ret.isOk());
1407 ASSERT_EQ(2u, halConfigs.size());
1408 }
1409 }
1410
1411 ndk::ScopedAStatus ret = mSession->close();
1412 mSession = nullptr;
1413 ASSERT_TRUE(ret.isOk());
1414 }
1415}
1416
1417// Generate and verify a camera capture request
1418TEST_P(CameraAidlTest, processCaptureRequestPreview) {
1419 // TODO(b/220897574): Failing with BUFFER_ERROR
1420 processCaptureRequestInternal(GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, RequestTemplate::PREVIEW,
1421 false /*secureOnlyCameras*/);
1422}
1423
1424// Generate and verify a secure camera capture request
1425TEST_P(CameraAidlTest, processSecureCaptureRequest) {
1426 processCaptureRequestInternal(GRALLOC1_PRODUCER_USAGE_PROTECTED, RequestTemplate::STILL_CAPTURE,
1427 true /*secureOnlyCameras*/);
1428}
1429
1430TEST_P(CameraAidlTest, processCaptureRequestPreviewStabilization) {
1431 std::unordered_map<std::string, nsecs_t> cameraDeviceToTimeLag;
1432 processPreviewStabilizationCaptureRequestInternal(/*previewStabilizationOn*/ false,
1433 cameraDeviceToTimeLag);
1434 processPreviewStabilizationCaptureRequestInternal(/*previewStabilizationOn*/ true,
1435 cameraDeviceToTimeLag);
1436}
1437
1438// Generate and verify a multi-camera capture request
1439TEST_P(CameraAidlTest, processMultiCaptureRequestPreview) {
1440 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1441 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
1442 static_cast<int32_t>(PixelFormat::YCBCR_420_888)};
1443 int64_t bufferId = 1;
1444 uint32_t frameNumber = 1;
1445 std::vector<uint8_t> settings;
1446 std::vector<uint8_t> emptySettings;
1447 std::string invalidPhysicalId = "-1";
1448
1449 for (const auto& name : cameraDeviceNames) {
1450 std::string version, deviceId;
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +00001451 ALOGI("processMultiCaptureRequestPreview: Test device %s", name.c_str());
Avichal Rakesh362242f2022-02-08 12:40:53 -08001452 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
1453 CameraMetadata metadata;
1454
1455 std::shared_ptr<ICameraDevice> unusedDevice;
1456 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &metadata /*out*/,
1457 &unusedDevice /*out*/);
1458
1459 camera_metadata_t* staticMeta =
1460 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
1461 Status rc = isLogicalMultiCamera(staticMeta);
1462 if (Status::OPERATION_NOT_SUPPORTED == rc) {
1463 ndk::ScopedAStatus ret = mSession->close();
1464 mSession = nullptr;
1465 ASSERT_TRUE(ret.isOk());
1466 continue;
1467 }
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +00001468 ASSERT_EQ(Status::OK, rc);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001469
1470 std::unordered_set<std::string> physicalIds;
1471 rc = getPhysicalCameraIds(staticMeta, &physicalIds);
1472 ASSERT_TRUE(Status::OK == rc);
1473 ASSERT_TRUE(physicalIds.size() > 1);
1474
1475 std::unordered_set<int32_t> physicalRequestKeyIDs;
1476 rc = getSupportedKeys(staticMeta, ANDROID_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS,
1477 &physicalRequestKeyIDs);
1478 ASSERT_TRUE(Status::OK == rc);
1479 if (physicalRequestKeyIDs.empty()) {
1480 ndk::ScopedAStatus ret = mSession->close();
1481 mSession = nullptr;
1482 ASSERT_TRUE(ret.isOk());
1483 // The logical camera doesn't support any individual physical requests.
1484 continue;
1485 }
1486
1487 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultPreviewSettings;
1488 android::hardware::camera::common::V1_0::helper::CameraMetadata filteredSettings;
1489 constructFilteredSettings(mSession, physicalRequestKeyIDs, RequestTemplate::PREVIEW,
1490 &defaultPreviewSettings, &filteredSettings);
1491 if (filteredSettings.isEmpty()) {
1492 // No physical device settings in default request.
1493 ndk::ScopedAStatus ret = mSession->close();
1494 mSession = nullptr;
1495 ASSERT_TRUE(ret.isOk());
1496 continue;
1497 }
1498
1499 const camera_metadata_t* settingsBuffer = defaultPreviewSettings.getAndLock();
1500 uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
1501 settings.assign(rawSettingsBuffer,
1502 rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
1503 CameraMetadata settingsMetadata = {settings};
1504 overrideRotateAndCrop(&settingsMetadata);
1505
1506 ndk::ScopedAStatus ret = mSession->close();
1507 mSession = nullptr;
1508 ASSERT_TRUE(ret.isOk());
1509
1510 // Leave only 2 physical devices in the id set.
1511 auto it = physicalIds.begin();
1512 std::string physicalDeviceId = *it;
1513 it++;
1514 physicalIds.erase(++it, physicalIds.end());
1515 ASSERT_EQ(physicalIds.size(), 2u);
1516
1517 std::vector<HalStream> halStreams;
1518 bool supportsPartialResults = false;
1519 bool useHalBufManager = false;
1520 int32_t partialResultCount = 0;
1521 Stream previewStream;
1522 std::shared_ptr<DeviceCb> cb;
1523
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +00001524 configurePreviewStreams(
1525 name, mProvider, &previewThreshold, physicalIds, &mSession, &previewStream,
1526 &halStreams /*out*/, &supportsPartialResults /*out*/, &partialResultCount /*out*/,
1527 &useHalBufManager /*out*/, &cb /*out*/, 0 /*streamConfigCounter*/, true);
1528 if (mSession == nullptr) {
1529 // stream combination not supported by HAL, skip test for device
1530 continue;
1531 }
Avichal Rakesh362242f2022-02-08 12:40:53 -08001532
1533 ::aidl::android::hardware::common::fmq::MQDescriptor<
1534 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
1535 descriptor;
1536 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
1537 ASSERT_TRUE(resultQueueRet.isOk());
1538 std::shared_ptr<ResultMetadataQueue> resultQueue =
1539 std::make_shared<ResultMetadataQueue>(descriptor);
1540 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
1541 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
1542 resultQueue = nullptr;
1543 // Don't use the queue onwards.
1544 }
1545
1546 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
1547 static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
1548 partialResultCount, physicalIds, resultQueue);
1549
1550 std::vector<CaptureRequest> requests(1);
1551 CaptureRequest& request = requests[0];
1552 request.frameNumber = frameNumber;
1553 request.fmqSettingsSize = 0;
Emilian Peev3d919f92022-04-20 13:50:59 -07001554 request.settings = settingsMetadata;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001555
1556 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
1557
1558 std::vector<buffer_handle_t> graphicBuffers;
1559 graphicBuffers.reserve(halStreams.size());
1560 outputBuffers.resize(halStreams.size());
1561 size_t k = 0;
1562 for (const auto& halStream : halStreams) {
1563 buffer_handle_t buffer_handle;
1564 if (useHalBufManager) {
1565 outputBuffers[k] = {halStream.id, /*bufferId*/ 0, NativeHandle(),
1566 BufferStatus::OK, NativeHandle(), NativeHandle()};
1567 } else {
1568 allocateGraphicBuffer(previewStream.width, previewStream.height,
1569 android_convertGralloc1To0Usage(
1570 static_cast<uint64_t>(halStream.producerUsage),
1571 static_cast<uint64_t>(halStream.consumerUsage)),
1572 halStream.overrideFormat, &buffer_handle);
1573 graphicBuffers.push_back(buffer_handle);
1574 outputBuffers[k] = {
1575 halStream.id, bufferId, ::android::makeToAidl(buffer_handle),
1576 BufferStatus::OK, NativeHandle(), NativeHandle()};
1577 bufferId++;
1578 }
1579 k++;
1580 }
1581
1582 std::vector<PhysicalCameraSetting> camSettings(1);
1583 const camera_metadata_t* filteredSettingsBuffer = filteredSettings.getAndLock();
1584 uint8_t* rawFilteredSettingsBuffer = (uint8_t*)filteredSettingsBuffer;
1585 camSettings[0].settings = {std::vector(
1586 rawFilteredSettingsBuffer,
1587 rawFilteredSettingsBuffer + get_camera_metadata_size(filteredSettingsBuffer))};
1588 overrideRotateAndCrop(&camSettings[0].settings);
1589 camSettings[0].fmqSettingsSize = 0;
1590 camSettings[0].physicalCameraId = physicalDeviceId;
1591
1592 request.inputBuffer = {
1593 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
1594 request.physicalCameraSettings = camSettings;
1595
1596 {
1597 std::unique_lock<std::mutex> l(mLock);
1598 mInflightMap.clear();
1599 mInflightMap[frameNumber] = inflightReq;
1600 }
1601
1602 int32_t numRequestProcessed = 0;
1603 std::vector<BufferCache> cachesToRemove;
1604 ndk::ScopedAStatus returnStatus =
1605 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1606 ASSERT_TRUE(returnStatus.isOk());
1607 ASSERT_EQ(numRequestProcessed, 1u);
1608
1609 {
1610 std::unique_lock<std::mutex> l(mLock);
1611 while (!inflightReq->errorCodeValid &&
1612 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1613 auto timeout = std::chrono::system_clock::now() +
1614 std::chrono::seconds(kStreamBufferTimeoutSec);
1615 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1616 }
1617
1618 ASSERT_FALSE(inflightReq->errorCodeValid);
1619 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1620
1621 request.frameNumber++;
1622 // Empty settings should be supported after the first call
1623 // for repeating requests.
1624 request.settings.metadata.clear();
1625 request.physicalCameraSettings[0].settings.metadata.clear();
1626 // The buffer has been registered to HAL by bufferId, so per
1627 // API contract we should send a null handle for this buffer
1628 request.outputBuffers[0].buffer = NativeHandle();
1629 mInflightMap.clear();
1630 inflightReq = std::make_shared<InFlightRequest>(
1631 static_cast<ssize_t>(physicalIds.size()), false, supportsPartialResults,
1632 partialResultCount, physicalIds, resultQueue);
1633 mInflightMap[request.frameNumber] = inflightReq;
1634 }
1635
1636 returnStatus =
1637 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1638 ASSERT_TRUE(returnStatus.isOk());
1639 ASSERT_EQ(numRequestProcessed, 1u);
1640
1641 {
1642 std::unique_lock<std::mutex> l(mLock);
1643 while (!inflightReq->errorCodeValid &&
1644 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1645 auto timeout = std::chrono::system_clock::now() +
1646 std::chrono::seconds(kStreamBufferTimeoutSec);
1647 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1648 }
1649
1650 ASSERT_FALSE(inflightReq->errorCodeValid);
1651 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1652 }
1653
1654 // Invalid physical camera id should fail process requests
1655 frameNumber++;
1656 camSettings[0].physicalCameraId = invalidPhysicalId;
1657 camSettings[0].settings.metadata = settings;
1658
1659 request.physicalCameraSettings = camSettings; // Invalid camera settings
1660 returnStatus =
1661 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1662 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
1663 returnStatus.getServiceSpecificError());
1664
1665 defaultPreviewSettings.unlock(settingsBuffer);
1666 filteredSettings.unlock(filteredSettingsBuffer);
1667
1668 if (useHalBufManager) {
1669 std::vector<int32_t> streamIds(halStreams.size());
1670 for (size_t i = 0; i < streamIds.size(); i++) {
1671 streamIds[i] = halStreams[i].id;
1672 }
1673 verifyBuffersReturned(mSession, streamIds, cb);
1674 }
1675
1676 ret = mSession->close();
1677 mSession = nullptr;
1678 ASSERT_TRUE(ret.isOk());
1679 }
1680}
1681
1682// Generate and verify an ultra high resolution capture request
1683TEST_P(CameraAidlTest, processUltraHighResolutionRequest) {
1684 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1685 int64_t bufferId = 1;
1686 int32_t frameNumber = 1;
1687 CameraMetadata settings;
1688
1689 for (const auto& name : cameraDeviceNames) {
1690 std::string version, deviceId;
1691 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
1692 CameraMetadata meta;
1693
1694 std::shared_ptr<ICameraDevice> unusedDevice;
1695 openEmptyDeviceSession(name, mProvider, &mSession, &meta, &unusedDevice);
1696 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1697 if (!isUltraHighResolution(staticMeta)) {
1698 ndk::ScopedAStatus ret = mSession->close();
1699 mSession = nullptr;
1700 ASSERT_TRUE(ret.isOk());
1701 continue;
1702 }
1703 CameraMetadata req;
1704 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultSettings;
1705 ndk::ScopedAStatus ret =
1706 mSession->constructDefaultRequestSettings(RequestTemplate::STILL_CAPTURE, &req);
1707 ASSERT_TRUE(ret.isOk());
1708
1709 const camera_metadata_t* metadata =
1710 reinterpret_cast<const camera_metadata_t*>(req.metadata.data());
1711 size_t expectedSize = req.metadata.size();
1712 int result = validate_camera_metadata_structure(metadata, &expectedSize);
1713 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
1714
1715 size_t entryCount = get_camera_metadata_entry_count(metadata);
1716 ASSERT_GT(entryCount, 0u);
1717 defaultSettings = metadata;
1718 uint8_t sensorPixelMode =
1719 static_cast<uint8_t>(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
1720 ASSERT_EQ(::android::OK,
1721 defaultSettings.update(ANDROID_SENSOR_PIXEL_MODE, &sensorPixelMode, 1));
1722
1723 const camera_metadata_t* settingsBuffer = defaultSettings.getAndLock();
1724 uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
1725 settings.metadata = std::vector(
1726 rawSettingsBuffer, rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
1727 overrideRotateAndCrop(&settings);
1728
1729 ret = mSession->close();
1730 mSession = nullptr;
1731 ASSERT_TRUE(ret.isOk());
1732
1733 std::vector<HalStream> halStreams;
1734 bool supportsPartialResults = false;
1735 bool useHalBufManager = false;
1736 int32_t partialResultCount = 0;
1737 Stream previewStream;
1738 std::shared_ptr<DeviceCb> cb;
1739
1740 std::list<PixelFormat> pixelFormats = {PixelFormat::YCBCR_420_888, PixelFormat::RAW16};
1741 for (PixelFormat format : pixelFormats) {
Emilian Peevdda1eb72022-07-28 16:37:40 -07001742 previewStream.usage =
1743 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1744 GRALLOC1_CONSUMER_USAGE_CPU_READ);
1745 previewStream.dataSpace = Dataspace::UNKNOWN;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001746 configureStreams(name, mProvider, format, &mSession, &previewStream, &halStreams,
1747 &supportsPartialResults, &partialResultCount, &useHalBufManager, &cb,
1748 0, /*maxResolution*/ true);
1749 ASSERT_NE(mSession, nullptr);
1750
1751 ::aidl::android::hardware::common::fmq::MQDescriptor<
1752 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
1753 descriptor;
1754 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
1755 ASSERT_TRUE(resultQueueRet.isOk());
1756
1757 std::shared_ptr<ResultMetadataQueue> resultQueue =
1758 std::make_shared<ResultMetadataQueue>(descriptor);
1759 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
1760 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
1761 resultQueue = nullptr;
1762 // Don't use the queue onwards.
1763 }
1764
1765 std::vector<buffer_handle_t> graphicBuffers;
1766 graphicBuffers.reserve(halStreams.size());
1767 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
1768 static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
1769 partialResultCount, std::unordered_set<std::string>(), resultQueue);
1770
1771 std::vector<CaptureRequest> requests(1);
1772 CaptureRequest& request = requests[0];
1773 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
1774 outputBuffers.resize(halStreams.size());
1775
1776 size_t k = 0;
1777 for (const auto& halStream : halStreams) {
1778 buffer_handle_t buffer_handle;
1779 if (useHalBufManager) {
1780 outputBuffers[k] = {halStream.id, 0,
1781 NativeHandle(), BufferStatus::OK,
1782 NativeHandle(), NativeHandle()};
1783 } else {
1784 allocateGraphicBuffer(previewStream.width, previewStream.height,
1785 android_convertGralloc1To0Usage(
1786 static_cast<uint64_t>(halStream.producerUsage),
1787 static_cast<uint64_t>(halStream.consumerUsage)),
1788 halStream.overrideFormat, &buffer_handle);
1789 graphicBuffers.push_back(buffer_handle);
1790 outputBuffers[k] = {
1791 halStream.id, bufferId, ::android::makeToAidl(buffer_handle),
1792 BufferStatus::OK, NativeHandle(), NativeHandle()};
1793 bufferId++;
1794 }
1795 k++;
1796 }
1797
1798 request.inputBuffer = {
1799 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
1800 request.frameNumber = frameNumber;
1801 request.fmqSettingsSize = 0;
1802 request.settings = settings;
1803 request.inputWidth = 0;
1804 request.inputHeight = 0;
1805
1806 {
1807 std::unique_lock<std::mutex> l(mLock);
1808 mInflightMap.clear();
1809 mInflightMap[frameNumber] = inflightReq;
1810 }
1811
1812 int32_t numRequestProcessed = 0;
1813 std::vector<BufferCache> cachesToRemove;
1814 ndk::ScopedAStatus returnStatus =
1815 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1816 ASSERT_TRUE(returnStatus.isOk());
1817 ASSERT_EQ(numRequestProcessed, 1u);
1818
1819 {
1820 std::unique_lock<std::mutex> l(mLock);
1821 while (!inflightReq->errorCodeValid &&
1822 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1823 auto timeout = std::chrono::system_clock::now() +
1824 std::chrono::seconds(kStreamBufferTimeoutSec);
1825 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1826 }
1827
1828 ASSERT_FALSE(inflightReq->errorCodeValid);
1829 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1830 }
1831 if (useHalBufManager) {
1832 std::vector<int32_t> streamIds(halStreams.size());
1833 for (size_t i = 0; i < streamIds.size(); i++) {
1834 streamIds[i] = halStreams[i].id;
1835 }
1836 verifyBuffersReturned(mSession, streamIds, cb);
1837 }
1838
1839 ret = mSession->close();
1840 mSession = nullptr;
1841 ASSERT_TRUE(ret.isOk());
1842 }
1843 }
1844}
1845
1846// Generate and verify 10-bit dynamic range request
1847TEST_P(CameraAidlTest, process10BitDynamicRangeRequest) {
1848 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001849 CameraMetadata settings;
1850
1851 for (const auto& name : cameraDeviceNames) {
1852 std::string version, deviceId;
1853 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
1854 CameraMetadata meta;
1855 std::shared_ptr<ICameraDevice> device;
1856 openEmptyDeviceSession(name, mProvider, &mSession, &meta, &device);
1857 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1858 if (!is10BitDynamicRangeCapable(staticMeta)) {
1859 ndk::ScopedAStatus ret = mSession->close();
1860 mSession = nullptr;
1861 ASSERT_TRUE(ret.isOk());
1862 continue;
1863 }
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001864 std::vector<RequestAvailableDynamicRangeProfilesMap> profileList;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001865 get10BitDynamicRangeProfiles(staticMeta, &profileList);
1866 ASSERT_FALSE(profileList.empty());
1867
1868 CameraMetadata req;
1869 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultSettings;
1870 ndk::ScopedAStatus ret =
Emilian Peevdda1eb72022-07-28 16:37:40 -07001871 mSession->constructDefaultRequestSettings(RequestTemplate::PREVIEW, &req);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001872 ASSERT_TRUE(ret.isOk());
1873
1874 const camera_metadata_t* metadata =
1875 reinterpret_cast<const camera_metadata_t*>(req.metadata.data());
1876 size_t expectedSize = req.metadata.size();
1877 int result = validate_camera_metadata_structure(metadata, &expectedSize);
1878 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
1879
1880 size_t entryCount = get_camera_metadata_entry_count(metadata);
1881 ASSERT_GT(entryCount, 0u);
1882 defaultSettings = metadata;
1883
1884 const camera_metadata_t* settingsBuffer = defaultSettings.getAndLock();
1885 uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
1886 settings.metadata = std::vector(
1887 rawSettingsBuffer, rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
1888 overrideRotateAndCrop(&settings);
1889
1890 ret = mSession->close();
1891 mSession = nullptr;
1892 ASSERT_TRUE(ret.isOk());
1893
1894 std::vector<HalStream> halStreams;
1895 bool supportsPartialResults = false;
1896 bool useHalBufManager = false;
1897 int32_t partialResultCount = 0;
1898 Stream previewStream;
1899 std::shared_ptr<DeviceCb> cb;
1900 for (const auto& profile : profileList) {
Emilian Peevdda1eb72022-07-28 16:37:40 -07001901 previewStream.usage =
1902 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1903 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER);
1904 previewStream.dataSpace = getDataspace(PixelFormat::IMPLEMENTATION_DEFINED);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001905 configureStreams(name, mProvider, PixelFormat::IMPLEMENTATION_DEFINED, &mSession,
1906 &previewStream, &halStreams, &supportsPartialResults,
1907 &partialResultCount, &useHalBufManager, &cb, 0,
1908 /*maxResolution*/ false, profile);
1909 ASSERT_NE(mSession, nullptr);
1910
1911 ::aidl::android::hardware::common::fmq::MQDescriptor<
1912 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
1913 descriptor;
1914 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
1915 ASSERT_TRUE(resultQueueRet.isOk());
1916
1917 std::shared_ptr<ResultMetadataQueue> resultQueue =
1918 std::make_shared<ResultMetadataQueue>(descriptor);
1919 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
1920 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
1921 resultQueue = nullptr;
1922 // Don't use the queue onwards.
1923 }
1924
Emilian Peevdda1eb72022-07-28 16:37:40 -07001925 mInflightMap.clear();
1926 // Stream as long as needed to fill the Hal inflight queue
1927 std::vector<CaptureRequest> requests(halStreams[0].maxBuffers);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001928
Emilian Peev470d1382023-01-18 11:09:09 -08001929 for (int32_t requestId = 0; requestId < requests.size(); requestId++) {
Emilian Peevdda1eb72022-07-28 16:37:40 -07001930 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
1931 static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
1932 partialResultCount, std::unordered_set<std::string>(), resultQueue);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001933
Emilian Peev470d1382023-01-18 11:09:09 -08001934 CaptureRequest& request = requests[requestId];
Emilian Peevdda1eb72022-07-28 16:37:40 -07001935 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
1936 outputBuffers.resize(halStreams.size());
Avichal Rakesh362242f2022-02-08 12:40:53 -08001937
Emilian Peevdda1eb72022-07-28 16:37:40 -07001938 size_t k = 0;
1939 inflightReq->mOutstandingBufferIds.resize(halStreams.size());
1940 std::vector<buffer_handle_t> graphicBuffers;
1941 graphicBuffers.reserve(halStreams.size());
Avichal Rakesh362242f2022-02-08 12:40:53 -08001942
Emilian Peev470d1382023-01-18 11:09:09 -08001943 auto bufferId = requestId + 1; // Buffer id value 0 is not valid
Emilian Peevdda1eb72022-07-28 16:37:40 -07001944 for (const auto& halStream : halStreams) {
1945 buffer_handle_t buffer_handle;
1946 if (useHalBufManager) {
1947 outputBuffers[k] = {halStream.id, 0,
1948 NativeHandle(), BufferStatus::OK,
1949 NativeHandle(), NativeHandle()};
1950 } else {
1951 auto usage = android_convertGralloc1To0Usage(
1952 static_cast<uint64_t>(halStream.producerUsage),
1953 static_cast<uint64_t>(halStream.consumerUsage));
1954 allocateGraphicBuffer(previewStream.width, previewStream.height, usage,
1955 halStream.overrideFormat, &buffer_handle);
1956
1957 inflightReq->mOutstandingBufferIds[halStream.id][bufferId] = buffer_handle;
1958 graphicBuffers.push_back(buffer_handle);
1959 outputBuffers[k] = {halStream.id, bufferId,
1960 android::makeToAidl(buffer_handle), BufferStatus::OK, NativeHandle(),
1961 NativeHandle()};
Emilian Peevdda1eb72022-07-28 16:37:40 -07001962 }
1963 k++;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001964 }
Avichal Rakesh362242f2022-02-08 12:40:53 -08001965
Emilian Peevdda1eb72022-07-28 16:37:40 -07001966 request.inputBuffer = {
1967 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
Emilian Peev470d1382023-01-18 11:09:09 -08001968 request.frameNumber = bufferId;
Emilian Peevdda1eb72022-07-28 16:37:40 -07001969 request.fmqSettingsSize = 0;
1970 request.settings = settings;
1971 request.inputWidth = 0;
1972 request.inputHeight = 0;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001973
Emilian Peevdda1eb72022-07-28 16:37:40 -07001974 {
1975 std::unique_lock<std::mutex> l(mLock);
Emilian Peev470d1382023-01-18 11:09:09 -08001976 mInflightMap[bufferId] = inflightReq;
Emilian Peevdda1eb72022-07-28 16:37:40 -07001977 }
1978
Avichal Rakesh362242f2022-02-08 12:40:53 -08001979 }
1980
1981 int32_t numRequestProcessed = 0;
1982 std::vector<BufferCache> cachesToRemove;
1983 ndk::ScopedAStatus returnStatus =
Emilian Peevdda1eb72022-07-28 16:37:40 -07001984 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001985 ASSERT_TRUE(returnStatus.isOk());
Emilian Peevdda1eb72022-07-28 16:37:40 -07001986 ASSERT_EQ(numRequestProcessed, requests.size());
Avichal Rakesh362242f2022-02-08 12:40:53 -08001987
Emilian Peevdda1eb72022-07-28 16:37:40 -07001988 returnStatus = mSession->repeatingRequestEnd(requests.size() - 1,
1989 std::vector<int32_t> {halStreams[0].id});
1990 ASSERT_TRUE(returnStatus.isOk());
1991
Emilian Peev470d1382023-01-18 11:09:09 -08001992 // We are keeping frame numbers and buffer ids consistent. Buffer id value of 0
1993 // is used to indicate a buffer that is not present/available so buffer ids as well
1994 // as frame numbers begin with 1.
1995 for (int32_t frameNumber = 1; frameNumber <= requests.size(); frameNumber++) {
Emilian Peevdda1eb72022-07-28 16:37:40 -07001996 const auto& inflightReq = mInflightMap[frameNumber];
Avichal Rakesh362242f2022-02-08 12:40:53 -08001997 std::unique_lock<std::mutex> l(mLock);
1998 while (!inflightReq->errorCodeValid &&
1999 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
2000 auto timeout = std::chrono::system_clock::now() +
2001 std::chrono::seconds(kStreamBufferTimeoutSec);
2002 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2003 }
2004
2005 ASSERT_FALSE(inflightReq->errorCodeValid);
2006 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
2007 verify10BitMetadata(mHandleImporter, *inflightReq, profile);
2008 }
Emilian Peevdda1eb72022-07-28 16:37:40 -07002009
Avichal Rakesh362242f2022-02-08 12:40:53 -08002010 if (useHalBufManager) {
2011 std::vector<int32_t> streamIds(halStreams.size());
2012 for (size_t i = 0; i < streamIds.size(); i++) {
2013 streamIds[i] = halStreams[i].id;
2014 }
2015 mSession->signalStreamFlush(streamIds, /*streamConfigCounter*/ 0);
2016 cb->waitForBuffersReturned();
2017 }
2018
2019 ret = mSession->close();
2020 mSession = nullptr;
2021 ASSERT_TRUE(ret.isOk());
2022 }
2023 }
2024}
2025
2026// Generate and verify a burst containing alternating sensor sensitivity values
2027TEST_P(CameraAidlTest, processCaptureRequestBurstISO) {
2028 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2029 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2030 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2031 int64_t bufferId = 1;
2032 int32_t frameNumber = 1;
2033 float isoTol = .03f;
2034 CameraMetadata settings;
2035
2036 for (const auto& name : cameraDeviceNames) {
2037 CameraMetadata meta;
2038 settings.metadata.clear();
2039 std::shared_ptr<ICameraDevice> unusedDevice;
2040 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
2041 &unusedDevice /*out*/);
2042 camera_metadata_t* staticMetaBuffer =
2043 clone_camera_metadata(reinterpret_cast<camera_metadata_t*>(meta.metadata.data()));
2044 ::android::hardware::camera::common::V1_0::helper::CameraMetadata staticMeta(
2045 staticMetaBuffer);
2046
2047 camera_metadata_entry_t hwLevel = staticMeta.find(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL);
2048 ASSERT_TRUE(0 < hwLevel.count);
2049 if (ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED == hwLevel.data.u8[0] ||
2050 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL == hwLevel.data.u8[0]) {
2051 // Limited/External devices can skip this test
2052 ndk::ScopedAStatus ret = mSession->close();
2053 mSession = nullptr;
2054 ASSERT_TRUE(ret.isOk());
2055 continue;
2056 }
2057
2058 camera_metadata_entry_t isoRange = staticMeta.find(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE);
2059 ASSERT_EQ(isoRange.count, 2u);
2060
2061 ndk::ScopedAStatus ret = mSession->close();
2062 mSession = nullptr;
2063 ASSERT_TRUE(ret.isOk());
2064
2065 bool supportsPartialResults = false;
2066 bool useHalBufManager = false;
2067 int32_t partialResultCount = 0;
2068 Stream previewStream;
2069 std::vector<HalStream> halStreams;
2070 std::shared_ptr<DeviceCb> cb;
2071 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2072 &previewStream /*out*/, &halStreams /*out*/,
2073 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2074 &useHalBufManager /*out*/, &cb /*out*/);
2075
2076 ::aidl::android::hardware::common::fmq::MQDescriptor<
2077 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2078 descriptor;
2079 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2080 std::shared_ptr<ResultMetadataQueue> resultQueue =
2081 std::make_shared<ResultMetadataQueue>(descriptor);
2082 ASSERT_TRUE(resultQueueRet.isOk());
2083 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2084 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2085 resultQueue = nullptr;
2086 // Don't use the queue onwards.
2087 }
2088
2089 ret = mSession->constructDefaultRequestSettings(RequestTemplate::PREVIEW, &settings);
2090 ASSERT_TRUE(ret.isOk());
2091
2092 ::android::hardware::camera::common::V1_0::helper::CameraMetadata requestMeta;
2093 std::vector<CaptureRequest> requests(kBurstFrameCount);
2094 std::vector<buffer_handle_t> buffers(kBurstFrameCount);
2095 std::vector<std::shared_ptr<InFlightRequest>> inflightReqs(kBurstFrameCount);
2096 std::vector<int32_t> isoValues(kBurstFrameCount);
2097 std::vector<CameraMetadata> requestSettings(kBurstFrameCount);
2098
2099 for (int32_t i = 0; i < kBurstFrameCount; i++) {
2100 std::unique_lock<std::mutex> l(mLock);
2101 CaptureRequest& request = requests[i];
2102 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2103 outputBuffers.resize(1);
2104 StreamBuffer& outputBuffer = outputBuffers[0];
2105
2106 isoValues[i] = ((i % 2) == 0) ? isoRange.data.i32[0] : isoRange.data.i32[1];
2107 if (useHalBufManager) {
2108 outputBuffer = {halStreams[0].id, 0,
2109 NativeHandle(), BufferStatus::OK,
2110 NativeHandle(), NativeHandle()};
2111 } else {
2112 allocateGraphicBuffer(previewStream.width, previewStream.height,
2113 android_convertGralloc1To0Usage(
2114 static_cast<uint64_t>(halStreams[0].producerUsage),
2115 static_cast<uint64_t>(halStreams[0].consumerUsage)),
2116 halStreams[0].overrideFormat, &buffers[i]);
2117 outputBuffer = {halStreams[0].id, bufferId + i, ::android::makeToAidl(buffers[i]),
2118 BufferStatus::OK, NativeHandle(), NativeHandle()};
2119 }
2120
2121 requestMeta.append(reinterpret_cast<camera_metadata_t*>(settings.metadata.data()));
2122
2123 // Disable all 3A routines
2124 uint8_t mode = static_cast<uint8_t>(ANDROID_CONTROL_MODE_OFF);
2125 ASSERT_EQ(::android::OK, requestMeta.update(ANDROID_CONTROL_MODE, &mode, 1));
2126 ASSERT_EQ(::android::OK,
2127 requestMeta.update(ANDROID_SENSOR_SENSITIVITY, &isoValues[i], 1));
2128 camera_metadata_t* metaBuffer = requestMeta.release();
2129 uint8_t* rawMetaBuffer = reinterpret_cast<uint8_t*>(metaBuffer);
2130 requestSettings[i].metadata = std::vector(
2131 rawMetaBuffer, rawMetaBuffer + get_camera_metadata_size(metaBuffer));
2132 overrideRotateAndCrop(&(requestSettings[i]));
2133
2134 request.frameNumber = frameNumber + i;
2135 request.fmqSettingsSize = 0;
2136 request.settings = requestSettings[i];
2137 request.inputBuffer = {
2138 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2139
2140 inflightReqs[i] = std::make_shared<InFlightRequest>(1, false, supportsPartialResults,
2141 partialResultCount, resultQueue);
2142 mInflightMap[frameNumber + i] = inflightReqs[i];
2143 }
2144
2145 int32_t numRequestProcessed = 0;
2146 std::vector<BufferCache> cachesToRemove;
2147
2148 ndk::ScopedAStatus returnStatus =
2149 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2150 ASSERT_TRUE(returnStatus.isOk());
2151 ASSERT_EQ(numRequestProcessed, kBurstFrameCount);
2152
2153 for (size_t i = 0; i < kBurstFrameCount; i++) {
2154 std::unique_lock<std::mutex> l(mLock);
2155 while (!inflightReqs[i]->errorCodeValid && ((0 < inflightReqs[i]->numBuffersLeft) ||
2156 (!inflightReqs[i]->haveResultMetadata))) {
2157 auto timeout = std::chrono::system_clock::now() +
2158 std::chrono::seconds(kStreamBufferTimeoutSec);
2159 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2160 }
2161
2162 ASSERT_FALSE(inflightReqs[i]->errorCodeValid);
2163 ASSERT_NE(inflightReqs[i]->resultOutputBuffers.size(), 0u);
2164 ASSERT_EQ(previewStream.id, inflightReqs[i]->resultOutputBuffers[0].buffer.streamId);
2165 ASSERT_FALSE(inflightReqs[i]->collectedResult.isEmpty());
2166 ASSERT_TRUE(inflightReqs[i]->collectedResult.exists(ANDROID_SENSOR_SENSITIVITY));
2167 camera_metadata_entry_t isoResult =
2168 inflightReqs[i]->collectedResult.find(ANDROID_SENSOR_SENSITIVITY);
2169 ASSERT_TRUE(std::abs(isoResult.data.i32[0] - isoValues[i]) <=
2170 std::round(isoValues[i] * isoTol));
2171 }
2172
2173 if (useHalBufManager) {
2174 verifyBuffersReturned(mSession, previewStream.id, cb);
2175 }
2176 ret = mSession->close();
2177 mSession = nullptr;
2178 ASSERT_TRUE(ret.isOk());
2179 }
2180}
2181
2182// Test whether an incorrect capture request with missing settings will
2183// be reported correctly.
2184TEST_P(CameraAidlTest, processCaptureRequestInvalidSinglePreview) {
2185 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2186 std::vector<AvailableStream> outputPreviewStreams;
2187 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2188 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2189 int64_t bufferId = 1;
2190 int32_t frameNumber = 1;
2191 CameraMetadata settings;
2192
2193 for (const auto& name : cameraDeviceNames) {
2194 Stream previewStream;
2195 std::vector<HalStream> halStreams;
2196 std::shared_ptr<DeviceCb> cb;
2197 bool supportsPartialResults = false;
2198 bool useHalBufManager = false;
2199 int32_t partialResultCount = 0;
2200 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2201 &previewStream /*out*/, &halStreams /*out*/,
2202 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2203 &useHalBufManager /*out*/, &cb /*out*/);
2204 ASSERT_NE(mSession, nullptr);
2205 ASSERT_FALSE(halStreams.empty());
2206
2207 buffer_handle_t buffer_handle = nullptr;
2208
2209 if (useHalBufManager) {
2210 bufferId = 0;
2211 } else {
2212 allocateGraphicBuffer(previewStream.width, previewStream.height,
2213 android_convertGralloc1To0Usage(
2214 static_cast<uint64_t>(halStreams[0].producerUsage),
2215 static_cast<uint64_t>(halStreams[0].consumerUsage)),
2216 halStreams[0].overrideFormat, &buffer_handle);
2217 }
2218
2219 std::vector<CaptureRequest> requests(1);
2220 CaptureRequest& request = requests[0];
2221 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2222 outputBuffers.resize(1);
2223 StreamBuffer& outputBuffer = outputBuffers[0];
2224
2225 outputBuffer = {
2226 halStreams[0].id,
2227 bufferId,
2228 buffer_handle == nullptr ? NativeHandle() : ::android::makeToAidl(buffer_handle),
2229 BufferStatus::OK,
2230 NativeHandle(),
2231 NativeHandle()};
2232
2233 request.inputBuffer = {
2234 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2235 request.frameNumber = frameNumber;
2236 request.fmqSettingsSize = 0;
2237 request.settings = settings;
2238
2239 // Settings were not correctly initialized, we should fail here
2240 int32_t numRequestProcessed = 0;
2241 std::vector<BufferCache> cachesToRemove;
2242 ndk::ScopedAStatus ret =
2243 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2244 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
2245 ASSERT_EQ(numRequestProcessed, 0u);
2246
2247 ret = mSession->close();
2248 mSession = nullptr;
2249 ASSERT_TRUE(ret.isOk());
2250 }
2251}
2252
2253// Verify camera offline session behavior
2254TEST_P(CameraAidlTest, switchToOffline) {
2255 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2256 AvailableStream threshold = {kMaxStillWidth, kMaxStillHeight,
2257 static_cast<int32_t>(PixelFormat::BLOB)};
2258 int64_t bufferId = 1;
2259 int32_t frameNumber = 1;
2260 CameraMetadata settings;
2261
2262 for (const auto& name : cameraDeviceNames) {
2263 CameraMetadata meta;
2264 {
2265 std::shared_ptr<ICameraDevice> unusedDevice;
2266 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
2267 &unusedDevice);
2268 camera_metadata_t* staticMetaBuffer = clone_camera_metadata(
2269 reinterpret_cast<camera_metadata_t*>(meta.metadata.data()));
2270 ::android::hardware::camera::common::V1_0::helper::CameraMetadata staticMeta(
2271 staticMetaBuffer);
2272
2273 if (isOfflineSessionSupported(staticMetaBuffer) != Status::OK) {
2274 ndk::ScopedAStatus ret = mSession->close();
2275 mSession = nullptr;
2276 ASSERT_TRUE(ret.isOk());
2277 continue;
2278 }
2279 ndk::ScopedAStatus ret = mSession->close();
2280 mSession = nullptr;
2281 ASSERT_TRUE(ret.isOk());
2282 }
2283
2284 bool supportsPartialResults = false;
2285 int32_t partialResultCount = 0;
2286 Stream stream;
2287 std::vector<HalStream> halStreams;
2288 std::shared_ptr<DeviceCb> cb;
2289 int32_t jpegBufferSize;
2290 bool useHalBufManager;
2291 configureOfflineStillStream(name, mProvider, &threshold, &mSession /*out*/, &stream /*out*/,
2292 &halStreams /*out*/, &supportsPartialResults /*out*/,
2293 &partialResultCount /*out*/, &cb /*out*/,
2294 &jpegBufferSize /*out*/, &useHalBufManager /*out*/);
2295
2296 auto ret = mSession->constructDefaultRequestSettings(RequestTemplate::STILL_CAPTURE,
2297 &settings);
2298 ASSERT_TRUE(ret.isOk());
2299
2300 ::aidl::android::hardware::common::fmq::MQDescriptor<
2301 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2302 descriptor;
2303
2304 ndk::ScopedAStatus resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2305 ASSERT_TRUE(resultQueueRet.isOk());
2306 std::shared_ptr<ResultMetadataQueue> resultQueue =
2307 std::make_shared<ResultMetadataQueue>(descriptor);
2308 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2309 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2310 resultQueue = nullptr;
2311 // Don't use the queue onwards.
2312 }
2313
2314 ::android::hardware::camera::common::V1_0::helper::CameraMetadata requestMeta;
2315
2316 std::vector<buffer_handle_t> buffers(kBurstFrameCount);
2317 std::vector<std::shared_ptr<InFlightRequest>> inflightReqs(kBurstFrameCount);
2318 std::vector<CameraMetadata> requestSettings(kBurstFrameCount);
2319
2320 std::vector<CaptureRequest> requests(kBurstFrameCount);
2321
2322 HalStream halStream = halStreams[0];
2323 for (uint32_t i = 0; i < kBurstFrameCount; i++) {
2324 CaptureRequest& request = requests[i];
2325 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2326 outputBuffers.resize(1);
2327 StreamBuffer& outputBuffer = outputBuffers[0];
2328
2329 std::unique_lock<std::mutex> l(mLock);
2330 if (useHalBufManager) {
2331 outputBuffer = {halStream.id, 0, NativeHandle(), BufferStatus::OK, NativeHandle(),
2332 NativeHandle()};
2333 } else {
2334 // jpeg buffer (w,h) = (blobLen, 1)
2335 allocateGraphicBuffer(jpegBufferSize, /*height*/ 1,
2336 android_convertGralloc1To0Usage(
2337 static_cast<uint64_t>(halStream.producerUsage),
2338 static_cast<uint64_t>(halStream.consumerUsage)),
2339 halStream.overrideFormat, &buffers[i]);
2340 outputBuffer = {halStream.id, bufferId + i, ::android::makeToAidl(buffers[i]),
2341 BufferStatus::OK, NativeHandle(), NativeHandle()};
2342 }
2343
2344 requestMeta.clear();
2345 requestMeta.append(reinterpret_cast<camera_metadata_t*>(settings.metadata.data()));
2346
2347 camera_metadata_t* metaBuffer = requestMeta.release();
2348 uint8_t* rawMetaBuffer = reinterpret_cast<uint8_t*>(metaBuffer);
2349 requestSettings[i].metadata = std::vector(
2350 rawMetaBuffer, rawMetaBuffer + get_camera_metadata_size(metaBuffer));
2351 overrideRotateAndCrop(&requestSettings[i]);
2352
2353 request.frameNumber = frameNumber + i;
2354 request.fmqSettingsSize = 0;
2355 request.settings = requestSettings[i];
2356 request.inputBuffer = {/*streamId*/ -1,
2357 /*bufferId*/ 0, NativeHandle(),
2358 BufferStatus::ERROR, NativeHandle(),
2359 NativeHandle()};
2360
2361 inflightReqs[i] = std::make_shared<InFlightRequest>(1, false, supportsPartialResults,
2362 partialResultCount, resultQueue);
2363 mInflightMap[frameNumber + i] = inflightReqs[i];
2364 }
2365
2366 int32_t numRequestProcessed = 0;
2367 std::vector<BufferCache> cachesToRemove;
2368
2369 ndk::ScopedAStatus returnStatus =
2370 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2371 ASSERT_TRUE(returnStatus.isOk());
2372 ASSERT_EQ(numRequestProcessed, kBurstFrameCount);
2373
2374 std::vector<int32_t> offlineStreamIds = {halStream.id};
2375 CameraOfflineSessionInfo offlineSessionInfo;
2376 std::shared_ptr<ICameraOfflineSession> offlineSession;
2377 returnStatus =
2378 mSession->switchToOffline(offlineStreamIds, &offlineSessionInfo, &offlineSession);
2379
2380 if (!halStreams[0].supportOffline) {
2381 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
2382 returnStatus.getServiceSpecificError());
2383 ret = mSession->close();
2384 mSession = nullptr;
2385 ASSERT_TRUE(ret.isOk());
2386 continue;
2387 }
2388
2389 ASSERT_TRUE(returnStatus.isOk());
2390 // Hal might be unable to find any requests qualified for offline mode.
2391 if (offlineSession == nullptr) {
2392 ret = mSession->close();
2393 mSession = nullptr;
2394 ASSERT_TRUE(ret.isOk());
2395 continue;
2396 }
2397
2398 ASSERT_EQ(offlineSessionInfo.offlineStreams.size(), 1u);
2399 ASSERT_EQ(offlineSessionInfo.offlineStreams[0].id, halStream.id);
2400 ASSERT_NE(offlineSessionInfo.offlineRequests.size(), 0u);
2401
2402 // close device session to make sure offline session does not rely on it
2403 ret = mSession->close();
2404 mSession = nullptr;
2405 ASSERT_TRUE(ret.isOk());
2406
2407 ::aidl::android::hardware::common::fmq::MQDescriptor<
2408 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2409 offlineResultDescriptor;
2410
2411 auto offlineResultQueueRet =
2412 offlineSession->getCaptureResultMetadataQueue(&offlineResultDescriptor);
2413 std::shared_ptr<ResultMetadataQueue> offlineResultQueue =
2414 std::make_shared<ResultMetadataQueue>(descriptor);
2415 if (!offlineResultQueue->isValid() || offlineResultQueue->availableToWrite() <= 0) {
2416 ALOGE("%s: offline session returns empty result metadata fmq, not use it", __func__);
2417 offlineResultQueue = nullptr;
2418 // Don't use the queue onwards.
2419 }
2420 ASSERT_TRUE(offlineResultQueueRet.isOk());
2421
2422 updateInflightResultQueue(offlineResultQueue);
2423
2424 ret = offlineSession->setCallback(cb);
2425 ASSERT_TRUE(ret.isOk());
2426
2427 for (size_t i = 0; i < kBurstFrameCount; i++) {
2428 std::unique_lock<std::mutex> l(mLock);
2429 while (!inflightReqs[i]->errorCodeValid && ((0 < inflightReqs[i]->numBuffersLeft) ||
2430 (!inflightReqs[i]->haveResultMetadata))) {
2431 auto timeout = std::chrono::system_clock::now() +
2432 std::chrono::seconds(kStreamBufferTimeoutSec);
2433 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2434 }
2435
2436 ASSERT_FALSE(inflightReqs[i]->errorCodeValid);
2437 ASSERT_NE(inflightReqs[i]->resultOutputBuffers.size(), 0u);
2438 ASSERT_EQ(stream.id, inflightReqs[i]->resultOutputBuffers[0].buffer.streamId);
2439 ASSERT_FALSE(inflightReqs[i]->collectedResult.isEmpty());
2440 }
2441
2442 ret = offlineSession->close();
2443 ASSERT_TRUE(ret.isOk());
2444 }
2445}
2446
2447// Check whether an invalid capture request with missing output buffers
2448// will be reported correctly.
2449TEST_P(CameraAidlTest, processCaptureRequestInvalidBuffer) {
2450 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2451 std::vector<AvailableStream> outputBlobStreams;
2452 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2453 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2454 int32_t frameNumber = 1;
2455 CameraMetadata settings;
2456
2457 for (const auto& name : cameraDeviceNames) {
2458 Stream previewStream;
2459 std::vector<HalStream> halStreams;
2460 std::shared_ptr<DeviceCb> cb;
2461 bool supportsPartialResults = false;
2462 bool useHalBufManager = false;
2463 int32_t partialResultCount = 0;
2464 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2465 &previewStream /*out*/, &halStreams /*out*/,
2466 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2467 &useHalBufManager /*out*/, &cb /*out*/);
2468
2469 RequestTemplate reqTemplate = RequestTemplate::PREVIEW;
2470 ndk::ScopedAStatus ret = mSession->constructDefaultRequestSettings(reqTemplate, &settings);
2471 ASSERT_TRUE(ret.isOk());
2472 overrideRotateAndCrop(&settings);
2473
2474 std::vector<CaptureRequest> requests(1);
2475 CaptureRequest& request = requests[0];
2476 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2477 outputBuffers.resize(1);
2478 // Empty output buffer
2479 outputBuffers[0] = {
2480 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2481
2482 request.inputBuffer = {
2483 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2484 request.frameNumber = frameNumber;
2485 request.fmqSettingsSize = 0;
2486 request.settings = settings;
2487
2488 // Output buffers are missing, we should fail here
2489 int32_t numRequestProcessed = 0;
2490 std::vector<BufferCache> cachesToRemove;
2491 ret = mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2492 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
2493 ASSERT_EQ(numRequestProcessed, 0u);
2494
2495 ret = mSession->close();
2496 mSession = nullptr;
2497 ASSERT_TRUE(ret.isOk());
2498 }
2499}
2500
2501// Generate, trigger and flush a preview request
2502TEST_P(CameraAidlTest, flushPreviewRequest) {
2503 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2504 std::vector<AvailableStream> outputPreviewStreams;
2505 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2506 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2507 int64_t bufferId = 1;
2508 int32_t frameNumber = 1;
2509 CameraMetadata settings;
2510
2511 for (const auto& name : cameraDeviceNames) {
2512 Stream previewStream;
2513 std::vector<HalStream> halStreams;
2514 std::shared_ptr<DeviceCb> cb;
2515 bool supportsPartialResults = false;
2516 bool useHalBufManager = false;
2517 int32_t partialResultCount = 0;
2518
2519 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2520 &previewStream /*out*/, &halStreams /*out*/,
2521 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2522 &useHalBufManager /*out*/, &cb /*out*/);
2523
2524 ASSERT_NE(mSession, nullptr);
2525 ASSERT_NE(cb, nullptr);
2526 ASSERT_FALSE(halStreams.empty());
2527
2528 ::aidl::android::hardware::common::fmq::MQDescriptor<
2529 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2530 descriptor;
2531
2532 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2533 std::shared_ptr<ResultMetadataQueue> resultQueue =
2534 std::make_shared<ResultMetadataQueue>(descriptor);
2535 ASSERT_TRUE(resultQueueRet.isOk());
2536 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2537 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2538 resultQueue = nullptr;
2539 // Don't use the queue onwards.
2540 }
2541
2542 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
2543 1, false, supportsPartialResults, partialResultCount, resultQueue);
2544 RequestTemplate reqTemplate = RequestTemplate::PREVIEW;
2545
2546 ndk::ScopedAStatus ret = mSession->constructDefaultRequestSettings(reqTemplate, &settings);
2547 ASSERT_TRUE(ret.isOk());
2548 overrideRotateAndCrop(&settings);
2549
2550 buffer_handle_t buffer_handle;
2551 std::vector<CaptureRequest> requests(1);
2552 CaptureRequest& request = requests[0];
2553 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2554 outputBuffers.resize(1);
2555 StreamBuffer& outputBuffer = outputBuffers[0];
2556 if (useHalBufManager) {
2557 bufferId = 0;
2558 outputBuffer = {halStreams[0].id, bufferId, NativeHandle(),
2559 BufferStatus::OK, NativeHandle(), NativeHandle()};
2560 } else {
2561 allocateGraphicBuffer(previewStream.width, previewStream.height,
2562 android_convertGralloc1To0Usage(
2563 static_cast<uint64_t>(halStreams[0].producerUsage),
2564 static_cast<uint64_t>(halStreams[0].consumerUsage)),
2565 halStreams[0].overrideFormat, &buffer_handle);
2566 outputBuffer = {halStreams[0].id, bufferId, ::android::makeToAidl(buffer_handle),
2567 BufferStatus::OK, NativeHandle(), NativeHandle()};
2568 }
2569
2570 request.frameNumber = frameNumber;
2571 request.fmqSettingsSize = 0;
2572 request.settings = settings;
2573 request.inputBuffer = {
2574 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2575
2576 {
2577 std::unique_lock<std::mutex> l(mLock);
2578 mInflightMap.clear();
2579 mInflightMap[frameNumber] = inflightReq;
2580 }
2581
2582 int32_t numRequestProcessed = 0;
2583 std::vector<BufferCache> cachesToRemove;
2584 ret = mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2585 ASSERT_TRUE(ret.isOk());
2586 ASSERT_EQ(numRequestProcessed, 1u);
2587
2588 // Flush before waiting for request to complete.
2589 ndk::ScopedAStatus returnStatus = mSession->flush();
2590 ASSERT_TRUE(returnStatus.isOk());
2591
2592 {
2593 std::unique_lock<std::mutex> l(mLock);
2594 while (!inflightReq->errorCodeValid &&
2595 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
2596 auto timeout = std::chrono::system_clock::now() +
2597 std::chrono::seconds(kStreamBufferTimeoutSec);
2598 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2599 }
2600
2601 if (!inflightReq->errorCodeValid) {
2602 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
2603 ASSERT_EQ(previewStream.id, inflightReq->resultOutputBuffers[0].buffer.streamId);
2604 } else {
2605 switch (inflightReq->errorCode) {
2606 case ErrorCode::ERROR_REQUEST:
2607 case ErrorCode::ERROR_RESULT:
2608 case ErrorCode::ERROR_BUFFER:
2609 // Expected
2610 break;
2611 case ErrorCode::ERROR_DEVICE:
2612 default:
2613 FAIL() << "Unexpected error:"
2614 << static_cast<uint32_t>(inflightReq->errorCode);
2615 }
2616 }
2617 }
2618
2619 if (useHalBufManager) {
2620 verifyBuffersReturned(mSession, previewStream.id, cb);
2621 }
2622
2623 ret = mSession->close();
2624 mSession = nullptr;
2625 ASSERT_TRUE(ret.isOk());
2626 }
2627}
2628
2629// Verify that camera flushes correctly without any pending requests.
2630TEST_P(CameraAidlTest, flushEmpty) {
2631 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2632 std::vector<AvailableStream> outputPreviewStreams;
2633 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2634 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2635
2636 for (const auto& name : cameraDeviceNames) {
2637 Stream previewStream;
2638 std::vector<HalStream> halStreams;
2639 std::shared_ptr<DeviceCb> cb;
2640 bool supportsPartialResults = false;
2641 bool useHalBufManager = false;
2642
2643 int32_t partialResultCount = 0;
2644 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2645 &previewStream /*out*/, &halStreams /*out*/,
2646 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2647 &useHalBufManager /*out*/, &cb /*out*/);
2648
2649 ndk::ScopedAStatus returnStatus = mSession->flush();
2650 ASSERT_TRUE(returnStatus.isOk());
2651
2652 {
2653 std::unique_lock<std::mutex> l(mLock);
2654 auto timeout = std::chrono::system_clock::now() +
2655 std::chrono::milliseconds(kEmptyFlushTimeoutMSec);
2656 ASSERT_EQ(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2657 }
2658
2659 ndk::ScopedAStatus ret = mSession->close();
2660 mSession = nullptr;
2661 ASSERT_TRUE(ret.isOk());
2662 }
2663}
2664
2665// Test camera provider notify method
2666TEST_P(CameraAidlTest, providerDeviceStateNotification) {
2667 notifyDeviceState(ICameraProvider::DEVICE_STATE_BACK_COVERED);
2668 notifyDeviceState(ICameraProvider::DEVICE_STATE_NORMAL);
2669}
2670
2671// Verify that all supported stream formats and sizes can be configured
2672// successfully for injection camera.
2673TEST_P(CameraAidlTest, configureInjectionStreamsAvailableOutputs) {
2674 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2675 std::vector<AvailableStream> outputStreams;
2676
2677 for (const auto& name : cameraDeviceNames) {
2678 CameraMetadata metadata;
2679
2680 std::shared_ptr<ICameraInjectionSession> injectionSession;
2681 std::shared_ptr<ICameraDevice> unusedDevice;
2682 openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/,
2683 &unusedDevice /*out*/);
2684 if (injectionSession == nullptr) {
2685 continue;
2686 }
2687
2688 camera_metadata_t* staticMetaBuffer =
2689 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
2690 CameraMetadata chars;
2691 chars.metadata = metadata.metadata;
2692
2693 outputStreams.clear();
2694 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputStreams));
2695 ASSERT_NE(0u, outputStreams.size());
2696
2697 int32_t jpegBufferSize = 0;
2698 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMetaBuffer, &jpegBufferSize));
2699 ASSERT_NE(0u, jpegBufferSize);
2700
2701 int32_t streamId = 0;
2702 int32_t streamConfigCounter = 0;
2703 for (auto& it : outputStreams) {
2704 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(it.format));
2705 Stream stream = {streamId,
2706 StreamType::OUTPUT,
2707 it.width,
2708 it.height,
2709 static_cast<PixelFormat>(it.format),
2710 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2711 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2712 dataspace,
2713 StreamRotation::ROTATION_0,
2714 std::string(),
2715 jpegBufferSize,
2716 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002717 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2718 RequestAvailableDynamicRangeProfilesMap::
2719 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002720
2721 std::vector<Stream> streams = {stream};
2722 StreamConfiguration config;
2723 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2724 jpegBufferSize);
2725
2726 config.streamConfigCounter = streamConfigCounter++;
2727 ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars);
2728 ASSERT_TRUE(s.isOk());
2729 streamId++;
2730 }
2731
2732 std::shared_ptr<ICameraDeviceSession> session;
2733 ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session);
2734 ASSERT_TRUE(ret.isOk());
2735 ASSERT_NE(session, nullptr);
2736 ret = session->close();
2737 ASSERT_TRUE(ret.isOk());
2738 }
2739}
2740
2741// Check for correct handling of invalid/incorrect configuration parameters for injection camera.
2742TEST_P(CameraAidlTest, configureInjectionStreamsInvalidOutputs) {
2743 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2744 std::vector<AvailableStream> outputStreams;
2745
2746 for (const auto& name : cameraDeviceNames) {
2747 CameraMetadata metadata;
2748 std::shared_ptr<ICameraInjectionSession> injectionSession;
2749 std::shared_ptr<ICameraDevice> unusedDevice;
2750 openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/,
2751 &unusedDevice);
2752 if (injectionSession == nullptr) {
2753 continue;
2754 }
2755
2756 camera_metadata_t* staticMetaBuffer =
2757 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
2758 std::shared_ptr<ICameraDeviceSession> session;
2759 ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session);
2760 ASSERT_TRUE(ret.isOk());
2761 ASSERT_NE(session, nullptr);
2762
2763 CameraMetadata chars;
2764 chars.metadata = metadata.metadata;
2765
2766 outputStreams.clear();
2767 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputStreams));
2768 ASSERT_NE(0u, outputStreams.size());
2769
2770 int32_t jpegBufferSize = 0;
2771 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMetaBuffer, &jpegBufferSize));
2772 ASSERT_NE(0u, jpegBufferSize);
2773
2774 int32_t streamId = 0;
2775 Stream stream = {streamId++,
2776 StreamType::OUTPUT,
2777 0,
2778 0,
2779 static_cast<PixelFormat>(outputStreams[0].format),
2780 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2781 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2782 Dataspace::UNKNOWN,
2783 StreamRotation::ROTATION_0,
2784 std::string(),
2785 jpegBufferSize,
2786 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002787 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2788 RequestAvailableDynamicRangeProfilesMap::
2789 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002790
2791 int32_t streamConfigCounter = 0;
2792 std::vector<Stream> streams = {stream};
2793 StreamConfiguration config;
2794 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2795 jpegBufferSize);
2796
2797 config.streamConfigCounter = streamConfigCounter++;
2798 ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars);
2799 ASSERT_TRUE(
2800 (static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) == s.getServiceSpecificError()) ||
2801 (static_cast<int32_t>(Status::INTERNAL_ERROR) == s.getServiceSpecificError()));
2802
2803 stream = {streamId++,
2804 StreamType::OUTPUT,
2805 INT32_MAX,
2806 INT32_MAX,
2807 static_cast<PixelFormat>(outputStreams[0].format),
2808 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2809 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2810 Dataspace::UNKNOWN,
2811 StreamRotation::ROTATION_0,
2812 std::string(),
2813 jpegBufferSize,
2814 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002815 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2816 RequestAvailableDynamicRangeProfilesMap::
2817 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
2818
Avichal Rakesh362242f2022-02-08 12:40:53 -08002819 streams[0] = stream;
2820 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2821 jpegBufferSize);
2822 config.streamConfigCounter = streamConfigCounter++;
2823 s = injectionSession->configureInjectionStreams(config, chars);
2824 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError());
2825
2826 for (auto& it : outputStreams) {
2827 stream = {streamId++,
2828 StreamType::OUTPUT,
2829 it.width,
2830 it.height,
2831 static_cast<PixelFormat>(INT32_MAX),
2832 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2833 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2834 Dataspace::UNKNOWN,
2835 StreamRotation::ROTATION_0,
2836 std::string(),
2837 jpegBufferSize,
2838 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002839 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2840 RequestAvailableDynamicRangeProfilesMap::
2841 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002842 streams[0] = stream;
2843 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2844 jpegBufferSize);
2845 config.streamConfigCounter = streamConfigCounter++;
2846 s = injectionSession->configureInjectionStreams(config, chars);
2847 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError());
2848
2849 stream = {streamId++,
2850 StreamType::OUTPUT,
2851 it.width,
2852 it.height,
2853 static_cast<PixelFormat>(it.format),
2854 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2855 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2856 Dataspace::UNKNOWN,
2857 static_cast<StreamRotation>(INT32_MAX),
2858 std::string(),
2859 jpegBufferSize,
2860 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002861 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2862 RequestAvailableDynamicRangeProfilesMap::
2863 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002864 streams[0] = stream;
2865 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2866 jpegBufferSize);
2867 config.streamConfigCounter = streamConfigCounter++;
2868 s = injectionSession->configureInjectionStreams(config, chars);
2869 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError());
2870 }
2871
2872 ret = session->close();
2873 ASSERT_TRUE(ret.isOk());
2874 }
2875}
2876
2877// Check whether session parameters are supported for injection camera. If Hal support for them
2878// exist, then try to configure a preview stream using them.
2879TEST_P(CameraAidlTest, configureInjectionStreamsWithSessionParameters) {
2880 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2881 std::vector<AvailableStream> outputPreviewStreams;
2882 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2883 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2884
2885 for (const auto& name : cameraDeviceNames) {
2886 CameraMetadata metadata;
2887 std::shared_ptr<ICameraInjectionSession> injectionSession;
2888 std::shared_ptr<ICameraDevice> unusedDevice;
2889 openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/,
2890 &unusedDevice /*out*/);
2891 if (injectionSession == nullptr) {
2892 continue;
2893 }
2894
2895 std::shared_ptr<ICameraDeviceSession> session;
2896 ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session);
2897 ASSERT_TRUE(ret.isOk());
2898 ASSERT_NE(session, nullptr);
2899
2900 camera_metadata_t* staticMetaBuffer =
2901 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
2902 CameraMetadata chars;
2903 chars.metadata = metadata.metadata;
2904
2905 std::unordered_set<int32_t> availableSessionKeys;
2906 Status rc = getSupportedKeys(staticMetaBuffer, ANDROID_REQUEST_AVAILABLE_SESSION_KEYS,
2907 &availableSessionKeys);
2908 ASSERT_EQ(Status::OK, rc);
2909 if (availableSessionKeys.empty()) {
2910 ret = session->close();
2911 ASSERT_TRUE(ret.isOk());
2912 continue;
2913 }
2914
2915 android::hardware::camera::common::V1_0::helper::CameraMetadata previewRequestSettings;
2916 android::hardware::camera::common::V1_0::helper::CameraMetadata sessionParams,
2917 modifiedSessionParams;
2918 constructFilteredSettings(session, availableSessionKeys, RequestTemplate::PREVIEW,
2919 &previewRequestSettings, &sessionParams);
2920 if (sessionParams.isEmpty()) {
2921 ret = session->close();
2922 ASSERT_TRUE(ret.isOk());
2923 continue;
2924 }
2925
2926 outputPreviewStreams.clear();
2927
2928 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputPreviewStreams,
2929 &previewThreshold));
2930 ASSERT_NE(0u, outputPreviewStreams.size());
2931
2932 Stream previewStream = {
2933 0,
2934 StreamType::OUTPUT,
2935 outputPreviewStreams[0].width,
2936 outputPreviewStreams[0].height,
2937 static_cast<PixelFormat>(outputPreviewStreams[0].format),
2938 static_cast<::aidl::android::hardware::graphics::common::BufferUsage>(
2939 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2940 Dataspace::UNKNOWN,
2941 StreamRotation::ROTATION_0,
2942 std::string(),
2943 0,
2944 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002945 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2946 RequestAvailableDynamicRangeProfilesMap::
2947 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002948 std::vector<Stream> streams = {previewStream};
2949 StreamConfiguration config;
2950 config.streams = streams;
2951 config.operationMode = StreamConfigurationMode::NORMAL_MODE;
2952
2953 modifiedSessionParams = sessionParams;
2954 camera_metadata_t* sessionParamsBuffer = sessionParams.release();
2955 uint8_t* rawSessionParamsBuffer = reinterpret_cast<uint8_t*>(sessionParamsBuffer);
2956 config.sessionParams.metadata =
2957 std::vector(rawSessionParamsBuffer,
2958 rawSessionParamsBuffer + get_camera_metadata_size(sessionParamsBuffer));
2959
2960 config.streamConfigCounter = 0;
2961 config.streamConfigCounter = 0;
2962 config.multiResolutionInputImage = false;
2963
2964 ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars);
2965 ASSERT_TRUE(s.isOk());
2966
2967 sessionParams.acquire(sessionParamsBuffer);
2968 free_camera_metadata(staticMetaBuffer);
2969 ret = session->close();
2970 ASSERT_TRUE(ret.isOk());
2971 }
2972}
2973
2974// Verify that valid stream use cases can be configured successfully, and invalid use cases
2975// fail stream configuration.
2976TEST_P(CameraAidlTest, configureStreamsUseCases) {
2977 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2978
2979 for (const auto& name : cameraDeviceNames) {
2980 CameraMetadata meta;
2981 std::shared_ptr<ICameraDevice> cameraDevice;
2982
2983 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
2984 &cameraDevice /*out*/);
2985
2986 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
2987 // Check if camera support depth only
2988 if (isDepthOnly(staticMeta)) {
2989 ndk::ScopedAStatus ret = mSession->close();
2990 mSession = nullptr;
2991 ASSERT_TRUE(ret.isOk());
2992 continue;
2993 }
2994
2995 std::vector<AvailableStream> outputPreviewStreams;
2996 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2997 static_cast<int32_t>(PixelFormat::YCBCR_420_888)};
2998 ASSERT_EQ(Status::OK,
2999 getAvailableOutputStreams(staticMeta, outputPreviewStreams, &previewThreshold));
3000 ASSERT_NE(0u, outputPreviewStreams.size());
3001
3002 // Combine valid and invalid stream use cases
Shuzhen Wang36efa712022-03-08 10:10:44 -08003003 std::vector<int64_t> useCases(kMandatoryUseCases);
Avichal Rakesh362242f2022-02-08 12:40:53 -08003004 useCases.push_back(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL + 1);
3005
Shuzhen Wang36efa712022-03-08 10:10:44 -08003006 std::vector<int64_t> supportedUseCases;
Avichal Rakesh362242f2022-02-08 12:40:53 -08003007 camera_metadata_ro_entry entry;
3008 auto retcode = find_camera_metadata_ro_entry(
3009 staticMeta, ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES, &entry);
3010 if ((0 == retcode) && (entry.count > 0)) {
Avichal Rakeshe1685a72022-03-22 13:52:36 -07003011 supportedUseCases.insert(supportedUseCases.end(), entry.data.i64,
3012 entry.data.i64 + entry.count);
Avichal Rakesh362242f2022-02-08 12:40:53 -08003013 } else {
3014 supportedUseCases.push_back(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT);
3015 }
3016
3017 std::vector<Stream> streams(1);
Avichal Rakeshd3503a32022-02-25 06:23:14 +00003018 streams[0] = {0,
3019 StreamType::OUTPUT,
3020 outputPreviewStreams[0].width,
3021 outputPreviewStreams[0].height,
3022 static_cast<PixelFormat>(outputPreviewStreams[0].format),
3023 static_cast<::aidl::android::hardware::graphics::common::BufferUsage>(
3024 GRALLOC1_CONSUMER_USAGE_CPU_READ),
3025 Dataspace::UNKNOWN,
3026 StreamRotation::ROTATION_0,
3027 std::string(),
3028 0,
3029 -1,
3030 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
3031 RequestAvailableDynamicRangeProfilesMap::
3032 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08003033
3034 int32_t streamConfigCounter = 0;
3035 CameraMetadata req;
3036 StreamConfiguration config;
3037 RequestTemplate reqTemplate = RequestTemplate::STILL_CAPTURE;
3038 ndk::ScopedAStatus ret = mSession->constructDefaultRequestSettings(reqTemplate, &req);
3039 ASSERT_TRUE(ret.isOk());
3040 config.sessionParams = req;
3041
Shuzhen Wang36efa712022-03-08 10:10:44 -08003042 for (int64_t useCase : useCases) {
Avichal Rakesh362242f2022-02-08 12:40:53 -08003043 bool useCaseSupported = std::find(supportedUseCases.begin(), supportedUseCases.end(),
3044 useCase) != supportedUseCases.end();
3045
3046 streams[0].useCase = static_cast<
3047 aidl::android::hardware::camera::metadata::ScalerAvailableStreamUseCases>(
3048 useCase);
3049 config.streams = streams;
3050 config.operationMode = StreamConfigurationMode::NORMAL_MODE;
3051 config.streamConfigCounter = streamConfigCounter;
3052 config.multiResolutionInputImage = false;
3053
3054 bool combSupported;
3055 ret = cameraDevice->isStreamCombinationSupported(config, &combSupported);
Avichal Rakeshe1685a72022-03-22 13:52:36 -07003056 if (static_cast<int32_t>(Status::OPERATION_NOT_SUPPORTED) ==
3057 ret.getServiceSpecificError()) {
3058 continue;
Avichal Rakesh362242f2022-02-08 12:40:53 -08003059 }
Avichal Rakeshe1685a72022-03-22 13:52:36 -07003060
Avichal Rakesh362242f2022-02-08 12:40:53 -08003061 ASSERT_TRUE(ret.isOk());
Avichal Rakeshe1685a72022-03-22 13:52:36 -07003062 ASSERT_EQ(combSupported, useCaseSupported);
Avichal Rakesh362242f2022-02-08 12:40:53 -08003063
3064 std::vector<HalStream> halStreams;
3065 ret = mSession->configureStreams(config, &halStreams);
3066 ALOGI("configureStreams returns status: %d", ret.getServiceSpecificError());
3067 if (useCaseSupported) {
3068 ASSERT_TRUE(ret.isOk());
3069 ASSERT_EQ(1u, halStreams.size());
3070 } else {
3071 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
3072 ret.getServiceSpecificError());
3073 }
3074 }
3075 ret = mSession->close();
3076 mSession = nullptr;
3077 ASSERT_TRUE(ret.isOk());
3078 }
3079}
3080
3081GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(CameraAidlTest);
3082INSTANTIATE_TEST_SUITE_P(
3083 PerInstance, CameraAidlTest,
3084 testing::ValuesIn(android::getAidlHalInstanceNames(ICameraProvider::descriptor)),
3085 android::hardware::PrintInstanceNameToString);