blob: 70ab7a02b3ae2eec5f2a65bcc7ead67f9a71ade5 [file] [log] [blame]
Avichal Rakesh362242f2022-02-08 12:40:53 -08001/*
2 * Copyright (C) 2022 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <aidl/Vintf.h>
18#include <aidl/android/hardware/camera/common/VendorTagSection.h>
19#include <aidl/android/hardware/camera/device/ICameraDevice.h>
20#include <aidlcommonsupport/NativeHandle.h>
21#include <camera_aidl_test.h>
22#include <cutils/properties.h>
23#include <device_cb.h>
24#include <empty_device_cb.h>
25#include <grallocusage/GrallocUsageConversion.h>
26#include <gtest/gtest.h>
27#include <hardware/gralloc.h>
28#include <hardware/gralloc1.h>
29#include <hidl/GtestPrinter.h>
30#include <hidl/HidlSupport.h>
31#include <torch_provider_cb.h>
32#include <list>
33
34using ::aidl::android::hardware::camera::common::CameraDeviceStatus;
35using ::aidl::android::hardware::camera::common::CameraResourceCost;
36using ::aidl::android::hardware::camera::common::TorchModeStatus;
37using ::aidl::android::hardware::camera::common::VendorTagSection;
38using ::aidl::android::hardware::camera::device::ICameraDevice;
Avichal Rakeshd3503a32022-02-25 06:23:14 +000039using ::aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap;
Avichal Rakesh362242f2022-02-08 12:40:53 -080040using ::aidl::android::hardware::camera::metadata::SensorPixelMode;
41using ::aidl::android::hardware::camera::provider::CameraIdAndStreamCombination;
Avichal Rakesh4bf91c72022-05-23 20:44:02 +000042using ::aidl::android::hardware::camera::provider::BnCameraProviderCallback;
Avichal Rakesh362242f2022-02-08 12:40:53 -080043
44using ::ndk::ScopedAStatus;
45
46namespace {
47const int32_t kBurstFrameCount = 10;
48const uint32_t kMaxStillWidth = 2048;
49const uint32_t kMaxStillHeight = 1536;
50
51const int64_t kEmptyFlushTimeoutMSec = 200;
52
Shuzhen Wang36efa712022-03-08 10:10:44 -080053const static std::vector<int64_t> kMandatoryUseCases = {
Avichal Rakesh362242f2022-02-08 12:40:53 -080054 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
55 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW,
56 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_STILL_CAPTURE,
57 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_RECORD,
58 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW_VIDEO_STILL,
59 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL};
60} // namespace
61
62TEST_P(CameraAidlTest, getCameraIdList) {
63 std::vector<std::string> idList;
64 ScopedAStatus ret = mProvider->getCameraIdList(&idList);
65 ASSERT_TRUE(ret.isOk());
66
67 for (size_t i = 0; i < idList.size(); i++) {
68 ALOGI("Camera Id[%zu] is %s", i, idList[i].c_str());
69 }
70}
71
72// Test if ICameraProvider::getVendorTags returns Status::OK
73TEST_P(CameraAidlTest, getVendorTags) {
74 std::vector<VendorTagSection> vendorTags;
75 ScopedAStatus ret = mProvider->getVendorTags(&vendorTags);
76
77 ASSERT_TRUE(ret.isOk());
78 for (size_t i = 0; i < vendorTags.size(); i++) {
79 ALOGI("Vendor tag section %zu name %s", i, vendorTags[i].sectionName.c_str());
80 for (auto& tag : vendorTags[i].tags) {
81 ALOGI("Vendor tag id %u name %s type %d", tag.tagId, tag.tagName.c_str(),
82 (int)tag.tagType);
83 }
84 }
85}
86
87// Test if ICameraProvider::setCallback returns Status::OK
88TEST_P(CameraAidlTest, setCallback) {
Avichal Rakesh4bf91c72022-05-23 20:44:02 +000089 struct ProviderCb : public BnCameraProviderCallback {
Avichal Rakesh362242f2022-02-08 12:40:53 -080090 ScopedAStatus cameraDeviceStatusChange(const std::string& cameraDeviceName,
91 CameraDeviceStatus newStatus) override {
92 ALOGI("camera device status callback name %s, status %d", cameraDeviceName.c_str(),
93 (int)newStatus);
94 return ScopedAStatus::ok();
95 }
96 ScopedAStatus torchModeStatusChange(const std::string& cameraDeviceName,
97 TorchModeStatus newStatus) override {
98 ALOGI("Torch mode status callback name %s, status %d", cameraDeviceName.c_str(),
99 (int)newStatus);
100 return ScopedAStatus::ok();
101 }
102 ScopedAStatus physicalCameraDeviceStatusChange(const std::string& cameraDeviceName,
103 const std::string& physicalCameraDeviceName,
104 CameraDeviceStatus newStatus) override {
105 ALOGI("physical camera device status callback name %s, physical camera name %s,"
106 " status %d",
107 cameraDeviceName.c_str(), physicalCameraDeviceName.c_str(), (int)newStatus);
108 return ScopedAStatus::ok();
109 }
110 };
111
Avichal Rakesh4bf91c72022-05-23 20:44:02 +0000112 std::shared_ptr<ProviderCb> cb = ndk::SharedRefBase::make<ProviderCb>();
Avichal Rakesh362242f2022-02-08 12:40:53 -0800113 ScopedAStatus ret = mProvider->setCallback(cb);
114 ASSERT_TRUE(ret.isOk());
115 ret = mProvider->setCallback(nullptr);
Avichal Rakesh4bf91c72022-05-23 20:44:02 +0000116 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
Avichal Rakesh362242f2022-02-08 12:40:53 -0800117}
118
119// Test if ICameraProvider::getCameraDeviceInterface returns Status::OK and non-null device
120TEST_P(CameraAidlTest, getCameraDeviceInterface) {
121 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
122
123 for (const auto& name : cameraDeviceNames) {
124 std::shared_ptr<ICameraDevice> cameraDevice;
125 ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &cameraDevice);
126 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
127 ret.getServiceSpecificError());
128 ASSERT_TRUE(ret.isOk());
129 ASSERT_NE(cameraDevice, nullptr);
130 }
131}
132
133// Verify that the device resource cost can be retrieved and the values are
134// correct.
135TEST_P(CameraAidlTest, getResourceCost) {
136 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
137
138 for (const auto& deviceName : cameraDeviceNames) {
139 std::shared_ptr<ICameraDevice> cameraDevice;
140 ScopedAStatus ret = mProvider->getCameraDeviceInterface(deviceName, &cameraDevice);
141 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
142 ret.getServiceSpecificError());
143 ASSERT_TRUE(ret.isOk());
144 ASSERT_NE(cameraDevice, nullptr);
145
146 CameraResourceCost resourceCost;
147 ret = cameraDevice->getResourceCost(&resourceCost);
148 ALOGI("getResourceCost returns: %d:%d", ret.getExceptionCode(),
149 ret.getServiceSpecificError());
150 ASSERT_TRUE(ret.isOk());
151
152 ALOGI(" Resource cost is %d", resourceCost.resourceCost);
153 ASSERT_LE(resourceCost.resourceCost, 100u);
154
155 for (const auto& name : resourceCost.conflictingDevices) {
156 ALOGI(" Conflicting device: %s", name.c_str());
157 }
158 }
159}
160
161TEST_P(CameraAidlTest, systemCameraTest) {
162 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
163 std::map<std::string, std::vector<SystemCameraKind>> hiddenPhysicalIdToLogicalMap;
164 for (const auto& name : cameraDeviceNames) {
165 std::shared_ptr<ICameraDevice> device;
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +0000166 ALOGI("systemCameraTest: Testing camera device %s", name.c_str());
Avichal Rakesh362242f2022-02-08 12:40:53 -0800167 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
168 ASSERT_TRUE(ret.isOk());
169 ASSERT_NE(device, nullptr);
170
171 CameraMetadata cameraCharacteristics;
172 ret = device->getCameraCharacteristics(&cameraCharacteristics);
173 ASSERT_TRUE(ret.isOk());
174
175 const camera_metadata_t* staticMeta =
176 reinterpret_cast<const camera_metadata_t*>(cameraCharacteristics.metadata.data());
177 Status rc = isLogicalMultiCamera(staticMeta);
178 if (rc == Status::OPERATION_NOT_SUPPORTED) {
179 return;
180 }
181
182 ASSERT_EQ(rc, Status::OK);
183 std::unordered_set<std::string> physicalIds;
184 ASSERT_EQ(getPhysicalCameraIds(staticMeta, &physicalIds), Status::OK);
185 SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC;
186 Status retStatus = getSystemCameraKind(staticMeta, &systemCameraKind);
187 ASSERT_EQ(retStatus, Status::OK);
188
189 for (auto physicalId : physicalIds) {
190 bool isPublicId = false;
191 for (auto& deviceName : cameraDeviceNames) {
192 std::string publicVersion, publicId;
193 ASSERT_TRUE(matchDeviceName(deviceName, mProviderType, &publicVersion, &publicId));
194 if (physicalId == publicId) {
195 isPublicId = true;
196 break;
197 }
198 }
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +0000199
Avichal Rakesh362242f2022-02-08 12:40:53 -0800200 // For hidden physical cameras, collect their associated logical cameras
201 // and store the system camera kind.
202 if (!isPublicId) {
203 auto it = hiddenPhysicalIdToLogicalMap.find(physicalId);
204 if (it == hiddenPhysicalIdToLogicalMap.end()) {
205 hiddenPhysicalIdToLogicalMap.insert(std::make_pair(
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +0000206 physicalId, std::vector<SystemCameraKind>({systemCameraKind})));
Avichal Rakesh362242f2022-02-08 12:40:53 -0800207 } else {
208 it->second.push_back(systemCameraKind);
209 }
210 }
211 }
212 }
213
214 // Check that the system camera kind of the logical cameras associated with
215 // each hidden physical camera is the same.
216 for (const auto& it : hiddenPhysicalIdToLogicalMap) {
217 SystemCameraKind neededSystemCameraKind = it.second.front();
218 for (auto foundSystemCamera : it.second) {
219 ASSERT_EQ(neededSystemCameraKind, foundSystemCamera);
220 }
221 }
222}
223
224// Verify that the static camera characteristics can be retrieved
225// successfully.
226TEST_P(CameraAidlTest, getCameraCharacteristics) {
227 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
228
229 for (const auto& name : cameraDeviceNames) {
230 std::shared_ptr<ICameraDevice> device;
231 ALOGI("getCameraCharacteristics: Testing camera device %s", name.c_str());
232 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
233 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
234 ret.getServiceSpecificError());
235 ASSERT_TRUE(ret.isOk());
236 ASSERT_NE(device, nullptr);
237
238 CameraMetadata chars;
239 ret = device->getCameraCharacteristics(&chars);
240 ASSERT_TRUE(ret.isOk());
241 verifyCameraCharacteristics(chars);
242 verifyMonochromeCharacteristics(chars);
243 verifyRecommendedConfigs(chars);
244 verifyLogicalOrUltraHighResCameraMetadata(name, device, chars, cameraDeviceNames);
245
246 ASSERT_TRUE(ret.isOk());
247
248 // getPhysicalCameraCharacteristics will fail for publicly
249 // advertised camera IDs.
250 std::string version, cameraId;
251 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &cameraId));
252 CameraMetadata devChars;
253 ret = device->getPhysicalCameraCharacteristics(cameraId, &devChars);
254 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
255 ASSERT_EQ(0, devChars.metadata.size());
256 }
257}
258
259// Verify that the torch strength level can be set and retrieved successfully.
260TEST_P(CameraAidlTest, turnOnTorchWithStrengthLevel) {
261 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
262
263 std::shared_ptr<TorchProviderCb> cb = ndk::SharedRefBase::make<TorchProviderCb>(this);
264 ndk::ScopedAStatus ret = mProvider->setCallback(cb);
265 ASSERT_TRUE(ret.isOk());
266
267 for (const auto& name : cameraDeviceNames) {
268 int32_t defaultLevel;
269 std::shared_ptr<ICameraDevice> device;
270 ALOGI("%s: Testing camera device %s", __FUNCTION__, name.c_str());
271
272 ret = mProvider->getCameraDeviceInterface(name, &device);
273 ASSERT_TRUE(ret.isOk());
274 ASSERT_NE(device, nullptr);
275
276 CameraMetadata chars;
277 ret = device->getCameraCharacteristics(&chars);
278 ASSERT_TRUE(ret.isOk());
279
280 const camera_metadata_t* staticMeta =
281 reinterpret_cast<const camera_metadata_t*>(chars.metadata.data());
282 bool torchStrengthControlSupported = isTorchStrengthControlSupported(staticMeta);
283 camera_metadata_ro_entry entry;
284 int rc = find_camera_metadata_ro_entry(staticMeta,
285 ANDROID_FLASH_INFO_STRENGTH_DEFAULT_LEVEL, &entry);
286 if (torchStrengthControlSupported) {
287 ASSERT_EQ(rc, 0);
288 ASSERT_GT(entry.count, 0);
289 defaultLevel = *entry.data.i32;
290 ALOGI("Default level is:%d", defaultLevel);
291 }
292
293 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
294 ret = device->turnOnTorchWithStrengthLevel(2);
295 ALOGI("turnOnTorchWithStrengthLevel returns status: %d", ret.getServiceSpecificError());
296 // OPERATION_NOT_SUPPORTED check
297 if (!torchStrengthControlSupported) {
298 ALOGI("Torch strength control not supported.");
299 ASSERT_EQ(static_cast<int32_t>(Status::OPERATION_NOT_SUPPORTED),
300 ret.getServiceSpecificError());
301 } else {
302 {
303 ASSERT_TRUE(ret.isOk());
304 std::unique_lock<std::mutex> l(mTorchLock);
305 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
306 auto timeout = std::chrono::system_clock::now() +
307 std::chrono::seconds(kTorchTimeoutSec);
308 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
309 }
310 ASSERT_EQ(TorchModeStatus::AVAILABLE_ON, mTorchStatus);
311 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
312 }
313 ALOGI("getTorchStrengthLevel: Testing");
314 int32_t strengthLevel;
315 ret = device->getTorchStrengthLevel(&strengthLevel);
316 ASSERT_TRUE(ret.isOk());
317 ALOGI("Torch strength level is : %d", strengthLevel);
318 ASSERT_EQ(strengthLevel, 2);
319
320 // Turn OFF the torch and verify torch strength level is reset to default level.
321 ALOGI("Testing torch strength level reset after turning the torch OFF.");
322 ret = device->setTorchMode(false);
323 ASSERT_TRUE(ret.isOk());
324 {
325 std::unique_lock<std::mutex> l(mTorchLock);
326 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
327 auto timeout = std::chrono::system_clock::now() +
328 std::chrono::seconds(kTorchTimeoutSec);
329 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
330 }
331 ASSERT_EQ(TorchModeStatus::AVAILABLE_OFF, mTorchStatus);
332 }
333
334 ret = device->getTorchStrengthLevel(&strengthLevel);
335 ASSERT_TRUE(ret.isOk());
336 ALOGI("Torch strength level after turning OFF torch is : %d", strengthLevel);
337 ASSERT_EQ(strengthLevel, defaultLevel);
338 }
339 }
340}
341
342// In case it is supported verify that torch can be enabled.
343// Check for corresponding torch callbacks as well.
344TEST_P(CameraAidlTest, setTorchMode) {
345 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
346
347 std::shared_ptr<TorchProviderCb> cb = ndk::SharedRefBase::make<TorchProviderCb>(this);
348 ndk::ScopedAStatus ret = mProvider->setCallback(cb);
349 ALOGI("setCallback returns status: %d", ret.getServiceSpecificError());
350 ASSERT_TRUE(ret.isOk());
351 ASSERT_NE(cb, nullptr);
352
353 for (const auto& name : cameraDeviceNames) {
354 std::shared_ptr<ICameraDevice> device;
355 ALOGI("setTorchMode: Testing camera device %s", name.c_str());
356 ret = mProvider->getCameraDeviceInterface(name, &device);
357 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
358 ret.getServiceSpecificError());
359 ASSERT_TRUE(ret.isOk());
360 ASSERT_NE(device, nullptr);
361
362 CameraMetadata metadata;
363 ret = device->getCameraCharacteristics(&metadata);
364 ALOGI("getCameraCharacteristics returns status:%d", ret.getServiceSpecificError());
365 ASSERT_TRUE(ret.isOk());
366 camera_metadata_t* staticMeta =
367 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
368 bool torchSupported = isTorchSupported(staticMeta);
369
370 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
371 ret = device->setTorchMode(true);
372 ALOGI("setTorchMode returns status: %d", ret.getServiceSpecificError());
373 if (!torchSupported) {
374 ASSERT_EQ(static_cast<int32_t>(Status::OPERATION_NOT_SUPPORTED),
375 ret.getServiceSpecificError());
376 } else {
377 ASSERT_TRUE(ret.isOk());
378 {
379 std::unique_lock<std::mutex> l(mTorchLock);
380 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
381 auto timeout = std::chrono::system_clock::now() +
382 std::chrono::seconds(kTorchTimeoutSec);
383 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
384 }
385 ASSERT_EQ(TorchModeStatus::AVAILABLE_ON, mTorchStatus);
386 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
387 }
388
389 ret = device->setTorchMode(false);
390 ASSERT_TRUE(ret.isOk());
391 {
392 std::unique_lock<std::mutex> l(mTorchLock);
393 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
394 auto timeout = std::chrono::system_clock::now() +
395 std::chrono::seconds(kTorchTimeoutSec);
396 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
397 }
398 ASSERT_EQ(TorchModeStatus::AVAILABLE_OFF, mTorchStatus);
399 }
400 }
401 }
Avichal Rakesh362242f2022-02-08 12:40:53 -0800402}
403
404// Check dump functionality.
405TEST_P(CameraAidlTest, dump) {
406 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
407
408 for (const auto& name : cameraDeviceNames) {
409 std::shared_ptr<ICameraDevice> device;
410 ALOGI("dump: Testing camera device %s", name.c_str());
411
412 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
413 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
414 ret.getServiceSpecificError());
415 ASSERT_TRUE(ret.isOk());
416 ASSERT_NE(device, nullptr);
417
418 int raw_handle = open(kDumpOutput, O_RDWR);
419 ASSERT_GE(raw_handle, 0);
420
421 auto retStatus = device->dump(raw_handle, nullptr, 0);
422 ASSERT_EQ(retStatus, ::android::OK);
423 close(raw_handle);
424 }
425}
426
427// Open, dump, then close
428TEST_P(CameraAidlTest, openClose) {
429 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
430
431 for (const auto& name : cameraDeviceNames) {
432 std::shared_ptr<ICameraDevice> device;
433 ALOGI("openClose: Testing camera device %s", name.c_str());
434 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
435 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
436 ret.getServiceSpecificError());
437 ASSERT_TRUE(ret.isOk());
438 ASSERT_NE(device, nullptr);
439
440 std::shared_ptr<EmptyDeviceCb> cb = ndk::SharedRefBase::make<EmptyDeviceCb>();
441
442 ret = device->open(cb, &mSession);
443 ASSERT_TRUE(ret.isOk());
444 ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
445 ret.getServiceSpecificError());
446 ASSERT_NE(mSession, nullptr);
447 int raw_handle = open(kDumpOutput, O_RDWR);
448 ASSERT_GE(raw_handle, 0);
449
450 auto retStatus = device->dump(raw_handle, nullptr, 0);
451 ASSERT_EQ(retStatus, ::android::OK);
452 close(raw_handle);
453
454 ret = mSession->close();
455 mSession = nullptr;
456 ASSERT_TRUE(ret.isOk());
457 // TODO: test all session API calls return INTERNAL_ERROR after close
458 // TODO: keep a wp copy here and verify session cannot be promoted out of this scope
459 }
460}
461
462// Check whether all common default request settings can be successfully
463// constructed.
464TEST_P(CameraAidlTest, constructDefaultRequestSettings) {
465 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
466
467 for (const auto& name : cameraDeviceNames) {
468 std::shared_ptr<ICameraDevice> device;
469 ALOGI("constructDefaultRequestSettings: Testing camera device %s", name.c_str());
470 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
471 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
472 ret.getServiceSpecificError());
473 ASSERT_TRUE(ret.isOk());
474 ASSERT_NE(device, nullptr);
475
476 std::shared_ptr<EmptyDeviceCb> cb = ndk::SharedRefBase::make<EmptyDeviceCb>();
477 ret = device->open(cb, &mSession);
478 ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
479 ret.getServiceSpecificError());
480 ASSERT_TRUE(ret.isOk());
481 ASSERT_NE(mSession, nullptr);
482
483 for (int32_t t = (int32_t)RequestTemplate::PREVIEW; t <= (int32_t)RequestTemplate::MANUAL;
484 t++) {
485 RequestTemplate reqTemplate = (RequestTemplate)t;
486 CameraMetadata rawMetadata;
487 ret = mSession->constructDefaultRequestSettings(reqTemplate, &rawMetadata);
488 ALOGI("constructDefaultRequestSettings returns status:%d:%d", ret.getExceptionCode(),
489 ret.getServiceSpecificError());
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000490
Avichal Rakesh362242f2022-02-08 12:40:53 -0800491 if (reqTemplate == RequestTemplate::ZERO_SHUTTER_LAG ||
492 reqTemplate == RequestTemplate::MANUAL) {
493 // optional templates
494 ASSERT_TRUE(ret.isOk() || static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
495 ret.getServiceSpecificError());
496 } else {
497 ASSERT_TRUE(ret.isOk());
498 }
499
500 if (ret.isOk()) {
501 const camera_metadata_t* metadata = (camera_metadata_t*)rawMetadata.metadata.data();
502 size_t expectedSize = rawMetadata.metadata.size();
503 int result = validate_camera_metadata_structure(metadata, &expectedSize);
504 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
505 verifyRequestTemplate(metadata, reqTemplate);
506 } else {
507 ASSERT_EQ(0u, rawMetadata.metadata.size());
508 }
509 }
510 ret = mSession->close();
511 mSession = nullptr;
512 ASSERT_TRUE(ret.isOk());
513 }
514}
515
516// Verify that all supported stream formats and sizes can be configured
517// successfully.
518TEST_P(CameraAidlTest, configureStreamsAvailableOutputs) {
519 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
520 std::vector<AvailableStream> outputStreams;
521
522 for (const auto& name : cameraDeviceNames) {
523 CameraMetadata meta;
524 std::shared_ptr<ICameraDevice> device;
525
526 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/, &device /*out*/);
527
528 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
529 outputStreams.clear();
530 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputStreams));
531 ASSERT_NE(0u, outputStreams.size());
532
533 int32_t jpegBufferSize = 0;
534 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
535 ASSERT_NE(0u, jpegBufferSize);
536
537 int32_t streamId = 0;
538 int32_t streamConfigCounter = 0;
539 for (auto& it : outputStreams) {
540 Stream stream;
541 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(it.format));
542 stream.id = streamId;
543 stream.streamType = StreamType::OUTPUT;
544 stream.width = it.width;
545 stream.height = it.height;
546 stream.format = static_cast<PixelFormat>(it.format);
547 stream.dataSpace = dataspace;
548 stream.usage = static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
549 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER);
550 stream.rotation = StreamRotation::ROTATION_0;
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000551 stream.dynamicRangeProfile = RequestAvailableDynamicRangeProfilesMap::
552 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
Avichal Rakesh362242f2022-02-08 12:40:53 -0800553
554 std::vector<Stream> streams = {stream};
555 StreamConfiguration config;
556 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
557 jpegBufferSize);
558
559 bool expectStreamCombQuery = (isLogicalMultiCamera(staticMeta) == Status::OK);
560 verifyStreamCombination(device, config, /*expectedStatus*/ true, expectStreamCombQuery);
561
562 config.streamConfigCounter = streamConfigCounter++;
563 std::vector<HalStream> halConfigs;
564 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
565 ASSERT_TRUE(ret.isOk());
566 ASSERT_EQ(halConfigs.size(), 1);
567 ASSERT_EQ(halConfigs[0].id, streamId);
568
569 streamId++;
570 }
571
572 ndk::ScopedAStatus ret = mSession->close();
573 mSession = nullptr;
574 ASSERT_TRUE(ret.isOk());
575 }
576}
577
578// Verify that mandatory concurrent streams and outputs are supported.
579TEST_P(CameraAidlTest, configureConcurrentStreamsAvailableOutputs) {
580 struct CameraTestInfo {
581 CameraMetadata staticMeta;
582 std::shared_ptr<ICameraDeviceSession> session;
583 std::shared_ptr<ICameraDevice> cameraDevice;
584 StreamConfiguration config;
585 };
586
587 std::map<std::string, std::string> idToNameMap = getCameraDeviceIdToNameMap(mProvider);
588 std::vector<ConcurrentCameraIdCombination> concurrentDeviceCombinations =
589 getConcurrentDeviceCombinations(mProvider);
590 std::vector<AvailableStream> outputStreams;
591 for (const auto& cameraDeviceIds : concurrentDeviceCombinations) {
592 std::vector<CameraIdAndStreamCombination> cameraIdsAndStreamCombinations;
593 std::vector<CameraTestInfo> cameraTestInfos;
594 size_t i = 0;
595 for (const auto& id : cameraDeviceIds.combination) {
596 CameraTestInfo cti;
597 auto it = idToNameMap.find(id);
598 ASSERT_TRUE(idToNameMap.end() != it);
599 std::string name = it->second;
600
601 openEmptyDeviceSession(name, mProvider, &cti.session /*out*/, &cti.staticMeta /*out*/,
602 &cti.cameraDevice /*out*/);
603
604 outputStreams.clear();
605 camera_metadata_t* staticMeta =
606 reinterpret_cast<camera_metadata_t*>(cti.staticMeta.metadata.data());
607 ASSERT_EQ(Status::OK, getMandatoryConcurrentStreams(staticMeta, &outputStreams));
608 ASSERT_NE(0u, outputStreams.size());
609
610 int32_t jpegBufferSize = 0;
611 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
612 ASSERT_NE(0u, jpegBufferSize);
613
614 int32_t streamId = 0;
615 std::vector<Stream> streams(outputStreams.size());
616 size_t j = 0;
617 for (const auto& s : outputStreams) {
618 Stream stream;
619 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(s.format));
620 stream.id = streamId++;
621 stream.streamType = StreamType::OUTPUT;
622 stream.width = s.width;
623 stream.height = s.height;
624 stream.format = static_cast<PixelFormat>(s.format);
625 stream.usage = static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
626 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER);
627 stream.dataSpace = dataspace;
628 stream.rotation = StreamRotation::ROTATION_0;
629 stream.sensorPixelModesUsed = {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT};
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000630 stream.dynamicRangeProfile = RequestAvailableDynamicRangeProfilesMap::
631 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
Avichal Rakesh362242f2022-02-08 12:40:53 -0800632 streams[j] = stream;
633 j++;
634 }
635
636 // Add the created stream configs to cameraIdsAndStreamCombinations
637 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &cti.config,
638 jpegBufferSize);
639
640 cti.config.streamConfigCounter = outputStreams.size();
641 CameraIdAndStreamCombination cameraIdAndStreamCombination;
642 cameraIdAndStreamCombination.cameraId = id;
643 cameraIdAndStreamCombination.streamConfiguration = cti.config;
644 cameraIdsAndStreamCombinations.push_back(cameraIdAndStreamCombination);
645 i++;
646 cameraTestInfos.push_back(cti);
647 }
648 // Now verify that concurrent streams are supported
649 bool combinationSupported;
650 ndk::ScopedAStatus ret = mProvider->isConcurrentStreamCombinationSupported(
651 cameraIdsAndStreamCombinations, &combinationSupported);
652 ASSERT_TRUE(ret.isOk());
653 ASSERT_EQ(combinationSupported, true);
654
655 // Test the stream can actually be configured
656 for (auto& cti : cameraTestInfos) {
657 if (cti.session != nullptr) {
658 camera_metadata_t* staticMeta =
659 reinterpret_cast<camera_metadata_t*>(cti.staticMeta.metadata.data());
660 bool expectStreamCombQuery = (isLogicalMultiCamera(staticMeta) == Status::OK);
661 verifyStreamCombination(cti.cameraDevice, cti.config, /*expectedStatus*/ true,
662 expectStreamCombQuery);
663 }
664
665 if (cti.session != nullptr) {
666 std::vector<HalStream> streamConfigs;
667 ret = cti.session->configureStreams(cti.config, &streamConfigs);
668 ASSERT_TRUE(ret.isOk());
669 ASSERT_EQ(cti.config.streams.size(), streamConfigs.size());
670 }
671 }
672
673 for (auto& cti : cameraTestInfos) {
674 ret = cti.session->close();
675 ASSERT_TRUE(ret.isOk());
676 }
677 }
678}
679
680// Check for correct handling of invalid/incorrect configuration parameters.
681TEST_P(CameraAidlTest, configureStreamsInvalidOutputs) {
682 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
683 std::vector<AvailableStream> outputStreams;
684
685 for (const auto& name : cameraDeviceNames) {
686 CameraMetadata meta;
687 std::shared_ptr<ICameraDevice> cameraDevice;
688
689 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
690 &cameraDevice /*out*/);
691 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
692 outputStreams.clear();
693
694 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputStreams));
695 ASSERT_NE(0u, outputStreams.size());
696
697 int32_t jpegBufferSize = 0;
698 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
699 ASSERT_NE(0u, jpegBufferSize);
700
701 int32_t streamId = 0;
702 Stream stream = {streamId++,
703 StreamType::OUTPUT,
704 static_cast<uint32_t>(0),
705 static_cast<uint32_t>(0),
706 static_cast<PixelFormat>(outputStreams[0].format),
707 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
708 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
709 Dataspace::UNKNOWN,
710 StreamRotation::ROTATION_0,
711 std::string(),
712 jpegBufferSize,
713 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000714 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
715 RequestAvailableDynamicRangeProfilesMap::
716 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800717 int32_t streamConfigCounter = 0;
718 std::vector<Stream> streams = {stream};
719 StreamConfiguration config;
720 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
721 jpegBufferSize);
722
723 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ false,
724 /*expectStreamCombQuery*/ false);
725
726 config.streamConfigCounter = streamConfigCounter++;
727 std::vector<HalStream> halConfigs;
728 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
729 ASSERT_TRUE(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
730 ret.getServiceSpecificError() ||
731 static_cast<int32_t>(Status::INTERNAL_ERROR) == ret.getServiceSpecificError());
732
733 stream = {streamId++,
734 StreamType::OUTPUT,
735 /*width*/ INT32_MAX,
736 /*height*/ INT32_MAX,
737 static_cast<PixelFormat>(outputStreams[0].format),
738 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
739 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
740 Dataspace::UNKNOWN,
741 StreamRotation::ROTATION_0,
742 std::string(),
743 jpegBufferSize,
744 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000745 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
746 RequestAvailableDynamicRangeProfilesMap::
747 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800748
749 streams[0] = stream;
750 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
751 jpegBufferSize);
752
753 config.streamConfigCounter = streamConfigCounter++;
754 halConfigs.clear();
755 ret = mSession->configureStreams(config, &halConfigs);
756 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
757
758 for (auto& it : outputStreams) {
759 stream = {streamId++,
760 StreamType::OUTPUT,
761 it.width,
762 it.height,
763 static_cast<PixelFormat>(UINT32_MAX),
764 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
765 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
766 Dataspace::UNKNOWN,
767 StreamRotation::ROTATION_0,
768 std::string(),
769 jpegBufferSize,
770 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000771 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
772 RequestAvailableDynamicRangeProfilesMap::
773 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800774
775 streams[0] = stream;
776 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
777 jpegBufferSize);
778 config.streamConfigCounter = streamConfigCounter++;
779 halConfigs.clear();
780 ret = mSession->configureStreams(config, &halConfigs);
781 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
782 ret.getServiceSpecificError());
783
784 stream = {streamId++,
785 StreamType::OUTPUT,
786 it.width,
787 it.height,
788 static_cast<PixelFormat>(it.format),
789 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
790 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
791 Dataspace::UNKNOWN,
792 static_cast<StreamRotation>(UINT32_MAX),
793 std::string(),
794 jpegBufferSize,
795 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000796 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
797 RequestAvailableDynamicRangeProfilesMap::
798 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800799
800 streams[0] = stream;
801 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
802 jpegBufferSize);
803
804 config.streamConfigCounter = streamConfigCounter++;
805 halConfigs.clear();
806 ret = mSession->configureStreams(config, &halConfigs);
807 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
808 ret.getServiceSpecificError());
809 }
810
811 ret = mSession->close();
812 mSession = nullptr;
813 ASSERT_TRUE(ret.isOk());
814 }
815}
816
817// Check whether all supported ZSL output stream combinations can be
818// configured successfully.
819TEST_P(CameraAidlTest, configureStreamsZSLInputOutputs) {
820 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
821 std::vector<AvailableStream> inputStreams;
822 std::vector<AvailableZSLInputOutput> inputOutputMap;
823
824 for (const auto& name : cameraDeviceNames) {
825 CameraMetadata meta;
826 std::shared_ptr<ICameraDevice> cameraDevice;
827
828 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
829 &cameraDevice /*out*/);
830 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
831
832 Status rc = isZSLModeAvailable(staticMeta);
833 if (Status::OPERATION_NOT_SUPPORTED == rc) {
834 ndk::ScopedAStatus ret = mSession->close();
835 mSession = nullptr;
836 ASSERT_TRUE(ret.isOk());
837 continue;
838 }
839 ASSERT_EQ(Status::OK, rc);
840
841 inputStreams.clear();
842 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, inputStreams));
843 ASSERT_NE(0u, inputStreams.size());
844
845 inputOutputMap.clear();
846 ASSERT_EQ(Status::OK, getZSLInputOutputMap(staticMeta, inputOutputMap));
847 ASSERT_NE(0u, inputOutputMap.size());
848
849 bool supportMonoY8 = false;
850 if (Status::OK == isMonochromeCamera(staticMeta)) {
851 for (auto& it : inputStreams) {
852 if (it.format == static_cast<uint32_t>(PixelFormat::Y8)) {
853 supportMonoY8 = true;
854 break;
855 }
856 }
857 }
858
859 int32_t jpegBufferSize = 0;
860 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
861 ASSERT_NE(0u, jpegBufferSize);
862
863 int32_t streamId = 0;
864 bool hasPrivToY8 = false, hasY8ToY8 = false, hasY8ToBlob = false;
865 uint32_t streamConfigCounter = 0;
866 for (auto& inputIter : inputOutputMap) {
867 AvailableStream input;
868 ASSERT_EQ(Status::OK, findLargestSize(inputStreams, inputIter.inputFormat, input));
869 ASSERT_NE(0u, inputStreams.size());
870
871 if (inputIter.inputFormat ==
872 static_cast<uint32_t>(PixelFormat::IMPLEMENTATION_DEFINED) &&
873 inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::Y8)) {
874 hasPrivToY8 = true;
875 } else if (inputIter.inputFormat == static_cast<uint32_t>(PixelFormat::Y8)) {
876 if (inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::BLOB)) {
877 hasY8ToBlob = true;
878 } else if (inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::Y8)) {
879 hasY8ToY8 = true;
880 }
881 }
882 AvailableStream outputThreshold = {INT32_MAX, INT32_MAX, inputIter.outputFormat};
883 std::vector<AvailableStream> outputStreams;
884 ASSERT_EQ(Status::OK,
885 getAvailableOutputStreams(staticMeta, outputStreams, &outputThreshold));
886 for (auto& outputIter : outputStreams) {
887 Dataspace outputDataSpace =
888 getDataspace(static_cast<PixelFormat>(outputIter.format));
889 Stream zslStream = {
890 streamId++,
891 StreamType::OUTPUT,
892 input.width,
893 input.height,
894 static_cast<PixelFormat>(input.format),
895 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
896 GRALLOC_USAGE_HW_CAMERA_ZSL),
897 Dataspace::UNKNOWN,
898 StreamRotation::ROTATION_0,
899 std::string(),
900 jpegBufferSize,
901 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000902 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
903 RequestAvailableDynamicRangeProfilesMap::
904 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800905 Stream inputStream = {
906 streamId++,
907 StreamType::INPUT,
908 input.width,
909 input.height,
910 static_cast<PixelFormat>(input.format),
911 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(0),
912 Dataspace::UNKNOWN,
913 StreamRotation::ROTATION_0,
914 std::string(),
915 jpegBufferSize,
916 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000917 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
918 RequestAvailableDynamicRangeProfilesMap::
919 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800920 Stream outputStream = {
921 streamId++,
922 StreamType::OUTPUT,
923 outputIter.width,
924 outputIter.height,
925 static_cast<PixelFormat>(outputIter.format),
926 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
927 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
928 outputDataSpace,
929 StreamRotation::ROTATION_0,
930 std::string(),
931 jpegBufferSize,
932 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000933 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
934 RequestAvailableDynamicRangeProfilesMap::
935 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800936
937 std::vector<Stream> streams = {inputStream, zslStream, outputStream};
938
939 StreamConfiguration config;
940 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
941 jpegBufferSize);
942
943 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true,
944 /*expectStreamCombQuery*/ false);
945
946 config.streamConfigCounter = streamConfigCounter++;
947 std::vector<HalStream> halConfigs;
948 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
949 ASSERT_TRUE(ret.isOk());
950 ASSERT_EQ(3u, halConfigs.size());
951 }
952 }
953
954 if (supportMonoY8) {
955 if (Status::OK == isZSLModeAvailable(staticMeta, PRIV_REPROCESS)) {
956 ASSERT_TRUE(hasPrivToY8);
957 }
958 if (Status::OK == isZSLModeAvailable(staticMeta, YUV_REPROCESS)) {
959 ASSERT_TRUE(hasY8ToY8);
960 ASSERT_TRUE(hasY8ToBlob);
961 }
962 }
963
964 ndk::ScopedAStatus ret = mSession->close();
965 mSession = nullptr;
966 ASSERT_TRUE(ret.isOk());
967 }
968}
969
970// Check whether session parameters are supported. If Hal support for them
971// exist, then try to configure a preview stream using them.
972TEST_P(CameraAidlTest, configureStreamsWithSessionParameters) {
973 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
974 std::vector<AvailableStream> outputPreviewStreams;
975 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
976 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
977
978 for (const auto& name : cameraDeviceNames) {
979 CameraMetadata meta;
980
981 std::shared_ptr<ICameraDevice> unusedCameraDevice;
982 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
983 &unusedCameraDevice /*out*/);
984 camera_metadata_t* staticMetaBuffer =
985 reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
986
987 std::unordered_set<int32_t> availableSessionKeys;
988 auto rc = getSupportedKeys(staticMetaBuffer, ANDROID_REQUEST_AVAILABLE_SESSION_KEYS,
989 &availableSessionKeys);
990 ASSERT_TRUE(Status::OK == rc);
991 if (availableSessionKeys.empty()) {
992 ndk::ScopedAStatus ret = mSession->close();
993 mSession = nullptr;
994 ASSERT_TRUE(ret.isOk());
995 continue;
996 }
997
998 android::hardware::camera::common::V1_0::helper::CameraMetadata previewRequestSettings;
999 android::hardware::camera::common::V1_0::helper::CameraMetadata sessionParams,
1000 modifiedSessionParams;
1001 constructFilteredSettings(mSession, availableSessionKeys, RequestTemplate::PREVIEW,
1002 &previewRequestSettings, &sessionParams);
1003 if (sessionParams.isEmpty()) {
1004 ndk::ScopedAStatus ret = mSession->close();
1005 mSession = nullptr;
1006 ASSERT_TRUE(ret.isOk());
1007 continue;
1008 }
1009
1010 outputPreviewStreams.clear();
1011
1012 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputPreviewStreams,
1013 &previewThreshold));
1014 ASSERT_NE(0u, outputPreviewStreams.size());
1015
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001016 Stream previewStream = {
1017 0,
1018 StreamType::OUTPUT,
1019 outputPreviewStreams[0].width,
1020 outputPreviewStreams[0].height,
1021 static_cast<PixelFormat>(outputPreviewStreams[0].format),
1022 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1023 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
1024 Dataspace::UNKNOWN,
1025 StreamRotation::ROTATION_0,
1026 std::string(),
1027 /*bufferSize*/ 0,
1028 /*groupId*/ -1,
1029 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1030 RequestAvailableDynamicRangeProfilesMap::
1031 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001032
1033 std::vector<Stream> streams = {previewStream};
1034 StreamConfiguration config;
1035
1036 config.streams = streams;
1037 config.operationMode = StreamConfigurationMode::NORMAL_MODE;
1038 modifiedSessionParams = sessionParams;
1039 auto sessionParamsBuffer = sessionParams.release();
1040 std::vector<uint8_t> rawSessionParam =
1041 std::vector(reinterpret_cast<uint8_t*>(sessionParamsBuffer),
1042 reinterpret_cast<uint8_t*>(sessionParamsBuffer) +
1043 get_camera_metadata_size(sessionParamsBuffer));
1044
1045 config.sessionParams.metadata = rawSessionParam;
1046 config.streamConfigCounter = 0;
1047 config.streams = {previewStream};
1048 config.streamConfigCounter = 0;
1049 config.multiResolutionInputImage = false;
1050
1051 bool newSessionParamsAvailable = false;
1052 for (const auto& it : availableSessionKeys) {
1053 if (modifiedSessionParams.exists(it)) {
1054 modifiedSessionParams.erase(it);
1055 newSessionParamsAvailable = true;
1056 break;
1057 }
1058 }
1059 if (newSessionParamsAvailable) {
1060 auto modifiedSessionParamsBuffer = modifiedSessionParams.release();
1061 verifySessionReconfigurationQuery(mSession, sessionParamsBuffer,
1062 modifiedSessionParamsBuffer);
1063 modifiedSessionParams.acquire(modifiedSessionParamsBuffer);
1064 }
1065
1066 std::vector<HalStream> halConfigs;
1067 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1068 ASSERT_TRUE(ret.isOk());
1069 ASSERT_EQ(1u, halConfigs.size());
1070
1071 sessionParams.acquire(sessionParamsBuffer);
1072 ret = mSession->close();
1073 mSession = nullptr;
1074 ASSERT_TRUE(ret.isOk());
1075 }
1076}
1077
1078// Verify that all supported preview + still capture stream combinations
1079// can be configured successfully.
1080TEST_P(CameraAidlTest, configureStreamsPreviewStillOutputs) {
1081 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1082 std::vector<AvailableStream> outputBlobStreams;
1083 std::vector<AvailableStream> outputPreviewStreams;
1084 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
1085 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
1086 AvailableStream blobThreshold = {INT32_MAX, INT32_MAX, static_cast<int32_t>(PixelFormat::BLOB)};
1087
1088 for (const auto& name : cameraDeviceNames) {
1089 CameraMetadata meta;
1090
1091 std::shared_ptr<ICameraDevice> cameraDevice;
1092 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1093 &cameraDevice /*out*/);
1094
1095 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1096
1097 // Check if camera support depth only
1098 if (isDepthOnly(staticMeta)) {
1099 ndk::ScopedAStatus ret = mSession->close();
1100 mSession = nullptr;
1101 ASSERT_TRUE(ret.isOk());
1102 continue;
1103 }
1104
1105 outputBlobStreams.clear();
1106 ASSERT_EQ(Status::OK,
1107 getAvailableOutputStreams(staticMeta, outputBlobStreams, &blobThreshold));
1108 ASSERT_NE(0u, outputBlobStreams.size());
1109
1110 outputPreviewStreams.clear();
1111 ASSERT_EQ(Status::OK,
1112 getAvailableOutputStreams(staticMeta, outputPreviewStreams, &previewThreshold));
1113 ASSERT_NE(0u, outputPreviewStreams.size());
1114
1115 int32_t jpegBufferSize = 0;
1116 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
1117 ASSERT_NE(0u, jpegBufferSize);
1118
1119 int32_t streamId = 0;
1120 uint32_t streamConfigCounter = 0;
1121
1122 for (auto& blobIter : outputBlobStreams) {
1123 for (auto& previewIter : outputPreviewStreams) {
1124 Stream previewStream = {
1125 streamId++,
1126 StreamType::OUTPUT,
1127 previewIter.width,
1128 previewIter.height,
1129 static_cast<PixelFormat>(previewIter.format),
1130 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1131 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
1132 Dataspace::UNKNOWN,
1133 StreamRotation::ROTATION_0,
1134 std::string(),
1135 /*bufferSize*/ 0,
1136 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001137 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1138 RequestAvailableDynamicRangeProfilesMap::
1139 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001140 Stream blobStream = {
1141 streamId++,
1142 StreamType::OUTPUT,
1143 blobIter.width,
1144 blobIter.height,
1145 static_cast<PixelFormat>(blobIter.format),
1146 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1147 GRALLOC1_CONSUMER_USAGE_CPU_READ),
1148 Dataspace::JFIF,
1149 StreamRotation::ROTATION_0,
1150 std::string(),
1151 /*bufferSize*/ 0,
1152 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001153 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1154 RequestAvailableDynamicRangeProfilesMap::
1155 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001156 std::vector<Stream> streams = {previewStream, blobStream};
1157 StreamConfiguration config;
1158
1159 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
1160 jpegBufferSize);
1161 config.streamConfigCounter = streamConfigCounter++;
1162 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true,
1163 /*expectStreamCombQuery*/ false);
1164
1165 std::vector<HalStream> halConfigs;
1166 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1167 ASSERT_TRUE(ret.isOk());
1168 ASSERT_EQ(2u, halConfigs.size());
1169 }
1170 }
1171
1172 ndk::ScopedAStatus ret = mSession->close();
1173 mSession = nullptr;
1174 ASSERT_TRUE(ret.isOk());
1175 }
1176}
1177
1178// In case constrained mode is supported, test whether it can be
1179// configured. Additionally check for common invalid inputs when
1180// using this mode.
1181TEST_P(CameraAidlTest, configureStreamsConstrainedOutputs) {
1182 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1183
1184 for (const auto& name : cameraDeviceNames) {
1185 CameraMetadata meta;
1186 std::shared_ptr<ICameraDevice> cameraDevice;
1187
1188 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1189 &cameraDevice /*out*/);
1190 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1191
1192 Status rc = isConstrainedModeAvailable(staticMeta);
1193 if (Status::OPERATION_NOT_SUPPORTED == rc) {
1194 ndk::ScopedAStatus ret = mSession->close();
1195 mSession = nullptr;
1196 ASSERT_TRUE(ret.isOk());
1197 continue;
1198 }
1199 ASSERT_EQ(Status::OK, rc);
1200
1201 AvailableStream hfrStream;
1202 rc = pickConstrainedModeSize(staticMeta, hfrStream);
1203 ASSERT_EQ(Status::OK, rc);
1204
1205 int32_t streamId = 0;
1206 uint32_t streamConfigCounter = 0;
1207 Stream stream = {streamId,
1208 StreamType::OUTPUT,
1209 hfrStream.width,
1210 hfrStream.height,
1211 static_cast<PixelFormat>(hfrStream.format),
1212 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1213 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1214 Dataspace::UNKNOWN,
1215 StreamRotation::ROTATION_0,
1216 std::string(),
1217 /*bufferSize*/ 0,
1218 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001219 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1220 RequestAvailableDynamicRangeProfilesMap::
1221 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001222 std::vector<Stream> streams = {stream};
1223 StreamConfiguration config;
1224 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1225 &config);
1226
1227 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true,
1228 /*expectStreamCombQuery*/ false);
1229
1230 config.streamConfigCounter = streamConfigCounter++;
1231 std::vector<HalStream> halConfigs;
1232 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1233 ASSERT_TRUE(ret.isOk());
1234 ASSERT_EQ(1u, halConfigs.size());
1235 ASSERT_EQ(halConfigs[0].id, streamId);
1236
1237 stream = {streamId++,
1238 StreamType::OUTPUT,
1239 static_cast<uint32_t>(0),
1240 static_cast<uint32_t>(0),
1241 static_cast<PixelFormat>(hfrStream.format),
1242 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1243 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1244 Dataspace::UNKNOWN,
1245 StreamRotation::ROTATION_0,
1246 std::string(),
1247 /*bufferSize*/ 0,
1248 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001249 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1250 RequestAvailableDynamicRangeProfilesMap::
1251 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001252 streams[0] = stream;
1253 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1254 &config);
1255
1256 config.streamConfigCounter = streamConfigCounter++;
1257 std::vector<HalStream> halConfig;
1258 ret = mSession->configureStreams(config, &halConfig);
1259 ASSERT_TRUE(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
1260 ret.getServiceSpecificError() ||
1261 static_cast<int32_t>(Status::INTERNAL_ERROR) == ret.getServiceSpecificError());
1262
1263 stream = {streamId++,
1264 StreamType::OUTPUT,
1265 INT32_MAX,
1266 INT32_MAX,
1267 static_cast<PixelFormat>(hfrStream.format),
1268 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1269 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1270 Dataspace::UNKNOWN,
1271 StreamRotation::ROTATION_0,
1272 std::string(),
1273 /*bufferSize*/ 0,
1274 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001275 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1276 RequestAvailableDynamicRangeProfilesMap::
1277 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001278 streams[0] = stream;
1279 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1280 &config);
1281
1282 config.streamConfigCounter = streamConfigCounter++;
1283 halConfigs.clear();
1284 ret = mSession->configureStreams(config, &halConfigs);
1285 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
1286
1287 stream = {streamId++,
1288 StreamType::OUTPUT,
1289 hfrStream.width,
1290 hfrStream.height,
1291 static_cast<PixelFormat>(UINT32_MAX),
1292 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1293 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1294 Dataspace::UNKNOWN,
1295 StreamRotation::ROTATION_0,
1296 std::string(),
1297 /*bufferSize*/ 0,
1298 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001299 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1300 RequestAvailableDynamicRangeProfilesMap::
1301 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001302 streams[0] = stream;
1303 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1304 &config);
1305
1306 config.streamConfigCounter = streamConfigCounter++;
1307 halConfigs.clear();
1308 ret = mSession->configureStreams(config, &halConfigs);
1309 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
1310
1311 ret = mSession->close();
1312 mSession = nullptr;
1313 ASSERT_TRUE(ret.isOk());
1314 }
1315}
1316
1317// Verify that all supported video + snapshot stream combinations can
1318// be configured successfully.
1319TEST_P(CameraAidlTest, configureStreamsVideoStillOutputs) {
1320 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1321 std::vector<AvailableStream> outputBlobStreams;
1322 std::vector<AvailableStream> outputVideoStreams;
1323 AvailableStream videoThreshold = {kMaxVideoWidth, kMaxVideoHeight,
1324 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
1325 AvailableStream blobThreshold = {kMaxVideoWidth, kMaxVideoHeight,
1326 static_cast<int32_t>(PixelFormat::BLOB)};
1327
1328 for (const auto& name : cameraDeviceNames) {
1329 CameraMetadata meta;
1330 std::shared_ptr<ICameraDevice> cameraDevice;
1331
1332 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1333 &cameraDevice /*out*/);
1334
1335 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1336
1337 // Check if camera support depth only
1338 if (isDepthOnly(staticMeta)) {
1339 ndk::ScopedAStatus ret = mSession->close();
1340 mSession = nullptr;
1341 ASSERT_TRUE(ret.isOk());
1342 continue;
1343 }
1344
1345 outputBlobStreams.clear();
1346 ASSERT_EQ(Status::OK,
1347 getAvailableOutputStreams(staticMeta, outputBlobStreams, &blobThreshold));
1348 ASSERT_NE(0u, outputBlobStreams.size());
1349
1350 outputVideoStreams.clear();
1351 ASSERT_EQ(Status::OK,
1352 getAvailableOutputStreams(staticMeta, outputVideoStreams, &videoThreshold));
1353 ASSERT_NE(0u, outputVideoStreams.size());
1354
1355 int32_t jpegBufferSize = 0;
1356 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
1357 ASSERT_NE(0u, jpegBufferSize);
1358
1359 int32_t streamId = 0;
1360 uint32_t streamConfigCounter = 0;
1361 for (auto& blobIter : outputBlobStreams) {
1362 for (auto& videoIter : outputVideoStreams) {
1363 Stream videoStream = {
1364 streamId++,
1365 StreamType::OUTPUT,
1366 videoIter.width,
1367 videoIter.height,
1368 static_cast<PixelFormat>(videoIter.format),
1369 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1370 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1371 Dataspace::UNKNOWN,
1372 StreamRotation::ROTATION_0,
1373 std::string(),
1374 jpegBufferSize,
1375 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001376 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1377 RequestAvailableDynamicRangeProfilesMap::
1378 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001379 Stream blobStream = {
1380 streamId++,
1381 StreamType::OUTPUT,
1382 blobIter.width,
1383 blobIter.height,
1384 static_cast<PixelFormat>(blobIter.format),
1385 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1386 GRALLOC1_CONSUMER_USAGE_CPU_READ),
1387 Dataspace::JFIF,
1388 StreamRotation::ROTATION_0,
1389 std::string(),
1390 jpegBufferSize,
1391 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001392 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1393 RequestAvailableDynamicRangeProfilesMap::
1394 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001395 std::vector<Stream> streams = {videoStream, blobStream};
1396 StreamConfiguration config;
1397
1398 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
1399 jpegBufferSize);
1400 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true,
1401 /*expectStreamCombQuery*/ false);
1402
1403 config.streamConfigCounter = streamConfigCounter++;
1404 std::vector<HalStream> halConfigs;
1405 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1406 ASSERT_TRUE(ret.isOk());
1407 ASSERT_EQ(2u, halConfigs.size());
1408 }
1409 }
1410
1411 ndk::ScopedAStatus ret = mSession->close();
1412 mSession = nullptr;
1413 ASSERT_TRUE(ret.isOk());
1414 }
1415}
1416
1417// Generate and verify a camera capture request
1418TEST_P(CameraAidlTest, processCaptureRequestPreview) {
1419 // TODO(b/220897574): Failing with BUFFER_ERROR
1420 processCaptureRequestInternal(GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, RequestTemplate::PREVIEW,
1421 false /*secureOnlyCameras*/);
1422}
1423
1424// Generate and verify a secure camera capture request
1425TEST_P(CameraAidlTest, processSecureCaptureRequest) {
1426 processCaptureRequestInternal(GRALLOC1_PRODUCER_USAGE_PROTECTED, RequestTemplate::STILL_CAPTURE,
1427 true /*secureOnlyCameras*/);
1428}
1429
1430TEST_P(CameraAidlTest, processCaptureRequestPreviewStabilization) {
1431 std::unordered_map<std::string, nsecs_t> cameraDeviceToTimeLag;
1432 processPreviewStabilizationCaptureRequestInternal(/*previewStabilizationOn*/ false,
1433 cameraDeviceToTimeLag);
1434 processPreviewStabilizationCaptureRequestInternal(/*previewStabilizationOn*/ true,
1435 cameraDeviceToTimeLag);
1436}
1437
1438// Generate and verify a multi-camera capture request
1439TEST_P(CameraAidlTest, processMultiCaptureRequestPreview) {
1440 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1441 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
1442 static_cast<int32_t>(PixelFormat::YCBCR_420_888)};
1443 int64_t bufferId = 1;
1444 uint32_t frameNumber = 1;
1445 std::vector<uint8_t> settings;
1446 std::vector<uint8_t> emptySettings;
1447 std::string invalidPhysicalId = "-1";
1448
1449 for (const auto& name : cameraDeviceNames) {
1450 std::string version, deviceId;
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +00001451 ALOGI("processMultiCaptureRequestPreview: Test device %s", name.c_str());
Avichal Rakesh362242f2022-02-08 12:40:53 -08001452 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
1453 CameraMetadata metadata;
1454
1455 std::shared_ptr<ICameraDevice> unusedDevice;
1456 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &metadata /*out*/,
1457 &unusedDevice /*out*/);
1458
1459 camera_metadata_t* staticMeta =
1460 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
1461 Status rc = isLogicalMultiCamera(staticMeta);
1462 if (Status::OPERATION_NOT_SUPPORTED == rc) {
1463 ndk::ScopedAStatus ret = mSession->close();
1464 mSession = nullptr;
1465 ASSERT_TRUE(ret.isOk());
1466 continue;
1467 }
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +00001468 ASSERT_EQ(Status::OK, rc);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001469
1470 std::unordered_set<std::string> physicalIds;
1471 rc = getPhysicalCameraIds(staticMeta, &physicalIds);
1472 ASSERT_TRUE(Status::OK == rc);
1473 ASSERT_TRUE(physicalIds.size() > 1);
1474
1475 std::unordered_set<int32_t> physicalRequestKeyIDs;
1476 rc = getSupportedKeys(staticMeta, ANDROID_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS,
1477 &physicalRequestKeyIDs);
1478 ASSERT_TRUE(Status::OK == rc);
1479 if (physicalRequestKeyIDs.empty()) {
1480 ndk::ScopedAStatus ret = mSession->close();
1481 mSession = nullptr;
1482 ASSERT_TRUE(ret.isOk());
1483 // The logical camera doesn't support any individual physical requests.
1484 continue;
1485 }
1486
1487 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultPreviewSettings;
1488 android::hardware::camera::common::V1_0::helper::CameraMetadata filteredSettings;
1489 constructFilteredSettings(mSession, physicalRequestKeyIDs, RequestTemplate::PREVIEW,
1490 &defaultPreviewSettings, &filteredSettings);
1491 if (filteredSettings.isEmpty()) {
1492 // No physical device settings in default request.
1493 ndk::ScopedAStatus ret = mSession->close();
1494 mSession = nullptr;
1495 ASSERT_TRUE(ret.isOk());
1496 continue;
1497 }
1498
1499 const camera_metadata_t* settingsBuffer = defaultPreviewSettings.getAndLock();
1500 uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
1501 settings.assign(rawSettingsBuffer,
1502 rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
1503 CameraMetadata settingsMetadata = {settings};
1504 overrideRotateAndCrop(&settingsMetadata);
1505
1506 ndk::ScopedAStatus ret = mSession->close();
1507 mSession = nullptr;
1508 ASSERT_TRUE(ret.isOk());
1509
1510 // Leave only 2 physical devices in the id set.
1511 auto it = physicalIds.begin();
1512 std::string physicalDeviceId = *it;
1513 it++;
1514 physicalIds.erase(++it, physicalIds.end());
1515 ASSERT_EQ(physicalIds.size(), 2u);
1516
1517 std::vector<HalStream> halStreams;
1518 bool supportsPartialResults = false;
1519 bool useHalBufManager = false;
1520 int32_t partialResultCount = 0;
1521 Stream previewStream;
1522 std::shared_ptr<DeviceCb> cb;
1523
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +00001524 configurePreviewStreams(
1525 name, mProvider, &previewThreshold, physicalIds, &mSession, &previewStream,
1526 &halStreams /*out*/, &supportsPartialResults /*out*/, &partialResultCount /*out*/,
1527 &useHalBufManager /*out*/, &cb /*out*/, 0 /*streamConfigCounter*/, true);
1528 if (mSession == nullptr) {
1529 // stream combination not supported by HAL, skip test for device
1530 continue;
1531 }
Avichal Rakesh362242f2022-02-08 12:40:53 -08001532
1533 ::aidl::android::hardware::common::fmq::MQDescriptor<
1534 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
1535 descriptor;
1536 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
1537 ASSERT_TRUE(resultQueueRet.isOk());
1538 std::shared_ptr<ResultMetadataQueue> resultQueue =
1539 std::make_shared<ResultMetadataQueue>(descriptor);
1540 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
1541 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
1542 resultQueue = nullptr;
1543 // Don't use the queue onwards.
1544 }
1545
1546 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
1547 static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
1548 partialResultCount, physicalIds, resultQueue);
1549
1550 std::vector<CaptureRequest> requests(1);
1551 CaptureRequest& request = requests[0];
1552 request.frameNumber = frameNumber;
1553 request.fmqSettingsSize = 0;
Emilian Peev3d919f92022-04-20 13:50:59 -07001554 request.settings = settingsMetadata;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001555
1556 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
1557
1558 std::vector<buffer_handle_t> graphicBuffers;
1559 graphicBuffers.reserve(halStreams.size());
1560 outputBuffers.resize(halStreams.size());
1561 size_t k = 0;
1562 for (const auto& halStream : halStreams) {
1563 buffer_handle_t buffer_handle;
1564 if (useHalBufManager) {
1565 outputBuffers[k] = {halStream.id, /*bufferId*/ 0, NativeHandle(),
1566 BufferStatus::OK, NativeHandle(), NativeHandle()};
1567 } else {
1568 allocateGraphicBuffer(previewStream.width, previewStream.height,
1569 android_convertGralloc1To0Usage(
1570 static_cast<uint64_t>(halStream.producerUsage),
1571 static_cast<uint64_t>(halStream.consumerUsage)),
1572 halStream.overrideFormat, &buffer_handle);
1573 graphicBuffers.push_back(buffer_handle);
1574 outputBuffers[k] = {
1575 halStream.id, bufferId, ::android::makeToAidl(buffer_handle),
1576 BufferStatus::OK, NativeHandle(), NativeHandle()};
1577 bufferId++;
1578 }
1579 k++;
1580 }
1581
1582 std::vector<PhysicalCameraSetting> camSettings(1);
1583 const camera_metadata_t* filteredSettingsBuffer = filteredSettings.getAndLock();
1584 uint8_t* rawFilteredSettingsBuffer = (uint8_t*)filteredSettingsBuffer;
1585 camSettings[0].settings = {std::vector(
1586 rawFilteredSettingsBuffer,
1587 rawFilteredSettingsBuffer + get_camera_metadata_size(filteredSettingsBuffer))};
1588 overrideRotateAndCrop(&camSettings[0].settings);
1589 camSettings[0].fmqSettingsSize = 0;
1590 camSettings[0].physicalCameraId = physicalDeviceId;
1591
1592 request.inputBuffer = {
1593 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
1594 request.physicalCameraSettings = camSettings;
1595
1596 {
1597 std::unique_lock<std::mutex> l(mLock);
1598 mInflightMap.clear();
1599 mInflightMap[frameNumber] = inflightReq;
1600 }
1601
1602 int32_t numRequestProcessed = 0;
1603 std::vector<BufferCache> cachesToRemove;
1604 ndk::ScopedAStatus returnStatus =
1605 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1606 ASSERT_TRUE(returnStatus.isOk());
1607 ASSERT_EQ(numRequestProcessed, 1u);
1608
1609 {
1610 std::unique_lock<std::mutex> l(mLock);
1611 while (!inflightReq->errorCodeValid &&
1612 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1613 auto timeout = std::chrono::system_clock::now() +
1614 std::chrono::seconds(kStreamBufferTimeoutSec);
1615 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1616 }
1617
1618 ASSERT_FALSE(inflightReq->errorCodeValid);
1619 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1620
1621 request.frameNumber++;
1622 // Empty settings should be supported after the first call
1623 // for repeating requests.
1624 request.settings.metadata.clear();
1625 request.physicalCameraSettings[0].settings.metadata.clear();
1626 // The buffer has been registered to HAL by bufferId, so per
1627 // API contract we should send a null handle for this buffer
1628 request.outputBuffers[0].buffer = NativeHandle();
1629 mInflightMap.clear();
1630 inflightReq = std::make_shared<InFlightRequest>(
1631 static_cast<ssize_t>(physicalIds.size()), false, supportsPartialResults,
1632 partialResultCount, physicalIds, resultQueue);
1633 mInflightMap[request.frameNumber] = inflightReq;
1634 }
1635
1636 returnStatus =
1637 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1638 ASSERT_TRUE(returnStatus.isOk());
1639 ASSERT_EQ(numRequestProcessed, 1u);
1640
1641 {
1642 std::unique_lock<std::mutex> l(mLock);
1643 while (!inflightReq->errorCodeValid &&
1644 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1645 auto timeout = std::chrono::system_clock::now() +
1646 std::chrono::seconds(kStreamBufferTimeoutSec);
1647 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1648 }
1649
1650 ASSERT_FALSE(inflightReq->errorCodeValid);
1651 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1652 }
1653
1654 // Invalid physical camera id should fail process requests
1655 frameNumber++;
1656 camSettings[0].physicalCameraId = invalidPhysicalId;
1657 camSettings[0].settings.metadata = settings;
1658
1659 request.physicalCameraSettings = camSettings; // Invalid camera settings
1660 returnStatus =
1661 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1662 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
1663 returnStatus.getServiceSpecificError());
1664
1665 defaultPreviewSettings.unlock(settingsBuffer);
1666 filteredSettings.unlock(filteredSettingsBuffer);
1667
1668 if (useHalBufManager) {
1669 std::vector<int32_t> streamIds(halStreams.size());
1670 for (size_t i = 0; i < streamIds.size(); i++) {
1671 streamIds[i] = halStreams[i].id;
1672 }
1673 verifyBuffersReturned(mSession, streamIds, cb);
1674 }
1675
1676 ret = mSession->close();
1677 mSession = nullptr;
1678 ASSERT_TRUE(ret.isOk());
1679 }
1680}
1681
1682// Generate and verify an ultra high resolution capture request
1683TEST_P(CameraAidlTest, processUltraHighResolutionRequest) {
1684 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1685 int64_t bufferId = 1;
1686 int32_t frameNumber = 1;
1687 CameraMetadata settings;
1688
1689 for (const auto& name : cameraDeviceNames) {
1690 std::string version, deviceId;
1691 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
1692 CameraMetadata meta;
1693
1694 std::shared_ptr<ICameraDevice> unusedDevice;
1695 openEmptyDeviceSession(name, mProvider, &mSession, &meta, &unusedDevice);
1696 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1697 if (!isUltraHighResolution(staticMeta)) {
1698 ndk::ScopedAStatus ret = mSession->close();
1699 mSession = nullptr;
1700 ASSERT_TRUE(ret.isOk());
1701 continue;
1702 }
1703 CameraMetadata req;
1704 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultSettings;
1705 ndk::ScopedAStatus ret =
1706 mSession->constructDefaultRequestSettings(RequestTemplate::STILL_CAPTURE, &req);
1707 ASSERT_TRUE(ret.isOk());
1708
1709 const camera_metadata_t* metadata =
1710 reinterpret_cast<const camera_metadata_t*>(req.metadata.data());
1711 size_t expectedSize = req.metadata.size();
1712 int result = validate_camera_metadata_structure(metadata, &expectedSize);
1713 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
1714
1715 size_t entryCount = get_camera_metadata_entry_count(metadata);
1716 ASSERT_GT(entryCount, 0u);
1717 defaultSettings = metadata;
1718 uint8_t sensorPixelMode =
1719 static_cast<uint8_t>(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
1720 ASSERT_EQ(::android::OK,
1721 defaultSettings.update(ANDROID_SENSOR_PIXEL_MODE, &sensorPixelMode, 1));
1722
1723 const camera_metadata_t* settingsBuffer = defaultSettings.getAndLock();
1724 uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
1725 settings.metadata = std::vector(
1726 rawSettingsBuffer, rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
1727 overrideRotateAndCrop(&settings);
1728
1729 ret = mSession->close();
1730 mSession = nullptr;
1731 ASSERT_TRUE(ret.isOk());
1732
1733 std::vector<HalStream> halStreams;
1734 bool supportsPartialResults = false;
1735 bool useHalBufManager = false;
1736 int32_t partialResultCount = 0;
1737 Stream previewStream;
1738 std::shared_ptr<DeviceCb> cb;
1739
1740 std::list<PixelFormat> pixelFormats = {PixelFormat::YCBCR_420_888, PixelFormat::RAW16};
1741 for (PixelFormat format : pixelFormats) {
Emilian Peevdda1eb72022-07-28 16:37:40 -07001742 previewStream.usage =
1743 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1744 GRALLOC1_CONSUMER_USAGE_CPU_READ);
1745 previewStream.dataSpace = Dataspace::UNKNOWN;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001746 configureStreams(name, mProvider, format, &mSession, &previewStream, &halStreams,
1747 &supportsPartialResults, &partialResultCount, &useHalBufManager, &cb,
1748 0, /*maxResolution*/ true);
1749 ASSERT_NE(mSession, nullptr);
1750
1751 ::aidl::android::hardware::common::fmq::MQDescriptor<
1752 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
1753 descriptor;
1754 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
1755 ASSERT_TRUE(resultQueueRet.isOk());
1756
1757 std::shared_ptr<ResultMetadataQueue> resultQueue =
1758 std::make_shared<ResultMetadataQueue>(descriptor);
1759 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
1760 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
1761 resultQueue = nullptr;
1762 // Don't use the queue onwards.
1763 }
1764
1765 std::vector<buffer_handle_t> graphicBuffers;
1766 graphicBuffers.reserve(halStreams.size());
1767 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
1768 static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
1769 partialResultCount, std::unordered_set<std::string>(), resultQueue);
1770
1771 std::vector<CaptureRequest> requests(1);
1772 CaptureRequest& request = requests[0];
1773 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
1774 outputBuffers.resize(halStreams.size());
1775
1776 size_t k = 0;
1777 for (const auto& halStream : halStreams) {
1778 buffer_handle_t buffer_handle;
1779 if (useHalBufManager) {
1780 outputBuffers[k] = {halStream.id, 0,
1781 NativeHandle(), BufferStatus::OK,
1782 NativeHandle(), NativeHandle()};
1783 } else {
1784 allocateGraphicBuffer(previewStream.width, previewStream.height,
1785 android_convertGralloc1To0Usage(
1786 static_cast<uint64_t>(halStream.producerUsage),
1787 static_cast<uint64_t>(halStream.consumerUsage)),
1788 halStream.overrideFormat, &buffer_handle);
1789 graphicBuffers.push_back(buffer_handle);
1790 outputBuffers[k] = {
1791 halStream.id, bufferId, ::android::makeToAidl(buffer_handle),
1792 BufferStatus::OK, NativeHandle(), NativeHandle()};
1793 bufferId++;
1794 }
1795 k++;
1796 }
1797
1798 request.inputBuffer = {
1799 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
1800 request.frameNumber = frameNumber;
1801 request.fmqSettingsSize = 0;
1802 request.settings = settings;
1803 request.inputWidth = 0;
1804 request.inputHeight = 0;
1805
1806 {
1807 std::unique_lock<std::mutex> l(mLock);
1808 mInflightMap.clear();
1809 mInflightMap[frameNumber] = inflightReq;
1810 }
1811
1812 int32_t numRequestProcessed = 0;
1813 std::vector<BufferCache> cachesToRemove;
1814 ndk::ScopedAStatus returnStatus =
1815 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1816 ASSERT_TRUE(returnStatus.isOk());
1817 ASSERT_EQ(numRequestProcessed, 1u);
1818
1819 {
1820 std::unique_lock<std::mutex> l(mLock);
1821 while (!inflightReq->errorCodeValid &&
1822 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1823 auto timeout = std::chrono::system_clock::now() +
1824 std::chrono::seconds(kStreamBufferTimeoutSec);
1825 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1826 }
1827
1828 ASSERT_FALSE(inflightReq->errorCodeValid);
1829 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1830 }
1831 if (useHalBufManager) {
1832 std::vector<int32_t> streamIds(halStreams.size());
1833 for (size_t i = 0; i < streamIds.size(); i++) {
1834 streamIds[i] = halStreams[i].id;
1835 }
1836 verifyBuffersReturned(mSession, streamIds, cb);
1837 }
1838
1839 ret = mSession->close();
1840 mSession = nullptr;
1841 ASSERT_TRUE(ret.isOk());
1842 }
1843 }
1844}
1845
1846// Generate and verify 10-bit dynamic range request
1847TEST_P(CameraAidlTest, process10BitDynamicRangeRequest) {
1848 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1849 int64_t bufferId = 1;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001850 CameraMetadata settings;
1851
1852 for (const auto& name : cameraDeviceNames) {
1853 std::string version, deviceId;
1854 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
1855 CameraMetadata meta;
1856 std::shared_ptr<ICameraDevice> device;
1857 openEmptyDeviceSession(name, mProvider, &mSession, &meta, &device);
1858 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1859 if (!is10BitDynamicRangeCapable(staticMeta)) {
1860 ndk::ScopedAStatus ret = mSession->close();
1861 mSession = nullptr;
1862 ASSERT_TRUE(ret.isOk());
1863 continue;
1864 }
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001865 std::vector<RequestAvailableDynamicRangeProfilesMap> profileList;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001866 get10BitDynamicRangeProfiles(staticMeta, &profileList);
1867 ASSERT_FALSE(profileList.empty());
1868
1869 CameraMetadata req;
1870 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultSettings;
1871 ndk::ScopedAStatus ret =
Emilian Peevdda1eb72022-07-28 16:37:40 -07001872 mSession->constructDefaultRequestSettings(RequestTemplate::PREVIEW, &req);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001873 ASSERT_TRUE(ret.isOk());
1874
1875 const camera_metadata_t* metadata =
1876 reinterpret_cast<const camera_metadata_t*>(req.metadata.data());
1877 size_t expectedSize = req.metadata.size();
1878 int result = validate_camera_metadata_structure(metadata, &expectedSize);
1879 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
1880
1881 size_t entryCount = get_camera_metadata_entry_count(metadata);
1882 ASSERT_GT(entryCount, 0u);
1883 defaultSettings = metadata;
1884
1885 const camera_metadata_t* settingsBuffer = defaultSettings.getAndLock();
1886 uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
1887 settings.metadata = std::vector(
1888 rawSettingsBuffer, rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
1889 overrideRotateAndCrop(&settings);
1890
1891 ret = mSession->close();
1892 mSession = nullptr;
1893 ASSERT_TRUE(ret.isOk());
1894
1895 std::vector<HalStream> halStreams;
1896 bool supportsPartialResults = false;
1897 bool useHalBufManager = false;
1898 int32_t partialResultCount = 0;
1899 Stream previewStream;
1900 std::shared_ptr<DeviceCb> cb;
1901 for (const auto& profile : profileList) {
Emilian Peevdda1eb72022-07-28 16:37:40 -07001902 previewStream.usage =
1903 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1904 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER);
1905 previewStream.dataSpace = getDataspace(PixelFormat::IMPLEMENTATION_DEFINED);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001906 configureStreams(name, mProvider, PixelFormat::IMPLEMENTATION_DEFINED, &mSession,
1907 &previewStream, &halStreams, &supportsPartialResults,
1908 &partialResultCount, &useHalBufManager, &cb, 0,
1909 /*maxResolution*/ false, profile);
1910 ASSERT_NE(mSession, nullptr);
1911
1912 ::aidl::android::hardware::common::fmq::MQDescriptor<
1913 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
1914 descriptor;
1915 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
1916 ASSERT_TRUE(resultQueueRet.isOk());
1917
1918 std::shared_ptr<ResultMetadataQueue> resultQueue =
1919 std::make_shared<ResultMetadataQueue>(descriptor);
1920 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
1921 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
1922 resultQueue = nullptr;
1923 // Don't use the queue onwards.
1924 }
1925
Emilian Peevdda1eb72022-07-28 16:37:40 -07001926 mInflightMap.clear();
1927 // Stream as long as needed to fill the Hal inflight queue
1928 std::vector<CaptureRequest> requests(halStreams[0].maxBuffers);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001929
Emilian Peevdda1eb72022-07-28 16:37:40 -07001930 for (int32_t frameNumber = 0; frameNumber < requests.size(); frameNumber++) {
1931 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
1932 static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
1933 partialResultCount, std::unordered_set<std::string>(), resultQueue);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001934
Emilian Peevdda1eb72022-07-28 16:37:40 -07001935 CaptureRequest& request = requests[frameNumber];
1936 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
1937 outputBuffers.resize(halStreams.size());
Avichal Rakesh362242f2022-02-08 12:40:53 -08001938
Emilian Peevdda1eb72022-07-28 16:37:40 -07001939 size_t k = 0;
1940 inflightReq->mOutstandingBufferIds.resize(halStreams.size());
1941 std::vector<buffer_handle_t> graphicBuffers;
1942 graphicBuffers.reserve(halStreams.size());
Avichal Rakesh362242f2022-02-08 12:40:53 -08001943
Emilian Peevdda1eb72022-07-28 16:37:40 -07001944 for (const auto& halStream : halStreams) {
1945 buffer_handle_t buffer_handle;
1946 if (useHalBufManager) {
1947 outputBuffers[k] = {halStream.id, 0,
1948 NativeHandle(), BufferStatus::OK,
1949 NativeHandle(), NativeHandle()};
1950 } else {
1951 auto usage = android_convertGralloc1To0Usage(
1952 static_cast<uint64_t>(halStream.producerUsage),
1953 static_cast<uint64_t>(halStream.consumerUsage));
1954 allocateGraphicBuffer(previewStream.width, previewStream.height, usage,
1955 halStream.overrideFormat, &buffer_handle);
1956
1957 inflightReq->mOutstandingBufferIds[halStream.id][bufferId] = buffer_handle;
1958 graphicBuffers.push_back(buffer_handle);
1959 outputBuffers[k] = {halStream.id, bufferId,
1960 android::makeToAidl(buffer_handle), BufferStatus::OK, NativeHandle(),
1961 NativeHandle()};
1962 bufferId++;
1963 }
1964 k++;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001965 }
Avichal Rakesh362242f2022-02-08 12:40:53 -08001966
Emilian Peevdda1eb72022-07-28 16:37:40 -07001967 request.inputBuffer = {
1968 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
1969 request.frameNumber = frameNumber;
1970 request.fmqSettingsSize = 0;
1971 request.settings = settings;
1972 request.inputWidth = 0;
1973 request.inputHeight = 0;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001974
Emilian Peevdda1eb72022-07-28 16:37:40 -07001975 {
1976 std::unique_lock<std::mutex> l(mLock);
1977 mInflightMap[frameNumber] = inflightReq;
1978 }
1979
Avichal Rakesh362242f2022-02-08 12:40:53 -08001980 }
1981
1982 int32_t numRequestProcessed = 0;
1983 std::vector<BufferCache> cachesToRemove;
1984 ndk::ScopedAStatus returnStatus =
Emilian Peevdda1eb72022-07-28 16:37:40 -07001985 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001986 ASSERT_TRUE(returnStatus.isOk());
Emilian Peevdda1eb72022-07-28 16:37:40 -07001987 ASSERT_EQ(numRequestProcessed, requests.size());
Avichal Rakesh362242f2022-02-08 12:40:53 -08001988
Emilian Peevdda1eb72022-07-28 16:37:40 -07001989 returnStatus = mSession->repeatingRequestEnd(requests.size() - 1,
1990 std::vector<int32_t> {halStreams[0].id});
1991 ASSERT_TRUE(returnStatus.isOk());
1992
1993 for (int32_t frameNumber = 0; frameNumber < requests.size(); frameNumber++) {
1994 const auto& inflightReq = mInflightMap[frameNumber];
Avichal Rakesh362242f2022-02-08 12:40:53 -08001995 std::unique_lock<std::mutex> l(mLock);
1996 while (!inflightReq->errorCodeValid &&
1997 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1998 auto timeout = std::chrono::system_clock::now() +
1999 std::chrono::seconds(kStreamBufferTimeoutSec);
2000 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2001 }
2002
2003 ASSERT_FALSE(inflightReq->errorCodeValid);
2004 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
2005 verify10BitMetadata(mHandleImporter, *inflightReq, profile);
2006 }
Emilian Peevdda1eb72022-07-28 16:37:40 -07002007
Avichal Rakesh362242f2022-02-08 12:40:53 -08002008 if (useHalBufManager) {
2009 std::vector<int32_t> streamIds(halStreams.size());
2010 for (size_t i = 0; i < streamIds.size(); i++) {
2011 streamIds[i] = halStreams[i].id;
2012 }
2013 mSession->signalStreamFlush(streamIds, /*streamConfigCounter*/ 0);
2014 cb->waitForBuffersReturned();
2015 }
2016
2017 ret = mSession->close();
2018 mSession = nullptr;
2019 ASSERT_TRUE(ret.isOk());
2020 }
2021 }
2022}
2023
2024// Generate and verify a burst containing alternating sensor sensitivity values
2025TEST_P(CameraAidlTest, processCaptureRequestBurstISO) {
2026 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2027 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2028 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2029 int64_t bufferId = 1;
2030 int32_t frameNumber = 1;
2031 float isoTol = .03f;
2032 CameraMetadata settings;
2033
2034 for (const auto& name : cameraDeviceNames) {
2035 CameraMetadata meta;
2036 settings.metadata.clear();
2037 std::shared_ptr<ICameraDevice> unusedDevice;
2038 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
2039 &unusedDevice /*out*/);
2040 camera_metadata_t* staticMetaBuffer =
2041 clone_camera_metadata(reinterpret_cast<camera_metadata_t*>(meta.metadata.data()));
2042 ::android::hardware::camera::common::V1_0::helper::CameraMetadata staticMeta(
2043 staticMetaBuffer);
2044
2045 camera_metadata_entry_t hwLevel = staticMeta.find(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL);
2046 ASSERT_TRUE(0 < hwLevel.count);
2047 if (ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED == hwLevel.data.u8[0] ||
2048 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL == hwLevel.data.u8[0]) {
2049 // Limited/External devices can skip this test
2050 ndk::ScopedAStatus ret = mSession->close();
2051 mSession = nullptr;
2052 ASSERT_TRUE(ret.isOk());
2053 continue;
2054 }
2055
2056 camera_metadata_entry_t isoRange = staticMeta.find(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE);
2057 ASSERT_EQ(isoRange.count, 2u);
2058
2059 ndk::ScopedAStatus ret = mSession->close();
2060 mSession = nullptr;
2061 ASSERT_TRUE(ret.isOk());
2062
2063 bool supportsPartialResults = false;
2064 bool useHalBufManager = false;
2065 int32_t partialResultCount = 0;
2066 Stream previewStream;
2067 std::vector<HalStream> halStreams;
2068 std::shared_ptr<DeviceCb> cb;
2069 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2070 &previewStream /*out*/, &halStreams /*out*/,
2071 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2072 &useHalBufManager /*out*/, &cb /*out*/);
2073
2074 ::aidl::android::hardware::common::fmq::MQDescriptor<
2075 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2076 descriptor;
2077 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2078 std::shared_ptr<ResultMetadataQueue> resultQueue =
2079 std::make_shared<ResultMetadataQueue>(descriptor);
2080 ASSERT_TRUE(resultQueueRet.isOk());
2081 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2082 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2083 resultQueue = nullptr;
2084 // Don't use the queue onwards.
2085 }
2086
2087 ret = mSession->constructDefaultRequestSettings(RequestTemplate::PREVIEW, &settings);
2088 ASSERT_TRUE(ret.isOk());
2089
2090 ::android::hardware::camera::common::V1_0::helper::CameraMetadata requestMeta;
2091 std::vector<CaptureRequest> requests(kBurstFrameCount);
2092 std::vector<buffer_handle_t> buffers(kBurstFrameCount);
2093 std::vector<std::shared_ptr<InFlightRequest>> inflightReqs(kBurstFrameCount);
2094 std::vector<int32_t> isoValues(kBurstFrameCount);
2095 std::vector<CameraMetadata> requestSettings(kBurstFrameCount);
2096
2097 for (int32_t i = 0; i < kBurstFrameCount; i++) {
2098 std::unique_lock<std::mutex> l(mLock);
2099 CaptureRequest& request = requests[i];
2100 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2101 outputBuffers.resize(1);
2102 StreamBuffer& outputBuffer = outputBuffers[0];
2103
2104 isoValues[i] = ((i % 2) == 0) ? isoRange.data.i32[0] : isoRange.data.i32[1];
2105 if (useHalBufManager) {
2106 outputBuffer = {halStreams[0].id, 0,
2107 NativeHandle(), BufferStatus::OK,
2108 NativeHandle(), NativeHandle()};
2109 } else {
2110 allocateGraphicBuffer(previewStream.width, previewStream.height,
2111 android_convertGralloc1To0Usage(
2112 static_cast<uint64_t>(halStreams[0].producerUsage),
2113 static_cast<uint64_t>(halStreams[0].consumerUsage)),
2114 halStreams[0].overrideFormat, &buffers[i]);
2115 outputBuffer = {halStreams[0].id, bufferId + i, ::android::makeToAidl(buffers[i]),
2116 BufferStatus::OK, NativeHandle(), NativeHandle()};
2117 }
2118
2119 requestMeta.append(reinterpret_cast<camera_metadata_t*>(settings.metadata.data()));
2120
2121 // Disable all 3A routines
2122 uint8_t mode = static_cast<uint8_t>(ANDROID_CONTROL_MODE_OFF);
2123 ASSERT_EQ(::android::OK, requestMeta.update(ANDROID_CONTROL_MODE, &mode, 1));
2124 ASSERT_EQ(::android::OK,
2125 requestMeta.update(ANDROID_SENSOR_SENSITIVITY, &isoValues[i], 1));
2126 camera_metadata_t* metaBuffer = requestMeta.release();
2127 uint8_t* rawMetaBuffer = reinterpret_cast<uint8_t*>(metaBuffer);
2128 requestSettings[i].metadata = std::vector(
2129 rawMetaBuffer, rawMetaBuffer + get_camera_metadata_size(metaBuffer));
2130 overrideRotateAndCrop(&(requestSettings[i]));
2131
2132 request.frameNumber = frameNumber + i;
2133 request.fmqSettingsSize = 0;
2134 request.settings = requestSettings[i];
2135 request.inputBuffer = {
2136 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2137
2138 inflightReqs[i] = std::make_shared<InFlightRequest>(1, false, supportsPartialResults,
2139 partialResultCount, resultQueue);
2140 mInflightMap[frameNumber + i] = inflightReqs[i];
2141 }
2142
2143 int32_t numRequestProcessed = 0;
2144 std::vector<BufferCache> cachesToRemove;
2145
2146 ndk::ScopedAStatus returnStatus =
2147 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2148 ASSERT_TRUE(returnStatus.isOk());
2149 ASSERT_EQ(numRequestProcessed, kBurstFrameCount);
2150
2151 for (size_t i = 0; i < kBurstFrameCount; i++) {
2152 std::unique_lock<std::mutex> l(mLock);
2153 while (!inflightReqs[i]->errorCodeValid && ((0 < inflightReqs[i]->numBuffersLeft) ||
2154 (!inflightReqs[i]->haveResultMetadata))) {
2155 auto timeout = std::chrono::system_clock::now() +
2156 std::chrono::seconds(kStreamBufferTimeoutSec);
2157 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2158 }
2159
2160 ASSERT_FALSE(inflightReqs[i]->errorCodeValid);
2161 ASSERT_NE(inflightReqs[i]->resultOutputBuffers.size(), 0u);
2162 ASSERT_EQ(previewStream.id, inflightReqs[i]->resultOutputBuffers[0].buffer.streamId);
2163 ASSERT_FALSE(inflightReqs[i]->collectedResult.isEmpty());
2164 ASSERT_TRUE(inflightReqs[i]->collectedResult.exists(ANDROID_SENSOR_SENSITIVITY));
2165 camera_metadata_entry_t isoResult =
2166 inflightReqs[i]->collectedResult.find(ANDROID_SENSOR_SENSITIVITY);
2167 ASSERT_TRUE(std::abs(isoResult.data.i32[0] - isoValues[i]) <=
2168 std::round(isoValues[i] * isoTol));
2169 }
2170
2171 if (useHalBufManager) {
2172 verifyBuffersReturned(mSession, previewStream.id, cb);
2173 }
2174 ret = mSession->close();
2175 mSession = nullptr;
2176 ASSERT_TRUE(ret.isOk());
2177 }
2178}
2179
2180// Test whether an incorrect capture request with missing settings will
2181// be reported correctly.
2182TEST_P(CameraAidlTest, processCaptureRequestInvalidSinglePreview) {
2183 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2184 std::vector<AvailableStream> outputPreviewStreams;
2185 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2186 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2187 int64_t bufferId = 1;
2188 int32_t frameNumber = 1;
2189 CameraMetadata settings;
2190
2191 for (const auto& name : cameraDeviceNames) {
2192 Stream previewStream;
2193 std::vector<HalStream> halStreams;
2194 std::shared_ptr<DeviceCb> cb;
2195 bool supportsPartialResults = false;
2196 bool useHalBufManager = false;
2197 int32_t partialResultCount = 0;
2198 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2199 &previewStream /*out*/, &halStreams /*out*/,
2200 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2201 &useHalBufManager /*out*/, &cb /*out*/);
2202 ASSERT_NE(mSession, nullptr);
2203 ASSERT_FALSE(halStreams.empty());
2204
2205 buffer_handle_t buffer_handle = nullptr;
2206
2207 if (useHalBufManager) {
2208 bufferId = 0;
2209 } else {
2210 allocateGraphicBuffer(previewStream.width, previewStream.height,
2211 android_convertGralloc1To0Usage(
2212 static_cast<uint64_t>(halStreams[0].producerUsage),
2213 static_cast<uint64_t>(halStreams[0].consumerUsage)),
2214 halStreams[0].overrideFormat, &buffer_handle);
2215 }
2216
2217 std::vector<CaptureRequest> requests(1);
2218 CaptureRequest& request = requests[0];
2219 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2220 outputBuffers.resize(1);
2221 StreamBuffer& outputBuffer = outputBuffers[0];
2222
2223 outputBuffer = {
2224 halStreams[0].id,
2225 bufferId,
2226 buffer_handle == nullptr ? NativeHandle() : ::android::makeToAidl(buffer_handle),
2227 BufferStatus::OK,
2228 NativeHandle(),
2229 NativeHandle()};
2230
2231 request.inputBuffer = {
2232 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2233 request.frameNumber = frameNumber;
2234 request.fmqSettingsSize = 0;
2235 request.settings = settings;
2236
2237 // Settings were not correctly initialized, we should fail here
2238 int32_t numRequestProcessed = 0;
2239 std::vector<BufferCache> cachesToRemove;
2240 ndk::ScopedAStatus ret =
2241 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2242 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
2243 ASSERT_EQ(numRequestProcessed, 0u);
2244
2245 ret = mSession->close();
2246 mSession = nullptr;
2247 ASSERT_TRUE(ret.isOk());
2248 }
2249}
2250
2251// Verify camera offline session behavior
2252TEST_P(CameraAidlTest, switchToOffline) {
2253 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2254 AvailableStream threshold = {kMaxStillWidth, kMaxStillHeight,
2255 static_cast<int32_t>(PixelFormat::BLOB)};
2256 int64_t bufferId = 1;
2257 int32_t frameNumber = 1;
2258 CameraMetadata settings;
2259
2260 for (const auto& name : cameraDeviceNames) {
2261 CameraMetadata meta;
2262 {
2263 std::shared_ptr<ICameraDevice> unusedDevice;
2264 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
2265 &unusedDevice);
2266 camera_metadata_t* staticMetaBuffer = clone_camera_metadata(
2267 reinterpret_cast<camera_metadata_t*>(meta.metadata.data()));
2268 ::android::hardware::camera::common::V1_0::helper::CameraMetadata staticMeta(
2269 staticMetaBuffer);
2270
2271 if (isOfflineSessionSupported(staticMetaBuffer) != Status::OK) {
2272 ndk::ScopedAStatus ret = mSession->close();
2273 mSession = nullptr;
2274 ASSERT_TRUE(ret.isOk());
2275 continue;
2276 }
2277 ndk::ScopedAStatus ret = mSession->close();
2278 mSession = nullptr;
2279 ASSERT_TRUE(ret.isOk());
2280 }
2281
2282 bool supportsPartialResults = false;
2283 int32_t partialResultCount = 0;
2284 Stream stream;
2285 std::vector<HalStream> halStreams;
2286 std::shared_ptr<DeviceCb> cb;
2287 int32_t jpegBufferSize;
2288 bool useHalBufManager;
2289 configureOfflineStillStream(name, mProvider, &threshold, &mSession /*out*/, &stream /*out*/,
2290 &halStreams /*out*/, &supportsPartialResults /*out*/,
2291 &partialResultCount /*out*/, &cb /*out*/,
2292 &jpegBufferSize /*out*/, &useHalBufManager /*out*/);
2293
2294 auto ret = mSession->constructDefaultRequestSettings(RequestTemplate::STILL_CAPTURE,
2295 &settings);
2296 ASSERT_TRUE(ret.isOk());
2297
2298 ::aidl::android::hardware::common::fmq::MQDescriptor<
2299 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2300 descriptor;
2301
2302 ndk::ScopedAStatus resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2303 ASSERT_TRUE(resultQueueRet.isOk());
2304 std::shared_ptr<ResultMetadataQueue> resultQueue =
2305 std::make_shared<ResultMetadataQueue>(descriptor);
2306 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2307 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2308 resultQueue = nullptr;
2309 // Don't use the queue onwards.
2310 }
2311
2312 ::android::hardware::camera::common::V1_0::helper::CameraMetadata requestMeta;
2313
2314 std::vector<buffer_handle_t> buffers(kBurstFrameCount);
2315 std::vector<std::shared_ptr<InFlightRequest>> inflightReqs(kBurstFrameCount);
2316 std::vector<CameraMetadata> requestSettings(kBurstFrameCount);
2317
2318 std::vector<CaptureRequest> requests(kBurstFrameCount);
2319
2320 HalStream halStream = halStreams[0];
2321 for (uint32_t i = 0; i < kBurstFrameCount; i++) {
2322 CaptureRequest& request = requests[i];
2323 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2324 outputBuffers.resize(1);
2325 StreamBuffer& outputBuffer = outputBuffers[0];
2326
2327 std::unique_lock<std::mutex> l(mLock);
2328 if (useHalBufManager) {
2329 outputBuffer = {halStream.id, 0, NativeHandle(), BufferStatus::OK, NativeHandle(),
2330 NativeHandle()};
2331 } else {
2332 // jpeg buffer (w,h) = (blobLen, 1)
2333 allocateGraphicBuffer(jpegBufferSize, /*height*/ 1,
2334 android_convertGralloc1To0Usage(
2335 static_cast<uint64_t>(halStream.producerUsage),
2336 static_cast<uint64_t>(halStream.consumerUsage)),
2337 halStream.overrideFormat, &buffers[i]);
2338 outputBuffer = {halStream.id, bufferId + i, ::android::makeToAidl(buffers[i]),
2339 BufferStatus::OK, NativeHandle(), NativeHandle()};
2340 }
2341
2342 requestMeta.clear();
2343 requestMeta.append(reinterpret_cast<camera_metadata_t*>(settings.metadata.data()));
2344
2345 camera_metadata_t* metaBuffer = requestMeta.release();
2346 uint8_t* rawMetaBuffer = reinterpret_cast<uint8_t*>(metaBuffer);
2347 requestSettings[i].metadata = std::vector(
2348 rawMetaBuffer, rawMetaBuffer + get_camera_metadata_size(metaBuffer));
2349 overrideRotateAndCrop(&requestSettings[i]);
2350
2351 request.frameNumber = frameNumber + i;
2352 request.fmqSettingsSize = 0;
2353 request.settings = requestSettings[i];
2354 request.inputBuffer = {/*streamId*/ -1,
2355 /*bufferId*/ 0, NativeHandle(),
2356 BufferStatus::ERROR, NativeHandle(),
2357 NativeHandle()};
2358
2359 inflightReqs[i] = std::make_shared<InFlightRequest>(1, false, supportsPartialResults,
2360 partialResultCount, resultQueue);
2361 mInflightMap[frameNumber + i] = inflightReqs[i];
2362 }
2363
2364 int32_t numRequestProcessed = 0;
2365 std::vector<BufferCache> cachesToRemove;
2366
2367 ndk::ScopedAStatus returnStatus =
2368 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2369 ASSERT_TRUE(returnStatus.isOk());
2370 ASSERT_EQ(numRequestProcessed, kBurstFrameCount);
2371
2372 std::vector<int32_t> offlineStreamIds = {halStream.id};
2373 CameraOfflineSessionInfo offlineSessionInfo;
2374 std::shared_ptr<ICameraOfflineSession> offlineSession;
2375 returnStatus =
2376 mSession->switchToOffline(offlineStreamIds, &offlineSessionInfo, &offlineSession);
2377
2378 if (!halStreams[0].supportOffline) {
2379 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
2380 returnStatus.getServiceSpecificError());
2381 ret = mSession->close();
2382 mSession = nullptr;
2383 ASSERT_TRUE(ret.isOk());
2384 continue;
2385 }
2386
2387 ASSERT_TRUE(returnStatus.isOk());
2388 // Hal might be unable to find any requests qualified for offline mode.
2389 if (offlineSession == nullptr) {
2390 ret = mSession->close();
2391 mSession = nullptr;
2392 ASSERT_TRUE(ret.isOk());
2393 continue;
2394 }
2395
2396 ASSERT_EQ(offlineSessionInfo.offlineStreams.size(), 1u);
2397 ASSERT_EQ(offlineSessionInfo.offlineStreams[0].id, halStream.id);
2398 ASSERT_NE(offlineSessionInfo.offlineRequests.size(), 0u);
2399
2400 // close device session to make sure offline session does not rely on it
2401 ret = mSession->close();
2402 mSession = nullptr;
2403 ASSERT_TRUE(ret.isOk());
2404
2405 ::aidl::android::hardware::common::fmq::MQDescriptor<
2406 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2407 offlineResultDescriptor;
2408
2409 auto offlineResultQueueRet =
2410 offlineSession->getCaptureResultMetadataQueue(&offlineResultDescriptor);
2411 std::shared_ptr<ResultMetadataQueue> offlineResultQueue =
2412 std::make_shared<ResultMetadataQueue>(descriptor);
2413 if (!offlineResultQueue->isValid() || offlineResultQueue->availableToWrite() <= 0) {
2414 ALOGE("%s: offline session returns empty result metadata fmq, not use it", __func__);
2415 offlineResultQueue = nullptr;
2416 // Don't use the queue onwards.
2417 }
2418 ASSERT_TRUE(offlineResultQueueRet.isOk());
2419
2420 updateInflightResultQueue(offlineResultQueue);
2421
2422 ret = offlineSession->setCallback(cb);
2423 ASSERT_TRUE(ret.isOk());
2424
2425 for (size_t i = 0; i < kBurstFrameCount; i++) {
2426 std::unique_lock<std::mutex> l(mLock);
2427 while (!inflightReqs[i]->errorCodeValid && ((0 < inflightReqs[i]->numBuffersLeft) ||
2428 (!inflightReqs[i]->haveResultMetadata))) {
2429 auto timeout = std::chrono::system_clock::now() +
2430 std::chrono::seconds(kStreamBufferTimeoutSec);
2431 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2432 }
2433
2434 ASSERT_FALSE(inflightReqs[i]->errorCodeValid);
2435 ASSERT_NE(inflightReqs[i]->resultOutputBuffers.size(), 0u);
2436 ASSERT_EQ(stream.id, inflightReqs[i]->resultOutputBuffers[0].buffer.streamId);
2437 ASSERT_FALSE(inflightReqs[i]->collectedResult.isEmpty());
2438 }
2439
2440 ret = offlineSession->close();
2441 ASSERT_TRUE(ret.isOk());
2442 }
2443}
2444
2445// Check whether an invalid capture request with missing output buffers
2446// will be reported correctly.
2447TEST_P(CameraAidlTest, processCaptureRequestInvalidBuffer) {
2448 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2449 std::vector<AvailableStream> outputBlobStreams;
2450 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2451 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2452 int32_t frameNumber = 1;
2453 CameraMetadata settings;
2454
2455 for (const auto& name : cameraDeviceNames) {
2456 Stream previewStream;
2457 std::vector<HalStream> halStreams;
2458 std::shared_ptr<DeviceCb> cb;
2459 bool supportsPartialResults = false;
2460 bool useHalBufManager = false;
2461 int32_t partialResultCount = 0;
2462 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2463 &previewStream /*out*/, &halStreams /*out*/,
2464 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2465 &useHalBufManager /*out*/, &cb /*out*/);
2466
2467 RequestTemplate reqTemplate = RequestTemplate::PREVIEW;
2468 ndk::ScopedAStatus ret = mSession->constructDefaultRequestSettings(reqTemplate, &settings);
2469 ASSERT_TRUE(ret.isOk());
2470 overrideRotateAndCrop(&settings);
2471
2472 std::vector<CaptureRequest> requests(1);
2473 CaptureRequest& request = requests[0];
2474 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2475 outputBuffers.resize(1);
2476 // Empty output buffer
2477 outputBuffers[0] = {
2478 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2479
2480 request.inputBuffer = {
2481 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2482 request.frameNumber = frameNumber;
2483 request.fmqSettingsSize = 0;
2484 request.settings = settings;
2485
2486 // Output buffers are missing, we should fail here
2487 int32_t numRequestProcessed = 0;
2488 std::vector<BufferCache> cachesToRemove;
2489 ret = mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2490 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
2491 ASSERT_EQ(numRequestProcessed, 0u);
2492
2493 ret = mSession->close();
2494 mSession = nullptr;
2495 ASSERT_TRUE(ret.isOk());
2496 }
2497}
2498
2499// Generate, trigger and flush a preview request
2500TEST_P(CameraAidlTest, flushPreviewRequest) {
2501 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2502 std::vector<AvailableStream> outputPreviewStreams;
2503 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2504 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2505 int64_t bufferId = 1;
2506 int32_t frameNumber = 1;
2507 CameraMetadata settings;
2508
2509 for (const auto& name : cameraDeviceNames) {
2510 Stream previewStream;
2511 std::vector<HalStream> halStreams;
2512 std::shared_ptr<DeviceCb> cb;
2513 bool supportsPartialResults = false;
2514 bool useHalBufManager = false;
2515 int32_t partialResultCount = 0;
2516
2517 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2518 &previewStream /*out*/, &halStreams /*out*/,
2519 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2520 &useHalBufManager /*out*/, &cb /*out*/);
2521
2522 ASSERT_NE(mSession, nullptr);
2523 ASSERT_NE(cb, nullptr);
2524 ASSERT_FALSE(halStreams.empty());
2525
2526 ::aidl::android::hardware::common::fmq::MQDescriptor<
2527 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2528 descriptor;
2529
2530 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2531 std::shared_ptr<ResultMetadataQueue> resultQueue =
2532 std::make_shared<ResultMetadataQueue>(descriptor);
2533 ASSERT_TRUE(resultQueueRet.isOk());
2534 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2535 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2536 resultQueue = nullptr;
2537 // Don't use the queue onwards.
2538 }
2539
2540 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
2541 1, false, supportsPartialResults, partialResultCount, resultQueue);
2542 RequestTemplate reqTemplate = RequestTemplate::PREVIEW;
2543
2544 ndk::ScopedAStatus ret = mSession->constructDefaultRequestSettings(reqTemplate, &settings);
2545 ASSERT_TRUE(ret.isOk());
2546 overrideRotateAndCrop(&settings);
2547
2548 buffer_handle_t buffer_handle;
2549 std::vector<CaptureRequest> requests(1);
2550 CaptureRequest& request = requests[0];
2551 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2552 outputBuffers.resize(1);
2553 StreamBuffer& outputBuffer = outputBuffers[0];
2554 if (useHalBufManager) {
2555 bufferId = 0;
2556 outputBuffer = {halStreams[0].id, bufferId, NativeHandle(),
2557 BufferStatus::OK, NativeHandle(), NativeHandle()};
2558 } else {
2559 allocateGraphicBuffer(previewStream.width, previewStream.height,
2560 android_convertGralloc1To0Usage(
2561 static_cast<uint64_t>(halStreams[0].producerUsage),
2562 static_cast<uint64_t>(halStreams[0].consumerUsage)),
2563 halStreams[0].overrideFormat, &buffer_handle);
2564 outputBuffer = {halStreams[0].id, bufferId, ::android::makeToAidl(buffer_handle),
2565 BufferStatus::OK, NativeHandle(), NativeHandle()};
2566 }
2567
2568 request.frameNumber = frameNumber;
2569 request.fmqSettingsSize = 0;
2570 request.settings = settings;
2571 request.inputBuffer = {
2572 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2573
2574 {
2575 std::unique_lock<std::mutex> l(mLock);
2576 mInflightMap.clear();
2577 mInflightMap[frameNumber] = inflightReq;
2578 }
2579
2580 int32_t numRequestProcessed = 0;
2581 std::vector<BufferCache> cachesToRemove;
2582 ret = mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2583 ASSERT_TRUE(ret.isOk());
2584 ASSERT_EQ(numRequestProcessed, 1u);
2585
2586 // Flush before waiting for request to complete.
2587 ndk::ScopedAStatus returnStatus = mSession->flush();
2588 ASSERT_TRUE(returnStatus.isOk());
2589
2590 {
2591 std::unique_lock<std::mutex> l(mLock);
2592 while (!inflightReq->errorCodeValid &&
2593 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
2594 auto timeout = std::chrono::system_clock::now() +
2595 std::chrono::seconds(kStreamBufferTimeoutSec);
2596 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2597 }
2598
2599 if (!inflightReq->errorCodeValid) {
2600 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
2601 ASSERT_EQ(previewStream.id, inflightReq->resultOutputBuffers[0].buffer.streamId);
2602 } else {
2603 switch (inflightReq->errorCode) {
2604 case ErrorCode::ERROR_REQUEST:
2605 case ErrorCode::ERROR_RESULT:
2606 case ErrorCode::ERROR_BUFFER:
2607 // Expected
2608 break;
2609 case ErrorCode::ERROR_DEVICE:
2610 default:
2611 FAIL() << "Unexpected error:"
2612 << static_cast<uint32_t>(inflightReq->errorCode);
2613 }
2614 }
2615 }
2616
2617 if (useHalBufManager) {
2618 verifyBuffersReturned(mSession, previewStream.id, cb);
2619 }
2620
2621 ret = mSession->close();
2622 mSession = nullptr;
2623 ASSERT_TRUE(ret.isOk());
2624 }
2625}
2626
2627// Verify that camera flushes correctly without any pending requests.
2628TEST_P(CameraAidlTest, flushEmpty) {
2629 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2630 std::vector<AvailableStream> outputPreviewStreams;
2631 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2632 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2633
2634 for (const auto& name : cameraDeviceNames) {
2635 Stream previewStream;
2636 std::vector<HalStream> halStreams;
2637 std::shared_ptr<DeviceCb> cb;
2638 bool supportsPartialResults = false;
2639 bool useHalBufManager = false;
2640
2641 int32_t partialResultCount = 0;
2642 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2643 &previewStream /*out*/, &halStreams /*out*/,
2644 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2645 &useHalBufManager /*out*/, &cb /*out*/);
2646
2647 ndk::ScopedAStatus returnStatus = mSession->flush();
2648 ASSERT_TRUE(returnStatus.isOk());
2649
2650 {
2651 std::unique_lock<std::mutex> l(mLock);
2652 auto timeout = std::chrono::system_clock::now() +
2653 std::chrono::milliseconds(kEmptyFlushTimeoutMSec);
2654 ASSERT_EQ(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2655 }
2656
2657 ndk::ScopedAStatus ret = mSession->close();
2658 mSession = nullptr;
2659 ASSERT_TRUE(ret.isOk());
2660 }
2661}
2662
2663// Test camera provider notify method
2664TEST_P(CameraAidlTest, providerDeviceStateNotification) {
2665 notifyDeviceState(ICameraProvider::DEVICE_STATE_BACK_COVERED);
2666 notifyDeviceState(ICameraProvider::DEVICE_STATE_NORMAL);
2667}
2668
2669// Verify that all supported stream formats and sizes can be configured
2670// successfully for injection camera.
2671TEST_P(CameraAidlTest, configureInjectionStreamsAvailableOutputs) {
2672 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2673 std::vector<AvailableStream> outputStreams;
2674
2675 for (const auto& name : cameraDeviceNames) {
2676 CameraMetadata metadata;
2677
2678 std::shared_ptr<ICameraInjectionSession> injectionSession;
2679 std::shared_ptr<ICameraDevice> unusedDevice;
2680 openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/,
2681 &unusedDevice /*out*/);
2682 if (injectionSession == nullptr) {
2683 continue;
2684 }
2685
2686 camera_metadata_t* staticMetaBuffer =
2687 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
2688 CameraMetadata chars;
2689 chars.metadata = metadata.metadata;
2690
2691 outputStreams.clear();
2692 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputStreams));
2693 ASSERT_NE(0u, outputStreams.size());
2694
2695 int32_t jpegBufferSize = 0;
2696 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMetaBuffer, &jpegBufferSize));
2697 ASSERT_NE(0u, jpegBufferSize);
2698
2699 int32_t streamId = 0;
2700 int32_t streamConfigCounter = 0;
2701 for (auto& it : outputStreams) {
2702 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(it.format));
2703 Stream stream = {streamId,
2704 StreamType::OUTPUT,
2705 it.width,
2706 it.height,
2707 static_cast<PixelFormat>(it.format),
2708 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2709 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2710 dataspace,
2711 StreamRotation::ROTATION_0,
2712 std::string(),
2713 jpegBufferSize,
2714 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002715 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2716 RequestAvailableDynamicRangeProfilesMap::
2717 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002718
2719 std::vector<Stream> streams = {stream};
2720 StreamConfiguration config;
2721 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2722 jpegBufferSize);
2723
2724 config.streamConfigCounter = streamConfigCounter++;
2725 ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars);
2726 ASSERT_TRUE(s.isOk());
2727 streamId++;
2728 }
2729
2730 std::shared_ptr<ICameraDeviceSession> session;
2731 ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session);
2732 ASSERT_TRUE(ret.isOk());
2733 ASSERT_NE(session, nullptr);
2734 ret = session->close();
2735 ASSERT_TRUE(ret.isOk());
2736 }
2737}
2738
2739// Check for correct handling of invalid/incorrect configuration parameters for injection camera.
2740TEST_P(CameraAidlTest, configureInjectionStreamsInvalidOutputs) {
2741 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2742 std::vector<AvailableStream> outputStreams;
2743
2744 for (const auto& name : cameraDeviceNames) {
2745 CameraMetadata metadata;
2746 std::shared_ptr<ICameraInjectionSession> injectionSession;
2747 std::shared_ptr<ICameraDevice> unusedDevice;
2748 openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/,
2749 &unusedDevice);
2750 if (injectionSession == nullptr) {
2751 continue;
2752 }
2753
2754 camera_metadata_t* staticMetaBuffer =
2755 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
2756 std::shared_ptr<ICameraDeviceSession> session;
2757 ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session);
2758 ASSERT_TRUE(ret.isOk());
2759 ASSERT_NE(session, nullptr);
2760
2761 CameraMetadata chars;
2762 chars.metadata = metadata.metadata;
2763
2764 outputStreams.clear();
2765 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputStreams));
2766 ASSERT_NE(0u, outputStreams.size());
2767
2768 int32_t jpegBufferSize = 0;
2769 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMetaBuffer, &jpegBufferSize));
2770 ASSERT_NE(0u, jpegBufferSize);
2771
2772 int32_t streamId = 0;
2773 Stream stream = {streamId++,
2774 StreamType::OUTPUT,
2775 0,
2776 0,
2777 static_cast<PixelFormat>(outputStreams[0].format),
2778 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2779 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2780 Dataspace::UNKNOWN,
2781 StreamRotation::ROTATION_0,
2782 std::string(),
2783 jpegBufferSize,
2784 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002785 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2786 RequestAvailableDynamicRangeProfilesMap::
2787 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002788
2789 int32_t streamConfigCounter = 0;
2790 std::vector<Stream> streams = {stream};
2791 StreamConfiguration config;
2792 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2793 jpegBufferSize);
2794
2795 config.streamConfigCounter = streamConfigCounter++;
2796 ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars);
2797 ASSERT_TRUE(
2798 (static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) == s.getServiceSpecificError()) ||
2799 (static_cast<int32_t>(Status::INTERNAL_ERROR) == s.getServiceSpecificError()));
2800
2801 stream = {streamId++,
2802 StreamType::OUTPUT,
2803 INT32_MAX,
2804 INT32_MAX,
2805 static_cast<PixelFormat>(outputStreams[0].format),
2806 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2807 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2808 Dataspace::UNKNOWN,
2809 StreamRotation::ROTATION_0,
2810 std::string(),
2811 jpegBufferSize,
2812 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002813 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2814 RequestAvailableDynamicRangeProfilesMap::
2815 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
2816
Avichal Rakesh362242f2022-02-08 12:40:53 -08002817 streams[0] = stream;
2818 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2819 jpegBufferSize);
2820 config.streamConfigCounter = streamConfigCounter++;
2821 s = injectionSession->configureInjectionStreams(config, chars);
2822 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError());
2823
2824 for (auto& it : outputStreams) {
2825 stream = {streamId++,
2826 StreamType::OUTPUT,
2827 it.width,
2828 it.height,
2829 static_cast<PixelFormat>(INT32_MAX),
2830 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2831 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2832 Dataspace::UNKNOWN,
2833 StreamRotation::ROTATION_0,
2834 std::string(),
2835 jpegBufferSize,
2836 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002837 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2838 RequestAvailableDynamicRangeProfilesMap::
2839 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002840 streams[0] = stream;
2841 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2842 jpegBufferSize);
2843 config.streamConfigCounter = streamConfigCounter++;
2844 s = injectionSession->configureInjectionStreams(config, chars);
2845 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError());
2846
2847 stream = {streamId++,
2848 StreamType::OUTPUT,
2849 it.width,
2850 it.height,
2851 static_cast<PixelFormat>(it.format),
2852 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2853 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2854 Dataspace::UNKNOWN,
2855 static_cast<StreamRotation>(INT32_MAX),
2856 std::string(),
2857 jpegBufferSize,
2858 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002859 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2860 RequestAvailableDynamicRangeProfilesMap::
2861 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002862 streams[0] = stream;
2863 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2864 jpegBufferSize);
2865 config.streamConfigCounter = streamConfigCounter++;
2866 s = injectionSession->configureInjectionStreams(config, chars);
2867 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError());
2868 }
2869
2870 ret = session->close();
2871 ASSERT_TRUE(ret.isOk());
2872 }
2873}
2874
2875// Check whether session parameters are supported for injection camera. If Hal support for them
2876// exist, then try to configure a preview stream using them.
2877TEST_P(CameraAidlTest, configureInjectionStreamsWithSessionParameters) {
2878 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2879 std::vector<AvailableStream> outputPreviewStreams;
2880 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2881 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2882
2883 for (const auto& name : cameraDeviceNames) {
2884 CameraMetadata metadata;
2885 std::shared_ptr<ICameraInjectionSession> injectionSession;
2886 std::shared_ptr<ICameraDevice> unusedDevice;
2887 openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/,
2888 &unusedDevice /*out*/);
2889 if (injectionSession == nullptr) {
2890 continue;
2891 }
2892
2893 std::shared_ptr<ICameraDeviceSession> session;
2894 ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session);
2895 ASSERT_TRUE(ret.isOk());
2896 ASSERT_NE(session, nullptr);
2897
2898 camera_metadata_t* staticMetaBuffer =
2899 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
2900 CameraMetadata chars;
2901 chars.metadata = metadata.metadata;
2902
2903 std::unordered_set<int32_t> availableSessionKeys;
2904 Status rc = getSupportedKeys(staticMetaBuffer, ANDROID_REQUEST_AVAILABLE_SESSION_KEYS,
2905 &availableSessionKeys);
2906 ASSERT_EQ(Status::OK, rc);
2907 if (availableSessionKeys.empty()) {
2908 ret = session->close();
2909 ASSERT_TRUE(ret.isOk());
2910 continue;
2911 }
2912
2913 android::hardware::camera::common::V1_0::helper::CameraMetadata previewRequestSettings;
2914 android::hardware::camera::common::V1_0::helper::CameraMetadata sessionParams,
2915 modifiedSessionParams;
2916 constructFilteredSettings(session, availableSessionKeys, RequestTemplate::PREVIEW,
2917 &previewRequestSettings, &sessionParams);
2918 if (sessionParams.isEmpty()) {
2919 ret = session->close();
2920 ASSERT_TRUE(ret.isOk());
2921 continue;
2922 }
2923
2924 outputPreviewStreams.clear();
2925
2926 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputPreviewStreams,
2927 &previewThreshold));
2928 ASSERT_NE(0u, outputPreviewStreams.size());
2929
2930 Stream previewStream = {
2931 0,
2932 StreamType::OUTPUT,
2933 outputPreviewStreams[0].width,
2934 outputPreviewStreams[0].height,
2935 static_cast<PixelFormat>(outputPreviewStreams[0].format),
2936 static_cast<::aidl::android::hardware::graphics::common::BufferUsage>(
2937 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2938 Dataspace::UNKNOWN,
2939 StreamRotation::ROTATION_0,
2940 std::string(),
2941 0,
2942 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002943 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2944 RequestAvailableDynamicRangeProfilesMap::
2945 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002946 std::vector<Stream> streams = {previewStream};
2947 StreamConfiguration config;
2948 config.streams = streams;
2949 config.operationMode = StreamConfigurationMode::NORMAL_MODE;
2950
2951 modifiedSessionParams = sessionParams;
2952 camera_metadata_t* sessionParamsBuffer = sessionParams.release();
2953 uint8_t* rawSessionParamsBuffer = reinterpret_cast<uint8_t*>(sessionParamsBuffer);
2954 config.sessionParams.metadata =
2955 std::vector(rawSessionParamsBuffer,
2956 rawSessionParamsBuffer + get_camera_metadata_size(sessionParamsBuffer));
2957
2958 config.streamConfigCounter = 0;
2959 config.streamConfigCounter = 0;
2960 config.multiResolutionInputImage = false;
2961
2962 ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars);
2963 ASSERT_TRUE(s.isOk());
2964
2965 sessionParams.acquire(sessionParamsBuffer);
2966 free_camera_metadata(staticMetaBuffer);
2967 ret = session->close();
2968 ASSERT_TRUE(ret.isOk());
2969 }
2970}
2971
2972// Verify that valid stream use cases can be configured successfully, and invalid use cases
2973// fail stream configuration.
2974TEST_P(CameraAidlTest, configureStreamsUseCases) {
2975 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2976
2977 for (const auto& name : cameraDeviceNames) {
2978 CameraMetadata meta;
2979 std::shared_ptr<ICameraDevice> cameraDevice;
2980
2981 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
2982 &cameraDevice /*out*/);
2983
2984 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
2985 // Check if camera support depth only
2986 if (isDepthOnly(staticMeta)) {
2987 ndk::ScopedAStatus ret = mSession->close();
2988 mSession = nullptr;
2989 ASSERT_TRUE(ret.isOk());
2990 continue;
2991 }
2992
2993 std::vector<AvailableStream> outputPreviewStreams;
2994 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2995 static_cast<int32_t>(PixelFormat::YCBCR_420_888)};
2996 ASSERT_EQ(Status::OK,
2997 getAvailableOutputStreams(staticMeta, outputPreviewStreams, &previewThreshold));
2998 ASSERT_NE(0u, outputPreviewStreams.size());
2999
3000 // Combine valid and invalid stream use cases
Shuzhen Wang36efa712022-03-08 10:10:44 -08003001 std::vector<int64_t> useCases(kMandatoryUseCases);
Avichal Rakesh362242f2022-02-08 12:40:53 -08003002 useCases.push_back(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL + 1);
3003
Shuzhen Wang36efa712022-03-08 10:10:44 -08003004 std::vector<int64_t> supportedUseCases;
Avichal Rakesh362242f2022-02-08 12:40:53 -08003005 camera_metadata_ro_entry entry;
3006 auto retcode = find_camera_metadata_ro_entry(
3007 staticMeta, ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES, &entry);
3008 if ((0 == retcode) && (entry.count > 0)) {
Avichal Rakeshe1685a72022-03-22 13:52:36 -07003009 supportedUseCases.insert(supportedUseCases.end(), entry.data.i64,
3010 entry.data.i64 + entry.count);
Avichal Rakesh362242f2022-02-08 12:40:53 -08003011 } else {
3012 supportedUseCases.push_back(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT);
3013 }
3014
3015 std::vector<Stream> streams(1);
Avichal Rakeshd3503a32022-02-25 06:23:14 +00003016 streams[0] = {0,
3017 StreamType::OUTPUT,
3018 outputPreviewStreams[0].width,
3019 outputPreviewStreams[0].height,
3020 static_cast<PixelFormat>(outputPreviewStreams[0].format),
3021 static_cast<::aidl::android::hardware::graphics::common::BufferUsage>(
3022 GRALLOC1_CONSUMER_USAGE_CPU_READ),
3023 Dataspace::UNKNOWN,
3024 StreamRotation::ROTATION_0,
3025 std::string(),
3026 0,
3027 -1,
3028 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
3029 RequestAvailableDynamicRangeProfilesMap::
3030 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08003031
3032 int32_t streamConfigCounter = 0;
3033 CameraMetadata req;
3034 StreamConfiguration config;
3035 RequestTemplate reqTemplate = RequestTemplate::STILL_CAPTURE;
3036 ndk::ScopedAStatus ret = mSession->constructDefaultRequestSettings(reqTemplate, &req);
3037 ASSERT_TRUE(ret.isOk());
3038 config.sessionParams = req;
3039
Shuzhen Wang36efa712022-03-08 10:10:44 -08003040 for (int64_t useCase : useCases) {
Avichal Rakesh362242f2022-02-08 12:40:53 -08003041 bool useCaseSupported = std::find(supportedUseCases.begin(), supportedUseCases.end(),
3042 useCase) != supportedUseCases.end();
3043
3044 streams[0].useCase = static_cast<
3045 aidl::android::hardware::camera::metadata::ScalerAvailableStreamUseCases>(
3046 useCase);
3047 config.streams = streams;
3048 config.operationMode = StreamConfigurationMode::NORMAL_MODE;
3049 config.streamConfigCounter = streamConfigCounter;
3050 config.multiResolutionInputImage = false;
3051
3052 bool combSupported;
3053 ret = cameraDevice->isStreamCombinationSupported(config, &combSupported);
Avichal Rakeshe1685a72022-03-22 13:52:36 -07003054 if (static_cast<int32_t>(Status::OPERATION_NOT_SUPPORTED) ==
3055 ret.getServiceSpecificError()) {
3056 continue;
Avichal Rakesh362242f2022-02-08 12:40:53 -08003057 }
Avichal Rakeshe1685a72022-03-22 13:52:36 -07003058
Avichal Rakesh362242f2022-02-08 12:40:53 -08003059 ASSERT_TRUE(ret.isOk());
Avichal Rakeshe1685a72022-03-22 13:52:36 -07003060 ASSERT_EQ(combSupported, useCaseSupported);
Avichal Rakesh362242f2022-02-08 12:40:53 -08003061
3062 std::vector<HalStream> halStreams;
3063 ret = mSession->configureStreams(config, &halStreams);
3064 ALOGI("configureStreams returns status: %d", ret.getServiceSpecificError());
3065 if (useCaseSupported) {
3066 ASSERT_TRUE(ret.isOk());
3067 ASSERT_EQ(1u, halStreams.size());
3068 } else {
3069 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
3070 ret.getServiceSpecificError());
3071 }
3072 }
3073 ret = mSession->close();
3074 mSession = nullptr;
3075 ASSERT_TRUE(ret.isOk());
3076 }
3077}
3078
3079GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(CameraAidlTest);
3080INSTANTIATE_TEST_SUITE_P(
3081 PerInstance, CameraAidlTest,
3082 testing::ValuesIn(android::getAidlHalInstanceNames(ICameraProvider::descriptor)),
3083 android::hardware::PrintInstanceNameToString);