blob: b764ad62998d27aa7215779c280b06a2a2ab3900 [file] [log] [blame]
Avichal Rakesh362242f2022-02-08 12:40:53 -08001/*
2 * Copyright (C) 2022 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <aidl/Vintf.h>
18#include <aidl/android/hardware/camera/common/VendorTagSection.h>
19#include <aidl/android/hardware/camera/device/ICameraDevice.h>
20#include <aidlcommonsupport/NativeHandle.h>
21#include <camera_aidl_test.h>
22#include <cutils/properties.h>
23#include <device_cb.h>
24#include <empty_device_cb.h>
25#include <grallocusage/GrallocUsageConversion.h>
26#include <gtest/gtest.h>
27#include <hardware/gralloc.h>
28#include <hardware/gralloc1.h>
29#include <hidl/GtestPrinter.h>
30#include <hidl/HidlSupport.h>
31#include <torch_provider_cb.h>
32#include <list>
33
34using ::aidl::android::hardware::camera::common::CameraDeviceStatus;
35using ::aidl::android::hardware::camera::common::CameraResourceCost;
36using ::aidl::android::hardware::camera::common::TorchModeStatus;
37using ::aidl::android::hardware::camera::common::VendorTagSection;
38using ::aidl::android::hardware::camera::device::ICameraDevice;
Avichal Rakeshd3503a32022-02-25 06:23:14 +000039using ::aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap;
Avichal Rakesh362242f2022-02-08 12:40:53 -080040using ::aidl::android::hardware::camera::metadata::SensorPixelMode;
41using ::aidl::android::hardware::camera::provider::CameraIdAndStreamCombination;
Avichal Rakesh4bf91c72022-05-23 20:44:02 +000042using ::aidl::android::hardware::camera::provider::BnCameraProviderCallback;
Avichal Rakesh362242f2022-02-08 12:40:53 -080043
44using ::ndk::ScopedAStatus;
45
46namespace {
47const int32_t kBurstFrameCount = 10;
48const uint32_t kMaxStillWidth = 2048;
49const uint32_t kMaxStillHeight = 1536;
50
51const int64_t kEmptyFlushTimeoutMSec = 200;
52
Shuzhen Wang36efa712022-03-08 10:10:44 -080053const static std::vector<int64_t> kMandatoryUseCases = {
Avichal Rakesh362242f2022-02-08 12:40:53 -080054 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
55 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW,
56 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_STILL_CAPTURE,
57 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_RECORD,
58 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW_VIDEO_STILL,
59 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL};
60} // namespace
61
62TEST_P(CameraAidlTest, getCameraIdList) {
63 std::vector<std::string> idList;
64 ScopedAStatus ret = mProvider->getCameraIdList(&idList);
65 ASSERT_TRUE(ret.isOk());
66
67 for (size_t i = 0; i < idList.size(); i++) {
68 ALOGI("Camera Id[%zu] is %s", i, idList[i].c_str());
69 }
70}
71
72// Test if ICameraProvider::getVendorTags returns Status::OK
73TEST_P(CameraAidlTest, getVendorTags) {
74 std::vector<VendorTagSection> vendorTags;
75 ScopedAStatus ret = mProvider->getVendorTags(&vendorTags);
76
77 ASSERT_TRUE(ret.isOk());
78 for (size_t i = 0; i < vendorTags.size(); i++) {
79 ALOGI("Vendor tag section %zu name %s", i, vendorTags[i].sectionName.c_str());
80 for (auto& tag : vendorTags[i].tags) {
81 ALOGI("Vendor tag id %u name %s type %d", tag.tagId, tag.tagName.c_str(),
82 (int)tag.tagType);
83 }
84 }
85}
86
87// Test if ICameraProvider::setCallback returns Status::OK
88TEST_P(CameraAidlTest, setCallback) {
Avichal Rakesh4bf91c72022-05-23 20:44:02 +000089 struct ProviderCb : public BnCameraProviderCallback {
Avichal Rakesh362242f2022-02-08 12:40:53 -080090 ScopedAStatus cameraDeviceStatusChange(const std::string& cameraDeviceName,
91 CameraDeviceStatus newStatus) override {
92 ALOGI("camera device status callback name %s, status %d", cameraDeviceName.c_str(),
93 (int)newStatus);
94 return ScopedAStatus::ok();
95 }
96 ScopedAStatus torchModeStatusChange(const std::string& cameraDeviceName,
97 TorchModeStatus newStatus) override {
98 ALOGI("Torch mode status callback name %s, status %d", cameraDeviceName.c_str(),
99 (int)newStatus);
100 return ScopedAStatus::ok();
101 }
102 ScopedAStatus physicalCameraDeviceStatusChange(const std::string& cameraDeviceName,
103 const std::string& physicalCameraDeviceName,
104 CameraDeviceStatus newStatus) override {
105 ALOGI("physical camera device status callback name %s, physical camera name %s,"
106 " status %d",
107 cameraDeviceName.c_str(), physicalCameraDeviceName.c_str(), (int)newStatus);
108 return ScopedAStatus::ok();
109 }
110 };
111
Avichal Rakesh4bf91c72022-05-23 20:44:02 +0000112 std::shared_ptr<ProviderCb> cb = ndk::SharedRefBase::make<ProviderCb>();
Avichal Rakesh362242f2022-02-08 12:40:53 -0800113 ScopedAStatus ret = mProvider->setCallback(cb);
114 ASSERT_TRUE(ret.isOk());
115 ret = mProvider->setCallback(nullptr);
Avichal Rakesh4bf91c72022-05-23 20:44:02 +0000116 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
Avichal Rakesh362242f2022-02-08 12:40:53 -0800117}
118
119// Test if ICameraProvider::getCameraDeviceInterface returns Status::OK and non-null device
120TEST_P(CameraAidlTest, getCameraDeviceInterface) {
121 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
122
123 for (const auto& name : cameraDeviceNames) {
124 std::shared_ptr<ICameraDevice> cameraDevice;
125 ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &cameraDevice);
126 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
127 ret.getServiceSpecificError());
128 ASSERT_TRUE(ret.isOk());
129 ASSERT_NE(cameraDevice, nullptr);
130 }
131}
132
133// Verify that the device resource cost can be retrieved and the values are
134// correct.
135TEST_P(CameraAidlTest, getResourceCost) {
136 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
137
138 for (const auto& deviceName : cameraDeviceNames) {
139 std::shared_ptr<ICameraDevice> cameraDevice;
140 ScopedAStatus ret = mProvider->getCameraDeviceInterface(deviceName, &cameraDevice);
141 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
142 ret.getServiceSpecificError());
143 ASSERT_TRUE(ret.isOk());
144 ASSERT_NE(cameraDevice, nullptr);
145
146 CameraResourceCost resourceCost;
147 ret = cameraDevice->getResourceCost(&resourceCost);
148 ALOGI("getResourceCost returns: %d:%d", ret.getExceptionCode(),
149 ret.getServiceSpecificError());
150 ASSERT_TRUE(ret.isOk());
151
152 ALOGI(" Resource cost is %d", resourceCost.resourceCost);
153 ASSERT_LE(resourceCost.resourceCost, 100u);
154
155 for (const auto& name : resourceCost.conflictingDevices) {
156 ALOGI(" Conflicting device: %s", name.c_str());
157 }
158 }
159}
160
161TEST_P(CameraAidlTest, systemCameraTest) {
162 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
163 std::map<std::string, std::vector<SystemCameraKind>> hiddenPhysicalIdToLogicalMap;
164 for (const auto& name : cameraDeviceNames) {
165 std::shared_ptr<ICameraDevice> device;
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +0000166 ALOGI("systemCameraTest: Testing camera device %s", name.c_str());
Avichal Rakesh362242f2022-02-08 12:40:53 -0800167 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
168 ASSERT_TRUE(ret.isOk());
169 ASSERT_NE(device, nullptr);
170
171 CameraMetadata cameraCharacteristics;
172 ret = device->getCameraCharacteristics(&cameraCharacteristics);
173 ASSERT_TRUE(ret.isOk());
174
175 const camera_metadata_t* staticMeta =
176 reinterpret_cast<const camera_metadata_t*>(cameraCharacteristics.metadata.data());
177 Status rc = isLogicalMultiCamera(staticMeta);
178 if (rc == Status::OPERATION_NOT_SUPPORTED) {
179 return;
180 }
181
182 ASSERT_EQ(rc, Status::OK);
183 std::unordered_set<std::string> physicalIds;
184 ASSERT_EQ(getPhysicalCameraIds(staticMeta, &physicalIds), Status::OK);
185 SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC;
186 Status retStatus = getSystemCameraKind(staticMeta, &systemCameraKind);
187 ASSERT_EQ(retStatus, Status::OK);
188
189 for (auto physicalId : physicalIds) {
190 bool isPublicId = false;
191 for (auto& deviceName : cameraDeviceNames) {
192 std::string publicVersion, publicId;
193 ASSERT_TRUE(matchDeviceName(deviceName, mProviderType, &publicVersion, &publicId));
194 if (physicalId == publicId) {
195 isPublicId = true;
196 break;
197 }
198 }
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +0000199
Avichal Rakesh362242f2022-02-08 12:40:53 -0800200 // For hidden physical cameras, collect their associated logical cameras
201 // and store the system camera kind.
202 if (!isPublicId) {
203 auto it = hiddenPhysicalIdToLogicalMap.find(physicalId);
204 if (it == hiddenPhysicalIdToLogicalMap.end()) {
205 hiddenPhysicalIdToLogicalMap.insert(std::make_pair(
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +0000206 physicalId, std::vector<SystemCameraKind>({systemCameraKind})));
Avichal Rakesh362242f2022-02-08 12:40:53 -0800207 } else {
208 it->second.push_back(systemCameraKind);
209 }
210 }
211 }
212 }
213
214 // Check that the system camera kind of the logical cameras associated with
215 // each hidden physical camera is the same.
216 for (const auto& it : hiddenPhysicalIdToLogicalMap) {
217 SystemCameraKind neededSystemCameraKind = it.second.front();
218 for (auto foundSystemCamera : it.second) {
219 ASSERT_EQ(neededSystemCameraKind, foundSystemCamera);
220 }
221 }
222}
223
224// Verify that the static camera characteristics can be retrieved
225// successfully.
226TEST_P(CameraAidlTest, getCameraCharacteristics) {
227 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
228
229 for (const auto& name : cameraDeviceNames) {
230 std::shared_ptr<ICameraDevice> device;
231 ALOGI("getCameraCharacteristics: Testing camera device %s", name.c_str());
232 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
233 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
234 ret.getServiceSpecificError());
235 ASSERT_TRUE(ret.isOk());
236 ASSERT_NE(device, nullptr);
237
238 CameraMetadata chars;
239 ret = device->getCameraCharacteristics(&chars);
240 ASSERT_TRUE(ret.isOk());
241 verifyCameraCharacteristics(chars);
242 verifyMonochromeCharacteristics(chars);
243 verifyRecommendedConfigs(chars);
244 verifyLogicalOrUltraHighResCameraMetadata(name, device, chars, cameraDeviceNames);
245
246 ASSERT_TRUE(ret.isOk());
247
248 // getPhysicalCameraCharacteristics will fail for publicly
249 // advertised camera IDs.
250 std::string version, cameraId;
251 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &cameraId));
252 CameraMetadata devChars;
253 ret = device->getPhysicalCameraCharacteristics(cameraId, &devChars);
254 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
255 ASSERT_EQ(0, devChars.metadata.size());
256 }
257}
258
259// Verify that the torch strength level can be set and retrieved successfully.
260TEST_P(CameraAidlTest, turnOnTorchWithStrengthLevel) {
261 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
262
263 std::shared_ptr<TorchProviderCb> cb = ndk::SharedRefBase::make<TorchProviderCb>(this);
264 ndk::ScopedAStatus ret = mProvider->setCallback(cb);
265 ASSERT_TRUE(ret.isOk());
266
267 for (const auto& name : cameraDeviceNames) {
268 int32_t defaultLevel;
269 std::shared_ptr<ICameraDevice> device;
270 ALOGI("%s: Testing camera device %s", __FUNCTION__, name.c_str());
271
272 ret = mProvider->getCameraDeviceInterface(name, &device);
273 ASSERT_TRUE(ret.isOk());
274 ASSERT_NE(device, nullptr);
275
276 CameraMetadata chars;
277 ret = device->getCameraCharacteristics(&chars);
278 ASSERT_TRUE(ret.isOk());
279
280 const camera_metadata_t* staticMeta =
281 reinterpret_cast<const camera_metadata_t*>(chars.metadata.data());
282 bool torchStrengthControlSupported = isTorchStrengthControlSupported(staticMeta);
283 camera_metadata_ro_entry entry;
284 int rc = find_camera_metadata_ro_entry(staticMeta,
285 ANDROID_FLASH_INFO_STRENGTH_DEFAULT_LEVEL, &entry);
286 if (torchStrengthControlSupported) {
287 ASSERT_EQ(rc, 0);
288 ASSERT_GT(entry.count, 0);
289 defaultLevel = *entry.data.i32;
290 ALOGI("Default level is:%d", defaultLevel);
291 }
292
293 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
294 ret = device->turnOnTorchWithStrengthLevel(2);
295 ALOGI("turnOnTorchWithStrengthLevel returns status: %d", ret.getServiceSpecificError());
296 // OPERATION_NOT_SUPPORTED check
297 if (!torchStrengthControlSupported) {
298 ALOGI("Torch strength control not supported.");
299 ASSERT_EQ(static_cast<int32_t>(Status::OPERATION_NOT_SUPPORTED),
300 ret.getServiceSpecificError());
301 } else {
302 {
303 ASSERT_TRUE(ret.isOk());
304 std::unique_lock<std::mutex> l(mTorchLock);
305 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
306 auto timeout = std::chrono::system_clock::now() +
307 std::chrono::seconds(kTorchTimeoutSec);
308 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
309 }
310 ASSERT_EQ(TorchModeStatus::AVAILABLE_ON, mTorchStatus);
311 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
312 }
313 ALOGI("getTorchStrengthLevel: Testing");
314 int32_t strengthLevel;
315 ret = device->getTorchStrengthLevel(&strengthLevel);
316 ASSERT_TRUE(ret.isOk());
317 ALOGI("Torch strength level is : %d", strengthLevel);
318 ASSERT_EQ(strengthLevel, 2);
319
320 // Turn OFF the torch and verify torch strength level is reset to default level.
321 ALOGI("Testing torch strength level reset after turning the torch OFF.");
322 ret = device->setTorchMode(false);
323 ASSERT_TRUE(ret.isOk());
324 {
325 std::unique_lock<std::mutex> l(mTorchLock);
326 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
327 auto timeout = std::chrono::system_clock::now() +
328 std::chrono::seconds(kTorchTimeoutSec);
329 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
330 }
331 ASSERT_EQ(TorchModeStatus::AVAILABLE_OFF, mTorchStatus);
332 }
333
334 ret = device->getTorchStrengthLevel(&strengthLevel);
335 ASSERT_TRUE(ret.isOk());
336 ALOGI("Torch strength level after turning OFF torch is : %d", strengthLevel);
337 ASSERT_EQ(strengthLevel, defaultLevel);
338 }
339 }
340}
341
342// In case it is supported verify that torch can be enabled.
343// Check for corresponding torch callbacks as well.
344TEST_P(CameraAidlTest, setTorchMode) {
345 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
346
347 std::shared_ptr<TorchProviderCb> cb = ndk::SharedRefBase::make<TorchProviderCb>(this);
348 ndk::ScopedAStatus ret = mProvider->setCallback(cb);
349 ALOGI("setCallback returns status: %d", ret.getServiceSpecificError());
350 ASSERT_TRUE(ret.isOk());
351 ASSERT_NE(cb, nullptr);
352
353 for (const auto& name : cameraDeviceNames) {
354 std::shared_ptr<ICameraDevice> device;
355 ALOGI("setTorchMode: Testing camera device %s", name.c_str());
356 ret = mProvider->getCameraDeviceInterface(name, &device);
357 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
358 ret.getServiceSpecificError());
359 ASSERT_TRUE(ret.isOk());
360 ASSERT_NE(device, nullptr);
361
362 CameraMetadata metadata;
363 ret = device->getCameraCharacteristics(&metadata);
364 ALOGI("getCameraCharacteristics returns status:%d", ret.getServiceSpecificError());
365 ASSERT_TRUE(ret.isOk());
366 camera_metadata_t* staticMeta =
367 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
368 bool torchSupported = isTorchSupported(staticMeta);
369
370 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
371 ret = device->setTorchMode(true);
372 ALOGI("setTorchMode returns status: %d", ret.getServiceSpecificError());
373 if (!torchSupported) {
374 ASSERT_EQ(static_cast<int32_t>(Status::OPERATION_NOT_SUPPORTED),
375 ret.getServiceSpecificError());
376 } else {
377 ASSERT_TRUE(ret.isOk());
378 {
379 std::unique_lock<std::mutex> l(mTorchLock);
380 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
381 auto timeout = std::chrono::system_clock::now() +
382 std::chrono::seconds(kTorchTimeoutSec);
383 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
384 }
385 ASSERT_EQ(TorchModeStatus::AVAILABLE_ON, mTorchStatus);
386 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
387 }
388
389 ret = device->setTorchMode(false);
390 ASSERT_TRUE(ret.isOk());
391 {
392 std::unique_lock<std::mutex> l(mTorchLock);
393 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
394 auto timeout = std::chrono::system_clock::now() +
395 std::chrono::seconds(kTorchTimeoutSec);
396 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
397 }
398 ASSERT_EQ(TorchModeStatus::AVAILABLE_OFF, mTorchStatus);
399 }
400 }
401 }
Avichal Rakesh362242f2022-02-08 12:40:53 -0800402}
403
404// Check dump functionality.
405TEST_P(CameraAidlTest, dump) {
406 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
407
408 for (const auto& name : cameraDeviceNames) {
409 std::shared_ptr<ICameraDevice> device;
410 ALOGI("dump: Testing camera device %s", name.c_str());
411
412 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
413 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
414 ret.getServiceSpecificError());
415 ASSERT_TRUE(ret.isOk());
416 ASSERT_NE(device, nullptr);
417
418 int raw_handle = open(kDumpOutput, O_RDWR);
419 ASSERT_GE(raw_handle, 0);
420
421 auto retStatus = device->dump(raw_handle, nullptr, 0);
422 ASSERT_EQ(retStatus, ::android::OK);
423 close(raw_handle);
424 }
425}
426
427// Open, dump, then close
428TEST_P(CameraAidlTest, openClose) {
429 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
430
431 for (const auto& name : cameraDeviceNames) {
432 std::shared_ptr<ICameraDevice> device;
433 ALOGI("openClose: Testing camera device %s", name.c_str());
434 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
435 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
436 ret.getServiceSpecificError());
437 ASSERT_TRUE(ret.isOk());
438 ASSERT_NE(device, nullptr);
439
440 std::shared_ptr<EmptyDeviceCb> cb = ndk::SharedRefBase::make<EmptyDeviceCb>();
441
442 ret = device->open(cb, &mSession);
443 ASSERT_TRUE(ret.isOk());
444 ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
445 ret.getServiceSpecificError());
446 ASSERT_NE(mSession, nullptr);
447 int raw_handle = open(kDumpOutput, O_RDWR);
448 ASSERT_GE(raw_handle, 0);
449
450 auto retStatus = device->dump(raw_handle, nullptr, 0);
451 ASSERT_EQ(retStatus, ::android::OK);
452 close(raw_handle);
453
454 ret = mSession->close();
455 mSession = nullptr;
456 ASSERT_TRUE(ret.isOk());
457 // TODO: test all session API calls return INTERNAL_ERROR after close
458 // TODO: keep a wp copy here and verify session cannot be promoted out of this scope
459 }
460}
461
462// Check whether all common default request settings can be successfully
463// constructed.
464TEST_P(CameraAidlTest, constructDefaultRequestSettings) {
465 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
466
467 for (const auto& name : cameraDeviceNames) {
468 std::shared_ptr<ICameraDevice> device;
469 ALOGI("constructDefaultRequestSettings: Testing camera device %s", name.c_str());
470 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
471 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
472 ret.getServiceSpecificError());
473 ASSERT_TRUE(ret.isOk());
474 ASSERT_NE(device, nullptr);
475
476 std::shared_ptr<EmptyDeviceCb> cb = ndk::SharedRefBase::make<EmptyDeviceCb>();
477 ret = device->open(cb, &mSession);
478 ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
479 ret.getServiceSpecificError());
480 ASSERT_TRUE(ret.isOk());
481 ASSERT_NE(mSession, nullptr);
482
483 for (int32_t t = (int32_t)RequestTemplate::PREVIEW; t <= (int32_t)RequestTemplate::MANUAL;
484 t++) {
485 RequestTemplate reqTemplate = (RequestTemplate)t;
486 CameraMetadata rawMetadata;
487 ret = mSession->constructDefaultRequestSettings(reqTemplate, &rawMetadata);
488 ALOGI("constructDefaultRequestSettings returns status:%d:%d", ret.getExceptionCode(),
489 ret.getServiceSpecificError());
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000490
Avichal Rakesh362242f2022-02-08 12:40:53 -0800491 if (reqTemplate == RequestTemplate::ZERO_SHUTTER_LAG ||
492 reqTemplate == RequestTemplate::MANUAL) {
493 // optional templates
494 ASSERT_TRUE(ret.isOk() || static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
495 ret.getServiceSpecificError());
496 } else {
497 ASSERT_TRUE(ret.isOk());
498 }
499
500 if (ret.isOk()) {
501 const camera_metadata_t* metadata = (camera_metadata_t*)rawMetadata.metadata.data();
502 size_t expectedSize = rawMetadata.metadata.size();
503 int result = validate_camera_metadata_structure(metadata, &expectedSize);
504 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
505 verifyRequestTemplate(metadata, reqTemplate);
506 } else {
507 ASSERT_EQ(0u, rawMetadata.metadata.size());
508 }
509 }
510 ret = mSession->close();
511 mSession = nullptr;
512 ASSERT_TRUE(ret.isOk());
513 }
514}
515
516// Verify that all supported stream formats and sizes can be configured
517// successfully.
518TEST_P(CameraAidlTest, configureStreamsAvailableOutputs) {
519 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
520 std::vector<AvailableStream> outputStreams;
521
522 for (const auto& name : cameraDeviceNames) {
523 CameraMetadata meta;
524 std::shared_ptr<ICameraDevice> device;
525
526 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/, &device /*out*/);
527
528 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
529 outputStreams.clear();
530 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputStreams));
531 ASSERT_NE(0u, outputStreams.size());
532
533 int32_t jpegBufferSize = 0;
534 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
535 ASSERT_NE(0u, jpegBufferSize);
536
537 int32_t streamId = 0;
538 int32_t streamConfigCounter = 0;
539 for (auto& it : outputStreams) {
540 Stream stream;
541 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(it.format));
542 stream.id = streamId;
543 stream.streamType = StreamType::OUTPUT;
544 stream.width = it.width;
545 stream.height = it.height;
546 stream.format = static_cast<PixelFormat>(it.format);
547 stream.dataSpace = dataspace;
548 stream.usage = static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
549 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER);
550 stream.rotation = StreamRotation::ROTATION_0;
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000551 stream.dynamicRangeProfile = RequestAvailableDynamicRangeProfilesMap::
552 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
Avichal Rakesh362242f2022-02-08 12:40:53 -0800553
554 std::vector<Stream> streams = {stream};
555 StreamConfiguration config;
556 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
557 jpegBufferSize);
558
559 bool expectStreamCombQuery = (isLogicalMultiCamera(staticMeta) == Status::OK);
560 verifyStreamCombination(device, config, /*expectedStatus*/ true, expectStreamCombQuery);
561
562 config.streamConfigCounter = streamConfigCounter++;
563 std::vector<HalStream> halConfigs;
564 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
565 ASSERT_TRUE(ret.isOk());
566 ASSERT_EQ(halConfigs.size(), 1);
567 ASSERT_EQ(halConfigs[0].id, streamId);
568
569 streamId++;
570 }
571
572 ndk::ScopedAStatus ret = mSession->close();
573 mSession = nullptr;
574 ASSERT_TRUE(ret.isOk());
575 }
576}
577
578// Verify that mandatory concurrent streams and outputs are supported.
579TEST_P(CameraAidlTest, configureConcurrentStreamsAvailableOutputs) {
580 struct CameraTestInfo {
581 CameraMetadata staticMeta;
582 std::shared_ptr<ICameraDeviceSession> session;
583 std::shared_ptr<ICameraDevice> cameraDevice;
584 StreamConfiguration config;
585 };
586
587 std::map<std::string, std::string> idToNameMap = getCameraDeviceIdToNameMap(mProvider);
588 std::vector<ConcurrentCameraIdCombination> concurrentDeviceCombinations =
589 getConcurrentDeviceCombinations(mProvider);
590 std::vector<AvailableStream> outputStreams;
591 for (const auto& cameraDeviceIds : concurrentDeviceCombinations) {
592 std::vector<CameraIdAndStreamCombination> cameraIdsAndStreamCombinations;
593 std::vector<CameraTestInfo> cameraTestInfos;
594 size_t i = 0;
595 for (const auto& id : cameraDeviceIds.combination) {
596 CameraTestInfo cti;
597 auto it = idToNameMap.find(id);
598 ASSERT_TRUE(idToNameMap.end() != it);
599 std::string name = it->second;
600
601 openEmptyDeviceSession(name, mProvider, &cti.session /*out*/, &cti.staticMeta /*out*/,
602 &cti.cameraDevice /*out*/);
603
604 outputStreams.clear();
605 camera_metadata_t* staticMeta =
606 reinterpret_cast<camera_metadata_t*>(cti.staticMeta.metadata.data());
607 ASSERT_EQ(Status::OK, getMandatoryConcurrentStreams(staticMeta, &outputStreams));
608 ASSERT_NE(0u, outputStreams.size());
609
610 int32_t jpegBufferSize = 0;
611 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
612 ASSERT_NE(0u, jpegBufferSize);
613
614 int32_t streamId = 0;
615 std::vector<Stream> streams(outputStreams.size());
616 size_t j = 0;
617 for (const auto& s : outputStreams) {
618 Stream stream;
619 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(s.format));
620 stream.id = streamId++;
621 stream.streamType = StreamType::OUTPUT;
622 stream.width = s.width;
623 stream.height = s.height;
624 stream.format = static_cast<PixelFormat>(s.format);
625 stream.usage = static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
626 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER);
627 stream.dataSpace = dataspace;
628 stream.rotation = StreamRotation::ROTATION_0;
629 stream.sensorPixelModesUsed = {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT};
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000630 stream.dynamicRangeProfile = RequestAvailableDynamicRangeProfilesMap::
631 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
Avichal Rakesh362242f2022-02-08 12:40:53 -0800632 streams[j] = stream;
633 j++;
634 }
635
636 // Add the created stream configs to cameraIdsAndStreamCombinations
637 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &cti.config,
638 jpegBufferSize);
639
640 cti.config.streamConfigCounter = outputStreams.size();
641 CameraIdAndStreamCombination cameraIdAndStreamCombination;
642 cameraIdAndStreamCombination.cameraId = id;
643 cameraIdAndStreamCombination.streamConfiguration = cti.config;
644 cameraIdsAndStreamCombinations.push_back(cameraIdAndStreamCombination);
645 i++;
646 cameraTestInfos.push_back(cti);
647 }
648 // Now verify that concurrent streams are supported
649 bool combinationSupported;
650 ndk::ScopedAStatus ret = mProvider->isConcurrentStreamCombinationSupported(
651 cameraIdsAndStreamCombinations, &combinationSupported);
652 ASSERT_TRUE(ret.isOk());
653 ASSERT_EQ(combinationSupported, true);
654
655 // Test the stream can actually be configured
656 for (auto& cti : cameraTestInfos) {
657 if (cti.session != nullptr) {
658 camera_metadata_t* staticMeta =
659 reinterpret_cast<camera_metadata_t*>(cti.staticMeta.metadata.data());
660 bool expectStreamCombQuery = (isLogicalMultiCamera(staticMeta) == Status::OK);
661 verifyStreamCombination(cti.cameraDevice, cti.config, /*expectedStatus*/ true,
662 expectStreamCombQuery);
663 }
664
665 if (cti.session != nullptr) {
666 std::vector<HalStream> streamConfigs;
667 ret = cti.session->configureStreams(cti.config, &streamConfigs);
668 ASSERT_TRUE(ret.isOk());
669 ASSERT_EQ(cti.config.streams.size(), streamConfigs.size());
670 }
671 }
672
673 for (auto& cti : cameraTestInfos) {
674 ret = cti.session->close();
675 ASSERT_TRUE(ret.isOk());
676 }
677 }
678}
679
680// Check for correct handling of invalid/incorrect configuration parameters.
681TEST_P(CameraAidlTest, configureStreamsInvalidOutputs) {
682 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
683 std::vector<AvailableStream> outputStreams;
684
685 for (const auto& name : cameraDeviceNames) {
686 CameraMetadata meta;
687 std::shared_ptr<ICameraDevice> cameraDevice;
688
689 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
690 &cameraDevice /*out*/);
691 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
692 outputStreams.clear();
693
694 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputStreams));
695 ASSERT_NE(0u, outputStreams.size());
696
697 int32_t jpegBufferSize = 0;
698 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
699 ASSERT_NE(0u, jpegBufferSize);
700
701 int32_t streamId = 0;
702 Stream stream = {streamId++,
703 StreamType::OUTPUT,
704 static_cast<uint32_t>(0),
705 static_cast<uint32_t>(0),
706 static_cast<PixelFormat>(outputStreams[0].format),
707 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
708 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
709 Dataspace::UNKNOWN,
710 StreamRotation::ROTATION_0,
711 std::string(),
712 jpegBufferSize,
713 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000714 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
715 RequestAvailableDynamicRangeProfilesMap::
716 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800717 int32_t streamConfigCounter = 0;
718 std::vector<Stream> streams = {stream};
719 StreamConfiguration config;
720 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
721 jpegBufferSize);
722
723 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ false,
724 /*expectStreamCombQuery*/ false);
725
726 config.streamConfigCounter = streamConfigCounter++;
727 std::vector<HalStream> halConfigs;
728 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
729 ASSERT_TRUE(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
730 ret.getServiceSpecificError() ||
731 static_cast<int32_t>(Status::INTERNAL_ERROR) == ret.getServiceSpecificError());
732
733 stream = {streamId++,
734 StreamType::OUTPUT,
735 /*width*/ INT32_MAX,
736 /*height*/ INT32_MAX,
737 static_cast<PixelFormat>(outputStreams[0].format),
738 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
739 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
740 Dataspace::UNKNOWN,
741 StreamRotation::ROTATION_0,
742 std::string(),
743 jpegBufferSize,
744 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000745 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
746 RequestAvailableDynamicRangeProfilesMap::
747 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800748
749 streams[0] = stream;
750 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
751 jpegBufferSize);
752
753 config.streamConfigCounter = streamConfigCounter++;
754 halConfigs.clear();
755 ret = mSession->configureStreams(config, &halConfigs);
756 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
757
758 for (auto& it : outputStreams) {
759 stream = {streamId++,
760 StreamType::OUTPUT,
761 it.width,
762 it.height,
763 static_cast<PixelFormat>(UINT32_MAX),
764 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
765 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
766 Dataspace::UNKNOWN,
767 StreamRotation::ROTATION_0,
768 std::string(),
769 jpegBufferSize,
770 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000771 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
772 RequestAvailableDynamicRangeProfilesMap::
773 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800774
775 streams[0] = stream;
776 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
777 jpegBufferSize);
778 config.streamConfigCounter = streamConfigCounter++;
779 halConfigs.clear();
780 ret = mSession->configureStreams(config, &halConfigs);
781 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
782 ret.getServiceSpecificError());
783
784 stream = {streamId++,
785 StreamType::OUTPUT,
786 it.width,
787 it.height,
788 static_cast<PixelFormat>(it.format),
789 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
790 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
791 Dataspace::UNKNOWN,
792 static_cast<StreamRotation>(UINT32_MAX),
793 std::string(),
794 jpegBufferSize,
795 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000796 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
797 RequestAvailableDynamicRangeProfilesMap::
798 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800799
800 streams[0] = stream;
801 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
802 jpegBufferSize);
803
804 config.streamConfigCounter = streamConfigCounter++;
805 halConfigs.clear();
806 ret = mSession->configureStreams(config, &halConfigs);
807 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
808 ret.getServiceSpecificError());
809 }
810
811 ret = mSession->close();
812 mSession = nullptr;
813 ASSERT_TRUE(ret.isOk());
814 }
815}
816
817// Check whether all supported ZSL output stream combinations can be
818// configured successfully.
819TEST_P(CameraAidlTest, configureStreamsZSLInputOutputs) {
820 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
821 std::vector<AvailableStream> inputStreams;
822 std::vector<AvailableZSLInputOutput> inputOutputMap;
823
824 for (const auto& name : cameraDeviceNames) {
825 CameraMetadata meta;
826 std::shared_ptr<ICameraDevice> cameraDevice;
827
828 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
829 &cameraDevice /*out*/);
830 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
831
832 Status rc = isZSLModeAvailable(staticMeta);
833 if (Status::OPERATION_NOT_SUPPORTED == rc) {
834 ndk::ScopedAStatus ret = mSession->close();
835 mSession = nullptr;
836 ASSERT_TRUE(ret.isOk());
837 continue;
838 }
839 ASSERT_EQ(Status::OK, rc);
840
841 inputStreams.clear();
842 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, inputStreams));
843 ASSERT_NE(0u, inputStreams.size());
844
845 inputOutputMap.clear();
846 ASSERT_EQ(Status::OK, getZSLInputOutputMap(staticMeta, inputOutputMap));
847 ASSERT_NE(0u, inputOutputMap.size());
848
849 bool supportMonoY8 = false;
850 if (Status::OK == isMonochromeCamera(staticMeta)) {
851 for (auto& it : inputStreams) {
852 if (it.format == static_cast<uint32_t>(PixelFormat::Y8)) {
853 supportMonoY8 = true;
854 break;
855 }
856 }
857 }
858
859 int32_t jpegBufferSize = 0;
860 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
861 ASSERT_NE(0u, jpegBufferSize);
862
863 int32_t streamId = 0;
864 bool hasPrivToY8 = false, hasY8ToY8 = false, hasY8ToBlob = false;
865 uint32_t streamConfigCounter = 0;
866 for (auto& inputIter : inputOutputMap) {
867 AvailableStream input;
868 ASSERT_EQ(Status::OK, findLargestSize(inputStreams, inputIter.inputFormat, input));
869 ASSERT_NE(0u, inputStreams.size());
870
871 if (inputIter.inputFormat ==
872 static_cast<uint32_t>(PixelFormat::IMPLEMENTATION_DEFINED) &&
873 inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::Y8)) {
874 hasPrivToY8 = true;
875 } else if (inputIter.inputFormat == static_cast<uint32_t>(PixelFormat::Y8)) {
876 if (inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::BLOB)) {
877 hasY8ToBlob = true;
878 } else if (inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::Y8)) {
879 hasY8ToY8 = true;
880 }
881 }
882 AvailableStream outputThreshold = {INT32_MAX, INT32_MAX, inputIter.outputFormat};
883 std::vector<AvailableStream> outputStreams;
884 ASSERT_EQ(Status::OK,
885 getAvailableOutputStreams(staticMeta, outputStreams, &outputThreshold));
886 for (auto& outputIter : outputStreams) {
887 Dataspace outputDataSpace =
888 getDataspace(static_cast<PixelFormat>(outputIter.format));
889 Stream zslStream = {
890 streamId++,
891 StreamType::OUTPUT,
892 input.width,
893 input.height,
894 static_cast<PixelFormat>(input.format),
895 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
896 GRALLOC_USAGE_HW_CAMERA_ZSL),
897 Dataspace::UNKNOWN,
898 StreamRotation::ROTATION_0,
899 std::string(),
900 jpegBufferSize,
901 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000902 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
903 RequestAvailableDynamicRangeProfilesMap::
904 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800905 Stream inputStream = {
906 streamId++,
907 StreamType::INPUT,
908 input.width,
909 input.height,
910 static_cast<PixelFormat>(input.format),
911 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(0),
912 Dataspace::UNKNOWN,
913 StreamRotation::ROTATION_0,
914 std::string(),
915 jpegBufferSize,
916 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000917 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
918 RequestAvailableDynamicRangeProfilesMap::
919 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800920 Stream outputStream = {
921 streamId++,
922 StreamType::OUTPUT,
923 outputIter.width,
924 outputIter.height,
925 static_cast<PixelFormat>(outputIter.format),
926 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
927 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
928 outputDataSpace,
929 StreamRotation::ROTATION_0,
930 std::string(),
931 jpegBufferSize,
932 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000933 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
934 RequestAvailableDynamicRangeProfilesMap::
935 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800936
937 std::vector<Stream> streams = {inputStream, zslStream, outputStream};
938
939 StreamConfiguration config;
940 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
941 jpegBufferSize);
942
943 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true,
944 /*expectStreamCombQuery*/ false);
945
946 config.streamConfigCounter = streamConfigCounter++;
947 std::vector<HalStream> halConfigs;
948 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
949 ASSERT_TRUE(ret.isOk());
950 ASSERT_EQ(3u, halConfigs.size());
951 }
952 }
953
954 if (supportMonoY8) {
955 if (Status::OK == isZSLModeAvailable(staticMeta, PRIV_REPROCESS)) {
956 ASSERT_TRUE(hasPrivToY8);
957 }
958 if (Status::OK == isZSLModeAvailable(staticMeta, YUV_REPROCESS)) {
959 ASSERT_TRUE(hasY8ToY8);
960 ASSERT_TRUE(hasY8ToBlob);
961 }
962 }
963
964 ndk::ScopedAStatus ret = mSession->close();
965 mSession = nullptr;
966 ASSERT_TRUE(ret.isOk());
967 }
968}
969
970// Check whether session parameters are supported. If Hal support for them
971// exist, then try to configure a preview stream using them.
972TEST_P(CameraAidlTest, configureStreamsWithSessionParameters) {
973 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
974 std::vector<AvailableStream> outputPreviewStreams;
975 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
976 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
977
978 for (const auto& name : cameraDeviceNames) {
979 CameraMetadata meta;
980
981 std::shared_ptr<ICameraDevice> unusedCameraDevice;
982 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
983 &unusedCameraDevice /*out*/);
984 camera_metadata_t* staticMetaBuffer =
985 reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
986
987 std::unordered_set<int32_t> availableSessionKeys;
988 auto rc = getSupportedKeys(staticMetaBuffer, ANDROID_REQUEST_AVAILABLE_SESSION_KEYS,
989 &availableSessionKeys);
990 ASSERT_TRUE(Status::OK == rc);
991 if (availableSessionKeys.empty()) {
992 ndk::ScopedAStatus ret = mSession->close();
993 mSession = nullptr;
994 ASSERT_TRUE(ret.isOk());
995 continue;
996 }
997
998 android::hardware::camera::common::V1_0::helper::CameraMetadata previewRequestSettings;
999 android::hardware::camera::common::V1_0::helper::CameraMetadata sessionParams,
1000 modifiedSessionParams;
1001 constructFilteredSettings(mSession, availableSessionKeys, RequestTemplate::PREVIEW,
1002 &previewRequestSettings, &sessionParams);
1003 if (sessionParams.isEmpty()) {
1004 ndk::ScopedAStatus ret = mSession->close();
1005 mSession = nullptr;
1006 ASSERT_TRUE(ret.isOk());
1007 continue;
1008 }
1009
1010 outputPreviewStreams.clear();
1011
1012 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputPreviewStreams,
1013 &previewThreshold));
1014 ASSERT_NE(0u, outputPreviewStreams.size());
1015
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001016 Stream previewStream = {
1017 0,
1018 StreamType::OUTPUT,
1019 outputPreviewStreams[0].width,
1020 outputPreviewStreams[0].height,
1021 static_cast<PixelFormat>(outputPreviewStreams[0].format),
1022 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1023 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
1024 Dataspace::UNKNOWN,
1025 StreamRotation::ROTATION_0,
1026 std::string(),
1027 /*bufferSize*/ 0,
1028 /*groupId*/ -1,
1029 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1030 RequestAvailableDynamicRangeProfilesMap::
1031 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001032
1033 std::vector<Stream> streams = {previewStream};
1034 StreamConfiguration config;
1035
1036 config.streams = streams;
1037 config.operationMode = StreamConfigurationMode::NORMAL_MODE;
1038 modifiedSessionParams = sessionParams;
1039 auto sessionParamsBuffer = sessionParams.release();
1040 std::vector<uint8_t> rawSessionParam =
1041 std::vector(reinterpret_cast<uint8_t*>(sessionParamsBuffer),
1042 reinterpret_cast<uint8_t*>(sessionParamsBuffer) +
1043 get_camera_metadata_size(sessionParamsBuffer));
1044
1045 config.sessionParams.metadata = rawSessionParam;
1046 config.streamConfigCounter = 0;
1047 config.streams = {previewStream};
1048 config.streamConfigCounter = 0;
1049 config.multiResolutionInputImage = false;
1050
1051 bool newSessionParamsAvailable = false;
1052 for (const auto& it : availableSessionKeys) {
1053 if (modifiedSessionParams.exists(it)) {
1054 modifiedSessionParams.erase(it);
1055 newSessionParamsAvailable = true;
1056 break;
1057 }
1058 }
1059 if (newSessionParamsAvailable) {
1060 auto modifiedSessionParamsBuffer = modifiedSessionParams.release();
1061 verifySessionReconfigurationQuery(mSession, sessionParamsBuffer,
1062 modifiedSessionParamsBuffer);
1063 modifiedSessionParams.acquire(modifiedSessionParamsBuffer);
1064 }
1065
1066 std::vector<HalStream> halConfigs;
1067 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1068 ASSERT_TRUE(ret.isOk());
1069 ASSERT_EQ(1u, halConfigs.size());
1070
1071 sessionParams.acquire(sessionParamsBuffer);
1072 ret = mSession->close();
1073 mSession = nullptr;
1074 ASSERT_TRUE(ret.isOk());
1075 }
1076}
1077
1078// Verify that all supported preview + still capture stream combinations
1079// can be configured successfully.
1080TEST_P(CameraAidlTest, configureStreamsPreviewStillOutputs) {
1081 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1082 std::vector<AvailableStream> outputBlobStreams;
1083 std::vector<AvailableStream> outputPreviewStreams;
1084 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
1085 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
1086 AvailableStream blobThreshold = {INT32_MAX, INT32_MAX, static_cast<int32_t>(PixelFormat::BLOB)};
1087
1088 for (const auto& name : cameraDeviceNames) {
1089 CameraMetadata meta;
1090
1091 std::shared_ptr<ICameraDevice> cameraDevice;
1092 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1093 &cameraDevice /*out*/);
1094
1095 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1096
1097 // Check if camera support depth only
1098 if (isDepthOnly(staticMeta)) {
1099 ndk::ScopedAStatus ret = mSession->close();
1100 mSession = nullptr;
1101 ASSERT_TRUE(ret.isOk());
1102 continue;
1103 }
1104
1105 outputBlobStreams.clear();
1106 ASSERT_EQ(Status::OK,
1107 getAvailableOutputStreams(staticMeta, outputBlobStreams, &blobThreshold));
1108 ASSERT_NE(0u, outputBlobStreams.size());
1109
1110 outputPreviewStreams.clear();
1111 ASSERT_EQ(Status::OK,
1112 getAvailableOutputStreams(staticMeta, outputPreviewStreams, &previewThreshold));
1113 ASSERT_NE(0u, outputPreviewStreams.size());
1114
1115 int32_t jpegBufferSize = 0;
1116 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
1117 ASSERT_NE(0u, jpegBufferSize);
1118
1119 int32_t streamId = 0;
1120 uint32_t streamConfigCounter = 0;
1121
1122 for (auto& blobIter : outputBlobStreams) {
1123 for (auto& previewIter : outputPreviewStreams) {
1124 Stream previewStream = {
1125 streamId++,
1126 StreamType::OUTPUT,
1127 previewIter.width,
1128 previewIter.height,
1129 static_cast<PixelFormat>(previewIter.format),
1130 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1131 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
1132 Dataspace::UNKNOWN,
1133 StreamRotation::ROTATION_0,
1134 std::string(),
1135 /*bufferSize*/ 0,
1136 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001137 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1138 RequestAvailableDynamicRangeProfilesMap::
1139 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001140 Stream blobStream = {
1141 streamId++,
1142 StreamType::OUTPUT,
1143 blobIter.width,
1144 blobIter.height,
1145 static_cast<PixelFormat>(blobIter.format),
1146 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1147 GRALLOC1_CONSUMER_USAGE_CPU_READ),
1148 Dataspace::JFIF,
1149 StreamRotation::ROTATION_0,
1150 std::string(),
1151 /*bufferSize*/ 0,
1152 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001153 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1154 RequestAvailableDynamicRangeProfilesMap::
1155 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001156 std::vector<Stream> streams = {previewStream, blobStream};
1157 StreamConfiguration config;
1158
1159 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
1160 jpegBufferSize);
1161 config.streamConfigCounter = streamConfigCounter++;
1162 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true,
1163 /*expectStreamCombQuery*/ false);
1164
1165 std::vector<HalStream> halConfigs;
1166 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1167 ASSERT_TRUE(ret.isOk());
1168 ASSERT_EQ(2u, halConfigs.size());
1169 }
1170 }
1171
1172 ndk::ScopedAStatus ret = mSession->close();
1173 mSession = nullptr;
1174 ASSERT_TRUE(ret.isOk());
1175 }
1176}
1177
1178// In case constrained mode is supported, test whether it can be
1179// configured. Additionally check for common invalid inputs when
1180// using this mode.
1181TEST_P(CameraAidlTest, configureStreamsConstrainedOutputs) {
1182 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1183
1184 for (const auto& name : cameraDeviceNames) {
1185 CameraMetadata meta;
1186 std::shared_ptr<ICameraDevice> cameraDevice;
1187
1188 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1189 &cameraDevice /*out*/);
1190 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1191
1192 Status rc = isConstrainedModeAvailable(staticMeta);
1193 if (Status::OPERATION_NOT_SUPPORTED == rc) {
1194 ndk::ScopedAStatus ret = mSession->close();
1195 mSession = nullptr;
1196 ASSERT_TRUE(ret.isOk());
1197 continue;
1198 }
1199 ASSERT_EQ(Status::OK, rc);
1200
1201 AvailableStream hfrStream;
1202 rc = pickConstrainedModeSize(staticMeta, hfrStream);
1203 ASSERT_EQ(Status::OK, rc);
1204
1205 int32_t streamId = 0;
1206 uint32_t streamConfigCounter = 0;
1207 Stream stream = {streamId,
1208 StreamType::OUTPUT,
1209 hfrStream.width,
1210 hfrStream.height,
1211 static_cast<PixelFormat>(hfrStream.format),
1212 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1213 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1214 Dataspace::UNKNOWN,
1215 StreamRotation::ROTATION_0,
1216 std::string(),
1217 /*bufferSize*/ 0,
1218 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001219 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1220 RequestAvailableDynamicRangeProfilesMap::
1221 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001222 std::vector<Stream> streams = {stream};
1223 StreamConfiguration config;
1224 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1225 &config);
1226
1227 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true,
1228 /*expectStreamCombQuery*/ false);
1229
1230 config.streamConfigCounter = streamConfigCounter++;
1231 std::vector<HalStream> halConfigs;
1232 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1233 ASSERT_TRUE(ret.isOk());
1234 ASSERT_EQ(1u, halConfigs.size());
1235 ASSERT_EQ(halConfigs[0].id, streamId);
1236
1237 stream = {streamId++,
1238 StreamType::OUTPUT,
1239 static_cast<uint32_t>(0),
1240 static_cast<uint32_t>(0),
1241 static_cast<PixelFormat>(hfrStream.format),
1242 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1243 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1244 Dataspace::UNKNOWN,
1245 StreamRotation::ROTATION_0,
1246 std::string(),
1247 /*bufferSize*/ 0,
1248 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001249 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1250 RequestAvailableDynamicRangeProfilesMap::
1251 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001252 streams[0] = stream;
1253 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1254 &config);
1255
1256 config.streamConfigCounter = streamConfigCounter++;
1257 std::vector<HalStream> halConfig;
1258 ret = mSession->configureStreams(config, &halConfig);
1259 ASSERT_TRUE(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
1260 ret.getServiceSpecificError() ||
1261 static_cast<int32_t>(Status::INTERNAL_ERROR) == ret.getServiceSpecificError());
1262
1263 stream = {streamId++,
1264 StreamType::OUTPUT,
1265 INT32_MAX,
1266 INT32_MAX,
1267 static_cast<PixelFormat>(hfrStream.format),
1268 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1269 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1270 Dataspace::UNKNOWN,
1271 StreamRotation::ROTATION_0,
1272 std::string(),
1273 /*bufferSize*/ 0,
1274 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001275 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1276 RequestAvailableDynamicRangeProfilesMap::
1277 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001278 streams[0] = stream;
1279 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1280 &config);
1281
1282 config.streamConfigCounter = streamConfigCounter++;
1283 halConfigs.clear();
1284 ret = mSession->configureStreams(config, &halConfigs);
1285 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
1286
1287 stream = {streamId++,
1288 StreamType::OUTPUT,
1289 hfrStream.width,
1290 hfrStream.height,
1291 static_cast<PixelFormat>(UINT32_MAX),
1292 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1293 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1294 Dataspace::UNKNOWN,
1295 StreamRotation::ROTATION_0,
1296 std::string(),
1297 /*bufferSize*/ 0,
1298 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001299 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1300 RequestAvailableDynamicRangeProfilesMap::
1301 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001302 streams[0] = stream;
1303 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1304 &config);
1305
1306 config.streamConfigCounter = streamConfigCounter++;
1307 halConfigs.clear();
1308 ret = mSession->configureStreams(config, &halConfigs);
1309 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
1310
1311 ret = mSession->close();
1312 mSession = nullptr;
1313 ASSERT_TRUE(ret.isOk());
1314 }
1315}
1316
1317// Verify that all supported video + snapshot stream combinations can
1318// be configured successfully.
1319TEST_P(CameraAidlTest, configureStreamsVideoStillOutputs) {
1320 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1321 std::vector<AvailableStream> outputBlobStreams;
1322 std::vector<AvailableStream> outputVideoStreams;
1323 AvailableStream videoThreshold = {kMaxVideoWidth, kMaxVideoHeight,
1324 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
1325 AvailableStream blobThreshold = {kMaxVideoWidth, kMaxVideoHeight,
1326 static_cast<int32_t>(PixelFormat::BLOB)};
1327
1328 for (const auto& name : cameraDeviceNames) {
1329 CameraMetadata meta;
1330 std::shared_ptr<ICameraDevice> cameraDevice;
1331
1332 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1333 &cameraDevice /*out*/);
1334
1335 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1336
1337 // Check if camera support depth only
1338 if (isDepthOnly(staticMeta)) {
1339 ndk::ScopedAStatus ret = mSession->close();
1340 mSession = nullptr;
1341 ASSERT_TRUE(ret.isOk());
1342 continue;
1343 }
1344
1345 outputBlobStreams.clear();
1346 ASSERT_EQ(Status::OK,
1347 getAvailableOutputStreams(staticMeta, outputBlobStreams, &blobThreshold));
1348 ASSERT_NE(0u, outputBlobStreams.size());
1349
1350 outputVideoStreams.clear();
1351 ASSERT_EQ(Status::OK,
1352 getAvailableOutputStreams(staticMeta, outputVideoStreams, &videoThreshold));
1353 ASSERT_NE(0u, outputVideoStreams.size());
1354
1355 int32_t jpegBufferSize = 0;
1356 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
1357 ASSERT_NE(0u, jpegBufferSize);
1358
1359 int32_t streamId = 0;
1360 uint32_t streamConfigCounter = 0;
1361 for (auto& blobIter : outputBlobStreams) {
1362 for (auto& videoIter : outputVideoStreams) {
1363 Stream videoStream = {
1364 streamId++,
1365 StreamType::OUTPUT,
1366 videoIter.width,
1367 videoIter.height,
1368 static_cast<PixelFormat>(videoIter.format),
1369 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1370 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1371 Dataspace::UNKNOWN,
1372 StreamRotation::ROTATION_0,
1373 std::string(),
1374 jpegBufferSize,
1375 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001376 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1377 RequestAvailableDynamicRangeProfilesMap::
1378 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001379 Stream blobStream = {
1380 streamId++,
1381 StreamType::OUTPUT,
1382 blobIter.width,
1383 blobIter.height,
1384 static_cast<PixelFormat>(blobIter.format),
1385 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1386 GRALLOC1_CONSUMER_USAGE_CPU_READ),
1387 Dataspace::JFIF,
1388 StreamRotation::ROTATION_0,
1389 std::string(),
1390 jpegBufferSize,
1391 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001392 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1393 RequestAvailableDynamicRangeProfilesMap::
1394 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001395 std::vector<Stream> streams = {videoStream, blobStream};
1396 StreamConfiguration config;
1397
1398 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
1399 jpegBufferSize);
1400 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true,
1401 /*expectStreamCombQuery*/ false);
1402
1403 config.streamConfigCounter = streamConfigCounter++;
1404 std::vector<HalStream> halConfigs;
1405 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1406 ASSERT_TRUE(ret.isOk());
1407 ASSERT_EQ(2u, halConfigs.size());
1408 }
1409 }
1410
1411 ndk::ScopedAStatus ret = mSession->close();
1412 mSession = nullptr;
1413 ASSERT_TRUE(ret.isOk());
1414 }
1415}
1416
1417// Generate and verify a camera capture request
1418TEST_P(CameraAidlTest, processCaptureRequestPreview) {
1419 // TODO(b/220897574): Failing with BUFFER_ERROR
1420 processCaptureRequestInternal(GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, RequestTemplate::PREVIEW,
1421 false /*secureOnlyCameras*/);
1422}
1423
1424// Generate and verify a secure camera capture request
1425TEST_P(CameraAidlTest, processSecureCaptureRequest) {
1426 processCaptureRequestInternal(GRALLOC1_PRODUCER_USAGE_PROTECTED, RequestTemplate::STILL_CAPTURE,
1427 true /*secureOnlyCameras*/);
1428}
1429
1430TEST_P(CameraAidlTest, processCaptureRequestPreviewStabilization) {
1431 std::unordered_map<std::string, nsecs_t> cameraDeviceToTimeLag;
1432 processPreviewStabilizationCaptureRequestInternal(/*previewStabilizationOn*/ false,
1433 cameraDeviceToTimeLag);
1434 processPreviewStabilizationCaptureRequestInternal(/*previewStabilizationOn*/ true,
1435 cameraDeviceToTimeLag);
1436}
1437
1438// Generate and verify a multi-camera capture request
1439TEST_P(CameraAidlTest, processMultiCaptureRequestPreview) {
1440 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1441 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
1442 static_cast<int32_t>(PixelFormat::YCBCR_420_888)};
1443 int64_t bufferId = 1;
1444 uint32_t frameNumber = 1;
1445 std::vector<uint8_t> settings;
1446 std::vector<uint8_t> emptySettings;
1447 std::string invalidPhysicalId = "-1";
1448
1449 for (const auto& name : cameraDeviceNames) {
1450 std::string version, deviceId;
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +00001451 ALOGI("processMultiCaptureRequestPreview: Test device %s", name.c_str());
Avichal Rakesh362242f2022-02-08 12:40:53 -08001452 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
1453 CameraMetadata metadata;
1454
1455 std::shared_ptr<ICameraDevice> unusedDevice;
1456 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &metadata /*out*/,
1457 &unusedDevice /*out*/);
1458
1459 camera_metadata_t* staticMeta =
1460 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
1461 Status rc = isLogicalMultiCamera(staticMeta);
1462 if (Status::OPERATION_NOT_SUPPORTED == rc) {
1463 ndk::ScopedAStatus ret = mSession->close();
1464 mSession = nullptr;
1465 ASSERT_TRUE(ret.isOk());
1466 continue;
1467 }
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +00001468 ASSERT_EQ(Status::OK, rc);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001469
1470 std::unordered_set<std::string> physicalIds;
1471 rc = getPhysicalCameraIds(staticMeta, &physicalIds);
1472 ASSERT_TRUE(Status::OK == rc);
1473 ASSERT_TRUE(physicalIds.size() > 1);
1474
1475 std::unordered_set<int32_t> physicalRequestKeyIDs;
1476 rc = getSupportedKeys(staticMeta, ANDROID_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS,
1477 &physicalRequestKeyIDs);
1478 ASSERT_TRUE(Status::OK == rc);
1479 if (physicalRequestKeyIDs.empty()) {
1480 ndk::ScopedAStatus ret = mSession->close();
1481 mSession = nullptr;
1482 ASSERT_TRUE(ret.isOk());
1483 // The logical camera doesn't support any individual physical requests.
1484 continue;
1485 }
1486
1487 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultPreviewSettings;
1488 android::hardware::camera::common::V1_0::helper::CameraMetadata filteredSettings;
1489 constructFilteredSettings(mSession, physicalRequestKeyIDs, RequestTemplate::PREVIEW,
1490 &defaultPreviewSettings, &filteredSettings);
1491 if (filteredSettings.isEmpty()) {
1492 // No physical device settings in default request.
1493 ndk::ScopedAStatus ret = mSession->close();
1494 mSession = nullptr;
1495 ASSERT_TRUE(ret.isOk());
1496 continue;
1497 }
1498
1499 const camera_metadata_t* settingsBuffer = defaultPreviewSettings.getAndLock();
1500 uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
1501 settings.assign(rawSettingsBuffer,
1502 rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
1503 CameraMetadata settingsMetadata = {settings};
1504 overrideRotateAndCrop(&settingsMetadata);
1505
1506 ndk::ScopedAStatus ret = mSession->close();
1507 mSession = nullptr;
1508 ASSERT_TRUE(ret.isOk());
1509
1510 // Leave only 2 physical devices in the id set.
1511 auto it = physicalIds.begin();
1512 std::string physicalDeviceId = *it;
1513 it++;
1514 physicalIds.erase(++it, physicalIds.end());
1515 ASSERT_EQ(physicalIds.size(), 2u);
1516
1517 std::vector<HalStream> halStreams;
1518 bool supportsPartialResults = false;
1519 bool useHalBufManager = false;
1520 int32_t partialResultCount = 0;
1521 Stream previewStream;
1522 std::shared_ptr<DeviceCb> cb;
1523
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +00001524 configurePreviewStreams(
1525 name, mProvider, &previewThreshold, physicalIds, &mSession, &previewStream,
1526 &halStreams /*out*/, &supportsPartialResults /*out*/, &partialResultCount /*out*/,
1527 &useHalBufManager /*out*/, &cb /*out*/, 0 /*streamConfigCounter*/, true);
1528 if (mSession == nullptr) {
1529 // stream combination not supported by HAL, skip test for device
1530 continue;
1531 }
Avichal Rakesh362242f2022-02-08 12:40:53 -08001532
1533 ::aidl::android::hardware::common::fmq::MQDescriptor<
1534 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
1535 descriptor;
1536 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
1537 ASSERT_TRUE(resultQueueRet.isOk());
1538 std::shared_ptr<ResultMetadataQueue> resultQueue =
1539 std::make_shared<ResultMetadataQueue>(descriptor);
1540 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
1541 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
1542 resultQueue = nullptr;
1543 // Don't use the queue onwards.
1544 }
1545
1546 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
1547 static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
1548 partialResultCount, physicalIds, resultQueue);
1549
1550 std::vector<CaptureRequest> requests(1);
1551 CaptureRequest& request = requests[0];
1552 request.frameNumber = frameNumber;
1553 request.fmqSettingsSize = 0;
Emilian Peev3d919f92022-04-20 13:50:59 -07001554 request.settings = settingsMetadata;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001555
1556 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
1557
1558 std::vector<buffer_handle_t> graphicBuffers;
1559 graphicBuffers.reserve(halStreams.size());
1560 outputBuffers.resize(halStreams.size());
1561 size_t k = 0;
1562 for (const auto& halStream : halStreams) {
1563 buffer_handle_t buffer_handle;
1564 if (useHalBufManager) {
1565 outputBuffers[k] = {halStream.id, /*bufferId*/ 0, NativeHandle(),
1566 BufferStatus::OK, NativeHandle(), NativeHandle()};
1567 } else {
1568 allocateGraphicBuffer(previewStream.width, previewStream.height,
1569 android_convertGralloc1To0Usage(
1570 static_cast<uint64_t>(halStream.producerUsage),
1571 static_cast<uint64_t>(halStream.consumerUsage)),
1572 halStream.overrideFormat, &buffer_handle);
1573 graphicBuffers.push_back(buffer_handle);
1574 outputBuffers[k] = {
1575 halStream.id, bufferId, ::android::makeToAidl(buffer_handle),
1576 BufferStatus::OK, NativeHandle(), NativeHandle()};
1577 bufferId++;
1578 }
1579 k++;
1580 }
1581
1582 std::vector<PhysicalCameraSetting> camSettings(1);
1583 const camera_metadata_t* filteredSettingsBuffer = filteredSettings.getAndLock();
1584 uint8_t* rawFilteredSettingsBuffer = (uint8_t*)filteredSettingsBuffer;
1585 camSettings[0].settings = {std::vector(
1586 rawFilteredSettingsBuffer,
1587 rawFilteredSettingsBuffer + get_camera_metadata_size(filteredSettingsBuffer))};
1588 overrideRotateAndCrop(&camSettings[0].settings);
1589 camSettings[0].fmqSettingsSize = 0;
1590 camSettings[0].physicalCameraId = physicalDeviceId;
1591
1592 request.inputBuffer = {
1593 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
1594 request.physicalCameraSettings = camSettings;
1595
1596 {
1597 std::unique_lock<std::mutex> l(mLock);
1598 mInflightMap.clear();
1599 mInflightMap[frameNumber] = inflightReq;
1600 }
1601
1602 int32_t numRequestProcessed = 0;
1603 std::vector<BufferCache> cachesToRemove;
1604 ndk::ScopedAStatus returnStatus =
1605 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1606 ASSERT_TRUE(returnStatus.isOk());
1607 ASSERT_EQ(numRequestProcessed, 1u);
1608
1609 {
1610 std::unique_lock<std::mutex> l(mLock);
1611 while (!inflightReq->errorCodeValid &&
1612 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1613 auto timeout = std::chrono::system_clock::now() +
1614 std::chrono::seconds(kStreamBufferTimeoutSec);
1615 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1616 }
1617
1618 ASSERT_FALSE(inflightReq->errorCodeValid);
1619 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1620
1621 request.frameNumber++;
1622 // Empty settings should be supported after the first call
1623 // for repeating requests.
1624 request.settings.metadata.clear();
1625 request.physicalCameraSettings[0].settings.metadata.clear();
1626 // The buffer has been registered to HAL by bufferId, so per
1627 // API contract we should send a null handle for this buffer
1628 request.outputBuffers[0].buffer = NativeHandle();
1629 mInflightMap.clear();
1630 inflightReq = std::make_shared<InFlightRequest>(
1631 static_cast<ssize_t>(physicalIds.size()), false, supportsPartialResults,
1632 partialResultCount, physicalIds, resultQueue);
1633 mInflightMap[request.frameNumber] = inflightReq;
1634 }
1635
1636 returnStatus =
1637 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1638 ASSERT_TRUE(returnStatus.isOk());
1639 ASSERT_EQ(numRequestProcessed, 1u);
1640
1641 {
1642 std::unique_lock<std::mutex> l(mLock);
1643 while (!inflightReq->errorCodeValid &&
1644 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1645 auto timeout = std::chrono::system_clock::now() +
1646 std::chrono::seconds(kStreamBufferTimeoutSec);
1647 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1648 }
1649
1650 ASSERT_FALSE(inflightReq->errorCodeValid);
1651 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1652 }
1653
1654 // Invalid physical camera id should fail process requests
1655 frameNumber++;
1656 camSettings[0].physicalCameraId = invalidPhysicalId;
1657 camSettings[0].settings.metadata = settings;
1658
1659 request.physicalCameraSettings = camSettings; // Invalid camera settings
1660 returnStatus =
1661 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1662 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
1663 returnStatus.getServiceSpecificError());
1664
1665 defaultPreviewSettings.unlock(settingsBuffer);
1666 filteredSettings.unlock(filteredSettingsBuffer);
1667
1668 if (useHalBufManager) {
1669 std::vector<int32_t> streamIds(halStreams.size());
1670 for (size_t i = 0; i < streamIds.size(); i++) {
1671 streamIds[i] = halStreams[i].id;
1672 }
1673 verifyBuffersReturned(mSession, streamIds, cb);
1674 }
1675
1676 ret = mSession->close();
1677 mSession = nullptr;
1678 ASSERT_TRUE(ret.isOk());
1679 }
1680}
1681
1682// Generate and verify an ultra high resolution capture request
1683TEST_P(CameraAidlTest, processUltraHighResolutionRequest) {
1684 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1685 int64_t bufferId = 1;
1686 int32_t frameNumber = 1;
1687 CameraMetadata settings;
1688
1689 for (const auto& name : cameraDeviceNames) {
1690 std::string version, deviceId;
1691 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
1692 CameraMetadata meta;
1693
1694 std::shared_ptr<ICameraDevice> unusedDevice;
1695 openEmptyDeviceSession(name, mProvider, &mSession, &meta, &unusedDevice);
1696 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1697 if (!isUltraHighResolution(staticMeta)) {
1698 ndk::ScopedAStatus ret = mSession->close();
1699 mSession = nullptr;
1700 ASSERT_TRUE(ret.isOk());
1701 continue;
1702 }
1703 CameraMetadata req;
1704 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultSettings;
1705 ndk::ScopedAStatus ret =
1706 mSession->constructDefaultRequestSettings(RequestTemplate::STILL_CAPTURE, &req);
1707 ASSERT_TRUE(ret.isOk());
1708
1709 const camera_metadata_t* metadata =
1710 reinterpret_cast<const camera_metadata_t*>(req.metadata.data());
1711 size_t expectedSize = req.metadata.size();
1712 int result = validate_camera_metadata_structure(metadata, &expectedSize);
1713 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
1714
1715 size_t entryCount = get_camera_metadata_entry_count(metadata);
1716 ASSERT_GT(entryCount, 0u);
1717 defaultSettings = metadata;
1718 uint8_t sensorPixelMode =
1719 static_cast<uint8_t>(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
1720 ASSERT_EQ(::android::OK,
1721 defaultSettings.update(ANDROID_SENSOR_PIXEL_MODE, &sensorPixelMode, 1));
1722
1723 const camera_metadata_t* settingsBuffer = defaultSettings.getAndLock();
1724 uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
1725 settings.metadata = std::vector(
1726 rawSettingsBuffer, rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
1727 overrideRotateAndCrop(&settings);
1728
1729 ret = mSession->close();
1730 mSession = nullptr;
1731 ASSERT_TRUE(ret.isOk());
1732
1733 std::vector<HalStream> halStreams;
1734 bool supportsPartialResults = false;
1735 bool useHalBufManager = false;
1736 int32_t partialResultCount = 0;
1737 Stream previewStream;
1738 std::shared_ptr<DeviceCb> cb;
1739
1740 std::list<PixelFormat> pixelFormats = {PixelFormat::YCBCR_420_888, PixelFormat::RAW16};
1741 for (PixelFormat format : pixelFormats) {
Emilian Peevdda1eb72022-07-28 16:37:40 -07001742 previewStream.usage =
1743 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1744 GRALLOC1_CONSUMER_USAGE_CPU_READ);
1745 previewStream.dataSpace = Dataspace::UNKNOWN;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001746 configureStreams(name, mProvider, format, &mSession, &previewStream, &halStreams,
1747 &supportsPartialResults, &partialResultCount, &useHalBufManager, &cb,
1748 0, /*maxResolution*/ true);
1749 ASSERT_NE(mSession, nullptr);
1750
1751 ::aidl::android::hardware::common::fmq::MQDescriptor<
1752 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
1753 descriptor;
1754 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
1755 ASSERT_TRUE(resultQueueRet.isOk());
1756
1757 std::shared_ptr<ResultMetadataQueue> resultQueue =
1758 std::make_shared<ResultMetadataQueue>(descriptor);
1759 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
1760 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
1761 resultQueue = nullptr;
1762 // Don't use the queue onwards.
1763 }
1764
1765 std::vector<buffer_handle_t> graphicBuffers;
1766 graphicBuffers.reserve(halStreams.size());
1767 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
1768 static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
1769 partialResultCount, std::unordered_set<std::string>(), resultQueue);
1770
1771 std::vector<CaptureRequest> requests(1);
1772 CaptureRequest& request = requests[0];
1773 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
1774 outputBuffers.resize(halStreams.size());
1775
1776 size_t k = 0;
1777 for (const auto& halStream : halStreams) {
1778 buffer_handle_t buffer_handle;
1779 if (useHalBufManager) {
1780 outputBuffers[k] = {halStream.id, 0,
1781 NativeHandle(), BufferStatus::OK,
1782 NativeHandle(), NativeHandle()};
1783 } else {
1784 allocateGraphicBuffer(previewStream.width, previewStream.height,
1785 android_convertGralloc1To0Usage(
1786 static_cast<uint64_t>(halStream.producerUsage),
1787 static_cast<uint64_t>(halStream.consumerUsage)),
1788 halStream.overrideFormat, &buffer_handle);
1789 graphicBuffers.push_back(buffer_handle);
1790 outputBuffers[k] = {
1791 halStream.id, bufferId, ::android::makeToAidl(buffer_handle),
1792 BufferStatus::OK, NativeHandle(), NativeHandle()};
1793 bufferId++;
1794 }
1795 k++;
1796 }
1797
1798 request.inputBuffer = {
1799 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
1800 request.frameNumber = frameNumber;
1801 request.fmqSettingsSize = 0;
1802 request.settings = settings;
1803 request.inputWidth = 0;
1804 request.inputHeight = 0;
1805
1806 {
1807 std::unique_lock<std::mutex> l(mLock);
1808 mInflightMap.clear();
1809 mInflightMap[frameNumber] = inflightReq;
1810 }
1811
1812 int32_t numRequestProcessed = 0;
1813 std::vector<BufferCache> cachesToRemove;
1814 ndk::ScopedAStatus returnStatus =
1815 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1816 ASSERT_TRUE(returnStatus.isOk());
1817 ASSERT_EQ(numRequestProcessed, 1u);
1818
1819 {
1820 std::unique_lock<std::mutex> l(mLock);
1821 while (!inflightReq->errorCodeValid &&
1822 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1823 auto timeout = std::chrono::system_clock::now() +
1824 std::chrono::seconds(kStreamBufferTimeoutSec);
1825 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1826 }
1827
1828 ASSERT_FALSE(inflightReq->errorCodeValid);
1829 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1830 }
1831 if (useHalBufManager) {
1832 std::vector<int32_t> streamIds(halStreams.size());
1833 for (size_t i = 0; i < streamIds.size(); i++) {
1834 streamIds[i] = halStreams[i].id;
1835 }
1836 verifyBuffersReturned(mSession, streamIds, cb);
1837 }
1838
1839 ret = mSession->close();
1840 mSession = nullptr;
1841 ASSERT_TRUE(ret.isOk());
1842 }
1843 }
1844}
1845
1846// Generate and verify 10-bit dynamic range request
1847TEST_P(CameraAidlTest, process10BitDynamicRangeRequest) {
1848 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001849 CameraMetadata settings;
1850
1851 for (const auto& name : cameraDeviceNames) {
1852 std::string version, deviceId;
1853 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
1854 CameraMetadata meta;
1855 std::shared_ptr<ICameraDevice> device;
1856 openEmptyDeviceSession(name, mProvider, &mSession, &meta, &device);
1857 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1858 if (!is10BitDynamicRangeCapable(staticMeta)) {
1859 ndk::ScopedAStatus ret = mSession->close();
1860 mSession = nullptr;
1861 ASSERT_TRUE(ret.isOk());
1862 continue;
1863 }
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001864 std::vector<RequestAvailableDynamicRangeProfilesMap> profileList;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001865 get10BitDynamicRangeProfiles(staticMeta, &profileList);
1866 ASSERT_FALSE(profileList.empty());
1867
1868 CameraMetadata req;
1869 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultSettings;
1870 ndk::ScopedAStatus ret =
Emilian Peevdda1eb72022-07-28 16:37:40 -07001871 mSession->constructDefaultRequestSettings(RequestTemplate::PREVIEW, &req);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001872 ASSERT_TRUE(ret.isOk());
1873
1874 const camera_metadata_t* metadata =
1875 reinterpret_cast<const camera_metadata_t*>(req.metadata.data());
1876 size_t expectedSize = req.metadata.size();
1877 int result = validate_camera_metadata_structure(metadata, &expectedSize);
1878 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
1879
1880 size_t entryCount = get_camera_metadata_entry_count(metadata);
1881 ASSERT_GT(entryCount, 0u);
1882 defaultSettings = metadata;
1883
1884 const camera_metadata_t* settingsBuffer = defaultSettings.getAndLock();
1885 uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
1886 settings.metadata = std::vector(
1887 rawSettingsBuffer, rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
1888 overrideRotateAndCrop(&settings);
1889
1890 ret = mSession->close();
1891 mSession = nullptr;
1892 ASSERT_TRUE(ret.isOk());
1893
1894 std::vector<HalStream> halStreams;
1895 bool supportsPartialResults = false;
1896 bool useHalBufManager = false;
1897 int32_t partialResultCount = 0;
1898 Stream previewStream;
1899 std::shared_ptr<DeviceCb> cb;
1900 for (const auto& profile : profileList) {
Emilian Peevdda1eb72022-07-28 16:37:40 -07001901 previewStream.usage =
1902 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1903 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER);
1904 previewStream.dataSpace = getDataspace(PixelFormat::IMPLEMENTATION_DEFINED);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001905 configureStreams(name, mProvider, PixelFormat::IMPLEMENTATION_DEFINED, &mSession,
1906 &previewStream, &halStreams, &supportsPartialResults,
1907 &partialResultCount, &useHalBufManager, &cb, 0,
1908 /*maxResolution*/ false, profile);
1909 ASSERT_NE(mSession, nullptr);
1910
1911 ::aidl::android::hardware::common::fmq::MQDescriptor<
1912 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
1913 descriptor;
1914 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
1915 ASSERT_TRUE(resultQueueRet.isOk());
1916
1917 std::shared_ptr<ResultMetadataQueue> resultQueue =
1918 std::make_shared<ResultMetadataQueue>(descriptor);
1919 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
1920 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
1921 resultQueue = nullptr;
1922 // Don't use the queue onwards.
1923 }
1924
Emilian Peevdda1eb72022-07-28 16:37:40 -07001925 mInflightMap.clear();
1926 // Stream as long as needed to fill the Hal inflight queue
1927 std::vector<CaptureRequest> requests(halStreams[0].maxBuffers);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001928
Emilian Peev470d1382023-01-18 11:09:09 -08001929 for (int32_t requestId = 0; requestId < requests.size(); requestId++) {
Emilian Peevdda1eb72022-07-28 16:37:40 -07001930 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
1931 static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
1932 partialResultCount, std::unordered_set<std::string>(), resultQueue);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001933
Emilian Peev470d1382023-01-18 11:09:09 -08001934 CaptureRequest& request = requests[requestId];
Emilian Peevdda1eb72022-07-28 16:37:40 -07001935 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
1936 outputBuffers.resize(halStreams.size());
Avichal Rakesh362242f2022-02-08 12:40:53 -08001937
Emilian Peevdda1eb72022-07-28 16:37:40 -07001938 size_t k = 0;
1939 inflightReq->mOutstandingBufferIds.resize(halStreams.size());
1940 std::vector<buffer_handle_t> graphicBuffers;
1941 graphicBuffers.reserve(halStreams.size());
Avichal Rakesh362242f2022-02-08 12:40:53 -08001942
Emilian Peev470d1382023-01-18 11:09:09 -08001943 auto bufferId = requestId + 1; // Buffer id value 0 is not valid
Emilian Peevdda1eb72022-07-28 16:37:40 -07001944 for (const auto& halStream : halStreams) {
1945 buffer_handle_t buffer_handle;
1946 if (useHalBufManager) {
1947 outputBuffers[k] = {halStream.id, 0,
1948 NativeHandle(), BufferStatus::OK,
1949 NativeHandle(), NativeHandle()};
1950 } else {
1951 auto usage = android_convertGralloc1To0Usage(
1952 static_cast<uint64_t>(halStream.producerUsage),
1953 static_cast<uint64_t>(halStream.consumerUsage));
1954 allocateGraphicBuffer(previewStream.width, previewStream.height, usage,
1955 halStream.overrideFormat, &buffer_handle);
1956
1957 inflightReq->mOutstandingBufferIds[halStream.id][bufferId] = buffer_handle;
1958 graphicBuffers.push_back(buffer_handle);
1959 outputBuffers[k] = {halStream.id, bufferId,
1960 android::makeToAidl(buffer_handle), BufferStatus::OK, NativeHandle(),
1961 NativeHandle()};
Emilian Peevdda1eb72022-07-28 16:37:40 -07001962 }
1963 k++;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001964 }
Avichal Rakesh362242f2022-02-08 12:40:53 -08001965
Emilian Peevdda1eb72022-07-28 16:37:40 -07001966 request.inputBuffer = {
1967 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
Emilian Peev470d1382023-01-18 11:09:09 -08001968 request.frameNumber = bufferId;
Emilian Peevdda1eb72022-07-28 16:37:40 -07001969 request.fmqSettingsSize = 0;
1970 request.settings = settings;
1971 request.inputWidth = 0;
1972 request.inputHeight = 0;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001973
Emilian Peevdda1eb72022-07-28 16:37:40 -07001974 {
1975 std::unique_lock<std::mutex> l(mLock);
Emilian Peev470d1382023-01-18 11:09:09 -08001976 mInflightMap[bufferId] = inflightReq;
Emilian Peevdda1eb72022-07-28 16:37:40 -07001977 }
1978
Avichal Rakesh362242f2022-02-08 12:40:53 -08001979 }
1980
1981 int32_t numRequestProcessed = 0;
1982 std::vector<BufferCache> cachesToRemove;
1983 ndk::ScopedAStatus returnStatus =
Emilian Peevdda1eb72022-07-28 16:37:40 -07001984 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001985 ASSERT_TRUE(returnStatus.isOk());
Emilian Peevdda1eb72022-07-28 16:37:40 -07001986 ASSERT_EQ(numRequestProcessed, requests.size());
Avichal Rakesh362242f2022-02-08 12:40:53 -08001987
Emilian Peevdda1eb72022-07-28 16:37:40 -07001988 returnStatus = mSession->repeatingRequestEnd(requests.size() - 1,
1989 std::vector<int32_t> {halStreams[0].id});
1990 ASSERT_TRUE(returnStatus.isOk());
1991
Emilian Peev470d1382023-01-18 11:09:09 -08001992 // We are keeping frame numbers and buffer ids consistent. Buffer id value of 0
1993 // is used to indicate a buffer that is not present/available so buffer ids as well
1994 // as frame numbers begin with 1.
1995 for (int32_t frameNumber = 1; frameNumber <= requests.size(); frameNumber++) {
Emilian Peevdda1eb72022-07-28 16:37:40 -07001996 const auto& inflightReq = mInflightMap[frameNumber];
Avichal Rakesh362242f2022-02-08 12:40:53 -08001997 std::unique_lock<std::mutex> l(mLock);
1998 while (!inflightReq->errorCodeValid &&
1999 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
2000 auto timeout = std::chrono::system_clock::now() +
2001 std::chrono::seconds(kStreamBufferTimeoutSec);
2002 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2003 }
2004
Shuzhen Wang0f56c562023-04-03 16:58:59 -07002005 waitForReleaseFence(inflightReq->resultOutputBuffers);
2006
Avichal Rakesh362242f2022-02-08 12:40:53 -08002007 ASSERT_FALSE(inflightReq->errorCodeValid);
2008 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
2009 verify10BitMetadata(mHandleImporter, *inflightReq, profile);
2010 }
Emilian Peevdda1eb72022-07-28 16:37:40 -07002011
Avichal Rakesh362242f2022-02-08 12:40:53 -08002012 if (useHalBufManager) {
2013 std::vector<int32_t> streamIds(halStreams.size());
2014 for (size_t i = 0; i < streamIds.size(); i++) {
2015 streamIds[i] = halStreams[i].id;
2016 }
2017 mSession->signalStreamFlush(streamIds, /*streamConfigCounter*/ 0);
2018 cb->waitForBuffersReturned();
2019 }
2020
2021 ret = mSession->close();
2022 mSession = nullptr;
2023 ASSERT_TRUE(ret.isOk());
2024 }
2025 }
2026}
2027
2028// Generate and verify a burst containing alternating sensor sensitivity values
2029TEST_P(CameraAidlTest, processCaptureRequestBurstISO) {
2030 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2031 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2032 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2033 int64_t bufferId = 1;
2034 int32_t frameNumber = 1;
2035 float isoTol = .03f;
2036 CameraMetadata settings;
2037
2038 for (const auto& name : cameraDeviceNames) {
2039 CameraMetadata meta;
2040 settings.metadata.clear();
2041 std::shared_ptr<ICameraDevice> unusedDevice;
2042 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
2043 &unusedDevice /*out*/);
2044 camera_metadata_t* staticMetaBuffer =
2045 clone_camera_metadata(reinterpret_cast<camera_metadata_t*>(meta.metadata.data()));
2046 ::android::hardware::camera::common::V1_0::helper::CameraMetadata staticMeta(
2047 staticMetaBuffer);
2048
2049 camera_metadata_entry_t hwLevel = staticMeta.find(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL);
2050 ASSERT_TRUE(0 < hwLevel.count);
2051 if (ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED == hwLevel.data.u8[0] ||
2052 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL == hwLevel.data.u8[0]) {
2053 // Limited/External devices can skip this test
2054 ndk::ScopedAStatus ret = mSession->close();
2055 mSession = nullptr;
2056 ASSERT_TRUE(ret.isOk());
2057 continue;
2058 }
2059
2060 camera_metadata_entry_t isoRange = staticMeta.find(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE);
2061 ASSERT_EQ(isoRange.count, 2u);
2062
2063 ndk::ScopedAStatus ret = mSession->close();
2064 mSession = nullptr;
2065 ASSERT_TRUE(ret.isOk());
2066
2067 bool supportsPartialResults = false;
2068 bool useHalBufManager = false;
2069 int32_t partialResultCount = 0;
2070 Stream previewStream;
2071 std::vector<HalStream> halStreams;
2072 std::shared_ptr<DeviceCb> cb;
2073 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2074 &previewStream /*out*/, &halStreams /*out*/,
2075 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2076 &useHalBufManager /*out*/, &cb /*out*/);
2077
2078 ::aidl::android::hardware::common::fmq::MQDescriptor<
2079 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2080 descriptor;
2081 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2082 std::shared_ptr<ResultMetadataQueue> resultQueue =
2083 std::make_shared<ResultMetadataQueue>(descriptor);
2084 ASSERT_TRUE(resultQueueRet.isOk());
2085 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2086 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2087 resultQueue = nullptr;
2088 // Don't use the queue onwards.
2089 }
2090
2091 ret = mSession->constructDefaultRequestSettings(RequestTemplate::PREVIEW, &settings);
2092 ASSERT_TRUE(ret.isOk());
2093
2094 ::android::hardware::camera::common::V1_0::helper::CameraMetadata requestMeta;
2095 std::vector<CaptureRequest> requests(kBurstFrameCount);
2096 std::vector<buffer_handle_t> buffers(kBurstFrameCount);
2097 std::vector<std::shared_ptr<InFlightRequest>> inflightReqs(kBurstFrameCount);
2098 std::vector<int32_t> isoValues(kBurstFrameCount);
2099 std::vector<CameraMetadata> requestSettings(kBurstFrameCount);
2100
2101 for (int32_t i = 0; i < kBurstFrameCount; i++) {
2102 std::unique_lock<std::mutex> l(mLock);
2103 CaptureRequest& request = requests[i];
2104 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2105 outputBuffers.resize(1);
2106 StreamBuffer& outputBuffer = outputBuffers[0];
2107
2108 isoValues[i] = ((i % 2) == 0) ? isoRange.data.i32[0] : isoRange.data.i32[1];
2109 if (useHalBufManager) {
2110 outputBuffer = {halStreams[0].id, 0,
2111 NativeHandle(), BufferStatus::OK,
2112 NativeHandle(), NativeHandle()};
2113 } else {
2114 allocateGraphicBuffer(previewStream.width, previewStream.height,
2115 android_convertGralloc1To0Usage(
2116 static_cast<uint64_t>(halStreams[0].producerUsage),
2117 static_cast<uint64_t>(halStreams[0].consumerUsage)),
2118 halStreams[0].overrideFormat, &buffers[i]);
2119 outputBuffer = {halStreams[0].id, bufferId + i, ::android::makeToAidl(buffers[i]),
2120 BufferStatus::OK, NativeHandle(), NativeHandle()};
2121 }
2122
2123 requestMeta.append(reinterpret_cast<camera_metadata_t*>(settings.metadata.data()));
2124
2125 // Disable all 3A routines
2126 uint8_t mode = static_cast<uint8_t>(ANDROID_CONTROL_MODE_OFF);
2127 ASSERT_EQ(::android::OK, requestMeta.update(ANDROID_CONTROL_MODE, &mode, 1));
2128 ASSERT_EQ(::android::OK,
2129 requestMeta.update(ANDROID_SENSOR_SENSITIVITY, &isoValues[i], 1));
2130 camera_metadata_t* metaBuffer = requestMeta.release();
2131 uint8_t* rawMetaBuffer = reinterpret_cast<uint8_t*>(metaBuffer);
2132 requestSettings[i].metadata = std::vector(
2133 rawMetaBuffer, rawMetaBuffer + get_camera_metadata_size(metaBuffer));
2134 overrideRotateAndCrop(&(requestSettings[i]));
2135
2136 request.frameNumber = frameNumber + i;
2137 request.fmqSettingsSize = 0;
2138 request.settings = requestSettings[i];
2139 request.inputBuffer = {
2140 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2141
2142 inflightReqs[i] = std::make_shared<InFlightRequest>(1, false, supportsPartialResults,
2143 partialResultCount, resultQueue);
2144 mInflightMap[frameNumber + i] = inflightReqs[i];
2145 }
2146
2147 int32_t numRequestProcessed = 0;
2148 std::vector<BufferCache> cachesToRemove;
2149
2150 ndk::ScopedAStatus returnStatus =
2151 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2152 ASSERT_TRUE(returnStatus.isOk());
2153 ASSERT_EQ(numRequestProcessed, kBurstFrameCount);
2154
2155 for (size_t i = 0; i < kBurstFrameCount; i++) {
2156 std::unique_lock<std::mutex> l(mLock);
2157 while (!inflightReqs[i]->errorCodeValid && ((0 < inflightReqs[i]->numBuffersLeft) ||
2158 (!inflightReqs[i]->haveResultMetadata))) {
2159 auto timeout = std::chrono::system_clock::now() +
2160 std::chrono::seconds(kStreamBufferTimeoutSec);
2161 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2162 }
2163
2164 ASSERT_FALSE(inflightReqs[i]->errorCodeValid);
2165 ASSERT_NE(inflightReqs[i]->resultOutputBuffers.size(), 0u);
2166 ASSERT_EQ(previewStream.id, inflightReqs[i]->resultOutputBuffers[0].buffer.streamId);
2167 ASSERT_FALSE(inflightReqs[i]->collectedResult.isEmpty());
2168 ASSERT_TRUE(inflightReqs[i]->collectedResult.exists(ANDROID_SENSOR_SENSITIVITY));
2169 camera_metadata_entry_t isoResult =
2170 inflightReqs[i]->collectedResult.find(ANDROID_SENSOR_SENSITIVITY);
2171 ASSERT_TRUE(std::abs(isoResult.data.i32[0] - isoValues[i]) <=
2172 std::round(isoValues[i] * isoTol));
2173 }
2174
2175 if (useHalBufManager) {
2176 verifyBuffersReturned(mSession, previewStream.id, cb);
2177 }
2178 ret = mSession->close();
2179 mSession = nullptr;
2180 ASSERT_TRUE(ret.isOk());
2181 }
2182}
2183
2184// Test whether an incorrect capture request with missing settings will
2185// be reported correctly.
2186TEST_P(CameraAidlTest, processCaptureRequestInvalidSinglePreview) {
2187 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2188 std::vector<AvailableStream> outputPreviewStreams;
2189 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2190 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2191 int64_t bufferId = 1;
2192 int32_t frameNumber = 1;
2193 CameraMetadata settings;
2194
2195 for (const auto& name : cameraDeviceNames) {
2196 Stream previewStream;
2197 std::vector<HalStream> halStreams;
2198 std::shared_ptr<DeviceCb> cb;
2199 bool supportsPartialResults = false;
2200 bool useHalBufManager = false;
2201 int32_t partialResultCount = 0;
2202 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2203 &previewStream /*out*/, &halStreams /*out*/,
2204 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2205 &useHalBufManager /*out*/, &cb /*out*/);
2206 ASSERT_NE(mSession, nullptr);
2207 ASSERT_FALSE(halStreams.empty());
2208
2209 buffer_handle_t buffer_handle = nullptr;
2210
2211 if (useHalBufManager) {
2212 bufferId = 0;
2213 } else {
2214 allocateGraphicBuffer(previewStream.width, previewStream.height,
2215 android_convertGralloc1To0Usage(
2216 static_cast<uint64_t>(halStreams[0].producerUsage),
2217 static_cast<uint64_t>(halStreams[0].consumerUsage)),
2218 halStreams[0].overrideFormat, &buffer_handle);
2219 }
2220
2221 std::vector<CaptureRequest> requests(1);
2222 CaptureRequest& request = requests[0];
2223 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2224 outputBuffers.resize(1);
2225 StreamBuffer& outputBuffer = outputBuffers[0];
2226
2227 outputBuffer = {
2228 halStreams[0].id,
2229 bufferId,
2230 buffer_handle == nullptr ? NativeHandle() : ::android::makeToAidl(buffer_handle),
2231 BufferStatus::OK,
2232 NativeHandle(),
2233 NativeHandle()};
2234
2235 request.inputBuffer = {
2236 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2237 request.frameNumber = frameNumber;
2238 request.fmqSettingsSize = 0;
2239 request.settings = settings;
2240
2241 // Settings were not correctly initialized, we should fail here
2242 int32_t numRequestProcessed = 0;
2243 std::vector<BufferCache> cachesToRemove;
2244 ndk::ScopedAStatus ret =
2245 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2246 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
2247 ASSERT_EQ(numRequestProcessed, 0u);
2248
2249 ret = mSession->close();
2250 mSession = nullptr;
2251 ASSERT_TRUE(ret.isOk());
2252 }
2253}
2254
2255// Verify camera offline session behavior
2256TEST_P(CameraAidlTest, switchToOffline) {
2257 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2258 AvailableStream threshold = {kMaxStillWidth, kMaxStillHeight,
2259 static_cast<int32_t>(PixelFormat::BLOB)};
2260 int64_t bufferId = 1;
2261 int32_t frameNumber = 1;
2262 CameraMetadata settings;
2263
2264 for (const auto& name : cameraDeviceNames) {
2265 CameraMetadata meta;
2266 {
2267 std::shared_ptr<ICameraDevice> unusedDevice;
2268 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
2269 &unusedDevice);
2270 camera_metadata_t* staticMetaBuffer = clone_camera_metadata(
2271 reinterpret_cast<camera_metadata_t*>(meta.metadata.data()));
2272 ::android::hardware::camera::common::V1_0::helper::CameraMetadata staticMeta(
2273 staticMetaBuffer);
2274
2275 if (isOfflineSessionSupported(staticMetaBuffer) != Status::OK) {
2276 ndk::ScopedAStatus ret = mSession->close();
2277 mSession = nullptr;
2278 ASSERT_TRUE(ret.isOk());
2279 continue;
2280 }
2281 ndk::ScopedAStatus ret = mSession->close();
2282 mSession = nullptr;
2283 ASSERT_TRUE(ret.isOk());
2284 }
2285
2286 bool supportsPartialResults = false;
2287 int32_t partialResultCount = 0;
2288 Stream stream;
2289 std::vector<HalStream> halStreams;
2290 std::shared_ptr<DeviceCb> cb;
2291 int32_t jpegBufferSize;
2292 bool useHalBufManager;
2293 configureOfflineStillStream(name, mProvider, &threshold, &mSession /*out*/, &stream /*out*/,
2294 &halStreams /*out*/, &supportsPartialResults /*out*/,
2295 &partialResultCount /*out*/, &cb /*out*/,
2296 &jpegBufferSize /*out*/, &useHalBufManager /*out*/);
2297
2298 auto ret = mSession->constructDefaultRequestSettings(RequestTemplate::STILL_CAPTURE,
2299 &settings);
2300 ASSERT_TRUE(ret.isOk());
2301
2302 ::aidl::android::hardware::common::fmq::MQDescriptor<
2303 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2304 descriptor;
2305
2306 ndk::ScopedAStatus resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2307 ASSERT_TRUE(resultQueueRet.isOk());
2308 std::shared_ptr<ResultMetadataQueue> resultQueue =
2309 std::make_shared<ResultMetadataQueue>(descriptor);
2310 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2311 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2312 resultQueue = nullptr;
2313 // Don't use the queue onwards.
2314 }
2315
2316 ::android::hardware::camera::common::V1_0::helper::CameraMetadata requestMeta;
2317
2318 std::vector<buffer_handle_t> buffers(kBurstFrameCount);
2319 std::vector<std::shared_ptr<InFlightRequest>> inflightReqs(kBurstFrameCount);
2320 std::vector<CameraMetadata> requestSettings(kBurstFrameCount);
2321
2322 std::vector<CaptureRequest> requests(kBurstFrameCount);
2323
2324 HalStream halStream = halStreams[0];
2325 for (uint32_t i = 0; i < kBurstFrameCount; i++) {
2326 CaptureRequest& request = requests[i];
2327 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2328 outputBuffers.resize(1);
2329 StreamBuffer& outputBuffer = outputBuffers[0];
2330
2331 std::unique_lock<std::mutex> l(mLock);
2332 if (useHalBufManager) {
2333 outputBuffer = {halStream.id, 0, NativeHandle(), BufferStatus::OK, NativeHandle(),
2334 NativeHandle()};
2335 } else {
2336 // jpeg buffer (w,h) = (blobLen, 1)
2337 allocateGraphicBuffer(jpegBufferSize, /*height*/ 1,
2338 android_convertGralloc1To0Usage(
2339 static_cast<uint64_t>(halStream.producerUsage),
2340 static_cast<uint64_t>(halStream.consumerUsage)),
2341 halStream.overrideFormat, &buffers[i]);
2342 outputBuffer = {halStream.id, bufferId + i, ::android::makeToAidl(buffers[i]),
2343 BufferStatus::OK, NativeHandle(), NativeHandle()};
2344 }
2345
2346 requestMeta.clear();
2347 requestMeta.append(reinterpret_cast<camera_metadata_t*>(settings.metadata.data()));
2348
2349 camera_metadata_t* metaBuffer = requestMeta.release();
2350 uint8_t* rawMetaBuffer = reinterpret_cast<uint8_t*>(metaBuffer);
2351 requestSettings[i].metadata = std::vector(
2352 rawMetaBuffer, rawMetaBuffer + get_camera_metadata_size(metaBuffer));
2353 overrideRotateAndCrop(&requestSettings[i]);
2354
2355 request.frameNumber = frameNumber + i;
2356 request.fmqSettingsSize = 0;
2357 request.settings = requestSettings[i];
2358 request.inputBuffer = {/*streamId*/ -1,
2359 /*bufferId*/ 0, NativeHandle(),
2360 BufferStatus::ERROR, NativeHandle(),
2361 NativeHandle()};
2362
2363 inflightReqs[i] = std::make_shared<InFlightRequest>(1, false, supportsPartialResults,
2364 partialResultCount, resultQueue);
2365 mInflightMap[frameNumber + i] = inflightReqs[i];
2366 }
2367
2368 int32_t numRequestProcessed = 0;
2369 std::vector<BufferCache> cachesToRemove;
2370
2371 ndk::ScopedAStatus returnStatus =
2372 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2373 ASSERT_TRUE(returnStatus.isOk());
2374 ASSERT_EQ(numRequestProcessed, kBurstFrameCount);
2375
2376 std::vector<int32_t> offlineStreamIds = {halStream.id};
2377 CameraOfflineSessionInfo offlineSessionInfo;
2378 std::shared_ptr<ICameraOfflineSession> offlineSession;
2379 returnStatus =
2380 mSession->switchToOffline(offlineStreamIds, &offlineSessionInfo, &offlineSession);
2381
2382 if (!halStreams[0].supportOffline) {
2383 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
2384 returnStatus.getServiceSpecificError());
2385 ret = mSession->close();
2386 mSession = nullptr;
2387 ASSERT_TRUE(ret.isOk());
2388 continue;
2389 }
2390
2391 ASSERT_TRUE(returnStatus.isOk());
2392 // Hal might be unable to find any requests qualified for offline mode.
2393 if (offlineSession == nullptr) {
2394 ret = mSession->close();
2395 mSession = nullptr;
2396 ASSERT_TRUE(ret.isOk());
2397 continue;
2398 }
2399
2400 ASSERT_EQ(offlineSessionInfo.offlineStreams.size(), 1u);
2401 ASSERT_EQ(offlineSessionInfo.offlineStreams[0].id, halStream.id);
2402 ASSERT_NE(offlineSessionInfo.offlineRequests.size(), 0u);
2403
2404 // close device session to make sure offline session does not rely on it
2405 ret = mSession->close();
2406 mSession = nullptr;
2407 ASSERT_TRUE(ret.isOk());
2408
2409 ::aidl::android::hardware::common::fmq::MQDescriptor<
2410 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2411 offlineResultDescriptor;
2412
2413 auto offlineResultQueueRet =
2414 offlineSession->getCaptureResultMetadataQueue(&offlineResultDescriptor);
2415 std::shared_ptr<ResultMetadataQueue> offlineResultQueue =
2416 std::make_shared<ResultMetadataQueue>(descriptor);
2417 if (!offlineResultQueue->isValid() || offlineResultQueue->availableToWrite() <= 0) {
2418 ALOGE("%s: offline session returns empty result metadata fmq, not use it", __func__);
2419 offlineResultQueue = nullptr;
2420 // Don't use the queue onwards.
2421 }
2422 ASSERT_TRUE(offlineResultQueueRet.isOk());
2423
2424 updateInflightResultQueue(offlineResultQueue);
2425
2426 ret = offlineSession->setCallback(cb);
2427 ASSERT_TRUE(ret.isOk());
2428
2429 for (size_t i = 0; i < kBurstFrameCount; i++) {
2430 std::unique_lock<std::mutex> l(mLock);
2431 while (!inflightReqs[i]->errorCodeValid && ((0 < inflightReqs[i]->numBuffersLeft) ||
2432 (!inflightReqs[i]->haveResultMetadata))) {
2433 auto timeout = std::chrono::system_clock::now() +
2434 std::chrono::seconds(kStreamBufferTimeoutSec);
2435 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2436 }
2437
2438 ASSERT_FALSE(inflightReqs[i]->errorCodeValid);
2439 ASSERT_NE(inflightReqs[i]->resultOutputBuffers.size(), 0u);
2440 ASSERT_EQ(stream.id, inflightReqs[i]->resultOutputBuffers[0].buffer.streamId);
2441 ASSERT_FALSE(inflightReqs[i]->collectedResult.isEmpty());
2442 }
2443
2444 ret = offlineSession->close();
2445 ASSERT_TRUE(ret.isOk());
2446 }
2447}
2448
2449// Check whether an invalid capture request with missing output buffers
2450// will be reported correctly.
2451TEST_P(CameraAidlTest, processCaptureRequestInvalidBuffer) {
2452 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2453 std::vector<AvailableStream> outputBlobStreams;
2454 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2455 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2456 int32_t frameNumber = 1;
2457 CameraMetadata settings;
2458
2459 for (const auto& name : cameraDeviceNames) {
2460 Stream previewStream;
2461 std::vector<HalStream> halStreams;
2462 std::shared_ptr<DeviceCb> cb;
2463 bool supportsPartialResults = false;
2464 bool useHalBufManager = false;
2465 int32_t partialResultCount = 0;
2466 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2467 &previewStream /*out*/, &halStreams /*out*/,
2468 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2469 &useHalBufManager /*out*/, &cb /*out*/);
2470
2471 RequestTemplate reqTemplate = RequestTemplate::PREVIEW;
2472 ndk::ScopedAStatus ret = mSession->constructDefaultRequestSettings(reqTemplate, &settings);
2473 ASSERT_TRUE(ret.isOk());
2474 overrideRotateAndCrop(&settings);
2475
2476 std::vector<CaptureRequest> requests(1);
2477 CaptureRequest& request = requests[0];
2478 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2479 outputBuffers.resize(1);
2480 // Empty output buffer
2481 outputBuffers[0] = {
2482 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2483
2484 request.inputBuffer = {
2485 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2486 request.frameNumber = frameNumber;
2487 request.fmqSettingsSize = 0;
2488 request.settings = settings;
2489
2490 // Output buffers are missing, we should fail here
2491 int32_t numRequestProcessed = 0;
2492 std::vector<BufferCache> cachesToRemove;
2493 ret = mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2494 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
2495 ASSERT_EQ(numRequestProcessed, 0u);
2496
2497 ret = mSession->close();
2498 mSession = nullptr;
2499 ASSERT_TRUE(ret.isOk());
2500 }
2501}
2502
2503// Generate, trigger and flush a preview request
2504TEST_P(CameraAidlTest, flushPreviewRequest) {
2505 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2506 std::vector<AvailableStream> outputPreviewStreams;
2507 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2508 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2509 int64_t bufferId = 1;
2510 int32_t frameNumber = 1;
2511 CameraMetadata settings;
2512
2513 for (const auto& name : cameraDeviceNames) {
2514 Stream previewStream;
2515 std::vector<HalStream> halStreams;
2516 std::shared_ptr<DeviceCb> cb;
2517 bool supportsPartialResults = false;
2518 bool useHalBufManager = false;
2519 int32_t partialResultCount = 0;
2520
2521 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2522 &previewStream /*out*/, &halStreams /*out*/,
2523 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2524 &useHalBufManager /*out*/, &cb /*out*/);
2525
2526 ASSERT_NE(mSession, nullptr);
2527 ASSERT_NE(cb, nullptr);
2528 ASSERT_FALSE(halStreams.empty());
2529
2530 ::aidl::android::hardware::common::fmq::MQDescriptor<
2531 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2532 descriptor;
2533
2534 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2535 std::shared_ptr<ResultMetadataQueue> resultQueue =
2536 std::make_shared<ResultMetadataQueue>(descriptor);
2537 ASSERT_TRUE(resultQueueRet.isOk());
2538 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2539 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2540 resultQueue = nullptr;
2541 // Don't use the queue onwards.
2542 }
2543
2544 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
2545 1, false, supportsPartialResults, partialResultCount, resultQueue);
2546 RequestTemplate reqTemplate = RequestTemplate::PREVIEW;
2547
2548 ndk::ScopedAStatus ret = mSession->constructDefaultRequestSettings(reqTemplate, &settings);
2549 ASSERT_TRUE(ret.isOk());
2550 overrideRotateAndCrop(&settings);
2551
2552 buffer_handle_t buffer_handle;
2553 std::vector<CaptureRequest> requests(1);
2554 CaptureRequest& request = requests[0];
2555 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2556 outputBuffers.resize(1);
2557 StreamBuffer& outputBuffer = outputBuffers[0];
2558 if (useHalBufManager) {
2559 bufferId = 0;
2560 outputBuffer = {halStreams[0].id, bufferId, NativeHandle(),
2561 BufferStatus::OK, NativeHandle(), NativeHandle()};
2562 } else {
2563 allocateGraphicBuffer(previewStream.width, previewStream.height,
2564 android_convertGralloc1To0Usage(
2565 static_cast<uint64_t>(halStreams[0].producerUsage),
2566 static_cast<uint64_t>(halStreams[0].consumerUsage)),
2567 halStreams[0].overrideFormat, &buffer_handle);
2568 outputBuffer = {halStreams[0].id, bufferId, ::android::makeToAidl(buffer_handle),
2569 BufferStatus::OK, NativeHandle(), NativeHandle()};
2570 }
2571
2572 request.frameNumber = frameNumber;
2573 request.fmqSettingsSize = 0;
2574 request.settings = settings;
2575 request.inputBuffer = {
2576 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2577
2578 {
2579 std::unique_lock<std::mutex> l(mLock);
2580 mInflightMap.clear();
2581 mInflightMap[frameNumber] = inflightReq;
2582 }
2583
2584 int32_t numRequestProcessed = 0;
2585 std::vector<BufferCache> cachesToRemove;
2586 ret = mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2587 ASSERT_TRUE(ret.isOk());
2588 ASSERT_EQ(numRequestProcessed, 1u);
2589
2590 // Flush before waiting for request to complete.
2591 ndk::ScopedAStatus returnStatus = mSession->flush();
2592 ASSERT_TRUE(returnStatus.isOk());
2593
2594 {
2595 std::unique_lock<std::mutex> l(mLock);
2596 while (!inflightReq->errorCodeValid &&
2597 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
2598 auto timeout = std::chrono::system_clock::now() +
2599 std::chrono::seconds(kStreamBufferTimeoutSec);
2600 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2601 }
2602
2603 if (!inflightReq->errorCodeValid) {
2604 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
2605 ASSERT_EQ(previewStream.id, inflightReq->resultOutputBuffers[0].buffer.streamId);
2606 } else {
2607 switch (inflightReq->errorCode) {
2608 case ErrorCode::ERROR_REQUEST:
2609 case ErrorCode::ERROR_RESULT:
2610 case ErrorCode::ERROR_BUFFER:
2611 // Expected
2612 break;
2613 case ErrorCode::ERROR_DEVICE:
2614 default:
2615 FAIL() << "Unexpected error:"
2616 << static_cast<uint32_t>(inflightReq->errorCode);
2617 }
2618 }
2619 }
2620
2621 if (useHalBufManager) {
2622 verifyBuffersReturned(mSession, previewStream.id, cb);
2623 }
2624
2625 ret = mSession->close();
2626 mSession = nullptr;
2627 ASSERT_TRUE(ret.isOk());
2628 }
2629}
2630
2631// Verify that camera flushes correctly without any pending requests.
2632TEST_P(CameraAidlTest, flushEmpty) {
2633 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2634 std::vector<AvailableStream> outputPreviewStreams;
2635 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2636 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2637
2638 for (const auto& name : cameraDeviceNames) {
2639 Stream previewStream;
2640 std::vector<HalStream> halStreams;
2641 std::shared_ptr<DeviceCb> cb;
2642 bool supportsPartialResults = false;
2643 bool useHalBufManager = false;
2644
2645 int32_t partialResultCount = 0;
2646 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2647 &previewStream /*out*/, &halStreams /*out*/,
2648 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2649 &useHalBufManager /*out*/, &cb /*out*/);
2650
2651 ndk::ScopedAStatus returnStatus = mSession->flush();
2652 ASSERT_TRUE(returnStatus.isOk());
2653
2654 {
2655 std::unique_lock<std::mutex> l(mLock);
2656 auto timeout = std::chrono::system_clock::now() +
2657 std::chrono::milliseconds(kEmptyFlushTimeoutMSec);
2658 ASSERT_EQ(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2659 }
2660
2661 ndk::ScopedAStatus ret = mSession->close();
2662 mSession = nullptr;
2663 ASSERT_TRUE(ret.isOk());
2664 }
2665}
2666
2667// Test camera provider notify method
2668TEST_P(CameraAidlTest, providerDeviceStateNotification) {
2669 notifyDeviceState(ICameraProvider::DEVICE_STATE_BACK_COVERED);
2670 notifyDeviceState(ICameraProvider::DEVICE_STATE_NORMAL);
2671}
2672
2673// Verify that all supported stream formats and sizes can be configured
2674// successfully for injection camera.
2675TEST_P(CameraAidlTest, configureInjectionStreamsAvailableOutputs) {
2676 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2677 std::vector<AvailableStream> outputStreams;
2678
2679 for (const auto& name : cameraDeviceNames) {
2680 CameraMetadata metadata;
2681
2682 std::shared_ptr<ICameraInjectionSession> injectionSession;
2683 std::shared_ptr<ICameraDevice> unusedDevice;
2684 openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/,
2685 &unusedDevice /*out*/);
2686 if (injectionSession == nullptr) {
2687 continue;
2688 }
2689
2690 camera_metadata_t* staticMetaBuffer =
2691 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
2692 CameraMetadata chars;
2693 chars.metadata = metadata.metadata;
2694
2695 outputStreams.clear();
2696 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputStreams));
2697 ASSERT_NE(0u, outputStreams.size());
2698
2699 int32_t jpegBufferSize = 0;
2700 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMetaBuffer, &jpegBufferSize));
2701 ASSERT_NE(0u, jpegBufferSize);
2702
2703 int32_t streamId = 0;
2704 int32_t streamConfigCounter = 0;
2705 for (auto& it : outputStreams) {
2706 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(it.format));
2707 Stream stream = {streamId,
2708 StreamType::OUTPUT,
2709 it.width,
2710 it.height,
2711 static_cast<PixelFormat>(it.format),
2712 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2713 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2714 dataspace,
2715 StreamRotation::ROTATION_0,
2716 std::string(),
2717 jpegBufferSize,
2718 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002719 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2720 RequestAvailableDynamicRangeProfilesMap::
2721 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002722
2723 std::vector<Stream> streams = {stream};
2724 StreamConfiguration config;
2725 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2726 jpegBufferSize);
2727
2728 config.streamConfigCounter = streamConfigCounter++;
2729 ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars);
2730 ASSERT_TRUE(s.isOk());
2731 streamId++;
2732 }
2733
2734 std::shared_ptr<ICameraDeviceSession> session;
2735 ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session);
2736 ASSERT_TRUE(ret.isOk());
2737 ASSERT_NE(session, nullptr);
2738 ret = session->close();
2739 ASSERT_TRUE(ret.isOk());
2740 }
2741}
2742
2743// Check for correct handling of invalid/incorrect configuration parameters for injection camera.
2744TEST_P(CameraAidlTest, configureInjectionStreamsInvalidOutputs) {
2745 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2746 std::vector<AvailableStream> outputStreams;
2747
2748 for (const auto& name : cameraDeviceNames) {
2749 CameraMetadata metadata;
2750 std::shared_ptr<ICameraInjectionSession> injectionSession;
2751 std::shared_ptr<ICameraDevice> unusedDevice;
2752 openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/,
2753 &unusedDevice);
2754 if (injectionSession == nullptr) {
2755 continue;
2756 }
2757
2758 camera_metadata_t* staticMetaBuffer =
2759 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
2760 std::shared_ptr<ICameraDeviceSession> session;
2761 ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session);
2762 ASSERT_TRUE(ret.isOk());
2763 ASSERT_NE(session, nullptr);
2764
2765 CameraMetadata chars;
2766 chars.metadata = metadata.metadata;
2767
2768 outputStreams.clear();
2769 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputStreams));
2770 ASSERT_NE(0u, outputStreams.size());
2771
2772 int32_t jpegBufferSize = 0;
2773 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMetaBuffer, &jpegBufferSize));
2774 ASSERT_NE(0u, jpegBufferSize);
2775
2776 int32_t streamId = 0;
2777 Stream stream = {streamId++,
2778 StreamType::OUTPUT,
2779 0,
2780 0,
2781 static_cast<PixelFormat>(outputStreams[0].format),
2782 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2783 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2784 Dataspace::UNKNOWN,
2785 StreamRotation::ROTATION_0,
2786 std::string(),
2787 jpegBufferSize,
2788 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002789 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2790 RequestAvailableDynamicRangeProfilesMap::
2791 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002792
2793 int32_t streamConfigCounter = 0;
2794 std::vector<Stream> streams = {stream};
2795 StreamConfiguration config;
2796 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2797 jpegBufferSize);
2798
2799 config.streamConfigCounter = streamConfigCounter++;
2800 ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars);
2801 ASSERT_TRUE(
2802 (static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) == s.getServiceSpecificError()) ||
2803 (static_cast<int32_t>(Status::INTERNAL_ERROR) == s.getServiceSpecificError()));
2804
2805 stream = {streamId++,
2806 StreamType::OUTPUT,
2807 INT32_MAX,
2808 INT32_MAX,
2809 static_cast<PixelFormat>(outputStreams[0].format),
2810 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2811 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2812 Dataspace::UNKNOWN,
2813 StreamRotation::ROTATION_0,
2814 std::string(),
2815 jpegBufferSize,
2816 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002817 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2818 RequestAvailableDynamicRangeProfilesMap::
2819 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
2820
Avichal Rakesh362242f2022-02-08 12:40:53 -08002821 streams[0] = stream;
2822 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2823 jpegBufferSize);
2824 config.streamConfigCounter = streamConfigCounter++;
2825 s = injectionSession->configureInjectionStreams(config, chars);
2826 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError());
2827
2828 for (auto& it : outputStreams) {
2829 stream = {streamId++,
2830 StreamType::OUTPUT,
2831 it.width,
2832 it.height,
2833 static_cast<PixelFormat>(INT32_MAX),
2834 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2835 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2836 Dataspace::UNKNOWN,
2837 StreamRotation::ROTATION_0,
2838 std::string(),
2839 jpegBufferSize,
2840 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002841 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2842 RequestAvailableDynamicRangeProfilesMap::
2843 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002844 streams[0] = stream;
2845 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2846 jpegBufferSize);
2847 config.streamConfigCounter = streamConfigCounter++;
2848 s = injectionSession->configureInjectionStreams(config, chars);
2849 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError());
2850
2851 stream = {streamId++,
2852 StreamType::OUTPUT,
2853 it.width,
2854 it.height,
2855 static_cast<PixelFormat>(it.format),
2856 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2857 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2858 Dataspace::UNKNOWN,
2859 static_cast<StreamRotation>(INT32_MAX),
2860 std::string(),
2861 jpegBufferSize,
2862 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002863 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2864 RequestAvailableDynamicRangeProfilesMap::
2865 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002866 streams[0] = stream;
2867 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2868 jpegBufferSize);
2869 config.streamConfigCounter = streamConfigCounter++;
2870 s = injectionSession->configureInjectionStreams(config, chars);
2871 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError());
2872 }
2873
2874 ret = session->close();
2875 ASSERT_TRUE(ret.isOk());
2876 }
2877}
2878
2879// Check whether session parameters are supported for injection camera. If Hal support for them
2880// exist, then try to configure a preview stream using them.
2881TEST_P(CameraAidlTest, configureInjectionStreamsWithSessionParameters) {
2882 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2883 std::vector<AvailableStream> outputPreviewStreams;
2884 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2885 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2886
2887 for (const auto& name : cameraDeviceNames) {
2888 CameraMetadata metadata;
2889 std::shared_ptr<ICameraInjectionSession> injectionSession;
2890 std::shared_ptr<ICameraDevice> unusedDevice;
2891 openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/,
2892 &unusedDevice /*out*/);
2893 if (injectionSession == nullptr) {
2894 continue;
2895 }
2896
2897 std::shared_ptr<ICameraDeviceSession> session;
2898 ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session);
2899 ASSERT_TRUE(ret.isOk());
2900 ASSERT_NE(session, nullptr);
2901
2902 camera_metadata_t* staticMetaBuffer =
2903 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
2904 CameraMetadata chars;
2905 chars.metadata = metadata.metadata;
2906
2907 std::unordered_set<int32_t> availableSessionKeys;
2908 Status rc = getSupportedKeys(staticMetaBuffer, ANDROID_REQUEST_AVAILABLE_SESSION_KEYS,
2909 &availableSessionKeys);
2910 ASSERT_EQ(Status::OK, rc);
2911 if (availableSessionKeys.empty()) {
2912 ret = session->close();
2913 ASSERT_TRUE(ret.isOk());
2914 continue;
2915 }
2916
2917 android::hardware::camera::common::V1_0::helper::CameraMetadata previewRequestSettings;
2918 android::hardware::camera::common::V1_0::helper::CameraMetadata sessionParams,
2919 modifiedSessionParams;
2920 constructFilteredSettings(session, availableSessionKeys, RequestTemplate::PREVIEW,
2921 &previewRequestSettings, &sessionParams);
2922 if (sessionParams.isEmpty()) {
2923 ret = session->close();
2924 ASSERT_TRUE(ret.isOk());
2925 continue;
2926 }
2927
2928 outputPreviewStreams.clear();
2929
2930 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputPreviewStreams,
2931 &previewThreshold));
2932 ASSERT_NE(0u, outputPreviewStreams.size());
2933
2934 Stream previewStream = {
2935 0,
2936 StreamType::OUTPUT,
2937 outputPreviewStreams[0].width,
2938 outputPreviewStreams[0].height,
2939 static_cast<PixelFormat>(outputPreviewStreams[0].format),
2940 static_cast<::aidl::android::hardware::graphics::common::BufferUsage>(
2941 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2942 Dataspace::UNKNOWN,
2943 StreamRotation::ROTATION_0,
2944 std::string(),
2945 0,
2946 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002947 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2948 RequestAvailableDynamicRangeProfilesMap::
2949 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002950 std::vector<Stream> streams = {previewStream};
2951 StreamConfiguration config;
2952 config.streams = streams;
2953 config.operationMode = StreamConfigurationMode::NORMAL_MODE;
2954
2955 modifiedSessionParams = sessionParams;
2956 camera_metadata_t* sessionParamsBuffer = sessionParams.release();
2957 uint8_t* rawSessionParamsBuffer = reinterpret_cast<uint8_t*>(sessionParamsBuffer);
2958 config.sessionParams.metadata =
2959 std::vector(rawSessionParamsBuffer,
2960 rawSessionParamsBuffer + get_camera_metadata_size(sessionParamsBuffer));
2961
2962 config.streamConfigCounter = 0;
2963 config.streamConfigCounter = 0;
2964 config.multiResolutionInputImage = false;
2965
2966 ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars);
2967 ASSERT_TRUE(s.isOk());
2968
2969 sessionParams.acquire(sessionParamsBuffer);
2970 free_camera_metadata(staticMetaBuffer);
2971 ret = session->close();
2972 ASSERT_TRUE(ret.isOk());
2973 }
2974}
2975
2976// Verify that valid stream use cases can be configured successfully, and invalid use cases
2977// fail stream configuration.
2978TEST_P(CameraAidlTest, configureStreamsUseCases) {
2979 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2980
2981 for (const auto& name : cameraDeviceNames) {
2982 CameraMetadata meta;
2983 std::shared_ptr<ICameraDevice> cameraDevice;
2984
2985 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
2986 &cameraDevice /*out*/);
2987
2988 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
2989 // Check if camera support depth only
2990 if (isDepthOnly(staticMeta)) {
2991 ndk::ScopedAStatus ret = mSession->close();
2992 mSession = nullptr;
2993 ASSERT_TRUE(ret.isOk());
2994 continue;
2995 }
2996
2997 std::vector<AvailableStream> outputPreviewStreams;
2998 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2999 static_cast<int32_t>(PixelFormat::YCBCR_420_888)};
3000 ASSERT_EQ(Status::OK,
3001 getAvailableOutputStreams(staticMeta, outputPreviewStreams, &previewThreshold));
3002 ASSERT_NE(0u, outputPreviewStreams.size());
3003
3004 // Combine valid and invalid stream use cases
Shuzhen Wang36efa712022-03-08 10:10:44 -08003005 std::vector<int64_t> useCases(kMandatoryUseCases);
Avichal Rakesh362242f2022-02-08 12:40:53 -08003006 useCases.push_back(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL + 1);
3007
Shuzhen Wang36efa712022-03-08 10:10:44 -08003008 std::vector<int64_t> supportedUseCases;
Avichal Rakesh362242f2022-02-08 12:40:53 -08003009 camera_metadata_ro_entry entry;
3010 auto retcode = find_camera_metadata_ro_entry(
3011 staticMeta, ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES, &entry);
3012 if ((0 == retcode) && (entry.count > 0)) {
Avichal Rakeshe1685a72022-03-22 13:52:36 -07003013 supportedUseCases.insert(supportedUseCases.end(), entry.data.i64,
3014 entry.data.i64 + entry.count);
Avichal Rakesh362242f2022-02-08 12:40:53 -08003015 } else {
3016 supportedUseCases.push_back(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT);
3017 }
3018
3019 std::vector<Stream> streams(1);
Avichal Rakeshd3503a32022-02-25 06:23:14 +00003020 streams[0] = {0,
3021 StreamType::OUTPUT,
3022 outputPreviewStreams[0].width,
3023 outputPreviewStreams[0].height,
3024 static_cast<PixelFormat>(outputPreviewStreams[0].format),
3025 static_cast<::aidl::android::hardware::graphics::common::BufferUsage>(
3026 GRALLOC1_CONSUMER_USAGE_CPU_READ),
3027 Dataspace::UNKNOWN,
3028 StreamRotation::ROTATION_0,
3029 std::string(),
3030 0,
3031 -1,
3032 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
3033 RequestAvailableDynamicRangeProfilesMap::
3034 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08003035
3036 int32_t streamConfigCounter = 0;
3037 CameraMetadata req;
3038 StreamConfiguration config;
3039 RequestTemplate reqTemplate = RequestTemplate::STILL_CAPTURE;
3040 ndk::ScopedAStatus ret = mSession->constructDefaultRequestSettings(reqTemplate, &req);
3041 ASSERT_TRUE(ret.isOk());
3042 config.sessionParams = req;
3043
Shuzhen Wang36efa712022-03-08 10:10:44 -08003044 for (int64_t useCase : useCases) {
Avichal Rakesh362242f2022-02-08 12:40:53 -08003045 bool useCaseSupported = std::find(supportedUseCases.begin(), supportedUseCases.end(),
3046 useCase) != supportedUseCases.end();
3047
3048 streams[0].useCase = static_cast<
3049 aidl::android::hardware::camera::metadata::ScalerAvailableStreamUseCases>(
3050 useCase);
3051 config.streams = streams;
3052 config.operationMode = StreamConfigurationMode::NORMAL_MODE;
3053 config.streamConfigCounter = streamConfigCounter;
3054 config.multiResolutionInputImage = false;
3055
3056 bool combSupported;
3057 ret = cameraDevice->isStreamCombinationSupported(config, &combSupported);
Avichal Rakeshe1685a72022-03-22 13:52:36 -07003058 if (static_cast<int32_t>(Status::OPERATION_NOT_SUPPORTED) ==
3059 ret.getServiceSpecificError()) {
3060 continue;
Avichal Rakesh362242f2022-02-08 12:40:53 -08003061 }
Avichal Rakeshe1685a72022-03-22 13:52:36 -07003062
Avichal Rakesh362242f2022-02-08 12:40:53 -08003063 ASSERT_TRUE(ret.isOk());
Avichal Rakeshe1685a72022-03-22 13:52:36 -07003064 ASSERT_EQ(combSupported, useCaseSupported);
Avichal Rakesh362242f2022-02-08 12:40:53 -08003065
3066 std::vector<HalStream> halStreams;
3067 ret = mSession->configureStreams(config, &halStreams);
3068 ALOGI("configureStreams returns status: %d", ret.getServiceSpecificError());
3069 if (useCaseSupported) {
3070 ASSERT_TRUE(ret.isOk());
3071 ASSERT_EQ(1u, halStreams.size());
3072 } else {
3073 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
3074 ret.getServiceSpecificError());
3075 }
3076 }
3077 ret = mSession->close();
3078 mSession = nullptr;
3079 ASSERT_TRUE(ret.isOk());
3080 }
3081}
3082
3083GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(CameraAidlTest);
3084INSTANTIATE_TEST_SUITE_P(
3085 PerInstance, CameraAidlTest,
3086 testing::ValuesIn(android::getAidlHalInstanceNames(ICameraProvider::descriptor)),
3087 android::hardware::PrintInstanceNameToString);