blob: 557061ad500ac9b82c15dbb2be30f8920b69eb48 [file] [log] [blame]
Avichal Rakesh362242f2022-02-08 12:40:53 -08001/*
2 * Copyright (C) 2022 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <aidl/Vintf.h>
18#include <aidl/android/hardware/camera/common/VendorTagSection.h>
19#include <aidl/android/hardware/camera/device/ICameraDevice.h>
20#include <aidlcommonsupport/NativeHandle.h>
21#include <camera_aidl_test.h>
22#include <cutils/properties.h>
23#include <device_cb.h>
24#include <empty_device_cb.h>
25#include <grallocusage/GrallocUsageConversion.h>
26#include <gtest/gtest.h>
27#include <hardware/gralloc.h>
28#include <hardware/gralloc1.h>
29#include <hidl/GtestPrinter.h>
30#include <hidl/HidlSupport.h>
31#include <torch_provider_cb.h>
32#include <list>
33
34using ::aidl::android::hardware::camera::common::CameraDeviceStatus;
35using ::aidl::android::hardware::camera::common::CameraResourceCost;
36using ::aidl::android::hardware::camera::common::TorchModeStatus;
37using ::aidl::android::hardware::camera::common::VendorTagSection;
38using ::aidl::android::hardware::camera::device::ICameraDevice;
Austin Borger4728fc42022-07-15 11:27:53 -070039using ::aidl::android::hardware::camera::metadata::RequestAvailableColorSpaceProfilesMap;
Avichal Rakeshd3503a32022-02-25 06:23:14 +000040using ::aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap;
Avichal Rakesh362242f2022-02-08 12:40:53 -080041using ::aidl::android::hardware::camera::metadata::SensorPixelMode;
42using ::aidl::android::hardware::camera::provider::CameraIdAndStreamCombination;
Avichal Rakesh4bf91c72022-05-23 20:44:02 +000043using ::aidl::android::hardware::camera::provider::BnCameraProviderCallback;
Avichal Rakesh362242f2022-02-08 12:40:53 -080044
45using ::ndk::ScopedAStatus;
46
47namespace {
48const int32_t kBurstFrameCount = 10;
49const uint32_t kMaxStillWidth = 2048;
50const uint32_t kMaxStillHeight = 1536;
51
52const int64_t kEmptyFlushTimeoutMSec = 200;
53
Shuzhen Wang36efa712022-03-08 10:10:44 -080054const static std::vector<int64_t> kMandatoryUseCases = {
Avichal Rakesh362242f2022-02-08 12:40:53 -080055 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
56 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW,
57 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_STILL_CAPTURE,
58 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_RECORD,
59 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW_VIDEO_STILL,
60 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL};
61} // namespace
62
63TEST_P(CameraAidlTest, getCameraIdList) {
64 std::vector<std::string> idList;
65 ScopedAStatus ret = mProvider->getCameraIdList(&idList);
66 ASSERT_TRUE(ret.isOk());
67
68 for (size_t i = 0; i < idList.size(); i++) {
69 ALOGI("Camera Id[%zu] is %s", i, idList[i].c_str());
70 }
71}
72
73// Test if ICameraProvider::getVendorTags returns Status::OK
74TEST_P(CameraAidlTest, getVendorTags) {
75 std::vector<VendorTagSection> vendorTags;
76 ScopedAStatus ret = mProvider->getVendorTags(&vendorTags);
77
78 ASSERT_TRUE(ret.isOk());
79 for (size_t i = 0; i < vendorTags.size(); i++) {
80 ALOGI("Vendor tag section %zu name %s", i, vendorTags[i].sectionName.c_str());
81 for (auto& tag : vendorTags[i].tags) {
82 ALOGI("Vendor tag id %u name %s type %d", tag.tagId, tag.tagName.c_str(),
83 (int)tag.tagType);
84 }
85 }
86}
87
88// Test if ICameraProvider::setCallback returns Status::OK
89TEST_P(CameraAidlTest, setCallback) {
Avichal Rakesh4bf91c72022-05-23 20:44:02 +000090 struct ProviderCb : public BnCameraProviderCallback {
Avichal Rakesh362242f2022-02-08 12:40:53 -080091 ScopedAStatus cameraDeviceStatusChange(const std::string& cameraDeviceName,
92 CameraDeviceStatus newStatus) override {
93 ALOGI("camera device status callback name %s, status %d", cameraDeviceName.c_str(),
94 (int)newStatus);
95 return ScopedAStatus::ok();
96 }
97 ScopedAStatus torchModeStatusChange(const std::string& cameraDeviceName,
98 TorchModeStatus newStatus) override {
99 ALOGI("Torch mode status callback name %s, status %d", cameraDeviceName.c_str(),
100 (int)newStatus);
101 return ScopedAStatus::ok();
102 }
103 ScopedAStatus physicalCameraDeviceStatusChange(const std::string& cameraDeviceName,
104 const std::string& physicalCameraDeviceName,
105 CameraDeviceStatus newStatus) override {
106 ALOGI("physical camera device status callback name %s, physical camera name %s,"
107 " status %d",
108 cameraDeviceName.c_str(), physicalCameraDeviceName.c_str(), (int)newStatus);
109 return ScopedAStatus::ok();
110 }
111 };
112
Avichal Rakesh4bf91c72022-05-23 20:44:02 +0000113 std::shared_ptr<ProviderCb> cb = ndk::SharedRefBase::make<ProviderCb>();
Avichal Rakesh362242f2022-02-08 12:40:53 -0800114 ScopedAStatus ret = mProvider->setCallback(cb);
115 ASSERT_TRUE(ret.isOk());
116 ret = mProvider->setCallback(nullptr);
Avichal Rakesh4bf91c72022-05-23 20:44:02 +0000117 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
Avichal Rakesh362242f2022-02-08 12:40:53 -0800118}
119
120// Test if ICameraProvider::getCameraDeviceInterface returns Status::OK and non-null device
121TEST_P(CameraAidlTest, getCameraDeviceInterface) {
122 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
123
124 for (const auto& name : cameraDeviceNames) {
125 std::shared_ptr<ICameraDevice> cameraDevice;
126 ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &cameraDevice);
127 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
128 ret.getServiceSpecificError());
129 ASSERT_TRUE(ret.isOk());
130 ASSERT_NE(cameraDevice, nullptr);
131 }
132}
133
134// Verify that the device resource cost can be retrieved and the values are
135// correct.
136TEST_P(CameraAidlTest, getResourceCost) {
137 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
138
139 for (const auto& deviceName : cameraDeviceNames) {
140 std::shared_ptr<ICameraDevice> cameraDevice;
141 ScopedAStatus ret = mProvider->getCameraDeviceInterface(deviceName, &cameraDevice);
142 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
143 ret.getServiceSpecificError());
144 ASSERT_TRUE(ret.isOk());
145 ASSERT_NE(cameraDevice, nullptr);
146
147 CameraResourceCost resourceCost;
148 ret = cameraDevice->getResourceCost(&resourceCost);
149 ALOGI("getResourceCost returns: %d:%d", ret.getExceptionCode(),
150 ret.getServiceSpecificError());
151 ASSERT_TRUE(ret.isOk());
152
153 ALOGI(" Resource cost is %d", resourceCost.resourceCost);
154 ASSERT_LE(resourceCost.resourceCost, 100u);
155
156 for (const auto& name : resourceCost.conflictingDevices) {
157 ALOGI(" Conflicting device: %s", name.c_str());
158 }
159 }
160}
161
162TEST_P(CameraAidlTest, systemCameraTest) {
163 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
164 std::map<std::string, std::vector<SystemCameraKind>> hiddenPhysicalIdToLogicalMap;
165 for (const auto& name : cameraDeviceNames) {
166 std::shared_ptr<ICameraDevice> device;
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +0000167 ALOGI("systemCameraTest: Testing camera device %s", name.c_str());
Avichal Rakesh362242f2022-02-08 12:40:53 -0800168 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
169 ASSERT_TRUE(ret.isOk());
170 ASSERT_NE(device, nullptr);
171
172 CameraMetadata cameraCharacteristics;
173 ret = device->getCameraCharacteristics(&cameraCharacteristics);
174 ASSERT_TRUE(ret.isOk());
175
176 const camera_metadata_t* staticMeta =
177 reinterpret_cast<const camera_metadata_t*>(cameraCharacteristics.metadata.data());
178 Status rc = isLogicalMultiCamera(staticMeta);
179 if (rc == Status::OPERATION_NOT_SUPPORTED) {
180 return;
181 }
182
183 ASSERT_EQ(rc, Status::OK);
184 std::unordered_set<std::string> physicalIds;
185 ASSERT_EQ(getPhysicalCameraIds(staticMeta, &physicalIds), Status::OK);
186 SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC;
187 Status retStatus = getSystemCameraKind(staticMeta, &systemCameraKind);
188 ASSERT_EQ(retStatus, Status::OK);
189
190 for (auto physicalId : physicalIds) {
191 bool isPublicId = false;
192 for (auto& deviceName : cameraDeviceNames) {
193 std::string publicVersion, publicId;
194 ASSERT_TRUE(matchDeviceName(deviceName, mProviderType, &publicVersion, &publicId));
195 if (physicalId == publicId) {
196 isPublicId = true;
197 break;
198 }
199 }
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +0000200
Avichal Rakesh362242f2022-02-08 12:40:53 -0800201 // For hidden physical cameras, collect their associated logical cameras
202 // and store the system camera kind.
203 if (!isPublicId) {
204 auto it = hiddenPhysicalIdToLogicalMap.find(physicalId);
205 if (it == hiddenPhysicalIdToLogicalMap.end()) {
206 hiddenPhysicalIdToLogicalMap.insert(std::make_pair(
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +0000207 physicalId, std::vector<SystemCameraKind>({systemCameraKind})));
Avichal Rakesh362242f2022-02-08 12:40:53 -0800208 } else {
209 it->second.push_back(systemCameraKind);
210 }
211 }
212 }
213 }
214
215 // Check that the system camera kind of the logical cameras associated with
216 // each hidden physical camera is the same.
217 for (const auto& it : hiddenPhysicalIdToLogicalMap) {
218 SystemCameraKind neededSystemCameraKind = it.second.front();
219 for (auto foundSystemCamera : it.second) {
220 ASSERT_EQ(neededSystemCameraKind, foundSystemCamera);
221 }
222 }
223}
224
225// Verify that the static camera characteristics can be retrieved
226// successfully.
227TEST_P(CameraAidlTest, getCameraCharacteristics) {
228 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
229
230 for (const auto& name : cameraDeviceNames) {
231 std::shared_ptr<ICameraDevice> device;
232 ALOGI("getCameraCharacteristics: Testing camera device %s", name.c_str());
233 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
234 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
235 ret.getServiceSpecificError());
236 ASSERT_TRUE(ret.isOk());
237 ASSERT_NE(device, nullptr);
238
239 CameraMetadata chars;
240 ret = device->getCameraCharacteristics(&chars);
241 ASSERT_TRUE(ret.isOk());
242 verifyCameraCharacteristics(chars);
243 verifyMonochromeCharacteristics(chars);
244 verifyRecommendedConfigs(chars);
245 verifyLogicalOrUltraHighResCameraMetadata(name, device, chars, cameraDeviceNames);
246
247 ASSERT_TRUE(ret.isOk());
248
249 // getPhysicalCameraCharacteristics will fail for publicly
250 // advertised camera IDs.
251 std::string version, cameraId;
252 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &cameraId));
253 CameraMetadata devChars;
254 ret = device->getPhysicalCameraCharacteristics(cameraId, &devChars);
255 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
256 ASSERT_EQ(0, devChars.metadata.size());
257 }
258}
259
260// Verify that the torch strength level can be set and retrieved successfully.
261TEST_P(CameraAidlTest, turnOnTorchWithStrengthLevel) {
262 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
263
264 std::shared_ptr<TorchProviderCb> cb = ndk::SharedRefBase::make<TorchProviderCb>(this);
265 ndk::ScopedAStatus ret = mProvider->setCallback(cb);
266 ASSERT_TRUE(ret.isOk());
267
268 for (const auto& name : cameraDeviceNames) {
269 int32_t defaultLevel;
270 std::shared_ptr<ICameraDevice> device;
271 ALOGI("%s: Testing camera device %s", __FUNCTION__, name.c_str());
272
273 ret = mProvider->getCameraDeviceInterface(name, &device);
274 ASSERT_TRUE(ret.isOk());
275 ASSERT_NE(device, nullptr);
276
277 CameraMetadata chars;
278 ret = device->getCameraCharacteristics(&chars);
279 ASSERT_TRUE(ret.isOk());
280
281 const camera_metadata_t* staticMeta =
282 reinterpret_cast<const camera_metadata_t*>(chars.metadata.data());
283 bool torchStrengthControlSupported = isTorchStrengthControlSupported(staticMeta);
284 camera_metadata_ro_entry entry;
285 int rc = find_camera_metadata_ro_entry(staticMeta,
286 ANDROID_FLASH_INFO_STRENGTH_DEFAULT_LEVEL, &entry);
287 if (torchStrengthControlSupported) {
288 ASSERT_EQ(rc, 0);
289 ASSERT_GT(entry.count, 0);
290 defaultLevel = *entry.data.i32;
291 ALOGI("Default level is:%d", defaultLevel);
292 }
293
294 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
295 ret = device->turnOnTorchWithStrengthLevel(2);
296 ALOGI("turnOnTorchWithStrengthLevel returns status: %d", ret.getServiceSpecificError());
297 // OPERATION_NOT_SUPPORTED check
298 if (!torchStrengthControlSupported) {
299 ALOGI("Torch strength control not supported.");
300 ASSERT_EQ(static_cast<int32_t>(Status::OPERATION_NOT_SUPPORTED),
301 ret.getServiceSpecificError());
302 } else {
303 {
304 ASSERT_TRUE(ret.isOk());
305 std::unique_lock<std::mutex> l(mTorchLock);
306 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
307 auto timeout = std::chrono::system_clock::now() +
308 std::chrono::seconds(kTorchTimeoutSec);
309 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
310 }
311 ASSERT_EQ(TorchModeStatus::AVAILABLE_ON, mTorchStatus);
312 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
313 }
314 ALOGI("getTorchStrengthLevel: Testing");
315 int32_t strengthLevel;
316 ret = device->getTorchStrengthLevel(&strengthLevel);
317 ASSERT_TRUE(ret.isOk());
318 ALOGI("Torch strength level is : %d", strengthLevel);
319 ASSERT_EQ(strengthLevel, 2);
320
321 // Turn OFF the torch and verify torch strength level is reset to default level.
322 ALOGI("Testing torch strength level reset after turning the torch OFF.");
323 ret = device->setTorchMode(false);
324 ASSERT_TRUE(ret.isOk());
325 {
326 std::unique_lock<std::mutex> l(mTorchLock);
327 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
328 auto timeout = std::chrono::system_clock::now() +
329 std::chrono::seconds(kTorchTimeoutSec);
330 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
331 }
332 ASSERT_EQ(TorchModeStatus::AVAILABLE_OFF, mTorchStatus);
333 }
334
335 ret = device->getTorchStrengthLevel(&strengthLevel);
336 ASSERT_TRUE(ret.isOk());
337 ALOGI("Torch strength level after turning OFF torch is : %d", strengthLevel);
338 ASSERT_EQ(strengthLevel, defaultLevel);
339 }
340 }
341}
342
343// In case it is supported verify that torch can be enabled.
344// Check for corresponding torch callbacks as well.
345TEST_P(CameraAidlTest, setTorchMode) {
346 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
347
348 std::shared_ptr<TorchProviderCb> cb = ndk::SharedRefBase::make<TorchProviderCb>(this);
349 ndk::ScopedAStatus ret = mProvider->setCallback(cb);
350 ALOGI("setCallback returns status: %d", ret.getServiceSpecificError());
351 ASSERT_TRUE(ret.isOk());
352 ASSERT_NE(cb, nullptr);
353
354 for (const auto& name : cameraDeviceNames) {
355 std::shared_ptr<ICameraDevice> device;
356 ALOGI("setTorchMode: Testing camera device %s", name.c_str());
357 ret = mProvider->getCameraDeviceInterface(name, &device);
358 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
359 ret.getServiceSpecificError());
360 ASSERT_TRUE(ret.isOk());
361 ASSERT_NE(device, nullptr);
362
363 CameraMetadata metadata;
364 ret = device->getCameraCharacteristics(&metadata);
365 ALOGI("getCameraCharacteristics returns status:%d", ret.getServiceSpecificError());
366 ASSERT_TRUE(ret.isOk());
367 camera_metadata_t* staticMeta =
368 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
369 bool torchSupported = isTorchSupported(staticMeta);
370
371 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
372 ret = device->setTorchMode(true);
373 ALOGI("setTorchMode returns status: %d", ret.getServiceSpecificError());
374 if (!torchSupported) {
375 ASSERT_EQ(static_cast<int32_t>(Status::OPERATION_NOT_SUPPORTED),
376 ret.getServiceSpecificError());
377 } else {
378 ASSERT_TRUE(ret.isOk());
379 {
380 std::unique_lock<std::mutex> l(mTorchLock);
381 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
382 auto timeout = std::chrono::system_clock::now() +
383 std::chrono::seconds(kTorchTimeoutSec);
384 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
385 }
386 ASSERT_EQ(TorchModeStatus::AVAILABLE_ON, mTorchStatus);
387 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
388 }
389
Shuzhen Wangf415f562022-09-19 18:28:09 -0700390 // register a new callback; make sure it receives the
391 // flash-on callback.
392 std::shared_ptr<TorchProviderCb> cb2 = ndk::SharedRefBase::make<TorchProviderCb>(this);
393 ret = mProvider->setCallback(cb2);
394 ASSERT_TRUE(ret.isOk());
395 ASSERT_NE(cb2, nullptr);
396 {
397 std::unique_lock<std::mutex> l(mTorchLock);
398 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
399 auto timeout = std::chrono::system_clock::now() +
400 std::chrono::seconds(kTorchTimeoutSec);
401 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
402 }
403 ASSERT_EQ(TorchModeStatus::AVAILABLE_ON, mTorchStatus);
404 }
405
Avichal Rakesh362242f2022-02-08 12:40:53 -0800406 ret = device->setTorchMode(false);
407 ASSERT_TRUE(ret.isOk());
408 {
409 std::unique_lock<std::mutex> l(mTorchLock);
410 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
411 auto timeout = std::chrono::system_clock::now() +
412 std::chrono::seconds(kTorchTimeoutSec);
413 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
414 }
415 ASSERT_EQ(TorchModeStatus::AVAILABLE_OFF, mTorchStatus);
416 }
417 }
418 }
Avichal Rakesh362242f2022-02-08 12:40:53 -0800419}
420
421// Check dump functionality.
422TEST_P(CameraAidlTest, dump) {
423 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
424
425 for (const auto& name : cameraDeviceNames) {
426 std::shared_ptr<ICameraDevice> device;
427 ALOGI("dump: Testing camera device %s", name.c_str());
428
429 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
430 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
431 ret.getServiceSpecificError());
432 ASSERT_TRUE(ret.isOk());
433 ASSERT_NE(device, nullptr);
434
435 int raw_handle = open(kDumpOutput, O_RDWR);
436 ASSERT_GE(raw_handle, 0);
437
438 auto retStatus = device->dump(raw_handle, nullptr, 0);
439 ASSERT_EQ(retStatus, ::android::OK);
440 close(raw_handle);
441 }
442}
443
444// Open, dump, then close
445TEST_P(CameraAidlTest, openClose) {
446 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
447
448 for (const auto& name : cameraDeviceNames) {
449 std::shared_ptr<ICameraDevice> device;
450 ALOGI("openClose: Testing camera device %s", name.c_str());
451 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
452 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
453 ret.getServiceSpecificError());
454 ASSERT_TRUE(ret.isOk());
455 ASSERT_NE(device, nullptr);
456
457 std::shared_ptr<EmptyDeviceCb> cb = ndk::SharedRefBase::make<EmptyDeviceCb>();
458
459 ret = device->open(cb, &mSession);
460 ASSERT_TRUE(ret.isOk());
461 ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
462 ret.getServiceSpecificError());
463 ASSERT_NE(mSession, nullptr);
464 int raw_handle = open(kDumpOutput, O_RDWR);
465 ASSERT_GE(raw_handle, 0);
466
467 auto retStatus = device->dump(raw_handle, nullptr, 0);
468 ASSERT_EQ(retStatus, ::android::OK);
469 close(raw_handle);
470
471 ret = mSession->close();
472 mSession = nullptr;
473 ASSERT_TRUE(ret.isOk());
474 // TODO: test all session API calls return INTERNAL_ERROR after close
475 // TODO: keep a wp copy here and verify session cannot be promoted out of this scope
476 }
477}
478
479// Check whether all common default request settings can be successfully
480// constructed.
481TEST_P(CameraAidlTest, constructDefaultRequestSettings) {
482 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
483
484 for (const auto& name : cameraDeviceNames) {
485 std::shared_ptr<ICameraDevice> device;
486 ALOGI("constructDefaultRequestSettings: Testing camera device %s", name.c_str());
487 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
488 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
489 ret.getServiceSpecificError());
490 ASSERT_TRUE(ret.isOk());
491 ASSERT_NE(device, nullptr);
492
493 std::shared_ptr<EmptyDeviceCb> cb = ndk::SharedRefBase::make<EmptyDeviceCb>();
494 ret = device->open(cb, &mSession);
495 ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
496 ret.getServiceSpecificError());
497 ASSERT_TRUE(ret.isOk());
498 ASSERT_NE(mSession, nullptr);
499
500 for (int32_t t = (int32_t)RequestTemplate::PREVIEW; t <= (int32_t)RequestTemplate::MANUAL;
501 t++) {
502 RequestTemplate reqTemplate = (RequestTemplate)t;
503 CameraMetadata rawMetadata;
504 ret = mSession->constructDefaultRequestSettings(reqTemplate, &rawMetadata);
505 ALOGI("constructDefaultRequestSettings returns status:%d:%d", ret.getExceptionCode(),
506 ret.getServiceSpecificError());
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000507
Avichal Rakesh362242f2022-02-08 12:40:53 -0800508 if (reqTemplate == RequestTemplate::ZERO_SHUTTER_LAG ||
509 reqTemplate == RequestTemplate::MANUAL) {
510 // optional templates
511 ASSERT_TRUE(ret.isOk() || static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
512 ret.getServiceSpecificError());
513 } else {
514 ASSERT_TRUE(ret.isOk());
515 }
516
517 if (ret.isOk()) {
518 const camera_metadata_t* metadata = (camera_metadata_t*)rawMetadata.metadata.data();
519 size_t expectedSize = rawMetadata.metadata.size();
520 int result = validate_camera_metadata_structure(metadata, &expectedSize);
521 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
522 verifyRequestTemplate(metadata, reqTemplate);
523 } else {
524 ASSERT_EQ(0u, rawMetadata.metadata.size());
525 }
526 }
527 ret = mSession->close();
528 mSession = nullptr;
529 ASSERT_TRUE(ret.isOk());
530 }
531}
532
533// Verify that all supported stream formats and sizes can be configured
534// successfully.
535TEST_P(CameraAidlTest, configureStreamsAvailableOutputs) {
536 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
537 std::vector<AvailableStream> outputStreams;
538
539 for (const auto& name : cameraDeviceNames) {
540 CameraMetadata meta;
541 std::shared_ptr<ICameraDevice> device;
542
543 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/, &device /*out*/);
544
545 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
546 outputStreams.clear();
547 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputStreams));
548 ASSERT_NE(0u, outputStreams.size());
549
550 int32_t jpegBufferSize = 0;
551 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
552 ASSERT_NE(0u, jpegBufferSize);
553
554 int32_t streamId = 0;
555 int32_t streamConfigCounter = 0;
556 for (auto& it : outputStreams) {
557 Stream stream;
558 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(it.format));
559 stream.id = streamId;
560 stream.streamType = StreamType::OUTPUT;
561 stream.width = it.width;
562 stream.height = it.height;
563 stream.format = static_cast<PixelFormat>(it.format);
564 stream.dataSpace = dataspace;
565 stream.usage = static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
566 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER);
567 stream.rotation = StreamRotation::ROTATION_0;
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000568 stream.dynamicRangeProfile = RequestAvailableDynamicRangeProfilesMap::
569 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
Avichal Rakesh362242f2022-02-08 12:40:53 -0800570
571 std::vector<Stream> streams = {stream};
572 StreamConfiguration config;
573 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
574 jpegBufferSize);
575
576 bool expectStreamCombQuery = (isLogicalMultiCamera(staticMeta) == Status::OK);
577 verifyStreamCombination(device, config, /*expectedStatus*/ true, expectStreamCombQuery);
578
579 config.streamConfigCounter = streamConfigCounter++;
580 std::vector<HalStream> halConfigs;
581 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
582 ASSERT_TRUE(ret.isOk());
583 ASSERT_EQ(halConfigs.size(), 1);
584 ASSERT_EQ(halConfigs[0].id, streamId);
585
586 streamId++;
587 }
588
589 ndk::ScopedAStatus ret = mSession->close();
590 mSession = nullptr;
591 ASSERT_TRUE(ret.isOk());
592 }
593}
594
595// Verify that mandatory concurrent streams and outputs are supported.
596TEST_P(CameraAidlTest, configureConcurrentStreamsAvailableOutputs) {
597 struct CameraTestInfo {
598 CameraMetadata staticMeta;
599 std::shared_ptr<ICameraDeviceSession> session;
600 std::shared_ptr<ICameraDevice> cameraDevice;
601 StreamConfiguration config;
602 };
603
604 std::map<std::string, std::string> idToNameMap = getCameraDeviceIdToNameMap(mProvider);
605 std::vector<ConcurrentCameraIdCombination> concurrentDeviceCombinations =
606 getConcurrentDeviceCombinations(mProvider);
607 std::vector<AvailableStream> outputStreams;
608 for (const auto& cameraDeviceIds : concurrentDeviceCombinations) {
609 std::vector<CameraIdAndStreamCombination> cameraIdsAndStreamCombinations;
610 std::vector<CameraTestInfo> cameraTestInfos;
611 size_t i = 0;
612 for (const auto& id : cameraDeviceIds.combination) {
613 CameraTestInfo cti;
614 auto it = idToNameMap.find(id);
615 ASSERT_TRUE(idToNameMap.end() != it);
616 std::string name = it->second;
617
618 openEmptyDeviceSession(name, mProvider, &cti.session /*out*/, &cti.staticMeta /*out*/,
619 &cti.cameraDevice /*out*/);
620
621 outputStreams.clear();
622 camera_metadata_t* staticMeta =
623 reinterpret_cast<camera_metadata_t*>(cti.staticMeta.metadata.data());
624 ASSERT_EQ(Status::OK, getMandatoryConcurrentStreams(staticMeta, &outputStreams));
625 ASSERT_NE(0u, outputStreams.size());
626
627 int32_t jpegBufferSize = 0;
628 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
629 ASSERT_NE(0u, jpegBufferSize);
630
631 int32_t streamId = 0;
632 std::vector<Stream> streams(outputStreams.size());
633 size_t j = 0;
634 for (const auto& s : outputStreams) {
635 Stream stream;
636 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(s.format));
637 stream.id = streamId++;
638 stream.streamType = StreamType::OUTPUT;
639 stream.width = s.width;
640 stream.height = s.height;
641 stream.format = static_cast<PixelFormat>(s.format);
642 stream.usage = static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
643 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER);
644 stream.dataSpace = dataspace;
645 stream.rotation = StreamRotation::ROTATION_0;
646 stream.sensorPixelModesUsed = {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT};
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000647 stream.dynamicRangeProfile = RequestAvailableDynamicRangeProfilesMap::
648 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
Avichal Rakesh362242f2022-02-08 12:40:53 -0800649 streams[j] = stream;
650 j++;
651 }
652
653 // Add the created stream configs to cameraIdsAndStreamCombinations
654 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &cti.config,
655 jpegBufferSize);
656
657 cti.config.streamConfigCounter = outputStreams.size();
658 CameraIdAndStreamCombination cameraIdAndStreamCombination;
659 cameraIdAndStreamCombination.cameraId = id;
660 cameraIdAndStreamCombination.streamConfiguration = cti.config;
661 cameraIdsAndStreamCombinations.push_back(cameraIdAndStreamCombination);
662 i++;
663 cameraTestInfos.push_back(cti);
664 }
665 // Now verify that concurrent streams are supported
666 bool combinationSupported;
667 ndk::ScopedAStatus ret = mProvider->isConcurrentStreamCombinationSupported(
668 cameraIdsAndStreamCombinations, &combinationSupported);
669 ASSERT_TRUE(ret.isOk());
670 ASSERT_EQ(combinationSupported, true);
671
672 // Test the stream can actually be configured
673 for (auto& cti : cameraTestInfos) {
674 if (cti.session != nullptr) {
675 camera_metadata_t* staticMeta =
676 reinterpret_cast<camera_metadata_t*>(cti.staticMeta.metadata.data());
677 bool expectStreamCombQuery = (isLogicalMultiCamera(staticMeta) == Status::OK);
678 verifyStreamCombination(cti.cameraDevice, cti.config, /*expectedStatus*/ true,
679 expectStreamCombQuery);
680 }
681
682 if (cti.session != nullptr) {
683 std::vector<HalStream> streamConfigs;
684 ret = cti.session->configureStreams(cti.config, &streamConfigs);
685 ASSERT_TRUE(ret.isOk());
686 ASSERT_EQ(cti.config.streams.size(), streamConfigs.size());
687 }
688 }
689
690 for (auto& cti : cameraTestInfos) {
691 ret = cti.session->close();
692 ASSERT_TRUE(ret.isOk());
693 }
694 }
695}
696
697// Check for correct handling of invalid/incorrect configuration parameters.
698TEST_P(CameraAidlTest, configureStreamsInvalidOutputs) {
699 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
700 std::vector<AvailableStream> outputStreams;
701
702 for (const auto& name : cameraDeviceNames) {
703 CameraMetadata meta;
704 std::shared_ptr<ICameraDevice> cameraDevice;
705
706 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
707 &cameraDevice /*out*/);
708 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
709 outputStreams.clear();
710
711 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputStreams));
712 ASSERT_NE(0u, outputStreams.size());
713
714 int32_t jpegBufferSize = 0;
715 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
716 ASSERT_NE(0u, jpegBufferSize);
717
718 int32_t streamId = 0;
719 Stream stream = {streamId++,
720 StreamType::OUTPUT,
721 static_cast<uint32_t>(0),
722 static_cast<uint32_t>(0),
723 static_cast<PixelFormat>(outputStreams[0].format),
724 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
725 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
726 Dataspace::UNKNOWN,
727 StreamRotation::ROTATION_0,
728 std::string(),
729 jpegBufferSize,
730 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000731 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
732 RequestAvailableDynamicRangeProfilesMap::
733 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800734 int32_t streamConfigCounter = 0;
735 std::vector<Stream> streams = {stream};
736 StreamConfiguration config;
737 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
738 jpegBufferSize);
739
740 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ false,
741 /*expectStreamCombQuery*/ false);
742
743 config.streamConfigCounter = streamConfigCounter++;
744 std::vector<HalStream> halConfigs;
745 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
746 ASSERT_TRUE(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
747 ret.getServiceSpecificError() ||
748 static_cast<int32_t>(Status::INTERNAL_ERROR) == ret.getServiceSpecificError());
749
750 stream = {streamId++,
751 StreamType::OUTPUT,
752 /*width*/ INT32_MAX,
753 /*height*/ INT32_MAX,
754 static_cast<PixelFormat>(outputStreams[0].format),
755 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
756 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
757 Dataspace::UNKNOWN,
758 StreamRotation::ROTATION_0,
759 std::string(),
760 jpegBufferSize,
761 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000762 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
763 RequestAvailableDynamicRangeProfilesMap::
764 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800765
766 streams[0] = stream;
767 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
768 jpegBufferSize);
769
770 config.streamConfigCounter = streamConfigCounter++;
771 halConfigs.clear();
772 ret = mSession->configureStreams(config, &halConfigs);
773 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
774
775 for (auto& it : outputStreams) {
776 stream = {streamId++,
777 StreamType::OUTPUT,
778 it.width,
779 it.height,
780 static_cast<PixelFormat>(UINT32_MAX),
781 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
782 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
783 Dataspace::UNKNOWN,
784 StreamRotation::ROTATION_0,
785 std::string(),
786 jpegBufferSize,
787 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000788 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
789 RequestAvailableDynamicRangeProfilesMap::
790 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800791
792 streams[0] = stream;
793 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
794 jpegBufferSize);
795 config.streamConfigCounter = streamConfigCounter++;
796 halConfigs.clear();
797 ret = mSession->configureStreams(config, &halConfigs);
798 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
799 ret.getServiceSpecificError());
800
801 stream = {streamId++,
802 StreamType::OUTPUT,
803 it.width,
804 it.height,
805 static_cast<PixelFormat>(it.format),
806 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
807 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
808 Dataspace::UNKNOWN,
809 static_cast<StreamRotation>(UINT32_MAX),
810 std::string(),
811 jpegBufferSize,
812 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000813 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
814 RequestAvailableDynamicRangeProfilesMap::
815 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800816
817 streams[0] = stream;
818 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
819 jpegBufferSize);
820
821 config.streamConfigCounter = streamConfigCounter++;
822 halConfigs.clear();
823 ret = mSession->configureStreams(config, &halConfigs);
824 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
825 ret.getServiceSpecificError());
826 }
827
828 ret = mSession->close();
829 mSession = nullptr;
830 ASSERT_TRUE(ret.isOk());
831 }
832}
833
834// Check whether all supported ZSL output stream combinations can be
835// configured successfully.
836TEST_P(CameraAidlTest, configureStreamsZSLInputOutputs) {
837 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
838 std::vector<AvailableStream> inputStreams;
839 std::vector<AvailableZSLInputOutput> inputOutputMap;
840
841 for (const auto& name : cameraDeviceNames) {
842 CameraMetadata meta;
843 std::shared_ptr<ICameraDevice> cameraDevice;
844
845 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
846 &cameraDevice /*out*/);
847 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
848
849 Status rc = isZSLModeAvailable(staticMeta);
850 if (Status::OPERATION_NOT_SUPPORTED == rc) {
851 ndk::ScopedAStatus ret = mSession->close();
852 mSession = nullptr;
853 ASSERT_TRUE(ret.isOk());
854 continue;
855 }
856 ASSERT_EQ(Status::OK, rc);
857
858 inputStreams.clear();
859 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, inputStreams));
860 ASSERT_NE(0u, inputStreams.size());
861
862 inputOutputMap.clear();
863 ASSERT_EQ(Status::OK, getZSLInputOutputMap(staticMeta, inputOutputMap));
864 ASSERT_NE(0u, inputOutputMap.size());
865
866 bool supportMonoY8 = false;
867 if (Status::OK == isMonochromeCamera(staticMeta)) {
868 for (auto& it : inputStreams) {
869 if (it.format == static_cast<uint32_t>(PixelFormat::Y8)) {
870 supportMonoY8 = true;
871 break;
872 }
873 }
874 }
875
876 int32_t jpegBufferSize = 0;
877 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
878 ASSERT_NE(0u, jpegBufferSize);
879
880 int32_t streamId = 0;
881 bool hasPrivToY8 = false, hasY8ToY8 = false, hasY8ToBlob = false;
882 uint32_t streamConfigCounter = 0;
883 for (auto& inputIter : inputOutputMap) {
884 AvailableStream input;
885 ASSERT_EQ(Status::OK, findLargestSize(inputStreams, inputIter.inputFormat, input));
886 ASSERT_NE(0u, inputStreams.size());
887
888 if (inputIter.inputFormat ==
889 static_cast<uint32_t>(PixelFormat::IMPLEMENTATION_DEFINED) &&
890 inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::Y8)) {
891 hasPrivToY8 = true;
892 } else if (inputIter.inputFormat == static_cast<uint32_t>(PixelFormat::Y8)) {
893 if (inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::BLOB)) {
894 hasY8ToBlob = true;
895 } else if (inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::Y8)) {
896 hasY8ToY8 = true;
897 }
898 }
899 AvailableStream outputThreshold = {INT32_MAX, INT32_MAX, inputIter.outputFormat};
900 std::vector<AvailableStream> outputStreams;
901 ASSERT_EQ(Status::OK,
902 getAvailableOutputStreams(staticMeta, outputStreams, &outputThreshold));
903 for (auto& outputIter : outputStreams) {
904 Dataspace outputDataSpace =
905 getDataspace(static_cast<PixelFormat>(outputIter.format));
906 Stream zslStream = {
907 streamId++,
908 StreamType::OUTPUT,
909 input.width,
910 input.height,
911 static_cast<PixelFormat>(input.format),
912 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
913 GRALLOC_USAGE_HW_CAMERA_ZSL),
914 Dataspace::UNKNOWN,
915 StreamRotation::ROTATION_0,
916 std::string(),
917 jpegBufferSize,
918 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000919 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
920 RequestAvailableDynamicRangeProfilesMap::
921 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800922 Stream inputStream = {
923 streamId++,
924 StreamType::INPUT,
925 input.width,
926 input.height,
927 static_cast<PixelFormat>(input.format),
928 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(0),
929 Dataspace::UNKNOWN,
930 StreamRotation::ROTATION_0,
931 std::string(),
932 jpegBufferSize,
933 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000934 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
935 RequestAvailableDynamicRangeProfilesMap::
936 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800937 Stream outputStream = {
938 streamId++,
939 StreamType::OUTPUT,
940 outputIter.width,
941 outputIter.height,
942 static_cast<PixelFormat>(outputIter.format),
943 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
944 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
945 outputDataSpace,
946 StreamRotation::ROTATION_0,
947 std::string(),
948 jpegBufferSize,
949 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000950 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
951 RequestAvailableDynamicRangeProfilesMap::
952 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800953
954 std::vector<Stream> streams = {inputStream, zslStream, outputStream};
955
956 StreamConfiguration config;
957 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
958 jpegBufferSize);
959
960 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true,
961 /*expectStreamCombQuery*/ false);
962
963 config.streamConfigCounter = streamConfigCounter++;
964 std::vector<HalStream> halConfigs;
965 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
966 ASSERT_TRUE(ret.isOk());
967 ASSERT_EQ(3u, halConfigs.size());
968 }
969 }
970
971 if (supportMonoY8) {
972 if (Status::OK == isZSLModeAvailable(staticMeta, PRIV_REPROCESS)) {
973 ASSERT_TRUE(hasPrivToY8);
974 }
975 if (Status::OK == isZSLModeAvailable(staticMeta, YUV_REPROCESS)) {
976 ASSERT_TRUE(hasY8ToY8);
977 ASSERT_TRUE(hasY8ToBlob);
978 }
979 }
980
981 ndk::ScopedAStatus ret = mSession->close();
982 mSession = nullptr;
983 ASSERT_TRUE(ret.isOk());
984 }
985}
986
987// Check whether session parameters are supported. If Hal support for them
988// exist, then try to configure a preview stream using them.
989TEST_P(CameraAidlTest, configureStreamsWithSessionParameters) {
990 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
991 std::vector<AvailableStream> outputPreviewStreams;
992 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
993 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
994
995 for (const auto& name : cameraDeviceNames) {
996 CameraMetadata meta;
997
998 std::shared_ptr<ICameraDevice> unusedCameraDevice;
999 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1000 &unusedCameraDevice /*out*/);
1001 camera_metadata_t* staticMetaBuffer =
1002 reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1003
1004 std::unordered_set<int32_t> availableSessionKeys;
1005 auto rc = getSupportedKeys(staticMetaBuffer, ANDROID_REQUEST_AVAILABLE_SESSION_KEYS,
1006 &availableSessionKeys);
1007 ASSERT_TRUE(Status::OK == rc);
1008 if (availableSessionKeys.empty()) {
1009 ndk::ScopedAStatus ret = mSession->close();
1010 mSession = nullptr;
1011 ASSERT_TRUE(ret.isOk());
1012 continue;
1013 }
1014
1015 android::hardware::camera::common::V1_0::helper::CameraMetadata previewRequestSettings;
1016 android::hardware::camera::common::V1_0::helper::CameraMetadata sessionParams,
1017 modifiedSessionParams;
1018 constructFilteredSettings(mSession, availableSessionKeys, RequestTemplate::PREVIEW,
1019 &previewRequestSettings, &sessionParams);
1020 if (sessionParams.isEmpty()) {
1021 ndk::ScopedAStatus ret = mSession->close();
1022 mSession = nullptr;
1023 ASSERT_TRUE(ret.isOk());
1024 continue;
1025 }
1026
1027 outputPreviewStreams.clear();
1028
1029 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputPreviewStreams,
1030 &previewThreshold));
1031 ASSERT_NE(0u, outputPreviewStreams.size());
1032
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001033 Stream previewStream = {
1034 0,
1035 StreamType::OUTPUT,
1036 outputPreviewStreams[0].width,
1037 outputPreviewStreams[0].height,
1038 static_cast<PixelFormat>(outputPreviewStreams[0].format),
1039 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1040 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
1041 Dataspace::UNKNOWN,
1042 StreamRotation::ROTATION_0,
1043 std::string(),
1044 /*bufferSize*/ 0,
1045 /*groupId*/ -1,
1046 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1047 RequestAvailableDynamicRangeProfilesMap::
1048 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001049
1050 std::vector<Stream> streams = {previewStream};
1051 StreamConfiguration config;
1052
1053 config.streams = streams;
1054 config.operationMode = StreamConfigurationMode::NORMAL_MODE;
1055 modifiedSessionParams = sessionParams;
1056 auto sessionParamsBuffer = sessionParams.release();
1057 std::vector<uint8_t> rawSessionParam =
1058 std::vector(reinterpret_cast<uint8_t*>(sessionParamsBuffer),
1059 reinterpret_cast<uint8_t*>(sessionParamsBuffer) +
1060 get_camera_metadata_size(sessionParamsBuffer));
1061
1062 config.sessionParams.metadata = rawSessionParam;
1063 config.streamConfigCounter = 0;
1064 config.streams = {previewStream};
1065 config.streamConfigCounter = 0;
1066 config.multiResolutionInputImage = false;
1067
1068 bool newSessionParamsAvailable = false;
1069 for (const auto& it : availableSessionKeys) {
1070 if (modifiedSessionParams.exists(it)) {
1071 modifiedSessionParams.erase(it);
1072 newSessionParamsAvailable = true;
1073 break;
1074 }
1075 }
1076 if (newSessionParamsAvailable) {
1077 auto modifiedSessionParamsBuffer = modifiedSessionParams.release();
1078 verifySessionReconfigurationQuery(mSession, sessionParamsBuffer,
1079 modifiedSessionParamsBuffer);
1080 modifiedSessionParams.acquire(modifiedSessionParamsBuffer);
1081 }
1082
1083 std::vector<HalStream> halConfigs;
1084 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1085 ASSERT_TRUE(ret.isOk());
1086 ASSERT_EQ(1u, halConfigs.size());
1087
1088 sessionParams.acquire(sessionParamsBuffer);
1089 ret = mSession->close();
1090 mSession = nullptr;
1091 ASSERT_TRUE(ret.isOk());
1092 }
1093}
1094
1095// Verify that all supported preview + still capture stream combinations
1096// can be configured successfully.
1097TEST_P(CameraAidlTest, configureStreamsPreviewStillOutputs) {
1098 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1099 std::vector<AvailableStream> outputBlobStreams;
1100 std::vector<AvailableStream> outputPreviewStreams;
1101 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
1102 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
1103 AvailableStream blobThreshold = {INT32_MAX, INT32_MAX, static_cast<int32_t>(PixelFormat::BLOB)};
1104
1105 for (const auto& name : cameraDeviceNames) {
1106 CameraMetadata meta;
1107
1108 std::shared_ptr<ICameraDevice> cameraDevice;
1109 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1110 &cameraDevice /*out*/);
1111
1112 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1113
1114 // Check if camera support depth only
1115 if (isDepthOnly(staticMeta)) {
1116 ndk::ScopedAStatus ret = mSession->close();
1117 mSession = nullptr;
1118 ASSERT_TRUE(ret.isOk());
1119 continue;
1120 }
1121
1122 outputBlobStreams.clear();
1123 ASSERT_EQ(Status::OK,
1124 getAvailableOutputStreams(staticMeta, outputBlobStreams, &blobThreshold));
1125 ASSERT_NE(0u, outputBlobStreams.size());
1126
1127 outputPreviewStreams.clear();
1128 ASSERT_EQ(Status::OK,
1129 getAvailableOutputStreams(staticMeta, outputPreviewStreams, &previewThreshold));
1130 ASSERT_NE(0u, outputPreviewStreams.size());
1131
1132 int32_t jpegBufferSize = 0;
1133 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
1134 ASSERT_NE(0u, jpegBufferSize);
1135
1136 int32_t streamId = 0;
1137 uint32_t streamConfigCounter = 0;
1138
1139 for (auto& blobIter : outputBlobStreams) {
1140 for (auto& previewIter : outputPreviewStreams) {
1141 Stream previewStream = {
1142 streamId++,
1143 StreamType::OUTPUT,
1144 previewIter.width,
1145 previewIter.height,
1146 static_cast<PixelFormat>(previewIter.format),
1147 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1148 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
1149 Dataspace::UNKNOWN,
1150 StreamRotation::ROTATION_0,
1151 std::string(),
1152 /*bufferSize*/ 0,
1153 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001154 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1155 RequestAvailableDynamicRangeProfilesMap::
1156 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001157 Stream blobStream = {
1158 streamId++,
1159 StreamType::OUTPUT,
1160 blobIter.width,
1161 blobIter.height,
1162 static_cast<PixelFormat>(blobIter.format),
1163 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1164 GRALLOC1_CONSUMER_USAGE_CPU_READ),
1165 Dataspace::JFIF,
1166 StreamRotation::ROTATION_0,
1167 std::string(),
1168 /*bufferSize*/ 0,
1169 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001170 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1171 RequestAvailableDynamicRangeProfilesMap::
1172 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001173 std::vector<Stream> streams = {previewStream, blobStream};
1174 StreamConfiguration config;
1175
1176 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
1177 jpegBufferSize);
1178 config.streamConfigCounter = streamConfigCounter++;
1179 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true,
1180 /*expectStreamCombQuery*/ false);
1181
1182 std::vector<HalStream> halConfigs;
1183 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1184 ASSERT_TRUE(ret.isOk());
1185 ASSERT_EQ(2u, halConfigs.size());
1186 }
1187 }
1188
1189 ndk::ScopedAStatus ret = mSession->close();
1190 mSession = nullptr;
1191 ASSERT_TRUE(ret.isOk());
1192 }
1193}
1194
1195// In case constrained mode is supported, test whether it can be
1196// configured. Additionally check for common invalid inputs when
1197// using this mode.
1198TEST_P(CameraAidlTest, configureStreamsConstrainedOutputs) {
1199 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1200
1201 for (const auto& name : cameraDeviceNames) {
1202 CameraMetadata meta;
1203 std::shared_ptr<ICameraDevice> cameraDevice;
1204
1205 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1206 &cameraDevice /*out*/);
1207 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1208
1209 Status rc = isConstrainedModeAvailable(staticMeta);
1210 if (Status::OPERATION_NOT_SUPPORTED == rc) {
1211 ndk::ScopedAStatus ret = mSession->close();
1212 mSession = nullptr;
1213 ASSERT_TRUE(ret.isOk());
1214 continue;
1215 }
1216 ASSERT_EQ(Status::OK, rc);
1217
1218 AvailableStream hfrStream;
1219 rc = pickConstrainedModeSize(staticMeta, hfrStream);
1220 ASSERT_EQ(Status::OK, rc);
1221
1222 int32_t streamId = 0;
1223 uint32_t streamConfigCounter = 0;
1224 Stream stream = {streamId,
1225 StreamType::OUTPUT,
1226 hfrStream.width,
1227 hfrStream.height,
1228 static_cast<PixelFormat>(hfrStream.format),
1229 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1230 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1231 Dataspace::UNKNOWN,
1232 StreamRotation::ROTATION_0,
1233 std::string(),
1234 /*bufferSize*/ 0,
1235 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001236 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1237 RequestAvailableDynamicRangeProfilesMap::
1238 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001239 std::vector<Stream> streams = {stream};
1240 StreamConfiguration config;
1241 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1242 &config);
1243
1244 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true,
1245 /*expectStreamCombQuery*/ false);
1246
1247 config.streamConfigCounter = streamConfigCounter++;
1248 std::vector<HalStream> halConfigs;
1249 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1250 ASSERT_TRUE(ret.isOk());
1251 ASSERT_EQ(1u, halConfigs.size());
1252 ASSERT_EQ(halConfigs[0].id, streamId);
1253
1254 stream = {streamId++,
1255 StreamType::OUTPUT,
1256 static_cast<uint32_t>(0),
1257 static_cast<uint32_t>(0),
1258 static_cast<PixelFormat>(hfrStream.format),
1259 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1260 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1261 Dataspace::UNKNOWN,
1262 StreamRotation::ROTATION_0,
1263 std::string(),
1264 /*bufferSize*/ 0,
1265 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001266 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1267 RequestAvailableDynamicRangeProfilesMap::
1268 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001269 streams[0] = stream;
1270 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1271 &config);
1272
1273 config.streamConfigCounter = streamConfigCounter++;
1274 std::vector<HalStream> halConfig;
1275 ret = mSession->configureStreams(config, &halConfig);
1276 ASSERT_TRUE(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
1277 ret.getServiceSpecificError() ||
1278 static_cast<int32_t>(Status::INTERNAL_ERROR) == ret.getServiceSpecificError());
1279
1280 stream = {streamId++,
1281 StreamType::OUTPUT,
1282 INT32_MAX,
1283 INT32_MAX,
1284 static_cast<PixelFormat>(hfrStream.format),
1285 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1286 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1287 Dataspace::UNKNOWN,
1288 StreamRotation::ROTATION_0,
1289 std::string(),
1290 /*bufferSize*/ 0,
1291 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001292 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1293 RequestAvailableDynamicRangeProfilesMap::
1294 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001295 streams[0] = stream;
1296 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1297 &config);
1298
1299 config.streamConfigCounter = streamConfigCounter++;
1300 halConfigs.clear();
1301 ret = mSession->configureStreams(config, &halConfigs);
1302 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
1303
1304 stream = {streamId++,
1305 StreamType::OUTPUT,
1306 hfrStream.width,
1307 hfrStream.height,
1308 static_cast<PixelFormat>(UINT32_MAX),
1309 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1310 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1311 Dataspace::UNKNOWN,
1312 StreamRotation::ROTATION_0,
1313 std::string(),
1314 /*bufferSize*/ 0,
1315 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001316 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1317 RequestAvailableDynamicRangeProfilesMap::
1318 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001319 streams[0] = stream;
1320 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1321 &config);
1322
1323 config.streamConfigCounter = streamConfigCounter++;
1324 halConfigs.clear();
1325 ret = mSession->configureStreams(config, &halConfigs);
1326 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
1327
1328 ret = mSession->close();
1329 mSession = nullptr;
1330 ASSERT_TRUE(ret.isOk());
1331 }
1332}
1333
1334// Verify that all supported video + snapshot stream combinations can
1335// be configured successfully.
1336TEST_P(CameraAidlTest, configureStreamsVideoStillOutputs) {
1337 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1338 std::vector<AvailableStream> outputBlobStreams;
1339 std::vector<AvailableStream> outputVideoStreams;
1340 AvailableStream videoThreshold = {kMaxVideoWidth, kMaxVideoHeight,
1341 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
1342 AvailableStream blobThreshold = {kMaxVideoWidth, kMaxVideoHeight,
1343 static_cast<int32_t>(PixelFormat::BLOB)};
1344
1345 for (const auto& name : cameraDeviceNames) {
1346 CameraMetadata meta;
1347 std::shared_ptr<ICameraDevice> cameraDevice;
1348
1349 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1350 &cameraDevice /*out*/);
1351
1352 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1353
1354 // Check if camera support depth only
1355 if (isDepthOnly(staticMeta)) {
1356 ndk::ScopedAStatus ret = mSession->close();
1357 mSession = nullptr;
1358 ASSERT_TRUE(ret.isOk());
1359 continue;
1360 }
1361
1362 outputBlobStreams.clear();
1363 ASSERT_EQ(Status::OK,
1364 getAvailableOutputStreams(staticMeta, outputBlobStreams, &blobThreshold));
1365 ASSERT_NE(0u, outputBlobStreams.size());
1366
1367 outputVideoStreams.clear();
1368 ASSERT_EQ(Status::OK,
1369 getAvailableOutputStreams(staticMeta, outputVideoStreams, &videoThreshold));
1370 ASSERT_NE(0u, outputVideoStreams.size());
1371
1372 int32_t jpegBufferSize = 0;
1373 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
1374 ASSERT_NE(0u, jpegBufferSize);
1375
1376 int32_t streamId = 0;
1377 uint32_t streamConfigCounter = 0;
1378 for (auto& blobIter : outputBlobStreams) {
1379 for (auto& videoIter : outputVideoStreams) {
1380 Stream videoStream = {
1381 streamId++,
1382 StreamType::OUTPUT,
1383 videoIter.width,
1384 videoIter.height,
1385 static_cast<PixelFormat>(videoIter.format),
1386 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1387 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1388 Dataspace::UNKNOWN,
1389 StreamRotation::ROTATION_0,
1390 std::string(),
1391 jpegBufferSize,
1392 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001393 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1394 RequestAvailableDynamicRangeProfilesMap::
1395 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001396 Stream blobStream = {
1397 streamId++,
1398 StreamType::OUTPUT,
1399 blobIter.width,
1400 blobIter.height,
1401 static_cast<PixelFormat>(blobIter.format),
1402 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1403 GRALLOC1_CONSUMER_USAGE_CPU_READ),
1404 Dataspace::JFIF,
1405 StreamRotation::ROTATION_0,
1406 std::string(),
1407 jpegBufferSize,
1408 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001409 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1410 RequestAvailableDynamicRangeProfilesMap::
1411 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001412 std::vector<Stream> streams = {videoStream, blobStream};
1413 StreamConfiguration config;
1414
1415 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
1416 jpegBufferSize);
1417 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true,
1418 /*expectStreamCombQuery*/ false);
1419
1420 config.streamConfigCounter = streamConfigCounter++;
1421 std::vector<HalStream> halConfigs;
1422 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1423 ASSERT_TRUE(ret.isOk());
1424 ASSERT_EQ(2u, halConfigs.size());
1425 }
1426 }
1427
1428 ndk::ScopedAStatus ret = mSession->close();
1429 mSession = nullptr;
1430 ASSERT_TRUE(ret.isOk());
1431 }
1432}
1433
1434// Generate and verify a camera capture request
1435TEST_P(CameraAidlTest, processCaptureRequestPreview) {
1436 // TODO(b/220897574): Failing with BUFFER_ERROR
1437 processCaptureRequestInternal(GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, RequestTemplate::PREVIEW,
1438 false /*secureOnlyCameras*/);
1439}
1440
1441// Generate and verify a secure camera capture request
1442TEST_P(CameraAidlTest, processSecureCaptureRequest) {
1443 processCaptureRequestInternal(GRALLOC1_PRODUCER_USAGE_PROTECTED, RequestTemplate::STILL_CAPTURE,
1444 true /*secureOnlyCameras*/);
1445}
1446
1447TEST_P(CameraAidlTest, processCaptureRequestPreviewStabilization) {
1448 std::unordered_map<std::string, nsecs_t> cameraDeviceToTimeLag;
1449 processPreviewStabilizationCaptureRequestInternal(/*previewStabilizationOn*/ false,
1450 cameraDeviceToTimeLag);
1451 processPreviewStabilizationCaptureRequestInternal(/*previewStabilizationOn*/ true,
1452 cameraDeviceToTimeLag);
1453}
1454
1455// Generate and verify a multi-camera capture request
1456TEST_P(CameraAidlTest, processMultiCaptureRequestPreview) {
1457 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1458 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
1459 static_cast<int32_t>(PixelFormat::YCBCR_420_888)};
1460 int64_t bufferId = 1;
1461 uint32_t frameNumber = 1;
1462 std::vector<uint8_t> settings;
1463 std::vector<uint8_t> emptySettings;
1464 std::string invalidPhysicalId = "-1";
1465
1466 for (const auto& name : cameraDeviceNames) {
1467 std::string version, deviceId;
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +00001468 ALOGI("processMultiCaptureRequestPreview: Test device %s", name.c_str());
Avichal Rakesh362242f2022-02-08 12:40:53 -08001469 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
1470 CameraMetadata metadata;
1471
1472 std::shared_ptr<ICameraDevice> unusedDevice;
1473 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &metadata /*out*/,
1474 &unusedDevice /*out*/);
1475
1476 camera_metadata_t* staticMeta =
1477 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
1478 Status rc = isLogicalMultiCamera(staticMeta);
1479 if (Status::OPERATION_NOT_SUPPORTED == rc) {
1480 ndk::ScopedAStatus ret = mSession->close();
1481 mSession = nullptr;
1482 ASSERT_TRUE(ret.isOk());
1483 continue;
1484 }
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +00001485 ASSERT_EQ(Status::OK, rc);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001486
1487 std::unordered_set<std::string> physicalIds;
1488 rc = getPhysicalCameraIds(staticMeta, &physicalIds);
1489 ASSERT_TRUE(Status::OK == rc);
1490 ASSERT_TRUE(physicalIds.size() > 1);
1491
1492 std::unordered_set<int32_t> physicalRequestKeyIDs;
1493 rc = getSupportedKeys(staticMeta, ANDROID_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS,
1494 &physicalRequestKeyIDs);
1495 ASSERT_TRUE(Status::OK == rc);
1496 if (physicalRequestKeyIDs.empty()) {
1497 ndk::ScopedAStatus ret = mSession->close();
1498 mSession = nullptr;
1499 ASSERT_TRUE(ret.isOk());
1500 // The logical camera doesn't support any individual physical requests.
1501 continue;
1502 }
1503
1504 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultPreviewSettings;
1505 android::hardware::camera::common::V1_0::helper::CameraMetadata filteredSettings;
1506 constructFilteredSettings(mSession, physicalRequestKeyIDs, RequestTemplate::PREVIEW,
1507 &defaultPreviewSettings, &filteredSettings);
1508 if (filteredSettings.isEmpty()) {
1509 // No physical device settings in default request.
1510 ndk::ScopedAStatus ret = mSession->close();
1511 mSession = nullptr;
1512 ASSERT_TRUE(ret.isOk());
1513 continue;
1514 }
1515
1516 const camera_metadata_t* settingsBuffer = defaultPreviewSettings.getAndLock();
1517 uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
1518 settings.assign(rawSettingsBuffer,
1519 rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
1520 CameraMetadata settingsMetadata = {settings};
1521 overrideRotateAndCrop(&settingsMetadata);
1522
1523 ndk::ScopedAStatus ret = mSession->close();
1524 mSession = nullptr;
1525 ASSERT_TRUE(ret.isOk());
1526
1527 // Leave only 2 physical devices in the id set.
1528 auto it = physicalIds.begin();
1529 std::string physicalDeviceId = *it;
1530 it++;
1531 physicalIds.erase(++it, physicalIds.end());
1532 ASSERT_EQ(physicalIds.size(), 2u);
1533
1534 std::vector<HalStream> halStreams;
1535 bool supportsPartialResults = false;
1536 bool useHalBufManager = false;
1537 int32_t partialResultCount = 0;
1538 Stream previewStream;
1539 std::shared_ptr<DeviceCb> cb;
1540
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +00001541 configurePreviewStreams(
1542 name, mProvider, &previewThreshold, physicalIds, &mSession, &previewStream,
1543 &halStreams /*out*/, &supportsPartialResults /*out*/, &partialResultCount /*out*/,
1544 &useHalBufManager /*out*/, &cb /*out*/, 0 /*streamConfigCounter*/, true);
1545 if (mSession == nullptr) {
1546 // stream combination not supported by HAL, skip test for device
1547 continue;
1548 }
Avichal Rakesh362242f2022-02-08 12:40:53 -08001549
1550 ::aidl::android::hardware::common::fmq::MQDescriptor<
1551 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
1552 descriptor;
1553 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
1554 ASSERT_TRUE(resultQueueRet.isOk());
1555 std::shared_ptr<ResultMetadataQueue> resultQueue =
1556 std::make_shared<ResultMetadataQueue>(descriptor);
1557 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
1558 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
1559 resultQueue = nullptr;
1560 // Don't use the queue onwards.
1561 }
1562
1563 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
1564 static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
1565 partialResultCount, physicalIds, resultQueue);
1566
1567 std::vector<CaptureRequest> requests(1);
1568 CaptureRequest& request = requests[0];
1569 request.frameNumber = frameNumber;
1570 request.fmqSettingsSize = 0;
Emilian Peev3d919f92022-04-20 13:50:59 -07001571 request.settings = settingsMetadata;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001572
1573 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
1574
1575 std::vector<buffer_handle_t> graphicBuffers;
1576 graphicBuffers.reserve(halStreams.size());
1577 outputBuffers.resize(halStreams.size());
1578 size_t k = 0;
1579 for (const auto& halStream : halStreams) {
1580 buffer_handle_t buffer_handle;
1581 if (useHalBufManager) {
1582 outputBuffers[k] = {halStream.id, /*bufferId*/ 0, NativeHandle(),
1583 BufferStatus::OK, NativeHandle(), NativeHandle()};
1584 } else {
1585 allocateGraphicBuffer(previewStream.width, previewStream.height,
1586 android_convertGralloc1To0Usage(
1587 static_cast<uint64_t>(halStream.producerUsage),
1588 static_cast<uint64_t>(halStream.consumerUsage)),
1589 halStream.overrideFormat, &buffer_handle);
1590 graphicBuffers.push_back(buffer_handle);
1591 outputBuffers[k] = {
1592 halStream.id, bufferId, ::android::makeToAidl(buffer_handle),
1593 BufferStatus::OK, NativeHandle(), NativeHandle()};
1594 bufferId++;
1595 }
1596 k++;
1597 }
1598
1599 std::vector<PhysicalCameraSetting> camSettings(1);
1600 const camera_metadata_t* filteredSettingsBuffer = filteredSettings.getAndLock();
1601 uint8_t* rawFilteredSettingsBuffer = (uint8_t*)filteredSettingsBuffer;
1602 camSettings[0].settings = {std::vector(
1603 rawFilteredSettingsBuffer,
1604 rawFilteredSettingsBuffer + get_camera_metadata_size(filteredSettingsBuffer))};
1605 overrideRotateAndCrop(&camSettings[0].settings);
1606 camSettings[0].fmqSettingsSize = 0;
1607 camSettings[0].physicalCameraId = physicalDeviceId;
1608
1609 request.inputBuffer = {
1610 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
1611 request.physicalCameraSettings = camSettings;
1612
1613 {
1614 std::unique_lock<std::mutex> l(mLock);
1615 mInflightMap.clear();
1616 mInflightMap[frameNumber] = inflightReq;
1617 }
1618
1619 int32_t numRequestProcessed = 0;
1620 std::vector<BufferCache> cachesToRemove;
1621 ndk::ScopedAStatus returnStatus =
1622 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1623 ASSERT_TRUE(returnStatus.isOk());
1624 ASSERT_EQ(numRequestProcessed, 1u);
1625
1626 {
1627 std::unique_lock<std::mutex> l(mLock);
1628 while (!inflightReq->errorCodeValid &&
1629 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1630 auto timeout = std::chrono::system_clock::now() +
1631 std::chrono::seconds(kStreamBufferTimeoutSec);
1632 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1633 }
1634
1635 ASSERT_FALSE(inflightReq->errorCodeValid);
1636 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1637
1638 request.frameNumber++;
1639 // Empty settings should be supported after the first call
1640 // for repeating requests.
1641 request.settings.metadata.clear();
1642 request.physicalCameraSettings[0].settings.metadata.clear();
1643 // The buffer has been registered to HAL by bufferId, so per
1644 // API contract we should send a null handle for this buffer
1645 request.outputBuffers[0].buffer = NativeHandle();
1646 mInflightMap.clear();
1647 inflightReq = std::make_shared<InFlightRequest>(
1648 static_cast<ssize_t>(physicalIds.size()), false, supportsPartialResults,
1649 partialResultCount, physicalIds, resultQueue);
1650 mInflightMap[request.frameNumber] = inflightReq;
1651 }
1652
1653 returnStatus =
1654 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1655 ASSERT_TRUE(returnStatus.isOk());
1656 ASSERT_EQ(numRequestProcessed, 1u);
1657
1658 {
1659 std::unique_lock<std::mutex> l(mLock);
1660 while (!inflightReq->errorCodeValid &&
1661 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1662 auto timeout = std::chrono::system_clock::now() +
1663 std::chrono::seconds(kStreamBufferTimeoutSec);
1664 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1665 }
1666
1667 ASSERT_FALSE(inflightReq->errorCodeValid);
1668 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1669 }
1670
1671 // Invalid physical camera id should fail process requests
1672 frameNumber++;
1673 camSettings[0].physicalCameraId = invalidPhysicalId;
1674 camSettings[0].settings.metadata = settings;
1675
1676 request.physicalCameraSettings = camSettings; // Invalid camera settings
1677 returnStatus =
1678 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1679 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
1680 returnStatus.getServiceSpecificError());
1681
1682 defaultPreviewSettings.unlock(settingsBuffer);
1683 filteredSettings.unlock(filteredSettingsBuffer);
1684
1685 if (useHalBufManager) {
1686 std::vector<int32_t> streamIds(halStreams.size());
1687 for (size_t i = 0; i < streamIds.size(); i++) {
1688 streamIds[i] = halStreams[i].id;
1689 }
1690 verifyBuffersReturned(mSession, streamIds, cb);
1691 }
1692
1693 ret = mSession->close();
1694 mSession = nullptr;
1695 ASSERT_TRUE(ret.isOk());
1696 }
1697}
1698
1699// Generate and verify an ultra high resolution capture request
1700TEST_P(CameraAidlTest, processUltraHighResolutionRequest) {
1701 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1702 int64_t bufferId = 1;
1703 int32_t frameNumber = 1;
1704 CameraMetadata settings;
1705
1706 for (const auto& name : cameraDeviceNames) {
1707 std::string version, deviceId;
1708 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
1709 CameraMetadata meta;
1710
1711 std::shared_ptr<ICameraDevice> unusedDevice;
1712 openEmptyDeviceSession(name, mProvider, &mSession, &meta, &unusedDevice);
1713 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1714 if (!isUltraHighResolution(staticMeta)) {
1715 ndk::ScopedAStatus ret = mSession->close();
1716 mSession = nullptr;
1717 ASSERT_TRUE(ret.isOk());
1718 continue;
1719 }
1720 CameraMetadata req;
1721 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultSettings;
1722 ndk::ScopedAStatus ret =
1723 mSession->constructDefaultRequestSettings(RequestTemplate::STILL_CAPTURE, &req);
1724 ASSERT_TRUE(ret.isOk());
1725
1726 const camera_metadata_t* metadata =
1727 reinterpret_cast<const camera_metadata_t*>(req.metadata.data());
1728 size_t expectedSize = req.metadata.size();
1729 int result = validate_camera_metadata_structure(metadata, &expectedSize);
1730 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
1731
1732 size_t entryCount = get_camera_metadata_entry_count(metadata);
1733 ASSERT_GT(entryCount, 0u);
1734 defaultSettings = metadata;
1735 uint8_t sensorPixelMode =
1736 static_cast<uint8_t>(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
1737 ASSERT_EQ(::android::OK,
1738 defaultSettings.update(ANDROID_SENSOR_PIXEL_MODE, &sensorPixelMode, 1));
1739
1740 const camera_metadata_t* settingsBuffer = defaultSettings.getAndLock();
1741 uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
1742 settings.metadata = std::vector(
1743 rawSettingsBuffer, rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
1744 overrideRotateAndCrop(&settings);
1745
1746 ret = mSession->close();
1747 mSession = nullptr;
1748 ASSERT_TRUE(ret.isOk());
1749
1750 std::vector<HalStream> halStreams;
1751 bool supportsPartialResults = false;
1752 bool useHalBufManager = false;
1753 int32_t partialResultCount = 0;
1754 Stream previewStream;
1755 std::shared_ptr<DeviceCb> cb;
1756
1757 std::list<PixelFormat> pixelFormats = {PixelFormat::YCBCR_420_888, PixelFormat::RAW16};
1758 for (PixelFormat format : pixelFormats) {
Emilian Peevdda1eb72022-07-28 16:37:40 -07001759 previewStream.usage =
1760 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1761 GRALLOC1_CONSUMER_USAGE_CPU_READ);
1762 previewStream.dataSpace = Dataspace::UNKNOWN;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001763 configureStreams(name, mProvider, format, &mSession, &previewStream, &halStreams,
1764 &supportsPartialResults, &partialResultCount, &useHalBufManager, &cb,
1765 0, /*maxResolution*/ true);
1766 ASSERT_NE(mSession, nullptr);
1767
1768 ::aidl::android::hardware::common::fmq::MQDescriptor<
1769 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
1770 descriptor;
1771 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
1772 ASSERT_TRUE(resultQueueRet.isOk());
1773
1774 std::shared_ptr<ResultMetadataQueue> resultQueue =
1775 std::make_shared<ResultMetadataQueue>(descriptor);
1776 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
1777 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
1778 resultQueue = nullptr;
1779 // Don't use the queue onwards.
1780 }
1781
1782 std::vector<buffer_handle_t> graphicBuffers;
1783 graphicBuffers.reserve(halStreams.size());
1784 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
1785 static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
1786 partialResultCount, std::unordered_set<std::string>(), resultQueue);
1787
1788 std::vector<CaptureRequest> requests(1);
1789 CaptureRequest& request = requests[0];
1790 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
1791 outputBuffers.resize(halStreams.size());
1792
1793 size_t k = 0;
1794 for (const auto& halStream : halStreams) {
1795 buffer_handle_t buffer_handle;
1796 if (useHalBufManager) {
1797 outputBuffers[k] = {halStream.id, 0,
1798 NativeHandle(), BufferStatus::OK,
1799 NativeHandle(), NativeHandle()};
1800 } else {
1801 allocateGraphicBuffer(previewStream.width, previewStream.height,
1802 android_convertGralloc1To0Usage(
1803 static_cast<uint64_t>(halStream.producerUsage),
1804 static_cast<uint64_t>(halStream.consumerUsage)),
1805 halStream.overrideFormat, &buffer_handle);
1806 graphicBuffers.push_back(buffer_handle);
1807 outputBuffers[k] = {
1808 halStream.id, bufferId, ::android::makeToAidl(buffer_handle),
1809 BufferStatus::OK, NativeHandle(), NativeHandle()};
1810 bufferId++;
1811 }
1812 k++;
1813 }
1814
1815 request.inputBuffer = {
1816 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
1817 request.frameNumber = frameNumber;
1818 request.fmqSettingsSize = 0;
1819 request.settings = settings;
1820 request.inputWidth = 0;
1821 request.inputHeight = 0;
1822
1823 {
1824 std::unique_lock<std::mutex> l(mLock);
1825 mInflightMap.clear();
1826 mInflightMap[frameNumber] = inflightReq;
1827 }
1828
1829 int32_t numRequestProcessed = 0;
1830 std::vector<BufferCache> cachesToRemove;
1831 ndk::ScopedAStatus returnStatus =
1832 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1833 ASSERT_TRUE(returnStatus.isOk());
1834 ASSERT_EQ(numRequestProcessed, 1u);
1835
1836 {
1837 std::unique_lock<std::mutex> l(mLock);
1838 while (!inflightReq->errorCodeValid &&
1839 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1840 auto timeout = std::chrono::system_clock::now() +
1841 std::chrono::seconds(kStreamBufferTimeoutSec);
1842 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1843 }
1844
1845 ASSERT_FALSE(inflightReq->errorCodeValid);
1846 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1847 }
1848 if (useHalBufManager) {
1849 std::vector<int32_t> streamIds(halStreams.size());
1850 for (size_t i = 0; i < streamIds.size(); i++) {
1851 streamIds[i] = halStreams[i].id;
1852 }
1853 verifyBuffersReturned(mSession, streamIds, cb);
1854 }
1855
1856 ret = mSession->close();
1857 mSession = nullptr;
1858 ASSERT_TRUE(ret.isOk());
1859 }
1860 }
1861}
1862
1863// Generate and verify 10-bit dynamic range request
1864TEST_P(CameraAidlTest, process10BitDynamicRangeRequest) {
1865 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1866 int64_t bufferId = 1;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001867 CameraMetadata settings;
1868
1869 for (const auto& name : cameraDeviceNames) {
1870 std::string version, deviceId;
1871 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
1872 CameraMetadata meta;
1873 std::shared_ptr<ICameraDevice> device;
1874 openEmptyDeviceSession(name, mProvider, &mSession, &meta, &device);
1875 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1876 if (!is10BitDynamicRangeCapable(staticMeta)) {
1877 ndk::ScopedAStatus ret = mSession->close();
1878 mSession = nullptr;
1879 ASSERT_TRUE(ret.isOk());
1880 continue;
1881 }
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001882 std::vector<RequestAvailableDynamicRangeProfilesMap> profileList;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001883 get10BitDynamicRangeProfiles(staticMeta, &profileList);
1884 ASSERT_FALSE(profileList.empty());
1885
1886 CameraMetadata req;
1887 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultSettings;
1888 ndk::ScopedAStatus ret =
Emilian Peevdda1eb72022-07-28 16:37:40 -07001889 mSession->constructDefaultRequestSettings(RequestTemplate::PREVIEW, &req);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001890 ASSERT_TRUE(ret.isOk());
1891
1892 const camera_metadata_t* metadata =
1893 reinterpret_cast<const camera_metadata_t*>(req.metadata.data());
1894 size_t expectedSize = req.metadata.size();
1895 int result = validate_camera_metadata_structure(metadata, &expectedSize);
1896 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
1897
1898 size_t entryCount = get_camera_metadata_entry_count(metadata);
1899 ASSERT_GT(entryCount, 0u);
1900 defaultSettings = metadata;
1901
1902 const camera_metadata_t* settingsBuffer = defaultSettings.getAndLock();
1903 uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
1904 settings.metadata = std::vector(
1905 rawSettingsBuffer, rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
1906 overrideRotateAndCrop(&settings);
1907
1908 ret = mSession->close();
1909 mSession = nullptr;
1910 ASSERT_TRUE(ret.isOk());
1911
1912 std::vector<HalStream> halStreams;
1913 bool supportsPartialResults = false;
1914 bool useHalBufManager = false;
1915 int32_t partialResultCount = 0;
1916 Stream previewStream;
1917 std::shared_ptr<DeviceCb> cb;
1918 for (const auto& profile : profileList) {
Emilian Peevdda1eb72022-07-28 16:37:40 -07001919 previewStream.usage =
1920 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1921 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER);
1922 previewStream.dataSpace = getDataspace(PixelFormat::IMPLEMENTATION_DEFINED);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001923 configureStreams(name, mProvider, PixelFormat::IMPLEMENTATION_DEFINED, &mSession,
1924 &previewStream, &halStreams, &supportsPartialResults,
1925 &partialResultCount, &useHalBufManager, &cb, 0,
1926 /*maxResolution*/ false, profile);
1927 ASSERT_NE(mSession, nullptr);
1928
1929 ::aidl::android::hardware::common::fmq::MQDescriptor<
1930 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
1931 descriptor;
1932 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
1933 ASSERT_TRUE(resultQueueRet.isOk());
1934
1935 std::shared_ptr<ResultMetadataQueue> resultQueue =
1936 std::make_shared<ResultMetadataQueue>(descriptor);
1937 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
1938 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
1939 resultQueue = nullptr;
1940 // Don't use the queue onwards.
1941 }
1942
Emilian Peevdda1eb72022-07-28 16:37:40 -07001943 mInflightMap.clear();
1944 // Stream as long as needed to fill the Hal inflight queue
1945 std::vector<CaptureRequest> requests(halStreams[0].maxBuffers);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001946
Emilian Peevdda1eb72022-07-28 16:37:40 -07001947 for (int32_t frameNumber = 0; frameNumber < requests.size(); frameNumber++) {
1948 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
1949 static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
1950 partialResultCount, std::unordered_set<std::string>(), resultQueue);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001951
Emilian Peevdda1eb72022-07-28 16:37:40 -07001952 CaptureRequest& request = requests[frameNumber];
1953 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
1954 outputBuffers.resize(halStreams.size());
Avichal Rakesh362242f2022-02-08 12:40:53 -08001955
Emilian Peevdda1eb72022-07-28 16:37:40 -07001956 size_t k = 0;
1957 inflightReq->mOutstandingBufferIds.resize(halStreams.size());
1958 std::vector<buffer_handle_t> graphicBuffers;
1959 graphicBuffers.reserve(halStreams.size());
Avichal Rakesh362242f2022-02-08 12:40:53 -08001960
Emilian Peevdda1eb72022-07-28 16:37:40 -07001961 for (const auto& halStream : halStreams) {
1962 buffer_handle_t buffer_handle;
1963 if (useHalBufManager) {
1964 outputBuffers[k] = {halStream.id, 0,
1965 NativeHandle(), BufferStatus::OK,
1966 NativeHandle(), NativeHandle()};
1967 } else {
1968 auto usage = android_convertGralloc1To0Usage(
1969 static_cast<uint64_t>(halStream.producerUsage),
1970 static_cast<uint64_t>(halStream.consumerUsage));
1971 allocateGraphicBuffer(previewStream.width, previewStream.height, usage,
1972 halStream.overrideFormat, &buffer_handle);
1973
1974 inflightReq->mOutstandingBufferIds[halStream.id][bufferId] = buffer_handle;
1975 graphicBuffers.push_back(buffer_handle);
1976 outputBuffers[k] = {halStream.id, bufferId,
1977 android::makeToAidl(buffer_handle), BufferStatus::OK, NativeHandle(),
1978 NativeHandle()};
1979 bufferId++;
1980 }
1981 k++;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001982 }
Avichal Rakesh362242f2022-02-08 12:40:53 -08001983
Emilian Peevdda1eb72022-07-28 16:37:40 -07001984 request.inputBuffer = {
1985 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
1986 request.frameNumber = frameNumber;
1987 request.fmqSettingsSize = 0;
1988 request.settings = settings;
1989 request.inputWidth = 0;
1990 request.inputHeight = 0;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001991
Emilian Peevdda1eb72022-07-28 16:37:40 -07001992 {
1993 std::unique_lock<std::mutex> l(mLock);
1994 mInflightMap[frameNumber] = inflightReq;
1995 }
1996
Avichal Rakesh362242f2022-02-08 12:40:53 -08001997 }
1998
1999 int32_t numRequestProcessed = 0;
2000 std::vector<BufferCache> cachesToRemove;
2001 ndk::ScopedAStatus returnStatus =
Emilian Peevdda1eb72022-07-28 16:37:40 -07002002 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
Avichal Rakesh362242f2022-02-08 12:40:53 -08002003 ASSERT_TRUE(returnStatus.isOk());
Emilian Peevdda1eb72022-07-28 16:37:40 -07002004 ASSERT_EQ(numRequestProcessed, requests.size());
Avichal Rakesh362242f2022-02-08 12:40:53 -08002005
Emilian Peevdda1eb72022-07-28 16:37:40 -07002006 returnStatus = mSession->repeatingRequestEnd(requests.size() - 1,
2007 std::vector<int32_t> {halStreams[0].id});
2008 ASSERT_TRUE(returnStatus.isOk());
2009
2010 for (int32_t frameNumber = 0; frameNumber < requests.size(); frameNumber++) {
2011 const auto& inflightReq = mInflightMap[frameNumber];
Avichal Rakesh362242f2022-02-08 12:40:53 -08002012 std::unique_lock<std::mutex> l(mLock);
2013 while (!inflightReq->errorCodeValid &&
2014 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
2015 auto timeout = std::chrono::system_clock::now() +
2016 std::chrono::seconds(kStreamBufferTimeoutSec);
2017 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2018 }
2019
2020 ASSERT_FALSE(inflightReq->errorCodeValid);
2021 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
2022 verify10BitMetadata(mHandleImporter, *inflightReq, profile);
2023 }
Emilian Peevdda1eb72022-07-28 16:37:40 -07002024
Avichal Rakesh362242f2022-02-08 12:40:53 -08002025 if (useHalBufManager) {
2026 std::vector<int32_t> streamIds(halStreams.size());
2027 for (size_t i = 0; i < streamIds.size(); i++) {
2028 streamIds[i] = halStreams[i].id;
2029 }
2030 mSession->signalStreamFlush(streamIds, /*streamConfigCounter*/ 0);
2031 cb->waitForBuffersReturned();
2032 }
2033
2034 ret = mSession->close();
2035 mSession = nullptr;
2036 ASSERT_TRUE(ret.isOk());
2037 }
2038 }
2039}
2040
Austin Borger4728fc42022-07-15 11:27:53 -07002041TEST_P(CameraAidlTest, process8BitColorSpaceRequests) {
2042 static int profiles[] = {
2043 ColorSpaceNamed::BT709,
2044 ColorSpaceNamed::DCI_P3,
2045 ColorSpaceNamed::DISPLAY_P3,
2046 ColorSpaceNamed::EXTENDED_SRGB,
2047 ColorSpaceNamed::LINEAR_EXTENDED_SRGB,
2048 ColorSpaceNamed::NTSC_1953,
2049 ColorSpaceNamed::SMPTE_C,
2050 ColorSpaceNamed::SRGB
2051 };
2052
2053 for (int32_t i = 0; i < sizeof(profiles) / sizeof(profiles[0]); i++) {
2054 processColorSpaceRequest(static_cast<RequestAvailableColorSpaceProfilesMap>(profiles[i]),
2055 static_cast<RequestAvailableDynamicRangeProfilesMap>(
2056 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD));
2057 }
2058}
2059
2060TEST_P(CameraAidlTest, process10BitColorSpaceRequests) {
2061 static const camera_metadata_enum_android_request_available_dynamic_range_profiles_map
2062 dynamicRangeProfiles[] = {
2063 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10,
2064 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10,
2065 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS,
2066 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF,
2067 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF_PO,
2068 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM,
2069 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM_PO,
2070 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF,
2071 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF_PO,
2072 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM,
2073 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM_PO
2074 };
2075
2076 // Process all dynamic range profiles with BT2020
2077 for (int32_t i = 0; i < sizeof(dynamicRangeProfiles) / sizeof(dynamicRangeProfiles[0]); i++) {
2078 processColorSpaceRequest(
2079 static_cast<RequestAvailableColorSpaceProfilesMap>(ColorSpaceNamed::BT2020),
2080 static_cast<RequestAvailableDynamicRangeProfilesMap>(dynamicRangeProfiles[i]));
2081 }
2082}
2083
Avichal Rakesh362242f2022-02-08 12:40:53 -08002084// Generate and verify a burst containing alternating sensor sensitivity values
2085TEST_P(CameraAidlTest, processCaptureRequestBurstISO) {
2086 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2087 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2088 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2089 int64_t bufferId = 1;
2090 int32_t frameNumber = 1;
2091 float isoTol = .03f;
2092 CameraMetadata settings;
2093
2094 for (const auto& name : cameraDeviceNames) {
2095 CameraMetadata meta;
2096 settings.metadata.clear();
2097 std::shared_ptr<ICameraDevice> unusedDevice;
2098 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
2099 &unusedDevice /*out*/);
2100 camera_metadata_t* staticMetaBuffer =
2101 clone_camera_metadata(reinterpret_cast<camera_metadata_t*>(meta.metadata.data()));
2102 ::android::hardware::camera::common::V1_0::helper::CameraMetadata staticMeta(
2103 staticMetaBuffer);
2104
2105 camera_metadata_entry_t hwLevel = staticMeta.find(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL);
2106 ASSERT_TRUE(0 < hwLevel.count);
2107 if (ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED == hwLevel.data.u8[0] ||
2108 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL == hwLevel.data.u8[0]) {
2109 // Limited/External devices can skip this test
2110 ndk::ScopedAStatus ret = mSession->close();
2111 mSession = nullptr;
2112 ASSERT_TRUE(ret.isOk());
2113 continue;
2114 }
2115
2116 camera_metadata_entry_t isoRange = staticMeta.find(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE);
2117 ASSERT_EQ(isoRange.count, 2u);
2118
2119 ndk::ScopedAStatus ret = mSession->close();
2120 mSession = nullptr;
2121 ASSERT_TRUE(ret.isOk());
2122
2123 bool supportsPartialResults = false;
2124 bool useHalBufManager = false;
2125 int32_t partialResultCount = 0;
2126 Stream previewStream;
2127 std::vector<HalStream> halStreams;
2128 std::shared_ptr<DeviceCb> cb;
2129 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2130 &previewStream /*out*/, &halStreams /*out*/,
2131 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2132 &useHalBufManager /*out*/, &cb /*out*/);
2133
2134 ::aidl::android::hardware::common::fmq::MQDescriptor<
2135 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2136 descriptor;
2137 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2138 std::shared_ptr<ResultMetadataQueue> resultQueue =
2139 std::make_shared<ResultMetadataQueue>(descriptor);
2140 ASSERT_TRUE(resultQueueRet.isOk());
2141 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2142 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2143 resultQueue = nullptr;
2144 // Don't use the queue onwards.
2145 }
2146
2147 ret = mSession->constructDefaultRequestSettings(RequestTemplate::PREVIEW, &settings);
2148 ASSERT_TRUE(ret.isOk());
2149
2150 ::android::hardware::camera::common::V1_0::helper::CameraMetadata requestMeta;
2151 std::vector<CaptureRequest> requests(kBurstFrameCount);
2152 std::vector<buffer_handle_t> buffers(kBurstFrameCount);
2153 std::vector<std::shared_ptr<InFlightRequest>> inflightReqs(kBurstFrameCount);
2154 std::vector<int32_t> isoValues(kBurstFrameCount);
2155 std::vector<CameraMetadata> requestSettings(kBurstFrameCount);
2156
2157 for (int32_t i = 0; i < kBurstFrameCount; i++) {
2158 std::unique_lock<std::mutex> l(mLock);
2159 CaptureRequest& request = requests[i];
2160 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2161 outputBuffers.resize(1);
2162 StreamBuffer& outputBuffer = outputBuffers[0];
2163
2164 isoValues[i] = ((i % 2) == 0) ? isoRange.data.i32[0] : isoRange.data.i32[1];
2165 if (useHalBufManager) {
2166 outputBuffer = {halStreams[0].id, 0,
2167 NativeHandle(), BufferStatus::OK,
2168 NativeHandle(), NativeHandle()};
2169 } else {
2170 allocateGraphicBuffer(previewStream.width, previewStream.height,
2171 android_convertGralloc1To0Usage(
2172 static_cast<uint64_t>(halStreams[0].producerUsage),
2173 static_cast<uint64_t>(halStreams[0].consumerUsage)),
2174 halStreams[0].overrideFormat, &buffers[i]);
2175 outputBuffer = {halStreams[0].id, bufferId + i, ::android::makeToAidl(buffers[i]),
2176 BufferStatus::OK, NativeHandle(), NativeHandle()};
2177 }
2178
2179 requestMeta.append(reinterpret_cast<camera_metadata_t*>(settings.metadata.data()));
2180
2181 // Disable all 3A routines
2182 uint8_t mode = static_cast<uint8_t>(ANDROID_CONTROL_MODE_OFF);
2183 ASSERT_EQ(::android::OK, requestMeta.update(ANDROID_CONTROL_MODE, &mode, 1));
2184 ASSERT_EQ(::android::OK,
2185 requestMeta.update(ANDROID_SENSOR_SENSITIVITY, &isoValues[i], 1));
2186 camera_metadata_t* metaBuffer = requestMeta.release();
2187 uint8_t* rawMetaBuffer = reinterpret_cast<uint8_t*>(metaBuffer);
2188 requestSettings[i].metadata = std::vector(
2189 rawMetaBuffer, rawMetaBuffer + get_camera_metadata_size(metaBuffer));
2190 overrideRotateAndCrop(&(requestSettings[i]));
2191
2192 request.frameNumber = frameNumber + i;
2193 request.fmqSettingsSize = 0;
2194 request.settings = requestSettings[i];
2195 request.inputBuffer = {
2196 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2197
2198 inflightReqs[i] = std::make_shared<InFlightRequest>(1, false, supportsPartialResults,
2199 partialResultCount, resultQueue);
2200 mInflightMap[frameNumber + i] = inflightReqs[i];
2201 }
2202
2203 int32_t numRequestProcessed = 0;
2204 std::vector<BufferCache> cachesToRemove;
2205
2206 ndk::ScopedAStatus returnStatus =
2207 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2208 ASSERT_TRUE(returnStatus.isOk());
2209 ASSERT_EQ(numRequestProcessed, kBurstFrameCount);
2210
2211 for (size_t i = 0; i < kBurstFrameCount; i++) {
2212 std::unique_lock<std::mutex> l(mLock);
2213 while (!inflightReqs[i]->errorCodeValid && ((0 < inflightReqs[i]->numBuffersLeft) ||
2214 (!inflightReqs[i]->haveResultMetadata))) {
2215 auto timeout = std::chrono::system_clock::now() +
2216 std::chrono::seconds(kStreamBufferTimeoutSec);
2217 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2218 }
2219
2220 ASSERT_FALSE(inflightReqs[i]->errorCodeValid);
2221 ASSERT_NE(inflightReqs[i]->resultOutputBuffers.size(), 0u);
2222 ASSERT_EQ(previewStream.id, inflightReqs[i]->resultOutputBuffers[0].buffer.streamId);
2223 ASSERT_FALSE(inflightReqs[i]->collectedResult.isEmpty());
2224 ASSERT_TRUE(inflightReqs[i]->collectedResult.exists(ANDROID_SENSOR_SENSITIVITY));
2225 camera_metadata_entry_t isoResult =
2226 inflightReqs[i]->collectedResult.find(ANDROID_SENSOR_SENSITIVITY);
2227 ASSERT_TRUE(std::abs(isoResult.data.i32[0] - isoValues[i]) <=
2228 std::round(isoValues[i] * isoTol));
2229 }
2230
2231 if (useHalBufManager) {
2232 verifyBuffersReturned(mSession, previewStream.id, cb);
2233 }
2234 ret = mSession->close();
2235 mSession = nullptr;
2236 ASSERT_TRUE(ret.isOk());
2237 }
2238}
2239
2240// Test whether an incorrect capture request with missing settings will
2241// be reported correctly.
2242TEST_P(CameraAidlTest, processCaptureRequestInvalidSinglePreview) {
2243 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2244 std::vector<AvailableStream> outputPreviewStreams;
2245 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2246 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2247 int64_t bufferId = 1;
2248 int32_t frameNumber = 1;
2249 CameraMetadata settings;
2250
2251 for (const auto& name : cameraDeviceNames) {
2252 Stream previewStream;
2253 std::vector<HalStream> halStreams;
2254 std::shared_ptr<DeviceCb> cb;
2255 bool supportsPartialResults = false;
2256 bool useHalBufManager = false;
2257 int32_t partialResultCount = 0;
2258 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2259 &previewStream /*out*/, &halStreams /*out*/,
2260 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2261 &useHalBufManager /*out*/, &cb /*out*/);
2262 ASSERT_NE(mSession, nullptr);
2263 ASSERT_FALSE(halStreams.empty());
2264
2265 buffer_handle_t buffer_handle = nullptr;
2266
2267 if (useHalBufManager) {
2268 bufferId = 0;
2269 } else {
2270 allocateGraphicBuffer(previewStream.width, previewStream.height,
2271 android_convertGralloc1To0Usage(
2272 static_cast<uint64_t>(halStreams[0].producerUsage),
2273 static_cast<uint64_t>(halStreams[0].consumerUsage)),
2274 halStreams[0].overrideFormat, &buffer_handle);
2275 }
2276
2277 std::vector<CaptureRequest> requests(1);
2278 CaptureRequest& request = requests[0];
2279 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2280 outputBuffers.resize(1);
2281 StreamBuffer& outputBuffer = outputBuffers[0];
2282
2283 outputBuffer = {
2284 halStreams[0].id,
2285 bufferId,
2286 buffer_handle == nullptr ? NativeHandle() : ::android::makeToAidl(buffer_handle),
2287 BufferStatus::OK,
2288 NativeHandle(),
2289 NativeHandle()};
2290
2291 request.inputBuffer = {
2292 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2293 request.frameNumber = frameNumber;
2294 request.fmqSettingsSize = 0;
2295 request.settings = settings;
2296
2297 // Settings were not correctly initialized, we should fail here
2298 int32_t numRequestProcessed = 0;
2299 std::vector<BufferCache> cachesToRemove;
2300 ndk::ScopedAStatus ret =
2301 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2302 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
2303 ASSERT_EQ(numRequestProcessed, 0u);
2304
2305 ret = mSession->close();
2306 mSession = nullptr;
2307 ASSERT_TRUE(ret.isOk());
2308 }
2309}
2310
2311// Verify camera offline session behavior
2312TEST_P(CameraAidlTest, switchToOffline) {
2313 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2314 AvailableStream threshold = {kMaxStillWidth, kMaxStillHeight,
2315 static_cast<int32_t>(PixelFormat::BLOB)};
2316 int64_t bufferId = 1;
2317 int32_t frameNumber = 1;
2318 CameraMetadata settings;
2319
2320 for (const auto& name : cameraDeviceNames) {
2321 CameraMetadata meta;
2322 {
2323 std::shared_ptr<ICameraDevice> unusedDevice;
2324 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
2325 &unusedDevice);
2326 camera_metadata_t* staticMetaBuffer = clone_camera_metadata(
2327 reinterpret_cast<camera_metadata_t*>(meta.metadata.data()));
2328 ::android::hardware::camera::common::V1_0::helper::CameraMetadata staticMeta(
2329 staticMetaBuffer);
2330
2331 if (isOfflineSessionSupported(staticMetaBuffer) != Status::OK) {
2332 ndk::ScopedAStatus ret = mSession->close();
2333 mSession = nullptr;
2334 ASSERT_TRUE(ret.isOk());
2335 continue;
2336 }
2337 ndk::ScopedAStatus ret = mSession->close();
2338 mSession = nullptr;
2339 ASSERT_TRUE(ret.isOk());
2340 }
2341
2342 bool supportsPartialResults = false;
2343 int32_t partialResultCount = 0;
2344 Stream stream;
2345 std::vector<HalStream> halStreams;
2346 std::shared_ptr<DeviceCb> cb;
2347 int32_t jpegBufferSize;
2348 bool useHalBufManager;
2349 configureOfflineStillStream(name, mProvider, &threshold, &mSession /*out*/, &stream /*out*/,
2350 &halStreams /*out*/, &supportsPartialResults /*out*/,
2351 &partialResultCount /*out*/, &cb /*out*/,
2352 &jpegBufferSize /*out*/, &useHalBufManager /*out*/);
2353
2354 auto ret = mSession->constructDefaultRequestSettings(RequestTemplate::STILL_CAPTURE,
2355 &settings);
2356 ASSERT_TRUE(ret.isOk());
2357
2358 ::aidl::android::hardware::common::fmq::MQDescriptor<
2359 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2360 descriptor;
2361
2362 ndk::ScopedAStatus resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2363 ASSERT_TRUE(resultQueueRet.isOk());
2364 std::shared_ptr<ResultMetadataQueue> resultQueue =
2365 std::make_shared<ResultMetadataQueue>(descriptor);
2366 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2367 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2368 resultQueue = nullptr;
2369 // Don't use the queue onwards.
2370 }
2371
2372 ::android::hardware::camera::common::V1_0::helper::CameraMetadata requestMeta;
2373
2374 std::vector<buffer_handle_t> buffers(kBurstFrameCount);
2375 std::vector<std::shared_ptr<InFlightRequest>> inflightReqs(kBurstFrameCount);
2376 std::vector<CameraMetadata> requestSettings(kBurstFrameCount);
2377
2378 std::vector<CaptureRequest> requests(kBurstFrameCount);
2379
2380 HalStream halStream = halStreams[0];
2381 for (uint32_t i = 0; i < kBurstFrameCount; i++) {
2382 CaptureRequest& request = requests[i];
2383 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2384 outputBuffers.resize(1);
2385 StreamBuffer& outputBuffer = outputBuffers[0];
2386
2387 std::unique_lock<std::mutex> l(mLock);
2388 if (useHalBufManager) {
2389 outputBuffer = {halStream.id, 0, NativeHandle(), BufferStatus::OK, NativeHandle(),
2390 NativeHandle()};
2391 } else {
2392 // jpeg buffer (w,h) = (blobLen, 1)
2393 allocateGraphicBuffer(jpegBufferSize, /*height*/ 1,
2394 android_convertGralloc1To0Usage(
2395 static_cast<uint64_t>(halStream.producerUsage),
2396 static_cast<uint64_t>(halStream.consumerUsage)),
2397 halStream.overrideFormat, &buffers[i]);
2398 outputBuffer = {halStream.id, bufferId + i, ::android::makeToAidl(buffers[i]),
2399 BufferStatus::OK, NativeHandle(), NativeHandle()};
2400 }
2401
2402 requestMeta.clear();
2403 requestMeta.append(reinterpret_cast<camera_metadata_t*>(settings.metadata.data()));
2404
2405 camera_metadata_t* metaBuffer = requestMeta.release();
2406 uint8_t* rawMetaBuffer = reinterpret_cast<uint8_t*>(metaBuffer);
2407 requestSettings[i].metadata = std::vector(
2408 rawMetaBuffer, rawMetaBuffer + get_camera_metadata_size(metaBuffer));
2409 overrideRotateAndCrop(&requestSettings[i]);
2410
2411 request.frameNumber = frameNumber + i;
2412 request.fmqSettingsSize = 0;
2413 request.settings = requestSettings[i];
2414 request.inputBuffer = {/*streamId*/ -1,
2415 /*bufferId*/ 0, NativeHandle(),
2416 BufferStatus::ERROR, NativeHandle(),
2417 NativeHandle()};
2418
2419 inflightReqs[i] = std::make_shared<InFlightRequest>(1, false, supportsPartialResults,
2420 partialResultCount, resultQueue);
2421 mInflightMap[frameNumber + i] = inflightReqs[i];
2422 }
2423
2424 int32_t numRequestProcessed = 0;
2425 std::vector<BufferCache> cachesToRemove;
2426
2427 ndk::ScopedAStatus returnStatus =
2428 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2429 ASSERT_TRUE(returnStatus.isOk());
2430 ASSERT_EQ(numRequestProcessed, kBurstFrameCount);
2431
2432 std::vector<int32_t> offlineStreamIds = {halStream.id};
2433 CameraOfflineSessionInfo offlineSessionInfo;
2434 std::shared_ptr<ICameraOfflineSession> offlineSession;
2435 returnStatus =
2436 mSession->switchToOffline(offlineStreamIds, &offlineSessionInfo, &offlineSession);
2437
2438 if (!halStreams[0].supportOffline) {
2439 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
2440 returnStatus.getServiceSpecificError());
2441 ret = mSession->close();
2442 mSession = nullptr;
2443 ASSERT_TRUE(ret.isOk());
2444 continue;
2445 }
2446
2447 ASSERT_TRUE(returnStatus.isOk());
2448 // Hal might be unable to find any requests qualified for offline mode.
2449 if (offlineSession == nullptr) {
2450 ret = mSession->close();
2451 mSession = nullptr;
2452 ASSERT_TRUE(ret.isOk());
2453 continue;
2454 }
2455
2456 ASSERT_EQ(offlineSessionInfo.offlineStreams.size(), 1u);
2457 ASSERT_EQ(offlineSessionInfo.offlineStreams[0].id, halStream.id);
2458 ASSERT_NE(offlineSessionInfo.offlineRequests.size(), 0u);
2459
2460 // close device session to make sure offline session does not rely on it
2461 ret = mSession->close();
2462 mSession = nullptr;
2463 ASSERT_TRUE(ret.isOk());
2464
2465 ::aidl::android::hardware::common::fmq::MQDescriptor<
2466 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2467 offlineResultDescriptor;
2468
2469 auto offlineResultQueueRet =
2470 offlineSession->getCaptureResultMetadataQueue(&offlineResultDescriptor);
2471 std::shared_ptr<ResultMetadataQueue> offlineResultQueue =
2472 std::make_shared<ResultMetadataQueue>(descriptor);
2473 if (!offlineResultQueue->isValid() || offlineResultQueue->availableToWrite() <= 0) {
2474 ALOGE("%s: offline session returns empty result metadata fmq, not use it", __func__);
2475 offlineResultQueue = nullptr;
2476 // Don't use the queue onwards.
2477 }
2478 ASSERT_TRUE(offlineResultQueueRet.isOk());
2479
2480 updateInflightResultQueue(offlineResultQueue);
2481
2482 ret = offlineSession->setCallback(cb);
2483 ASSERT_TRUE(ret.isOk());
2484
2485 for (size_t i = 0; i < kBurstFrameCount; i++) {
2486 std::unique_lock<std::mutex> l(mLock);
2487 while (!inflightReqs[i]->errorCodeValid && ((0 < inflightReqs[i]->numBuffersLeft) ||
2488 (!inflightReqs[i]->haveResultMetadata))) {
2489 auto timeout = std::chrono::system_clock::now() +
2490 std::chrono::seconds(kStreamBufferTimeoutSec);
2491 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2492 }
2493
2494 ASSERT_FALSE(inflightReqs[i]->errorCodeValid);
2495 ASSERT_NE(inflightReqs[i]->resultOutputBuffers.size(), 0u);
2496 ASSERT_EQ(stream.id, inflightReqs[i]->resultOutputBuffers[0].buffer.streamId);
2497 ASSERT_FALSE(inflightReqs[i]->collectedResult.isEmpty());
2498 }
2499
2500 ret = offlineSession->close();
2501 ASSERT_TRUE(ret.isOk());
2502 }
2503}
2504
2505// Check whether an invalid capture request with missing output buffers
2506// will be reported correctly.
2507TEST_P(CameraAidlTest, processCaptureRequestInvalidBuffer) {
2508 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2509 std::vector<AvailableStream> outputBlobStreams;
2510 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2511 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2512 int32_t frameNumber = 1;
2513 CameraMetadata settings;
2514
2515 for (const auto& name : cameraDeviceNames) {
2516 Stream previewStream;
2517 std::vector<HalStream> halStreams;
2518 std::shared_ptr<DeviceCb> cb;
2519 bool supportsPartialResults = false;
2520 bool useHalBufManager = false;
2521 int32_t partialResultCount = 0;
2522 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2523 &previewStream /*out*/, &halStreams /*out*/,
2524 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2525 &useHalBufManager /*out*/, &cb /*out*/);
2526
2527 RequestTemplate reqTemplate = RequestTemplate::PREVIEW;
2528 ndk::ScopedAStatus ret = mSession->constructDefaultRequestSettings(reqTemplate, &settings);
2529 ASSERT_TRUE(ret.isOk());
2530 overrideRotateAndCrop(&settings);
2531
2532 std::vector<CaptureRequest> requests(1);
2533 CaptureRequest& request = requests[0];
2534 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2535 outputBuffers.resize(1);
2536 // Empty output buffer
2537 outputBuffers[0] = {
2538 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2539
2540 request.inputBuffer = {
2541 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2542 request.frameNumber = frameNumber;
2543 request.fmqSettingsSize = 0;
2544 request.settings = settings;
2545
2546 // Output buffers are missing, we should fail here
2547 int32_t numRequestProcessed = 0;
2548 std::vector<BufferCache> cachesToRemove;
2549 ret = mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2550 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
2551 ASSERT_EQ(numRequestProcessed, 0u);
2552
2553 ret = mSession->close();
2554 mSession = nullptr;
2555 ASSERT_TRUE(ret.isOk());
2556 }
2557}
2558
2559// Generate, trigger and flush a preview request
2560TEST_P(CameraAidlTest, flushPreviewRequest) {
2561 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2562 std::vector<AvailableStream> outputPreviewStreams;
2563 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2564 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2565 int64_t bufferId = 1;
2566 int32_t frameNumber = 1;
2567 CameraMetadata settings;
2568
2569 for (const auto& name : cameraDeviceNames) {
2570 Stream previewStream;
2571 std::vector<HalStream> halStreams;
2572 std::shared_ptr<DeviceCb> cb;
2573 bool supportsPartialResults = false;
2574 bool useHalBufManager = false;
2575 int32_t partialResultCount = 0;
2576
2577 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2578 &previewStream /*out*/, &halStreams /*out*/,
2579 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2580 &useHalBufManager /*out*/, &cb /*out*/);
2581
2582 ASSERT_NE(mSession, nullptr);
2583 ASSERT_NE(cb, nullptr);
2584 ASSERT_FALSE(halStreams.empty());
2585
2586 ::aidl::android::hardware::common::fmq::MQDescriptor<
2587 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2588 descriptor;
2589
2590 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2591 std::shared_ptr<ResultMetadataQueue> resultQueue =
2592 std::make_shared<ResultMetadataQueue>(descriptor);
2593 ASSERT_TRUE(resultQueueRet.isOk());
2594 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2595 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2596 resultQueue = nullptr;
2597 // Don't use the queue onwards.
2598 }
2599
2600 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
2601 1, false, supportsPartialResults, partialResultCount, resultQueue);
2602 RequestTemplate reqTemplate = RequestTemplate::PREVIEW;
2603
2604 ndk::ScopedAStatus ret = mSession->constructDefaultRequestSettings(reqTemplate, &settings);
2605 ASSERT_TRUE(ret.isOk());
2606 overrideRotateAndCrop(&settings);
2607
2608 buffer_handle_t buffer_handle;
2609 std::vector<CaptureRequest> requests(1);
2610 CaptureRequest& request = requests[0];
2611 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2612 outputBuffers.resize(1);
2613 StreamBuffer& outputBuffer = outputBuffers[0];
2614 if (useHalBufManager) {
2615 bufferId = 0;
2616 outputBuffer = {halStreams[0].id, bufferId, NativeHandle(),
2617 BufferStatus::OK, NativeHandle(), NativeHandle()};
2618 } else {
2619 allocateGraphicBuffer(previewStream.width, previewStream.height,
2620 android_convertGralloc1To0Usage(
2621 static_cast<uint64_t>(halStreams[0].producerUsage),
2622 static_cast<uint64_t>(halStreams[0].consumerUsage)),
2623 halStreams[0].overrideFormat, &buffer_handle);
2624 outputBuffer = {halStreams[0].id, bufferId, ::android::makeToAidl(buffer_handle),
2625 BufferStatus::OK, NativeHandle(), NativeHandle()};
2626 }
2627
2628 request.frameNumber = frameNumber;
2629 request.fmqSettingsSize = 0;
2630 request.settings = settings;
2631 request.inputBuffer = {
2632 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2633
2634 {
2635 std::unique_lock<std::mutex> l(mLock);
2636 mInflightMap.clear();
2637 mInflightMap[frameNumber] = inflightReq;
2638 }
2639
2640 int32_t numRequestProcessed = 0;
2641 std::vector<BufferCache> cachesToRemove;
2642 ret = mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2643 ASSERT_TRUE(ret.isOk());
2644 ASSERT_EQ(numRequestProcessed, 1u);
2645
2646 // Flush before waiting for request to complete.
2647 ndk::ScopedAStatus returnStatus = mSession->flush();
2648 ASSERT_TRUE(returnStatus.isOk());
2649
2650 {
2651 std::unique_lock<std::mutex> l(mLock);
2652 while (!inflightReq->errorCodeValid &&
2653 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
2654 auto timeout = std::chrono::system_clock::now() +
2655 std::chrono::seconds(kStreamBufferTimeoutSec);
2656 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2657 }
2658
2659 if (!inflightReq->errorCodeValid) {
2660 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
2661 ASSERT_EQ(previewStream.id, inflightReq->resultOutputBuffers[0].buffer.streamId);
2662 } else {
2663 switch (inflightReq->errorCode) {
2664 case ErrorCode::ERROR_REQUEST:
2665 case ErrorCode::ERROR_RESULT:
2666 case ErrorCode::ERROR_BUFFER:
2667 // Expected
2668 break;
2669 case ErrorCode::ERROR_DEVICE:
2670 default:
2671 FAIL() << "Unexpected error:"
2672 << static_cast<uint32_t>(inflightReq->errorCode);
2673 }
2674 }
2675 }
2676
2677 if (useHalBufManager) {
2678 verifyBuffersReturned(mSession, previewStream.id, cb);
2679 }
2680
2681 ret = mSession->close();
2682 mSession = nullptr;
2683 ASSERT_TRUE(ret.isOk());
2684 }
2685}
2686
2687// Verify that camera flushes correctly without any pending requests.
2688TEST_P(CameraAidlTest, flushEmpty) {
2689 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2690 std::vector<AvailableStream> outputPreviewStreams;
2691 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2692 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2693
2694 for (const auto& name : cameraDeviceNames) {
2695 Stream previewStream;
2696 std::vector<HalStream> halStreams;
2697 std::shared_ptr<DeviceCb> cb;
2698 bool supportsPartialResults = false;
2699 bool useHalBufManager = false;
2700
2701 int32_t partialResultCount = 0;
2702 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2703 &previewStream /*out*/, &halStreams /*out*/,
2704 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2705 &useHalBufManager /*out*/, &cb /*out*/);
2706
2707 ndk::ScopedAStatus returnStatus = mSession->flush();
2708 ASSERT_TRUE(returnStatus.isOk());
2709
2710 {
2711 std::unique_lock<std::mutex> l(mLock);
2712 auto timeout = std::chrono::system_clock::now() +
2713 std::chrono::milliseconds(kEmptyFlushTimeoutMSec);
2714 ASSERT_EQ(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2715 }
2716
2717 ndk::ScopedAStatus ret = mSession->close();
2718 mSession = nullptr;
2719 ASSERT_TRUE(ret.isOk());
2720 }
2721}
2722
2723// Test camera provider notify method
2724TEST_P(CameraAidlTest, providerDeviceStateNotification) {
2725 notifyDeviceState(ICameraProvider::DEVICE_STATE_BACK_COVERED);
2726 notifyDeviceState(ICameraProvider::DEVICE_STATE_NORMAL);
2727}
2728
2729// Verify that all supported stream formats and sizes can be configured
2730// successfully for injection camera.
2731TEST_P(CameraAidlTest, configureInjectionStreamsAvailableOutputs) {
2732 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2733 std::vector<AvailableStream> outputStreams;
2734
2735 for (const auto& name : cameraDeviceNames) {
2736 CameraMetadata metadata;
2737
2738 std::shared_ptr<ICameraInjectionSession> injectionSession;
2739 std::shared_ptr<ICameraDevice> unusedDevice;
2740 openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/,
2741 &unusedDevice /*out*/);
2742 if (injectionSession == nullptr) {
2743 continue;
2744 }
2745
2746 camera_metadata_t* staticMetaBuffer =
2747 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
2748 CameraMetadata chars;
2749 chars.metadata = metadata.metadata;
2750
2751 outputStreams.clear();
2752 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputStreams));
2753 ASSERT_NE(0u, outputStreams.size());
2754
2755 int32_t jpegBufferSize = 0;
2756 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMetaBuffer, &jpegBufferSize));
2757 ASSERT_NE(0u, jpegBufferSize);
2758
2759 int32_t streamId = 0;
2760 int32_t streamConfigCounter = 0;
2761 for (auto& it : outputStreams) {
2762 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(it.format));
2763 Stream stream = {streamId,
2764 StreamType::OUTPUT,
2765 it.width,
2766 it.height,
2767 static_cast<PixelFormat>(it.format),
2768 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2769 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2770 dataspace,
2771 StreamRotation::ROTATION_0,
2772 std::string(),
2773 jpegBufferSize,
2774 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002775 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2776 RequestAvailableDynamicRangeProfilesMap::
2777 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002778
2779 std::vector<Stream> streams = {stream};
2780 StreamConfiguration config;
2781 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2782 jpegBufferSize);
2783
2784 config.streamConfigCounter = streamConfigCounter++;
2785 ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars);
2786 ASSERT_TRUE(s.isOk());
2787 streamId++;
2788 }
2789
2790 std::shared_ptr<ICameraDeviceSession> session;
2791 ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session);
2792 ASSERT_TRUE(ret.isOk());
2793 ASSERT_NE(session, nullptr);
2794 ret = session->close();
2795 ASSERT_TRUE(ret.isOk());
2796 }
2797}
2798
2799// Check for correct handling of invalid/incorrect configuration parameters for injection camera.
2800TEST_P(CameraAidlTest, configureInjectionStreamsInvalidOutputs) {
2801 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2802 std::vector<AvailableStream> outputStreams;
2803
2804 for (const auto& name : cameraDeviceNames) {
2805 CameraMetadata metadata;
2806 std::shared_ptr<ICameraInjectionSession> injectionSession;
2807 std::shared_ptr<ICameraDevice> unusedDevice;
2808 openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/,
2809 &unusedDevice);
2810 if (injectionSession == nullptr) {
2811 continue;
2812 }
2813
2814 camera_metadata_t* staticMetaBuffer =
2815 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
2816 std::shared_ptr<ICameraDeviceSession> session;
2817 ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session);
2818 ASSERT_TRUE(ret.isOk());
2819 ASSERT_NE(session, nullptr);
2820
2821 CameraMetadata chars;
2822 chars.metadata = metadata.metadata;
2823
2824 outputStreams.clear();
2825 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputStreams));
2826 ASSERT_NE(0u, outputStreams.size());
2827
2828 int32_t jpegBufferSize = 0;
2829 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMetaBuffer, &jpegBufferSize));
2830 ASSERT_NE(0u, jpegBufferSize);
2831
2832 int32_t streamId = 0;
2833 Stream stream = {streamId++,
2834 StreamType::OUTPUT,
2835 0,
2836 0,
2837 static_cast<PixelFormat>(outputStreams[0].format),
2838 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2839 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2840 Dataspace::UNKNOWN,
2841 StreamRotation::ROTATION_0,
2842 std::string(),
2843 jpegBufferSize,
2844 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002845 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2846 RequestAvailableDynamicRangeProfilesMap::
2847 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002848
2849 int32_t streamConfigCounter = 0;
2850 std::vector<Stream> streams = {stream};
2851 StreamConfiguration config;
2852 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2853 jpegBufferSize);
2854
2855 config.streamConfigCounter = streamConfigCounter++;
2856 ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars);
2857 ASSERT_TRUE(
2858 (static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) == s.getServiceSpecificError()) ||
2859 (static_cast<int32_t>(Status::INTERNAL_ERROR) == s.getServiceSpecificError()));
2860
2861 stream = {streamId++,
2862 StreamType::OUTPUT,
2863 INT32_MAX,
2864 INT32_MAX,
2865 static_cast<PixelFormat>(outputStreams[0].format),
2866 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2867 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2868 Dataspace::UNKNOWN,
2869 StreamRotation::ROTATION_0,
2870 std::string(),
2871 jpegBufferSize,
2872 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002873 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2874 RequestAvailableDynamicRangeProfilesMap::
2875 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
2876
Avichal Rakesh362242f2022-02-08 12:40:53 -08002877 streams[0] = stream;
2878 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2879 jpegBufferSize);
2880 config.streamConfigCounter = streamConfigCounter++;
2881 s = injectionSession->configureInjectionStreams(config, chars);
2882 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError());
2883
2884 for (auto& it : outputStreams) {
2885 stream = {streamId++,
2886 StreamType::OUTPUT,
2887 it.width,
2888 it.height,
2889 static_cast<PixelFormat>(INT32_MAX),
2890 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2891 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2892 Dataspace::UNKNOWN,
2893 StreamRotation::ROTATION_0,
2894 std::string(),
2895 jpegBufferSize,
2896 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002897 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2898 RequestAvailableDynamicRangeProfilesMap::
2899 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002900 streams[0] = stream;
2901 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2902 jpegBufferSize);
2903 config.streamConfigCounter = streamConfigCounter++;
2904 s = injectionSession->configureInjectionStreams(config, chars);
2905 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError());
2906
2907 stream = {streamId++,
2908 StreamType::OUTPUT,
2909 it.width,
2910 it.height,
2911 static_cast<PixelFormat>(it.format),
2912 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2913 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2914 Dataspace::UNKNOWN,
2915 static_cast<StreamRotation>(INT32_MAX),
2916 std::string(),
2917 jpegBufferSize,
2918 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002919 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2920 RequestAvailableDynamicRangeProfilesMap::
2921 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002922 streams[0] = stream;
2923 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2924 jpegBufferSize);
2925 config.streamConfigCounter = streamConfigCounter++;
2926 s = injectionSession->configureInjectionStreams(config, chars);
2927 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError());
2928 }
2929
2930 ret = session->close();
2931 ASSERT_TRUE(ret.isOk());
2932 }
2933}
2934
2935// Check whether session parameters are supported for injection camera. If Hal support for them
2936// exist, then try to configure a preview stream using them.
2937TEST_P(CameraAidlTest, configureInjectionStreamsWithSessionParameters) {
2938 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2939 std::vector<AvailableStream> outputPreviewStreams;
2940 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2941 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2942
2943 for (const auto& name : cameraDeviceNames) {
2944 CameraMetadata metadata;
2945 std::shared_ptr<ICameraInjectionSession> injectionSession;
2946 std::shared_ptr<ICameraDevice> unusedDevice;
2947 openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/,
2948 &unusedDevice /*out*/);
2949 if (injectionSession == nullptr) {
2950 continue;
2951 }
2952
2953 std::shared_ptr<ICameraDeviceSession> session;
2954 ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session);
2955 ASSERT_TRUE(ret.isOk());
2956 ASSERT_NE(session, nullptr);
2957
2958 camera_metadata_t* staticMetaBuffer =
2959 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
2960 CameraMetadata chars;
2961 chars.metadata = metadata.metadata;
2962
2963 std::unordered_set<int32_t> availableSessionKeys;
2964 Status rc = getSupportedKeys(staticMetaBuffer, ANDROID_REQUEST_AVAILABLE_SESSION_KEYS,
2965 &availableSessionKeys);
2966 ASSERT_EQ(Status::OK, rc);
2967 if (availableSessionKeys.empty()) {
2968 ret = session->close();
2969 ASSERT_TRUE(ret.isOk());
2970 continue;
2971 }
2972
2973 android::hardware::camera::common::V1_0::helper::CameraMetadata previewRequestSettings;
2974 android::hardware::camera::common::V1_0::helper::CameraMetadata sessionParams,
2975 modifiedSessionParams;
2976 constructFilteredSettings(session, availableSessionKeys, RequestTemplate::PREVIEW,
2977 &previewRequestSettings, &sessionParams);
2978 if (sessionParams.isEmpty()) {
2979 ret = session->close();
2980 ASSERT_TRUE(ret.isOk());
2981 continue;
2982 }
2983
2984 outputPreviewStreams.clear();
2985
2986 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputPreviewStreams,
2987 &previewThreshold));
2988 ASSERT_NE(0u, outputPreviewStreams.size());
2989
2990 Stream previewStream = {
2991 0,
2992 StreamType::OUTPUT,
2993 outputPreviewStreams[0].width,
2994 outputPreviewStreams[0].height,
2995 static_cast<PixelFormat>(outputPreviewStreams[0].format),
2996 static_cast<::aidl::android::hardware::graphics::common::BufferUsage>(
2997 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2998 Dataspace::UNKNOWN,
2999 StreamRotation::ROTATION_0,
3000 std::string(),
3001 0,
3002 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00003003 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
3004 RequestAvailableDynamicRangeProfilesMap::
3005 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08003006 std::vector<Stream> streams = {previewStream};
3007 StreamConfiguration config;
3008 config.streams = streams;
3009 config.operationMode = StreamConfigurationMode::NORMAL_MODE;
3010
3011 modifiedSessionParams = sessionParams;
3012 camera_metadata_t* sessionParamsBuffer = sessionParams.release();
3013 uint8_t* rawSessionParamsBuffer = reinterpret_cast<uint8_t*>(sessionParamsBuffer);
3014 config.sessionParams.metadata =
3015 std::vector(rawSessionParamsBuffer,
3016 rawSessionParamsBuffer + get_camera_metadata_size(sessionParamsBuffer));
3017
3018 config.streamConfigCounter = 0;
3019 config.streamConfigCounter = 0;
3020 config.multiResolutionInputImage = false;
3021
3022 ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars);
3023 ASSERT_TRUE(s.isOk());
3024
3025 sessionParams.acquire(sessionParamsBuffer);
3026 free_camera_metadata(staticMetaBuffer);
3027 ret = session->close();
3028 ASSERT_TRUE(ret.isOk());
3029 }
3030}
3031
3032// Verify that valid stream use cases can be configured successfully, and invalid use cases
3033// fail stream configuration.
3034TEST_P(CameraAidlTest, configureStreamsUseCases) {
3035 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
3036
3037 for (const auto& name : cameraDeviceNames) {
3038 CameraMetadata meta;
3039 std::shared_ptr<ICameraDevice> cameraDevice;
3040
3041 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
3042 &cameraDevice /*out*/);
3043
3044 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
3045 // Check if camera support depth only
3046 if (isDepthOnly(staticMeta)) {
3047 ndk::ScopedAStatus ret = mSession->close();
3048 mSession = nullptr;
3049 ASSERT_TRUE(ret.isOk());
3050 continue;
3051 }
3052
3053 std::vector<AvailableStream> outputPreviewStreams;
3054 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
3055 static_cast<int32_t>(PixelFormat::YCBCR_420_888)};
3056 ASSERT_EQ(Status::OK,
3057 getAvailableOutputStreams(staticMeta, outputPreviewStreams, &previewThreshold));
3058 ASSERT_NE(0u, outputPreviewStreams.size());
3059
3060 // Combine valid and invalid stream use cases
Shuzhen Wang36efa712022-03-08 10:10:44 -08003061 std::vector<int64_t> useCases(kMandatoryUseCases);
Avichal Rakesh362242f2022-02-08 12:40:53 -08003062 useCases.push_back(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL + 1);
3063
Shuzhen Wang36efa712022-03-08 10:10:44 -08003064 std::vector<int64_t> supportedUseCases;
Avichal Rakesh362242f2022-02-08 12:40:53 -08003065 camera_metadata_ro_entry entry;
3066 auto retcode = find_camera_metadata_ro_entry(
3067 staticMeta, ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES, &entry);
3068 if ((0 == retcode) && (entry.count > 0)) {
Avichal Rakeshe1685a72022-03-22 13:52:36 -07003069 supportedUseCases.insert(supportedUseCases.end(), entry.data.i64,
3070 entry.data.i64 + entry.count);
Avichal Rakesh362242f2022-02-08 12:40:53 -08003071 } else {
3072 supportedUseCases.push_back(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT);
3073 }
3074
3075 std::vector<Stream> streams(1);
Avichal Rakeshd3503a32022-02-25 06:23:14 +00003076 streams[0] = {0,
3077 StreamType::OUTPUT,
3078 outputPreviewStreams[0].width,
3079 outputPreviewStreams[0].height,
3080 static_cast<PixelFormat>(outputPreviewStreams[0].format),
3081 static_cast<::aidl::android::hardware::graphics::common::BufferUsage>(
3082 GRALLOC1_CONSUMER_USAGE_CPU_READ),
3083 Dataspace::UNKNOWN,
3084 StreamRotation::ROTATION_0,
3085 std::string(),
3086 0,
3087 -1,
3088 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
3089 RequestAvailableDynamicRangeProfilesMap::
3090 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08003091
3092 int32_t streamConfigCounter = 0;
3093 CameraMetadata req;
3094 StreamConfiguration config;
3095 RequestTemplate reqTemplate = RequestTemplate::STILL_CAPTURE;
3096 ndk::ScopedAStatus ret = mSession->constructDefaultRequestSettings(reqTemplate, &req);
3097 ASSERT_TRUE(ret.isOk());
3098 config.sessionParams = req;
3099
Shuzhen Wang36efa712022-03-08 10:10:44 -08003100 for (int64_t useCase : useCases) {
Avichal Rakesh362242f2022-02-08 12:40:53 -08003101 bool useCaseSupported = std::find(supportedUseCases.begin(), supportedUseCases.end(),
3102 useCase) != supportedUseCases.end();
3103
3104 streams[0].useCase = static_cast<
3105 aidl::android::hardware::camera::metadata::ScalerAvailableStreamUseCases>(
3106 useCase);
3107 config.streams = streams;
3108 config.operationMode = StreamConfigurationMode::NORMAL_MODE;
3109 config.streamConfigCounter = streamConfigCounter;
3110 config.multiResolutionInputImage = false;
3111
3112 bool combSupported;
3113 ret = cameraDevice->isStreamCombinationSupported(config, &combSupported);
Avichal Rakeshe1685a72022-03-22 13:52:36 -07003114 if (static_cast<int32_t>(Status::OPERATION_NOT_SUPPORTED) ==
3115 ret.getServiceSpecificError()) {
3116 continue;
Avichal Rakesh362242f2022-02-08 12:40:53 -08003117 }
Avichal Rakeshe1685a72022-03-22 13:52:36 -07003118
Avichal Rakesh362242f2022-02-08 12:40:53 -08003119 ASSERT_TRUE(ret.isOk());
Avichal Rakeshe1685a72022-03-22 13:52:36 -07003120 ASSERT_EQ(combSupported, useCaseSupported);
Avichal Rakesh362242f2022-02-08 12:40:53 -08003121
3122 std::vector<HalStream> halStreams;
3123 ret = mSession->configureStreams(config, &halStreams);
3124 ALOGI("configureStreams returns status: %d", ret.getServiceSpecificError());
3125 if (useCaseSupported) {
3126 ASSERT_TRUE(ret.isOk());
3127 ASSERT_EQ(1u, halStreams.size());
3128 } else {
3129 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
3130 ret.getServiceSpecificError());
3131 }
3132 }
3133 ret = mSession->close();
3134 mSession = nullptr;
3135 ASSERT_TRUE(ret.isOk());
3136 }
3137}
3138
3139GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(CameraAidlTest);
3140INSTANTIATE_TEST_SUITE_P(
3141 PerInstance, CameraAidlTest,
3142 testing::ValuesIn(android::getAidlHalInstanceNames(ICameraProvider::descriptor)),
3143 android::hardware::PrintInstanceNameToString);