blob: 3fef0899ba4d1e044e43b1d48115305b36733745 [file] [log] [blame]
Avichal Rakesh362242f2022-02-08 12:40:53 -08001/*
2 * Copyright (C) 2022 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <aidl/Vintf.h>
18#include <aidl/android/hardware/camera/common/VendorTagSection.h>
19#include <aidl/android/hardware/camera/device/ICameraDevice.h>
20#include <aidlcommonsupport/NativeHandle.h>
21#include <camera_aidl_test.h>
22#include <cutils/properties.h>
23#include <device_cb.h>
24#include <empty_device_cb.h>
25#include <grallocusage/GrallocUsageConversion.h>
26#include <gtest/gtest.h>
27#include <hardware/gralloc.h>
28#include <hardware/gralloc1.h>
29#include <hidl/GtestPrinter.h>
30#include <hidl/HidlSupport.h>
31#include <torch_provider_cb.h>
32#include <list>
33
34using ::aidl::android::hardware::camera::common::CameraDeviceStatus;
35using ::aidl::android::hardware::camera::common::CameraResourceCost;
36using ::aidl::android::hardware::camera::common::TorchModeStatus;
37using ::aidl::android::hardware::camera::common::VendorTagSection;
38using ::aidl::android::hardware::camera::device::ICameraDevice;
Avichal Rakeshd3503a32022-02-25 06:23:14 +000039using ::aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap;
Avichal Rakesh362242f2022-02-08 12:40:53 -080040using ::aidl::android::hardware::camera::metadata::SensorPixelMode;
41using ::aidl::android::hardware::camera::provider::CameraIdAndStreamCombination;
Avichal Rakesh4bf91c72022-05-23 20:44:02 +000042using ::aidl::android::hardware::camera::provider::BnCameraProviderCallback;
Avichal Rakesh362242f2022-02-08 12:40:53 -080043
44using ::ndk::ScopedAStatus;
45
46namespace {
47const int32_t kBurstFrameCount = 10;
48const uint32_t kMaxStillWidth = 2048;
49const uint32_t kMaxStillHeight = 1536;
50
51const int64_t kEmptyFlushTimeoutMSec = 200;
52
Shuzhen Wang36efa712022-03-08 10:10:44 -080053const static std::vector<int64_t> kMandatoryUseCases = {
Avichal Rakesh362242f2022-02-08 12:40:53 -080054 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
55 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW,
56 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_STILL_CAPTURE,
57 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_RECORD,
58 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW_VIDEO_STILL,
59 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL};
60} // namespace
61
62TEST_P(CameraAidlTest, getCameraIdList) {
63 std::vector<std::string> idList;
64 ScopedAStatus ret = mProvider->getCameraIdList(&idList);
65 ASSERT_TRUE(ret.isOk());
66
67 for (size_t i = 0; i < idList.size(); i++) {
68 ALOGI("Camera Id[%zu] is %s", i, idList[i].c_str());
69 }
70}
71
72// Test if ICameraProvider::getVendorTags returns Status::OK
73TEST_P(CameraAidlTest, getVendorTags) {
74 std::vector<VendorTagSection> vendorTags;
75 ScopedAStatus ret = mProvider->getVendorTags(&vendorTags);
76
77 ASSERT_TRUE(ret.isOk());
78 for (size_t i = 0; i < vendorTags.size(); i++) {
79 ALOGI("Vendor tag section %zu name %s", i, vendorTags[i].sectionName.c_str());
80 for (auto& tag : vendorTags[i].tags) {
81 ALOGI("Vendor tag id %u name %s type %d", tag.tagId, tag.tagName.c_str(),
82 (int)tag.tagType);
83 }
84 }
85}
86
87// Test if ICameraProvider::setCallback returns Status::OK
88TEST_P(CameraAidlTest, setCallback) {
Avichal Rakesh4bf91c72022-05-23 20:44:02 +000089 struct ProviderCb : public BnCameraProviderCallback {
Avichal Rakesh362242f2022-02-08 12:40:53 -080090 ScopedAStatus cameraDeviceStatusChange(const std::string& cameraDeviceName,
91 CameraDeviceStatus newStatus) override {
92 ALOGI("camera device status callback name %s, status %d", cameraDeviceName.c_str(),
93 (int)newStatus);
94 return ScopedAStatus::ok();
95 }
96 ScopedAStatus torchModeStatusChange(const std::string& cameraDeviceName,
97 TorchModeStatus newStatus) override {
98 ALOGI("Torch mode status callback name %s, status %d", cameraDeviceName.c_str(),
99 (int)newStatus);
100 return ScopedAStatus::ok();
101 }
102 ScopedAStatus physicalCameraDeviceStatusChange(const std::string& cameraDeviceName,
103 const std::string& physicalCameraDeviceName,
104 CameraDeviceStatus newStatus) override {
105 ALOGI("physical camera device status callback name %s, physical camera name %s,"
106 " status %d",
107 cameraDeviceName.c_str(), physicalCameraDeviceName.c_str(), (int)newStatus);
108 return ScopedAStatus::ok();
109 }
110 };
111
Avichal Rakesh4bf91c72022-05-23 20:44:02 +0000112 std::shared_ptr<ProviderCb> cb = ndk::SharedRefBase::make<ProviderCb>();
Avichal Rakesh362242f2022-02-08 12:40:53 -0800113 ScopedAStatus ret = mProvider->setCallback(cb);
114 ASSERT_TRUE(ret.isOk());
115 ret = mProvider->setCallback(nullptr);
Avichal Rakesh4bf91c72022-05-23 20:44:02 +0000116 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
Avichal Rakesh362242f2022-02-08 12:40:53 -0800117}
118
119// Test if ICameraProvider::getCameraDeviceInterface returns Status::OK and non-null device
120TEST_P(CameraAidlTest, getCameraDeviceInterface) {
121 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
122
123 for (const auto& name : cameraDeviceNames) {
124 std::shared_ptr<ICameraDevice> cameraDevice;
125 ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &cameraDevice);
126 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
127 ret.getServiceSpecificError());
128 ASSERT_TRUE(ret.isOk());
129 ASSERT_NE(cameraDevice, nullptr);
130 }
131}
132
133// Verify that the device resource cost can be retrieved and the values are
134// correct.
135TEST_P(CameraAidlTest, getResourceCost) {
136 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
137
138 for (const auto& deviceName : cameraDeviceNames) {
139 std::shared_ptr<ICameraDevice> cameraDevice;
140 ScopedAStatus ret = mProvider->getCameraDeviceInterface(deviceName, &cameraDevice);
141 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
142 ret.getServiceSpecificError());
143 ASSERT_TRUE(ret.isOk());
144 ASSERT_NE(cameraDevice, nullptr);
145
146 CameraResourceCost resourceCost;
147 ret = cameraDevice->getResourceCost(&resourceCost);
148 ALOGI("getResourceCost returns: %d:%d", ret.getExceptionCode(),
149 ret.getServiceSpecificError());
150 ASSERT_TRUE(ret.isOk());
151
152 ALOGI(" Resource cost is %d", resourceCost.resourceCost);
153 ASSERT_LE(resourceCost.resourceCost, 100u);
154
155 for (const auto& name : resourceCost.conflictingDevices) {
156 ALOGI(" Conflicting device: %s", name.c_str());
157 }
158 }
159}
160
161TEST_P(CameraAidlTest, systemCameraTest) {
162 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
163 std::map<std::string, std::vector<SystemCameraKind>> hiddenPhysicalIdToLogicalMap;
164 for (const auto& name : cameraDeviceNames) {
165 std::shared_ptr<ICameraDevice> device;
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +0000166 ALOGI("systemCameraTest: Testing camera device %s", name.c_str());
Avichal Rakesh362242f2022-02-08 12:40:53 -0800167 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
168 ASSERT_TRUE(ret.isOk());
169 ASSERT_NE(device, nullptr);
170
171 CameraMetadata cameraCharacteristics;
172 ret = device->getCameraCharacteristics(&cameraCharacteristics);
173 ASSERT_TRUE(ret.isOk());
174
175 const camera_metadata_t* staticMeta =
176 reinterpret_cast<const camera_metadata_t*>(cameraCharacteristics.metadata.data());
177 Status rc = isLogicalMultiCamera(staticMeta);
178 if (rc == Status::OPERATION_NOT_SUPPORTED) {
179 return;
180 }
181
182 ASSERT_EQ(rc, Status::OK);
183 std::unordered_set<std::string> physicalIds;
184 ASSERT_EQ(getPhysicalCameraIds(staticMeta, &physicalIds), Status::OK);
185 SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC;
186 Status retStatus = getSystemCameraKind(staticMeta, &systemCameraKind);
187 ASSERT_EQ(retStatus, Status::OK);
188
189 for (auto physicalId : physicalIds) {
190 bool isPublicId = false;
191 for (auto& deviceName : cameraDeviceNames) {
192 std::string publicVersion, publicId;
193 ASSERT_TRUE(matchDeviceName(deviceName, mProviderType, &publicVersion, &publicId));
194 if (physicalId == publicId) {
195 isPublicId = true;
196 break;
197 }
198 }
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +0000199
Avichal Rakesh362242f2022-02-08 12:40:53 -0800200 // For hidden physical cameras, collect their associated logical cameras
201 // and store the system camera kind.
202 if (!isPublicId) {
203 auto it = hiddenPhysicalIdToLogicalMap.find(physicalId);
204 if (it == hiddenPhysicalIdToLogicalMap.end()) {
205 hiddenPhysicalIdToLogicalMap.insert(std::make_pair(
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +0000206 physicalId, std::vector<SystemCameraKind>({systemCameraKind})));
Avichal Rakesh362242f2022-02-08 12:40:53 -0800207 } else {
208 it->second.push_back(systemCameraKind);
209 }
210 }
211 }
212 }
213
214 // Check that the system camera kind of the logical cameras associated with
215 // each hidden physical camera is the same.
216 for (const auto& it : hiddenPhysicalIdToLogicalMap) {
217 SystemCameraKind neededSystemCameraKind = it.second.front();
218 for (auto foundSystemCamera : it.second) {
219 ASSERT_EQ(neededSystemCameraKind, foundSystemCamera);
220 }
221 }
222}
223
224// Verify that the static camera characteristics can be retrieved
225// successfully.
226TEST_P(CameraAidlTest, getCameraCharacteristics) {
227 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
228
229 for (const auto& name : cameraDeviceNames) {
230 std::shared_ptr<ICameraDevice> device;
231 ALOGI("getCameraCharacteristics: Testing camera device %s", name.c_str());
232 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
233 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
234 ret.getServiceSpecificError());
235 ASSERT_TRUE(ret.isOk());
236 ASSERT_NE(device, nullptr);
237
238 CameraMetadata chars;
239 ret = device->getCameraCharacteristics(&chars);
240 ASSERT_TRUE(ret.isOk());
241 verifyCameraCharacteristics(chars);
242 verifyMonochromeCharacteristics(chars);
243 verifyRecommendedConfigs(chars);
244 verifyLogicalOrUltraHighResCameraMetadata(name, device, chars, cameraDeviceNames);
245
246 ASSERT_TRUE(ret.isOk());
247
248 // getPhysicalCameraCharacteristics will fail for publicly
249 // advertised camera IDs.
250 std::string version, cameraId;
251 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &cameraId));
252 CameraMetadata devChars;
253 ret = device->getPhysicalCameraCharacteristics(cameraId, &devChars);
254 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
255 ASSERT_EQ(0, devChars.metadata.size());
256 }
257}
258
259// Verify that the torch strength level can be set and retrieved successfully.
260TEST_P(CameraAidlTest, turnOnTorchWithStrengthLevel) {
261 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
262
263 std::shared_ptr<TorchProviderCb> cb = ndk::SharedRefBase::make<TorchProviderCb>(this);
264 ndk::ScopedAStatus ret = mProvider->setCallback(cb);
265 ASSERT_TRUE(ret.isOk());
266
267 for (const auto& name : cameraDeviceNames) {
268 int32_t defaultLevel;
269 std::shared_ptr<ICameraDevice> device;
270 ALOGI("%s: Testing camera device %s", __FUNCTION__, name.c_str());
271
272 ret = mProvider->getCameraDeviceInterface(name, &device);
273 ASSERT_TRUE(ret.isOk());
274 ASSERT_NE(device, nullptr);
275
276 CameraMetadata chars;
277 ret = device->getCameraCharacteristics(&chars);
278 ASSERT_TRUE(ret.isOk());
279
280 const camera_metadata_t* staticMeta =
281 reinterpret_cast<const camera_metadata_t*>(chars.metadata.data());
282 bool torchStrengthControlSupported = isTorchStrengthControlSupported(staticMeta);
283 camera_metadata_ro_entry entry;
284 int rc = find_camera_metadata_ro_entry(staticMeta,
285 ANDROID_FLASH_INFO_STRENGTH_DEFAULT_LEVEL, &entry);
286 if (torchStrengthControlSupported) {
287 ASSERT_EQ(rc, 0);
288 ASSERT_GT(entry.count, 0);
289 defaultLevel = *entry.data.i32;
290 ALOGI("Default level is:%d", defaultLevel);
291 }
292
293 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
294 ret = device->turnOnTorchWithStrengthLevel(2);
295 ALOGI("turnOnTorchWithStrengthLevel returns status: %d", ret.getServiceSpecificError());
296 // OPERATION_NOT_SUPPORTED check
297 if (!torchStrengthControlSupported) {
298 ALOGI("Torch strength control not supported.");
299 ASSERT_EQ(static_cast<int32_t>(Status::OPERATION_NOT_SUPPORTED),
300 ret.getServiceSpecificError());
301 } else {
302 {
303 ASSERT_TRUE(ret.isOk());
304 std::unique_lock<std::mutex> l(mTorchLock);
305 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
306 auto timeout = std::chrono::system_clock::now() +
307 std::chrono::seconds(kTorchTimeoutSec);
308 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
309 }
310 ASSERT_EQ(TorchModeStatus::AVAILABLE_ON, mTorchStatus);
311 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
312 }
313 ALOGI("getTorchStrengthLevel: Testing");
314 int32_t strengthLevel;
315 ret = device->getTorchStrengthLevel(&strengthLevel);
316 ASSERT_TRUE(ret.isOk());
317 ALOGI("Torch strength level is : %d", strengthLevel);
318 ASSERT_EQ(strengthLevel, 2);
319
320 // Turn OFF the torch and verify torch strength level is reset to default level.
321 ALOGI("Testing torch strength level reset after turning the torch OFF.");
322 ret = device->setTorchMode(false);
323 ASSERT_TRUE(ret.isOk());
324 {
325 std::unique_lock<std::mutex> l(mTorchLock);
326 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
327 auto timeout = std::chrono::system_clock::now() +
328 std::chrono::seconds(kTorchTimeoutSec);
329 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
330 }
331 ASSERT_EQ(TorchModeStatus::AVAILABLE_OFF, mTorchStatus);
332 }
333
334 ret = device->getTorchStrengthLevel(&strengthLevel);
335 ASSERT_TRUE(ret.isOk());
336 ALOGI("Torch strength level after turning OFF torch is : %d", strengthLevel);
337 ASSERT_EQ(strengthLevel, defaultLevel);
338 }
339 }
340}
341
342// In case it is supported verify that torch can be enabled.
343// Check for corresponding torch callbacks as well.
344TEST_P(CameraAidlTest, setTorchMode) {
345 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
346
347 std::shared_ptr<TorchProviderCb> cb = ndk::SharedRefBase::make<TorchProviderCb>(this);
348 ndk::ScopedAStatus ret = mProvider->setCallback(cb);
349 ALOGI("setCallback returns status: %d", ret.getServiceSpecificError());
350 ASSERT_TRUE(ret.isOk());
351 ASSERT_NE(cb, nullptr);
352
353 for (const auto& name : cameraDeviceNames) {
354 std::shared_ptr<ICameraDevice> device;
355 ALOGI("setTorchMode: Testing camera device %s", name.c_str());
356 ret = mProvider->getCameraDeviceInterface(name, &device);
357 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
358 ret.getServiceSpecificError());
359 ASSERT_TRUE(ret.isOk());
360 ASSERT_NE(device, nullptr);
361
362 CameraMetadata metadata;
363 ret = device->getCameraCharacteristics(&metadata);
364 ALOGI("getCameraCharacteristics returns status:%d", ret.getServiceSpecificError());
365 ASSERT_TRUE(ret.isOk());
366 camera_metadata_t* staticMeta =
367 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
368 bool torchSupported = isTorchSupported(staticMeta);
369
370 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
371 ret = device->setTorchMode(true);
372 ALOGI("setTorchMode returns status: %d", ret.getServiceSpecificError());
373 if (!torchSupported) {
374 ASSERT_EQ(static_cast<int32_t>(Status::OPERATION_NOT_SUPPORTED),
375 ret.getServiceSpecificError());
376 } else {
377 ASSERT_TRUE(ret.isOk());
378 {
379 std::unique_lock<std::mutex> l(mTorchLock);
380 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
381 auto timeout = std::chrono::system_clock::now() +
382 std::chrono::seconds(kTorchTimeoutSec);
383 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
384 }
385 ASSERT_EQ(TorchModeStatus::AVAILABLE_ON, mTorchStatus);
386 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
387 }
388
Shuzhen Wangf415f562022-09-19 18:28:09 -0700389 // register a new callback; make sure it receives the
390 // flash-on callback.
391 std::shared_ptr<TorchProviderCb> cb2 = ndk::SharedRefBase::make<TorchProviderCb>(this);
392 ret = mProvider->setCallback(cb2);
393 ASSERT_TRUE(ret.isOk());
394 ASSERT_NE(cb2, nullptr);
395 {
396 std::unique_lock<std::mutex> l(mTorchLock);
397 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
398 auto timeout = std::chrono::system_clock::now() +
399 std::chrono::seconds(kTorchTimeoutSec);
400 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
401 }
402 ASSERT_EQ(TorchModeStatus::AVAILABLE_ON, mTorchStatus);
403 }
404
Avichal Rakesh362242f2022-02-08 12:40:53 -0800405 ret = device->setTorchMode(false);
406 ASSERT_TRUE(ret.isOk());
407 {
408 std::unique_lock<std::mutex> l(mTorchLock);
409 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
410 auto timeout = std::chrono::system_clock::now() +
411 std::chrono::seconds(kTorchTimeoutSec);
412 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
413 }
414 ASSERT_EQ(TorchModeStatus::AVAILABLE_OFF, mTorchStatus);
415 }
416 }
417 }
Avichal Rakesh362242f2022-02-08 12:40:53 -0800418}
419
420// Check dump functionality.
421TEST_P(CameraAidlTest, dump) {
422 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
423
424 for (const auto& name : cameraDeviceNames) {
425 std::shared_ptr<ICameraDevice> device;
426 ALOGI("dump: Testing camera device %s", name.c_str());
427
428 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
429 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
430 ret.getServiceSpecificError());
431 ASSERT_TRUE(ret.isOk());
432 ASSERT_NE(device, nullptr);
433
434 int raw_handle = open(kDumpOutput, O_RDWR);
435 ASSERT_GE(raw_handle, 0);
436
437 auto retStatus = device->dump(raw_handle, nullptr, 0);
438 ASSERT_EQ(retStatus, ::android::OK);
439 close(raw_handle);
440 }
441}
442
443// Open, dump, then close
444TEST_P(CameraAidlTest, openClose) {
445 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
446
447 for (const auto& name : cameraDeviceNames) {
448 std::shared_ptr<ICameraDevice> device;
449 ALOGI("openClose: Testing camera device %s", name.c_str());
450 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
451 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
452 ret.getServiceSpecificError());
453 ASSERT_TRUE(ret.isOk());
454 ASSERT_NE(device, nullptr);
455
456 std::shared_ptr<EmptyDeviceCb> cb = ndk::SharedRefBase::make<EmptyDeviceCb>();
457
458 ret = device->open(cb, &mSession);
459 ASSERT_TRUE(ret.isOk());
460 ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
461 ret.getServiceSpecificError());
462 ASSERT_NE(mSession, nullptr);
463 int raw_handle = open(kDumpOutput, O_RDWR);
464 ASSERT_GE(raw_handle, 0);
465
466 auto retStatus = device->dump(raw_handle, nullptr, 0);
467 ASSERT_EQ(retStatus, ::android::OK);
468 close(raw_handle);
469
470 ret = mSession->close();
471 mSession = nullptr;
472 ASSERT_TRUE(ret.isOk());
473 // TODO: test all session API calls return INTERNAL_ERROR after close
474 // TODO: keep a wp copy here and verify session cannot be promoted out of this scope
475 }
476}
477
478// Check whether all common default request settings can be successfully
479// constructed.
480TEST_P(CameraAidlTest, constructDefaultRequestSettings) {
481 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
482
483 for (const auto& name : cameraDeviceNames) {
484 std::shared_ptr<ICameraDevice> device;
485 ALOGI("constructDefaultRequestSettings: Testing camera device %s", name.c_str());
486 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
487 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
488 ret.getServiceSpecificError());
489 ASSERT_TRUE(ret.isOk());
490 ASSERT_NE(device, nullptr);
491
492 std::shared_ptr<EmptyDeviceCb> cb = ndk::SharedRefBase::make<EmptyDeviceCb>();
493 ret = device->open(cb, &mSession);
494 ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
495 ret.getServiceSpecificError());
496 ASSERT_TRUE(ret.isOk());
497 ASSERT_NE(mSession, nullptr);
498
499 for (int32_t t = (int32_t)RequestTemplate::PREVIEW; t <= (int32_t)RequestTemplate::MANUAL;
500 t++) {
501 RequestTemplate reqTemplate = (RequestTemplate)t;
502 CameraMetadata rawMetadata;
503 ret = mSession->constructDefaultRequestSettings(reqTemplate, &rawMetadata);
504 ALOGI("constructDefaultRequestSettings returns status:%d:%d", ret.getExceptionCode(),
505 ret.getServiceSpecificError());
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000506
Avichal Rakesh362242f2022-02-08 12:40:53 -0800507 if (reqTemplate == RequestTemplate::ZERO_SHUTTER_LAG ||
508 reqTemplate == RequestTemplate::MANUAL) {
509 // optional templates
510 ASSERT_TRUE(ret.isOk() || static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
511 ret.getServiceSpecificError());
512 } else {
513 ASSERT_TRUE(ret.isOk());
514 }
515
516 if (ret.isOk()) {
517 const camera_metadata_t* metadata = (camera_metadata_t*)rawMetadata.metadata.data();
518 size_t expectedSize = rawMetadata.metadata.size();
519 int result = validate_camera_metadata_structure(metadata, &expectedSize);
520 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
521 verifyRequestTemplate(metadata, reqTemplate);
522 } else {
523 ASSERT_EQ(0u, rawMetadata.metadata.size());
524 }
525 }
526 ret = mSession->close();
527 mSession = nullptr;
528 ASSERT_TRUE(ret.isOk());
529 }
530}
531
532// Verify that all supported stream formats and sizes can be configured
533// successfully.
534TEST_P(CameraAidlTest, configureStreamsAvailableOutputs) {
535 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
536 std::vector<AvailableStream> outputStreams;
537
538 for (const auto& name : cameraDeviceNames) {
539 CameraMetadata meta;
540 std::shared_ptr<ICameraDevice> device;
541
542 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/, &device /*out*/);
543
544 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
545 outputStreams.clear();
546 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputStreams));
547 ASSERT_NE(0u, outputStreams.size());
548
549 int32_t jpegBufferSize = 0;
550 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
551 ASSERT_NE(0u, jpegBufferSize);
552
553 int32_t streamId = 0;
554 int32_t streamConfigCounter = 0;
555 for (auto& it : outputStreams) {
556 Stream stream;
557 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(it.format));
558 stream.id = streamId;
559 stream.streamType = StreamType::OUTPUT;
560 stream.width = it.width;
561 stream.height = it.height;
562 stream.format = static_cast<PixelFormat>(it.format);
563 stream.dataSpace = dataspace;
564 stream.usage = static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
565 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER);
566 stream.rotation = StreamRotation::ROTATION_0;
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000567 stream.dynamicRangeProfile = RequestAvailableDynamicRangeProfilesMap::
568 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
Avichal Rakesh362242f2022-02-08 12:40:53 -0800569
570 std::vector<Stream> streams = {stream};
571 StreamConfiguration config;
572 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
573 jpegBufferSize);
574
575 bool expectStreamCombQuery = (isLogicalMultiCamera(staticMeta) == Status::OK);
576 verifyStreamCombination(device, config, /*expectedStatus*/ true, expectStreamCombQuery);
577
578 config.streamConfigCounter = streamConfigCounter++;
579 std::vector<HalStream> halConfigs;
580 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
581 ASSERT_TRUE(ret.isOk());
582 ASSERT_EQ(halConfigs.size(), 1);
583 ASSERT_EQ(halConfigs[0].id, streamId);
584
585 streamId++;
586 }
587
588 ndk::ScopedAStatus ret = mSession->close();
589 mSession = nullptr;
590 ASSERT_TRUE(ret.isOk());
591 }
592}
593
594// Verify that mandatory concurrent streams and outputs are supported.
595TEST_P(CameraAidlTest, configureConcurrentStreamsAvailableOutputs) {
596 struct CameraTestInfo {
597 CameraMetadata staticMeta;
598 std::shared_ptr<ICameraDeviceSession> session;
599 std::shared_ptr<ICameraDevice> cameraDevice;
600 StreamConfiguration config;
601 };
602
603 std::map<std::string, std::string> idToNameMap = getCameraDeviceIdToNameMap(mProvider);
604 std::vector<ConcurrentCameraIdCombination> concurrentDeviceCombinations =
605 getConcurrentDeviceCombinations(mProvider);
606 std::vector<AvailableStream> outputStreams;
607 for (const auto& cameraDeviceIds : concurrentDeviceCombinations) {
608 std::vector<CameraIdAndStreamCombination> cameraIdsAndStreamCombinations;
609 std::vector<CameraTestInfo> cameraTestInfos;
610 size_t i = 0;
611 for (const auto& id : cameraDeviceIds.combination) {
612 CameraTestInfo cti;
613 auto it = idToNameMap.find(id);
614 ASSERT_TRUE(idToNameMap.end() != it);
615 std::string name = it->second;
616
617 openEmptyDeviceSession(name, mProvider, &cti.session /*out*/, &cti.staticMeta /*out*/,
618 &cti.cameraDevice /*out*/);
619
620 outputStreams.clear();
621 camera_metadata_t* staticMeta =
622 reinterpret_cast<camera_metadata_t*>(cti.staticMeta.metadata.data());
623 ASSERT_EQ(Status::OK, getMandatoryConcurrentStreams(staticMeta, &outputStreams));
624 ASSERT_NE(0u, outputStreams.size());
625
626 int32_t jpegBufferSize = 0;
627 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
628 ASSERT_NE(0u, jpegBufferSize);
629
630 int32_t streamId = 0;
631 std::vector<Stream> streams(outputStreams.size());
632 size_t j = 0;
633 for (const auto& s : outputStreams) {
634 Stream stream;
635 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(s.format));
636 stream.id = streamId++;
637 stream.streamType = StreamType::OUTPUT;
638 stream.width = s.width;
639 stream.height = s.height;
640 stream.format = static_cast<PixelFormat>(s.format);
641 stream.usage = static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
642 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER);
643 stream.dataSpace = dataspace;
644 stream.rotation = StreamRotation::ROTATION_0;
645 stream.sensorPixelModesUsed = {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT};
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000646 stream.dynamicRangeProfile = RequestAvailableDynamicRangeProfilesMap::
647 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
Avichal Rakesh362242f2022-02-08 12:40:53 -0800648 streams[j] = stream;
649 j++;
650 }
651
652 // Add the created stream configs to cameraIdsAndStreamCombinations
653 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &cti.config,
654 jpegBufferSize);
655
656 cti.config.streamConfigCounter = outputStreams.size();
657 CameraIdAndStreamCombination cameraIdAndStreamCombination;
658 cameraIdAndStreamCombination.cameraId = id;
659 cameraIdAndStreamCombination.streamConfiguration = cti.config;
660 cameraIdsAndStreamCombinations.push_back(cameraIdAndStreamCombination);
661 i++;
662 cameraTestInfos.push_back(cti);
663 }
664 // Now verify that concurrent streams are supported
665 bool combinationSupported;
666 ndk::ScopedAStatus ret = mProvider->isConcurrentStreamCombinationSupported(
667 cameraIdsAndStreamCombinations, &combinationSupported);
668 ASSERT_TRUE(ret.isOk());
669 ASSERT_EQ(combinationSupported, true);
670
671 // Test the stream can actually be configured
672 for (auto& cti : cameraTestInfos) {
673 if (cti.session != nullptr) {
674 camera_metadata_t* staticMeta =
675 reinterpret_cast<camera_metadata_t*>(cti.staticMeta.metadata.data());
676 bool expectStreamCombQuery = (isLogicalMultiCamera(staticMeta) == Status::OK);
677 verifyStreamCombination(cti.cameraDevice, cti.config, /*expectedStatus*/ true,
678 expectStreamCombQuery);
679 }
680
681 if (cti.session != nullptr) {
682 std::vector<HalStream> streamConfigs;
683 ret = cti.session->configureStreams(cti.config, &streamConfigs);
684 ASSERT_TRUE(ret.isOk());
685 ASSERT_EQ(cti.config.streams.size(), streamConfigs.size());
686 }
687 }
688
689 for (auto& cti : cameraTestInfos) {
690 ret = cti.session->close();
691 ASSERT_TRUE(ret.isOk());
692 }
693 }
694}
695
696// Check for correct handling of invalid/incorrect configuration parameters.
697TEST_P(CameraAidlTest, configureStreamsInvalidOutputs) {
698 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
699 std::vector<AvailableStream> outputStreams;
700
701 for (const auto& name : cameraDeviceNames) {
702 CameraMetadata meta;
703 std::shared_ptr<ICameraDevice> cameraDevice;
704
705 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
706 &cameraDevice /*out*/);
707 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
708 outputStreams.clear();
709
710 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputStreams));
711 ASSERT_NE(0u, outputStreams.size());
712
713 int32_t jpegBufferSize = 0;
714 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
715 ASSERT_NE(0u, jpegBufferSize);
716
717 int32_t streamId = 0;
718 Stream stream = {streamId++,
719 StreamType::OUTPUT,
720 static_cast<uint32_t>(0),
721 static_cast<uint32_t>(0),
722 static_cast<PixelFormat>(outputStreams[0].format),
723 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
724 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
725 Dataspace::UNKNOWN,
726 StreamRotation::ROTATION_0,
727 std::string(),
728 jpegBufferSize,
729 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000730 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
731 RequestAvailableDynamicRangeProfilesMap::
732 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800733 int32_t streamConfigCounter = 0;
734 std::vector<Stream> streams = {stream};
735 StreamConfiguration config;
736 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
737 jpegBufferSize);
738
739 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ false,
740 /*expectStreamCombQuery*/ false);
741
742 config.streamConfigCounter = streamConfigCounter++;
743 std::vector<HalStream> halConfigs;
744 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
745 ASSERT_TRUE(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
746 ret.getServiceSpecificError() ||
747 static_cast<int32_t>(Status::INTERNAL_ERROR) == ret.getServiceSpecificError());
748
749 stream = {streamId++,
750 StreamType::OUTPUT,
751 /*width*/ INT32_MAX,
752 /*height*/ INT32_MAX,
753 static_cast<PixelFormat>(outputStreams[0].format),
754 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
755 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
756 Dataspace::UNKNOWN,
757 StreamRotation::ROTATION_0,
758 std::string(),
759 jpegBufferSize,
760 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000761 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
762 RequestAvailableDynamicRangeProfilesMap::
763 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800764
765 streams[0] = stream;
766 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
767 jpegBufferSize);
768
769 config.streamConfigCounter = streamConfigCounter++;
770 halConfigs.clear();
771 ret = mSession->configureStreams(config, &halConfigs);
772 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
773
774 for (auto& it : outputStreams) {
775 stream = {streamId++,
776 StreamType::OUTPUT,
777 it.width,
778 it.height,
779 static_cast<PixelFormat>(UINT32_MAX),
780 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
781 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
782 Dataspace::UNKNOWN,
783 StreamRotation::ROTATION_0,
784 std::string(),
785 jpegBufferSize,
786 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000787 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
788 RequestAvailableDynamicRangeProfilesMap::
789 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800790
791 streams[0] = stream;
792 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
793 jpegBufferSize);
794 config.streamConfigCounter = streamConfigCounter++;
795 halConfigs.clear();
796 ret = mSession->configureStreams(config, &halConfigs);
797 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
798 ret.getServiceSpecificError());
799
800 stream = {streamId++,
801 StreamType::OUTPUT,
802 it.width,
803 it.height,
804 static_cast<PixelFormat>(it.format),
805 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
806 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
807 Dataspace::UNKNOWN,
808 static_cast<StreamRotation>(UINT32_MAX),
809 std::string(),
810 jpegBufferSize,
811 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000812 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
813 RequestAvailableDynamicRangeProfilesMap::
814 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800815
816 streams[0] = stream;
817 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
818 jpegBufferSize);
819
820 config.streamConfigCounter = streamConfigCounter++;
821 halConfigs.clear();
822 ret = mSession->configureStreams(config, &halConfigs);
823 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
824 ret.getServiceSpecificError());
825 }
826
827 ret = mSession->close();
828 mSession = nullptr;
829 ASSERT_TRUE(ret.isOk());
830 }
831}
832
833// Check whether all supported ZSL output stream combinations can be
834// configured successfully.
835TEST_P(CameraAidlTest, configureStreamsZSLInputOutputs) {
836 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
837 std::vector<AvailableStream> inputStreams;
838 std::vector<AvailableZSLInputOutput> inputOutputMap;
839
840 for (const auto& name : cameraDeviceNames) {
841 CameraMetadata meta;
842 std::shared_ptr<ICameraDevice> cameraDevice;
843
844 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
845 &cameraDevice /*out*/);
846 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
847
848 Status rc = isZSLModeAvailable(staticMeta);
849 if (Status::OPERATION_NOT_SUPPORTED == rc) {
850 ndk::ScopedAStatus ret = mSession->close();
851 mSession = nullptr;
852 ASSERT_TRUE(ret.isOk());
853 continue;
854 }
855 ASSERT_EQ(Status::OK, rc);
856
857 inputStreams.clear();
858 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, inputStreams));
859 ASSERT_NE(0u, inputStreams.size());
860
861 inputOutputMap.clear();
862 ASSERT_EQ(Status::OK, getZSLInputOutputMap(staticMeta, inputOutputMap));
863 ASSERT_NE(0u, inputOutputMap.size());
864
865 bool supportMonoY8 = false;
866 if (Status::OK == isMonochromeCamera(staticMeta)) {
867 for (auto& it : inputStreams) {
868 if (it.format == static_cast<uint32_t>(PixelFormat::Y8)) {
869 supportMonoY8 = true;
870 break;
871 }
872 }
873 }
874
875 int32_t jpegBufferSize = 0;
876 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
877 ASSERT_NE(0u, jpegBufferSize);
878
879 int32_t streamId = 0;
880 bool hasPrivToY8 = false, hasY8ToY8 = false, hasY8ToBlob = false;
881 uint32_t streamConfigCounter = 0;
882 for (auto& inputIter : inputOutputMap) {
883 AvailableStream input;
884 ASSERT_EQ(Status::OK, findLargestSize(inputStreams, inputIter.inputFormat, input));
885 ASSERT_NE(0u, inputStreams.size());
886
887 if (inputIter.inputFormat ==
888 static_cast<uint32_t>(PixelFormat::IMPLEMENTATION_DEFINED) &&
889 inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::Y8)) {
890 hasPrivToY8 = true;
891 } else if (inputIter.inputFormat == static_cast<uint32_t>(PixelFormat::Y8)) {
892 if (inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::BLOB)) {
893 hasY8ToBlob = true;
894 } else if (inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::Y8)) {
895 hasY8ToY8 = true;
896 }
897 }
898 AvailableStream outputThreshold = {INT32_MAX, INT32_MAX, inputIter.outputFormat};
899 std::vector<AvailableStream> outputStreams;
900 ASSERT_EQ(Status::OK,
901 getAvailableOutputStreams(staticMeta, outputStreams, &outputThreshold));
902 for (auto& outputIter : outputStreams) {
903 Dataspace outputDataSpace =
904 getDataspace(static_cast<PixelFormat>(outputIter.format));
905 Stream zslStream = {
906 streamId++,
907 StreamType::OUTPUT,
908 input.width,
909 input.height,
910 static_cast<PixelFormat>(input.format),
911 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
912 GRALLOC_USAGE_HW_CAMERA_ZSL),
913 Dataspace::UNKNOWN,
914 StreamRotation::ROTATION_0,
915 std::string(),
916 jpegBufferSize,
917 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000918 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
919 RequestAvailableDynamicRangeProfilesMap::
920 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800921 Stream inputStream = {
922 streamId++,
923 StreamType::INPUT,
924 input.width,
925 input.height,
926 static_cast<PixelFormat>(input.format),
927 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(0),
928 Dataspace::UNKNOWN,
929 StreamRotation::ROTATION_0,
930 std::string(),
931 jpegBufferSize,
932 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000933 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
934 RequestAvailableDynamicRangeProfilesMap::
935 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800936 Stream outputStream = {
937 streamId++,
938 StreamType::OUTPUT,
939 outputIter.width,
940 outputIter.height,
941 static_cast<PixelFormat>(outputIter.format),
942 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
943 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
944 outputDataSpace,
945 StreamRotation::ROTATION_0,
946 std::string(),
947 jpegBufferSize,
948 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000949 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
950 RequestAvailableDynamicRangeProfilesMap::
951 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800952
953 std::vector<Stream> streams = {inputStream, zslStream, outputStream};
954
955 StreamConfiguration config;
956 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
957 jpegBufferSize);
958
959 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true,
960 /*expectStreamCombQuery*/ false);
961
962 config.streamConfigCounter = streamConfigCounter++;
963 std::vector<HalStream> halConfigs;
964 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
965 ASSERT_TRUE(ret.isOk());
966 ASSERT_EQ(3u, halConfigs.size());
967 }
968 }
969
970 if (supportMonoY8) {
971 if (Status::OK == isZSLModeAvailable(staticMeta, PRIV_REPROCESS)) {
972 ASSERT_TRUE(hasPrivToY8);
973 }
974 if (Status::OK == isZSLModeAvailable(staticMeta, YUV_REPROCESS)) {
975 ASSERT_TRUE(hasY8ToY8);
976 ASSERT_TRUE(hasY8ToBlob);
977 }
978 }
979
980 ndk::ScopedAStatus ret = mSession->close();
981 mSession = nullptr;
982 ASSERT_TRUE(ret.isOk());
983 }
984}
985
986// Check whether session parameters are supported. If Hal support for them
987// exist, then try to configure a preview stream using them.
988TEST_P(CameraAidlTest, configureStreamsWithSessionParameters) {
989 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
990 std::vector<AvailableStream> outputPreviewStreams;
991 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
992 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
993
994 for (const auto& name : cameraDeviceNames) {
995 CameraMetadata meta;
996
997 std::shared_ptr<ICameraDevice> unusedCameraDevice;
998 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
999 &unusedCameraDevice /*out*/);
1000 camera_metadata_t* staticMetaBuffer =
1001 reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1002
1003 std::unordered_set<int32_t> availableSessionKeys;
1004 auto rc = getSupportedKeys(staticMetaBuffer, ANDROID_REQUEST_AVAILABLE_SESSION_KEYS,
1005 &availableSessionKeys);
1006 ASSERT_TRUE(Status::OK == rc);
1007 if (availableSessionKeys.empty()) {
1008 ndk::ScopedAStatus ret = mSession->close();
1009 mSession = nullptr;
1010 ASSERT_TRUE(ret.isOk());
1011 continue;
1012 }
1013
1014 android::hardware::camera::common::V1_0::helper::CameraMetadata previewRequestSettings;
1015 android::hardware::camera::common::V1_0::helper::CameraMetadata sessionParams,
1016 modifiedSessionParams;
1017 constructFilteredSettings(mSession, availableSessionKeys, RequestTemplate::PREVIEW,
1018 &previewRequestSettings, &sessionParams);
1019 if (sessionParams.isEmpty()) {
1020 ndk::ScopedAStatus ret = mSession->close();
1021 mSession = nullptr;
1022 ASSERT_TRUE(ret.isOk());
1023 continue;
1024 }
1025
1026 outputPreviewStreams.clear();
1027
1028 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputPreviewStreams,
1029 &previewThreshold));
1030 ASSERT_NE(0u, outputPreviewStreams.size());
1031
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001032 Stream previewStream = {
1033 0,
1034 StreamType::OUTPUT,
1035 outputPreviewStreams[0].width,
1036 outputPreviewStreams[0].height,
1037 static_cast<PixelFormat>(outputPreviewStreams[0].format),
1038 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1039 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
1040 Dataspace::UNKNOWN,
1041 StreamRotation::ROTATION_0,
1042 std::string(),
1043 /*bufferSize*/ 0,
1044 /*groupId*/ -1,
1045 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1046 RequestAvailableDynamicRangeProfilesMap::
1047 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001048
1049 std::vector<Stream> streams = {previewStream};
1050 StreamConfiguration config;
1051
1052 config.streams = streams;
1053 config.operationMode = StreamConfigurationMode::NORMAL_MODE;
1054 modifiedSessionParams = sessionParams;
1055 auto sessionParamsBuffer = sessionParams.release();
1056 std::vector<uint8_t> rawSessionParam =
1057 std::vector(reinterpret_cast<uint8_t*>(sessionParamsBuffer),
1058 reinterpret_cast<uint8_t*>(sessionParamsBuffer) +
1059 get_camera_metadata_size(sessionParamsBuffer));
1060
1061 config.sessionParams.metadata = rawSessionParam;
1062 config.streamConfigCounter = 0;
1063 config.streams = {previewStream};
1064 config.streamConfigCounter = 0;
1065 config.multiResolutionInputImage = false;
1066
1067 bool newSessionParamsAvailable = false;
1068 for (const auto& it : availableSessionKeys) {
1069 if (modifiedSessionParams.exists(it)) {
1070 modifiedSessionParams.erase(it);
1071 newSessionParamsAvailable = true;
1072 break;
1073 }
1074 }
1075 if (newSessionParamsAvailable) {
1076 auto modifiedSessionParamsBuffer = modifiedSessionParams.release();
1077 verifySessionReconfigurationQuery(mSession, sessionParamsBuffer,
1078 modifiedSessionParamsBuffer);
1079 modifiedSessionParams.acquire(modifiedSessionParamsBuffer);
1080 }
1081
1082 std::vector<HalStream> halConfigs;
1083 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1084 ASSERT_TRUE(ret.isOk());
1085 ASSERT_EQ(1u, halConfigs.size());
1086
1087 sessionParams.acquire(sessionParamsBuffer);
1088 ret = mSession->close();
1089 mSession = nullptr;
1090 ASSERT_TRUE(ret.isOk());
1091 }
1092}
1093
1094// Verify that all supported preview + still capture stream combinations
1095// can be configured successfully.
1096TEST_P(CameraAidlTest, configureStreamsPreviewStillOutputs) {
1097 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1098 std::vector<AvailableStream> outputBlobStreams;
1099 std::vector<AvailableStream> outputPreviewStreams;
1100 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
1101 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
1102 AvailableStream blobThreshold = {INT32_MAX, INT32_MAX, static_cast<int32_t>(PixelFormat::BLOB)};
1103
1104 for (const auto& name : cameraDeviceNames) {
1105 CameraMetadata meta;
1106
1107 std::shared_ptr<ICameraDevice> cameraDevice;
1108 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1109 &cameraDevice /*out*/);
1110
1111 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1112
1113 // Check if camera support depth only
1114 if (isDepthOnly(staticMeta)) {
1115 ndk::ScopedAStatus ret = mSession->close();
1116 mSession = nullptr;
1117 ASSERT_TRUE(ret.isOk());
1118 continue;
1119 }
1120
1121 outputBlobStreams.clear();
1122 ASSERT_EQ(Status::OK,
1123 getAvailableOutputStreams(staticMeta, outputBlobStreams, &blobThreshold));
1124 ASSERT_NE(0u, outputBlobStreams.size());
1125
1126 outputPreviewStreams.clear();
1127 ASSERT_EQ(Status::OK,
1128 getAvailableOutputStreams(staticMeta, outputPreviewStreams, &previewThreshold));
1129 ASSERT_NE(0u, outputPreviewStreams.size());
1130
1131 int32_t jpegBufferSize = 0;
1132 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
1133 ASSERT_NE(0u, jpegBufferSize);
1134
1135 int32_t streamId = 0;
1136 uint32_t streamConfigCounter = 0;
1137
1138 for (auto& blobIter : outputBlobStreams) {
1139 for (auto& previewIter : outputPreviewStreams) {
1140 Stream previewStream = {
1141 streamId++,
1142 StreamType::OUTPUT,
1143 previewIter.width,
1144 previewIter.height,
1145 static_cast<PixelFormat>(previewIter.format),
1146 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1147 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
1148 Dataspace::UNKNOWN,
1149 StreamRotation::ROTATION_0,
1150 std::string(),
1151 /*bufferSize*/ 0,
1152 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001153 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1154 RequestAvailableDynamicRangeProfilesMap::
1155 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001156 Stream blobStream = {
1157 streamId++,
1158 StreamType::OUTPUT,
1159 blobIter.width,
1160 blobIter.height,
1161 static_cast<PixelFormat>(blobIter.format),
1162 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1163 GRALLOC1_CONSUMER_USAGE_CPU_READ),
1164 Dataspace::JFIF,
1165 StreamRotation::ROTATION_0,
1166 std::string(),
1167 /*bufferSize*/ 0,
1168 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001169 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1170 RequestAvailableDynamicRangeProfilesMap::
1171 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001172 std::vector<Stream> streams = {previewStream, blobStream};
1173 StreamConfiguration config;
1174
1175 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
1176 jpegBufferSize);
1177 config.streamConfigCounter = streamConfigCounter++;
1178 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true,
1179 /*expectStreamCombQuery*/ false);
1180
1181 std::vector<HalStream> halConfigs;
1182 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1183 ASSERT_TRUE(ret.isOk());
1184 ASSERT_EQ(2u, halConfigs.size());
1185 }
1186 }
1187
1188 ndk::ScopedAStatus ret = mSession->close();
1189 mSession = nullptr;
1190 ASSERT_TRUE(ret.isOk());
1191 }
1192}
1193
1194// In case constrained mode is supported, test whether it can be
1195// configured. Additionally check for common invalid inputs when
1196// using this mode.
1197TEST_P(CameraAidlTest, configureStreamsConstrainedOutputs) {
1198 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1199
1200 for (const auto& name : cameraDeviceNames) {
1201 CameraMetadata meta;
1202 std::shared_ptr<ICameraDevice> cameraDevice;
1203
1204 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1205 &cameraDevice /*out*/);
1206 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1207
1208 Status rc = isConstrainedModeAvailable(staticMeta);
1209 if (Status::OPERATION_NOT_SUPPORTED == rc) {
1210 ndk::ScopedAStatus ret = mSession->close();
1211 mSession = nullptr;
1212 ASSERT_TRUE(ret.isOk());
1213 continue;
1214 }
1215 ASSERT_EQ(Status::OK, rc);
1216
1217 AvailableStream hfrStream;
1218 rc = pickConstrainedModeSize(staticMeta, hfrStream);
1219 ASSERT_EQ(Status::OK, rc);
1220
1221 int32_t streamId = 0;
1222 uint32_t streamConfigCounter = 0;
1223 Stream stream = {streamId,
1224 StreamType::OUTPUT,
1225 hfrStream.width,
1226 hfrStream.height,
1227 static_cast<PixelFormat>(hfrStream.format),
1228 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1229 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1230 Dataspace::UNKNOWN,
1231 StreamRotation::ROTATION_0,
1232 std::string(),
1233 /*bufferSize*/ 0,
1234 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001235 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1236 RequestAvailableDynamicRangeProfilesMap::
1237 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001238 std::vector<Stream> streams = {stream};
1239 StreamConfiguration config;
1240 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1241 &config);
1242
1243 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true,
1244 /*expectStreamCombQuery*/ false);
1245
1246 config.streamConfigCounter = streamConfigCounter++;
1247 std::vector<HalStream> halConfigs;
1248 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1249 ASSERT_TRUE(ret.isOk());
1250 ASSERT_EQ(1u, halConfigs.size());
1251 ASSERT_EQ(halConfigs[0].id, streamId);
1252
1253 stream = {streamId++,
1254 StreamType::OUTPUT,
1255 static_cast<uint32_t>(0),
1256 static_cast<uint32_t>(0),
1257 static_cast<PixelFormat>(hfrStream.format),
1258 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1259 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1260 Dataspace::UNKNOWN,
1261 StreamRotation::ROTATION_0,
1262 std::string(),
1263 /*bufferSize*/ 0,
1264 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001265 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1266 RequestAvailableDynamicRangeProfilesMap::
1267 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001268 streams[0] = stream;
1269 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1270 &config);
1271
1272 config.streamConfigCounter = streamConfigCounter++;
1273 std::vector<HalStream> halConfig;
1274 ret = mSession->configureStreams(config, &halConfig);
1275 ASSERT_TRUE(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
1276 ret.getServiceSpecificError() ||
1277 static_cast<int32_t>(Status::INTERNAL_ERROR) == ret.getServiceSpecificError());
1278
1279 stream = {streamId++,
1280 StreamType::OUTPUT,
1281 INT32_MAX,
1282 INT32_MAX,
1283 static_cast<PixelFormat>(hfrStream.format),
1284 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1285 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1286 Dataspace::UNKNOWN,
1287 StreamRotation::ROTATION_0,
1288 std::string(),
1289 /*bufferSize*/ 0,
1290 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001291 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1292 RequestAvailableDynamicRangeProfilesMap::
1293 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001294 streams[0] = stream;
1295 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1296 &config);
1297
1298 config.streamConfigCounter = streamConfigCounter++;
1299 halConfigs.clear();
1300 ret = mSession->configureStreams(config, &halConfigs);
1301 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
1302
1303 stream = {streamId++,
1304 StreamType::OUTPUT,
1305 hfrStream.width,
1306 hfrStream.height,
1307 static_cast<PixelFormat>(UINT32_MAX),
1308 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1309 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1310 Dataspace::UNKNOWN,
1311 StreamRotation::ROTATION_0,
1312 std::string(),
1313 /*bufferSize*/ 0,
1314 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001315 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1316 RequestAvailableDynamicRangeProfilesMap::
1317 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001318 streams[0] = stream;
1319 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1320 &config);
1321
1322 config.streamConfigCounter = streamConfigCounter++;
1323 halConfigs.clear();
1324 ret = mSession->configureStreams(config, &halConfigs);
1325 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
1326
1327 ret = mSession->close();
1328 mSession = nullptr;
1329 ASSERT_TRUE(ret.isOk());
1330 }
1331}
1332
1333// Verify that all supported video + snapshot stream combinations can
1334// be configured successfully.
1335TEST_P(CameraAidlTest, configureStreamsVideoStillOutputs) {
1336 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1337 std::vector<AvailableStream> outputBlobStreams;
1338 std::vector<AvailableStream> outputVideoStreams;
1339 AvailableStream videoThreshold = {kMaxVideoWidth, kMaxVideoHeight,
1340 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
1341 AvailableStream blobThreshold = {kMaxVideoWidth, kMaxVideoHeight,
1342 static_cast<int32_t>(PixelFormat::BLOB)};
1343
1344 for (const auto& name : cameraDeviceNames) {
1345 CameraMetadata meta;
1346 std::shared_ptr<ICameraDevice> cameraDevice;
1347
1348 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1349 &cameraDevice /*out*/);
1350
1351 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1352
1353 // Check if camera support depth only
1354 if (isDepthOnly(staticMeta)) {
1355 ndk::ScopedAStatus ret = mSession->close();
1356 mSession = nullptr;
1357 ASSERT_TRUE(ret.isOk());
1358 continue;
1359 }
1360
1361 outputBlobStreams.clear();
1362 ASSERT_EQ(Status::OK,
1363 getAvailableOutputStreams(staticMeta, outputBlobStreams, &blobThreshold));
1364 ASSERT_NE(0u, outputBlobStreams.size());
1365
1366 outputVideoStreams.clear();
1367 ASSERT_EQ(Status::OK,
1368 getAvailableOutputStreams(staticMeta, outputVideoStreams, &videoThreshold));
1369 ASSERT_NE(0u, outputVideoStreams.size());
1370
1371 int32_t jpegBufferSize = 0;
1372 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
1373 ASSERT_NE(0u, jpegBufferSize);
1374
1375 int32_t streamId = 0;
1376 uint32_t streamConfigCounter = 0;
1377 for (auto& blobIter : outputBlobStreams) {
1378 for (auto& videoIter : outputVideoStreams) {
1379 Stream videoStream = {
1380 streamId++,
1381 StreamType::OUTPUT,
1382 videoIter.width,
1383 videoIter.height,
1384 static_cast<PixelFormat>(videoIter.format),
1385 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1386 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1387 Dataspace::UNKNOWN,
1388 StreamRotation::ROTATION_0,
1389 std::string(),
1390 jpegBufferSize,
1391 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001392 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1393 RequestAvailableDynamicRangeProfilesMap::
1394 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001395 Stream blobStream = {
1396 streamId++,
1397 StreamType::OUTPUT,
1398 blobIter.width,
1399 blobIter.height,
1400 static_cast<PixelFormat>(blobIter.format),
1401 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1402 GRALLOC1_CONSUMER_USAGE_CPU_READ),
1403 Dataspace::JFIF,
1404 StreamRotation::ROTATION_0,
1405 std::string(),
1406 jpegBufferSize,
1407 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001408 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1409 RequestAvailableDynamicRangeProfilesMap::
1410 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001411 std::vector<Stream> streams = {videoStream, blobStream};
1412 StreamConfiguration config;
1413
1414 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
1415 jpegBufferSize);
1416 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true,
1417 /*expectStreamCombQuery*/ false);
1418
1419 config.streamConfigCounter = streamConfigCounter++;
1420 std::vector<HalStream> halConfigs;
1421 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1422 ASSERT_TRUE(ret.isOk());
1423 ASSERT_EQ(2u, halConfigs.size());
1424 }
1425 }
1426
1427 ndk::ScopedAStatus ret = mSession->close();
1428 mSession = nullptr;
1429 ASSERT_TRUE(ret.isOk());
1430 }
1431}
1432
1433// Generate and verify a camera capture request
1434TEST_P(CameraAidlTest, processCaptureRequestPreview) {
1435 // TODO(b/220897574): Failing with BUFFER_ERROR
1436 processCaptureRequestInternal(GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, RequestTemplate::PREVIEW,
1437 false /*secureOnlyCameras*/);
1438}
1439
1440// Generate and verify a secure camera capture request
1441TEST_P(CameraAidlTest, processSecureCaptureRequest) {
1442 processCaptureRequestInternal(GRALLOC1_PRODUCER_USAGE_PROTECTED, RequestTemplate::STILL_CAPTURE,
1443 true /*secureOnlyCameras*/);
1444}
1445
1446TEST_P(CameraAidlTest, processCaptureRequestPreviewStabilization) {
1447 std::unordered_map<std::string, nsecs_t> cameraDeviceToTimeLag;
1448 processPreviewStabilizationCaptureRequestInternal(/*previewStabilizationOn*/ false,
1449 cameraDeviceToTimeLag);
1450 processPreviewStabilizationCaptureRequestInternal(/*previewStabilizationOn*/ true,
1451 cameraDeviceToTimeLag);
1452}
1453
1454// Generate and verify a multi-camera capture request
1455TEST_P(CameraAidlTest, processMultiCaptureRequestPreview) {
1456 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1457 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
1458 static_cast<int32_t>(PixelFormat::YCBCR_420_888)};
1459 int64_t bufferId = 1;
1460 uint32_t frameNumber = 1;
1461 std::vector<uint8_t> settings;
1462 std::vector<uint8_t> emptySettings;
1463 std::string invalidPhysicalId = "-1";
1464
1465 for (const auto& name : cameraDeviceNames) {
1466 std::string version, deviceId;
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +00001467 ALOGI("processMultiCaptureRequestPreview: Test device %s", name.c_str());
Avichal Rakesh362242f2022-02-08 12:40:53 -08001468 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
1469 CameraMetadata metadata;
1470
1471 std::shared_ptr<ICameraDevice> unusedDevice;
1472 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &metadata /*out*/,
1473 &unusedDevice /*out*/);
1474
1475 camera_metadata_t* staticMeta =
1476 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
1477 Status rc = isLogicalMultiCamera(staticMeta);
1478 if (Status::OPERATION_NOT_SUPPORTED == rc) {
1479 ndk::ScopedAStatus ret = mSession->close();
1480 mSession = nullptr;
1481 ASSERT_TRUE(ret.isOk());
1482 continue;
1483 }
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +00001484 ASSERT_EQ(Status::OK, rc);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001485
1486 std::unordered_set<std::string> physicalIds;
1487 rc = getPhysicalCameraIds(staticMeta, &physicalIds);
1488 ASSERT_TRUE(Status::OK == rc);
1489 ASSERT_TRUE(physicalIds.size() > 1);
1490
1491 std::unordered_set<int32_t> physicalRequestKeyIDs;
1492 rc = getSupportedKeys(staticMeta, ANDROID_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS,
1493 &physicalRequestKeyIDs);
1494 ASSERT_TRUE(Status::OK == rc);
1495 if (physicalRequestKeyIDs.empty()) {
1496 ndk::ScopedAStatus ret = mSession->close();
1497 mSession = nullptr;
1498 ASSERT_TRUE(ret.isOk());
1499 // The logical camera doesn't support any individual physical requests.
1500 continue;
1501 }
1502
1503 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultPreviewSettings;
1504 android::hardware::camera::common::V1_0::helper::CameraMetadata filteredSettings;
1505 constructFilteredSettings(mSession, physicalRequestKeyIDs, RequestTemplate::PREVIEW,
1506 &defaultPreviewSettings, &filteredSettings);
1507 if (filteredSettings.isEmpty()) {
1508 // No physical device settings in default request.
1509 ndk::ScopedAStatus ret = mSession->close();
1510 mSession = nullptr;
1511 ASSERT_TRUE(ret.isOk());
1512 continue;
1513 }
1514
1515 const camera_metadata_t* settingsBuffer = defaultPreviewSettings.getAndLock();
1516 uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
1517 settings.assign(rawSettingsBuffer,
1518 rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
1519 CameraMetadata settingsMetadata = {settings};
1520 overrideRotateAndCrop(&settingsMetadata);
1521
1522 ndk::ScopedAStatus ret = mSession->close();
1523 mSession = nullptr;
1524 ASSERT_TRUE(ret.isOk());
1525
1526 // Leave only 2 physical devices in the id set.
1527 auto it = physicalIds.begin();
1528 std::string physicalDeviceId = *it;
1529 it++;
1530 physicalIds.erase(++it, physicalIds.end());
1531 ASSERT_EQ(physicalIds.size(), 2u);
1532
1533 std::vector<HalStream> halStreams;
1534 bool supportsPartialResults = false;
1535 bool useHalBufManager = false;
1536 int32_t partialResultCount = 0;
1537 Stream previewStream;
1538 std::shared_ptr<DeviceCb> cb;
1539
Avichal Rakeshfbcf7ea2022-03-09 01:00:34 +00001540 configurePreviewStreams(
1541 name, mProvider, &previewThreshold, physicalIds, &mSession, &previewStream,
1542 &halStreams /*out*/, &supportsPartialResults /*out*/, &partialResultCount /*out*/,
1543 &useHalBufManager /*out*/, &cb /*out*/, 0 /*streamConfigCounter*/, true);
1544 if (mSession == nullptr) {
1545 // stream combination not supported by HAL, skip test for device
1546 continue;
1547 }
Avichal Rakesh362242f2022-02-08 12:40:53 -08001548
1549 ::aidl::android::hardware::common::fmq::MQDescriptor<
1550 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
1551 descriptor;
1552 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
1553 ASSERT_TRUE(resultQueueRet.isOk());
1554 std::shared_ptr<ResultMetadataQueue> resultQueue =
1555 std::make_shared<ResultMetadataQueue>(descriptor);
1556 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
1557 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
1558 resultQueue = nullptr;
1559 // Don't use the queue onwards.
1560 }
1561
1562 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
1563 static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
1564 partialResultCount, physicalIds, resultQueue);
1565
1566 std::vector<CaptureRequest> requests(1);
1567 CaptureRequest& request = requests[0];
1568 request.frameNumber = frameNumber;
1569 request.fmqSettingsSize = 0;
Emilian Peev3d919f92022-04-20 13:50:59 -07001570 request.settings = settingsMetadata;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001571
1572 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
1573
1574 std::vector<buffer_handle_t> graphicBuffers;
1575 graphicBuffers.reserve(halStreams.size());
1576 outputBuffers.resize(halStreams.size());
1577 size_t k = 0;
1578 for (const auto& halStream : halStreams) {
1579 buffer_handle_t buffer_handle;
1580 if (useHalBufManager) {
1581 outputBuffers[k] = {halStream.id, /*bufferId*/ 0, NativeHandle(),
1582 BufferStatus::OK, NativeHandle(), NativeHandle()};
1583 } else {
1584 allocateGraphicBuffer(previewStream.width, previewStream.height,
1585 android_convertGralloc1To0Usage(
1586 static_cast<uint64_t>(halStream.producerUsage),
1587 static_cast<uint64_t>(halStream.consumerUsage)),
1588 halStream.overrideFormat, &buffer_handle);
1589 graphicBuffers.push_back(buffer_handle);
1590 outputBuffers[k] = {
1591 halStream.id, bufferId, ::android::makeToAidl(buffer_handle),
1592 BufferStatus::OK, NativeHandle(), NativeHandle()};
1593 bufferId++;
1594 }
1595 k++;
1596 }
1597
1598 std::vector<PhysicalCameraSetting> camSettings(1);
1599 const camera_metadata_t* filteredSettingsBuffer = filteredSettings.getAndLock();
1600 uint8_t* rawFilteredSettingsBuffer = (uint8_t*)filteredSettingsBuffer;
1601 camSettings[0].settings = {std::vector(
1602 rawFilteredSettingsBuffer,
1603 rawFilteredSettingsBuffer + get_camera_metadata_size(filteredSettingsBuffer))};
1604 overrideRotateAndCrop(&camSettings[0].settings);
1605 camSettings[0].fmqSettingsSize = 0;
1606 camSettings[0].physicalCameraId = physicalDeviceId;
1607
1608 request.inputBuffer = {
1609 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
1610 request.physicalCameraSettings = camSettings;
1611
1612 {
1613 std::unique_lock<std::mutex> l(mLock);
1614 mInflightMap.clear();
1615 mInflightMap[frameNumber] = inflightReq;
1616 }
1617
1618 int32_t numRequestProcessed = 0;
1619 std::vector<BufferCache> cachesToRemove;
1620 ndk::ScopedAStatus returnStatus =
1621 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1622 ASSERT_TRUE(returnStatus.isOk());
1623 ASSERT_EQ(numRequestProcessed, 1u);
1624
1625 {
1626 std::unique_lock<std::mutex> l(mLock);
1627 while (!inflightReq->errorCodeValid &&
1628 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1629 auto timeout = std::chrono::system_clock::now() +
1630 std::chrono::seconds(kStreamBufferTimeoutSec);
1631 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1632 }
1633
1634 ASSERT_FALSE(inflightReq->errorCodeValid);
1635 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1636
1637 request.frameNumber++;
1638 // Empty settings should be supported after the first call
1639 // for repeating requests.
1640 request.settings.metadata.clear();
1641 request.physicalCameraSettings[0].settings.metadata.clear();
1642 // The buffer has been registered to HAL by bufferId, so per
1643 // API contract we should send a null handle for this buffer
1644 request.outputBuffers[0].buffer = NativeHandle();
1645 mInflightMap.clear();
1646 inflightReq = std::make_shared<InFlightRequest>(
1647 static_cast<ssize_t>(physicalIds.size()), false, supportsPartialResults,
1648 partialResultCount, physicalIds, resultQueue);
1649 mInflightMap[request.frameNumber] = inflightReq;
1650 }
1651
1652 returnStatus =
1653 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1654 ASSERT_TRUE(returnStatus.isOk());
1655 ASSERT_EQ(numRequestProcessed, 1u);
1656
1657 {
1658 std::unique_lock<std::mutex> l(mLock);
1659 while (!inflightReq->errorCodeValid &&
1660 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1661 auto timeout = std::chrono::system_clock::now() +
1662 std::chrono::seconds(kStreamBufferTimeoutSec);
1663 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1664 }
1665
1666 ASSERT_FALSE(inflightReq->errorCodeValid);
1667 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1668 }
1669
1670 // Invalid physical camera id should fail process requests
1671 frameNumber++;
1672 camSettings[0].physicalCameraId = invalidPhysicalId;
1673 camSettings[0].settings.metadata = settings;
1674
1675 request.physicalCameraSettings = camSettings; // Invalid camera settings
1676 returnStatus =
1677 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1678 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
1679 returnStatus.getServiceSpecificError());
1680
1681 defaultPreviewSettings.unlock(settingsBuffer);
1682 filteredSettings.unlock(filteredSettingsBuffer);
1683
1684 if (useHalBufManager) {
1685 std::vector<int32_t> streamIds(halStreams.size());
1686 for (size_t i = 0; i < streamIds.size(); i++) {
1687 streamIds[i] = halStreams[i].id;
1688 }
1689 verifyBuffersReturned(mSession, streamIds, cb);
1690 }
1691
1692 ret = mSession->close();
1693 mSession = nullptr;
1694 ASSERT_TRUE(ret.isOk());
1695 }
1696}
1697
1698// Generate and verify an ultra high resolution capture request
1699TEST_P(CameraAidlTest, processUltraHighResolutionRequest) {
1700 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1701 int64_t bufferId = 1;
1702 int32_t frameNumber = 1;
1703 CameraMetadata settings;
1704
1705 for (const auto& name : cameraDeviceNames) {
1706 std::string version, deviceId;
1707 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
1708 CameraMetadata meta;
1709
1710 std::shared_ptr<ICameraDevice> unusedDevice;
1711 openEmptyDeviceSession(name, mProvider, &mSession, &meta, &unusedDevice);
1712 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1713 if (!isUltraHighResolution(staticMeta)) {
1714 ndk::ScopedAStatus ret = mSession->close();
1715 mSession = nullptr;
1716 ASSERT_TRUE(ret.isOk());
1717 continue;
1718 }
1719 CameraMetadata req;
1720 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultSettings;
1721 ndk::ScopedAStatus ret =
1722 mSession->constructDefaultRequestSettings(RequestTemplate::STILL_CAPTURE, &req);
1723 ASSERT_TRUE(ret.isOk());
1724
1725 const camera_metadata_t* metadata =
1726 reinterpret_cast<const camera_metadata_t*>(req.metadata.data());
1727 size_t expectedSize = req.metadata.size();
1728 int result = validate_camera_metadata_structure(metadata, &expectedSize);
1729 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
1730
1731 size_t entryCount = get_camera_metadata_entry_count(metadata);
1732 ASSERT_GT(entryCount, 0u);
1733 defaultSettings = metadata;
1734 uint8_t sensorPixelMode =
1735 static_cast<uint8_t>(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
1736 ASSERT_EQ(::android::OK,
1737 defaultSettings.update(ANDROID_SENSOR_PIXEL_MODE, &sensorPixelMode, 1));
1738
1739 const camera_metadata_t* settingsBuffer = defaultSettings.getAndLock();
1740 uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
1741 settings.metadata = std::vector(
1742 rawSettingsBuffer, rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
1743 overrideRotateAndCrop(&settings);
1744
1745 ret = mSession->close();
1746 mSession = nullptr;
1747 ASSERT_TRUE(ret.isOk());
1748
1749 std::vector<HalStream> halStreams;
1750 bool supportsPartialResults = false;
1751 bool useHalBufManager = false;
1752 int32_t partialResultCount = 0;
1753 Stream previewStream;
1754 std::shared_ptr<DeviceCb> cb;
1755
1756 std::list<PixelFormat> pixelFormats = {PixelFormat::YCBCR_420_888, PixelFormat::RAW16};
1757 for (PixelFormat format : pixelFormats) {
Emilian Peevdda1eb72022-07-28 16:37:40 -07001758 previewStream.usage =
1759 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1760 GRALLOC1_CONSUMER_USAGE_CPU_READ);
1761 previewStream.dataSpace = Dataspace::UNKNOWN;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001762 configureStreams(name, mProvider, format, &mSession, &previewStream, &halStreams,
1763 &supportsPartialResults, &partialResultCount, &useHalBufManager, &cb,
1764 0, /*maxResolution*/ true);
1765 ASSERT_NE(mSession, nullptr);
1766
1767 ::aidl::android::hardware::common::fmq::MQDescriptor<
1768 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
1769 descriptor;
1770 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
1771 ASSERT_TRUE(resultQueueRet.isOk());
1772
1773 std::shared_ptr<ResultMetadataQueue> resultQueue =
1774 std::make_shared<ResultMetadataQueue>(descriptor);
1775 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
1776 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
1777 resultQueue = nullptr;
1778 // Don't use the queue onwards.
1779 }
1780
1781 std::vector<buffer_handle_t> graphicBuffers;
1782 graphicBuffers.reserve(halStreams.size());
1783 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
1784 static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
1785 partialResultCount, std::unordered_set<std::string>(), resultQueue);
1786
1787 std::vector<CaptureRequest> requests(1);
1788 CaptureRequest& request = requests[0];
1789 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
1790 outputBuffers.resize(halStreams.size());
1791
1792 size_t k = 0;
1793 for (const auto& halStream : halStreams) {
1794 buffer_handle_t buffer_handle;
1795 if (useHalBufManager) {
1796 outputBuffers[k] = {halStream.id, 0,
1797 NativeHandle(), BufferStatus::OK,
1798 NativeHandle(), NativeHandle()};
1799 } else {
1800 allocateGraphicBuffer(previewStream.width, previewStream.height,
1801 android_convertGralloc1To0Usage(
1802 static_cast<uint64_t>(halStream.producerUsage),
1803 static_cast<uint64_t>(halStream.consumerUsage)),
1804 halStream.overrideFormat, &buffer_handle);
1805 graphicBuffers.push_back(buffer_handle);
1806 outputBuffers[k] = {
1807 halStream.id, bufferId, ::android::makeToAidl(buffer_handle),
1808 BufferStatus::OK, NativeHandle(), NativeHandle()};
1809 bufferId++;
1810 }
1811 k++;
1812 }
1813
1814 request.inputBuffer = {
1815 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
1816 request.frameNumber = frameNumber;
1817 request.fmqSettingsSize = 0;
1818 request.settings = settings;
1819 request.inputWidth = 0;
1820 request.inputHeight = 0;
1821
1822 {
1823 std::unique_lock<std::mutex> l(mLock);
1824 mInflightMap.clear();
1825 mInflightMap[frameNumber] = inflightReq;
1826 }
1827
1828 int32_t numRequestProcessed = 0;
1829 std::vector<BufferCache> cachesToRemove;
1830 ndk::ScopedAStatus returnStatus =
1831 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1832 ASSERT_TRUE(returnStatus.isOk());
1833 ASSERT_EQ(numRequestProcessed, 1u);
1834
1835 {
1836 std::unique_lock<std::mutex> l(mLock);
1837 while (!inflightReq->errorCodeValid &&
1838 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1839 auto timeout = std::chrono::system_clock::now() +
1840 std::chrono::seconds(kStreamBufferTimeoutSec);
1841 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1842 }
1843
1844 ASSERT_FALSE(inflightReq->errorCodeValid);
1845 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1846 }
1847 if (useHalBufManager) {
1848 std::vector<int32_t> streamIds(halStreams.size());
1849 for (size_t i = 0; i < streamIds.size(); i++) {
1850 streamIds[i] = halStreams[i].id;
1851 }
1852 verifyBuffersReturned(mSession, streamIds, cb);
1853 }
1854
1855 ret = mSession->close();
1856 mSession = nullptr;
1857 ASSERT_TRUE(ret.isOk());
1858 }
1859 }
1860}
1861
1862// Generate and verify 10-bit dynamic range request
1863TEST_P(CameraAidlTest, process10BitDynamicRangeRequest) {
1864 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1865 int64_t bufferId = 1;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001866 CameraMetadata settings;
1867
1868 for (const auto& name : cameraDeviceNames) {
1869 std::string version, deviceId;
1870 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
1871 CameraMetadata meta;
1872 std::shared_ptr<ICameraDevice> device;
1873 openEmptyDeviceSession(name, mProvider, &mSession, &meta, &device);
1874 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1875 if (!is10BitDynamicRangeCapable(staticMeta)) {
1876 ndk::ScopedAStatus ret = mSession->close();
1877 mSession = nullptr;
1878 ASSERT_TRUE(ret.isOk());
1879 continue;
1880 }
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001881 std::vector<RequestAvailableDynamicRangeProfilesMap> profileList;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001882 get10BitDynamicRangeProfiles(staticMeta, &profileList);
1883 ASSERT_FALSE(profileList.empty());
1884
1885 CameraMetadata req;
1886 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultSettings;
1887 ndk::ScopedAStatus ret =
Emilian Peevdda1eb72022-07-28 16:37:40 -07001888 mSession->constructDefaultRequestSettings(RequestTemplate::PREVIEW, &req);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001889 ASSERT_TRUE(ret.isOk());
1890
1891 const camera_metadata_t* metadata =
1892 reinterpret_cast<const camera_metadata_t*>(req.metadata.data());
1893 size_t expectedSize = req.metadata.size();
1894 int result = validate_camera_metadata_structure(metadata, &expectedSize);
1895 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
1896
1897 size_t entryCount = get_camera_metadata_entry_count(metadata);
1898 ASSERT_GT(entryCount, 0u);
1899 defaultSettings = metadata;
1900
1901 const camera_metadata_t* settingsBuffer = defaultSettings.getAndLock();
1902 uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
1903 settings.metadata = std::vector(
1904 rawSettingsBuffer, rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
1905 overrideRotateAndCrop(&settings);
1906
1907 ret = mSession->close();
1908 mSession = nullptr;
1909 ASSERT_TRUE(ret.isOk());
1910
1911 std::vector<HalStream> halStreams;
1912 bool supportsPartialResults = false;
1913 bool useHalBufManager = false;
1914 int32_t partialResultCount = 0;
1915 Stream previewStream;
1916 std::shared_ptr<DeviceCb> cb;
1917 for (const auto& profile : profileList) {
Emilian Peevdda1eb72022-07-28 16:37:40 -07001918 previewStream.usage =
1919 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1920 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER);
1921 previewStream.dataSpace = getDataspace(PixelFormat::IMPLEMENTATION_DEFINED);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001922 configureStreams(name, mProvider, PixelFormat::IMPLEMENTATION_DEFINED, &mSession,
1923 &previewStream, &halStreams, &supportsPartialResults,
1924 &partialResultCount, &useHalBufManager, &cb, 0,
1925 /*maxResolution*/ false, profile);
1926 ASSERT_NE(mSession, nullptr);
1927
1928 ::aidl::android::hardware::common::fmq::MQDescriptor<
1929 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
1930 descriptor;
1931 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
1932 ASSERT_TRUE(resultQueueRet.isOk());
1933
1934 std::shared_ptr<ResultMetadataQueue> resultQueue =
1935 std::make_shared<ResultMetadataQueue>(descriptor);
1936 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
1937 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
1938 resultQueue = nullptr;
1939 // Don't use the queue onwards.
1940 }
1941
Emilian Peevdda1eb72022-07-28 16:37:40 -07001942 mInflightMap.clear();
1943 // Stream as long as needed to fill the Hal inflight queue
1944 std::vector<CaptureRequest> requests(halStreams[0].maxBuffers);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001945
Emilian Peevdda1eb72022-07-28 16:37:40 -07001946 for (int32_t frameNumber = 0; frameNumber < requests.size(); frameNumber++) {
1947 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
1948 static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
1949 partialResultCount, std::unordered_set<std::string>(), resultQueue);
Avichal Rakesh362242f2022-02-08 12:40:53 -08001950
Emilian Peevdda1eb72022-07-28 16:37:40 -07001951 CaptureRequest& request = requests[frameNumber];
1952 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
1953 outputBuffers.resize(halStreams.size());
Avichal Rakesh362242f2022-02-08 12:40:53 -08001954
Emilian Peevdda1eb72022-07-28 16:37:40 -07001955 size_t k = 0;
1956 inflightReq->mOutstandingBufferIds.resize(halStreams.size());
1957 std::vector<buffer_handle_t> graphicBuffers;
1958 graphicBuffers.reserve(halStreams.size());
Avichal Rakesh362242f2022-02-08 12:40:53 -08001959
Emilian Peevdda1eb72022-07-28 16:37:40 -07001960 for (const auto& halStream : halStreams) {
1961 buffer_handle_t buffer_handle;
1962 if (useHalBufManager) {
1963 outputBuffers[k] = {halStream.id, 0,
1964 NativeHandle(), BufferStatus::OK,
1965 NativeHandle(), NativeHandle()};
1966 } else {
1967 auto usage = android_convertGralloc1To0Usage(
1968 static_cast<uint64_t>(halStream.producerUsage),
1969 static_cast<uint64_t>(halStream.consumerUsage));
1970 allocateGraphicBuffer(previewStream.width, previewStream.height, usage,
1971 halStream.overrideFormat, &buffer_handle);
1972
1973 inflightReq->mOutstandingBufferIds[halStream.id][bufferId] = buffer_handle;
1974 graphicBuffers.push_back(buffer_handle);
1975 outputBuffers[k] = {halStream.id, bufferId,
1976 android::makeToAidl(buffer_handle), BufferStatus::OK, NativeHandle(),
1977 NativeHandle()};
1978 bufferId++;
1979 }
1980 k++;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001981 }
Avichal Rakesh362242f2022-02-08 12:40:53 -08001982
Emilian Peevdda1eb72022-07-28 16:37:40 -07001983 request.inputBuffer = {
1984 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
1985 request.frameNumber = frameNumber;
1986 request.fmqSettingsSize = 0;
1987 request.settings = settings;
1988 request.inputWidth = 0;
1989 request.inputHeight = 0;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001990
Emilian Peevdda1eb72022-07-28 16:37:40 -07001991 {
1992 std::unique_lock<std::mutex> l(mLock);
1993 mInflightMap[frameNumber] = inflightReq;
1994 }
1995
Avichal Rakesh362242f2022-02-08 12:40:53 -08001996 }
1997
1998 int32_t numRequestProcessed = 0;
1999 std::vector<BufferCache> cachesToRemove;
2000 ndk::ScopedAStatus returnStatus =
Emilian Peevdda1eb72022-07-28 16:37:40 -07002001 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
Avichal Rakesh362242f2022-02-08 12:40:53 -08002002 ASSERT_TRUE(returnStatus.isOk());
Emilian Peevdda1eb72022-07-28 16:37:40 -07002003 ASSERT_EQ(numRequestProcessed, requests.size());
Avichal Rakesh362242f2022-02-08 12:40:53 -08002004
Emilian Peevdda1eb72022-07-28 16:37:40 -07002005 returnStatus = mSession->repeatingRequestEnd(requests.size() - 1,
2006 std::vector<int32_t> {halStreams[0].id});
2007 ASSERT_TRUE(returnStatus.isOk());
2008
2009 for (int32_t frameNumber = 0; frameNumber < requests.size(); frameNumber++) {
2010 const auto& inflightReq = mInflightMap[frameNumber];
Avichal Rakesh362242f2022-02-08 12:40:53 -08002011 std::unique_lock<std::mutex> l(mLock);
2012 while (!inflightReq->errorCodeValid &&
2013 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
2014 auto timeout = std::chrono::system_clock::now() +
2015 std::chrono::seconds(kStreamBufferTimeoutSec);
2016 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2017 }
2018
2019 ASSERT_FALSE(inflightReq->errorCodeValid);
2020 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
2021 verify10BitMetadata(mHandleImporter, *inflightReq, profile);
2022 }
Emilian Peevdda1eb72022-07-28 16:37:40 -07002023
Avichal Rakesh362242f2022-02-08 12:40:53 -08002024 if (useHalBufManager) {
2025 std::vector<int32_t> streamIds(halStreams.size());
2026 for (size_t i = 0; i < streamIds.size(); i++) {
2027 streamIds[i] = halStreams[i].id;
2028 }
2029 mSession->signalStreamFlush(streamIds, /*streamConfigCounter*/ 0);
2030 cb->waitForBuffersReturned();
2031 }
2032
2033 ret = mSession->close();
2034 mSession = nullptr;
2035 ASSERT_TRUE(ret.isOk());
2036 }
2037 }
2038}
2039
2040// Generate and verify a burst containing alternating sensor sensitivity values
2041TEST_P(CameraAidlTest, processCaptureRequestBurstISO) {
2042 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2043 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2044 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2045 int64_t bufferId = 1;
2046 int32_t frameNumber = 1;
2047 float isoTol = .03f;
2048 CameraMetadata settings;
2049
2050 for (const auto& name : cameraDeviceNames) {
2051 CameraMetadata meta;
2052 settings.metadata.clear();
2053 std::shared_ptr<ICameraDevice> unusedDevice;
2054 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
2055 &unusedDevice /*out*/);
2056 camera_metadata_t* staticMetaBuffer =
2057 clone_camera_metadata(reinterpret_cast<camera_metadata_t*>(meta.metadata.data()));
2058 ::android::hardware::camera::common::V1_0::helper::CameraMetadata staticMeta(
2059 staticMetaBuffer);
2060
2061 camera_metadata_entry_t hwLevel = staticMeta.find(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL);
2062 ASSERT_TRUE(0 < hwLevel.count);
2063 if (ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED == hwLevel.data.u8[0] ||
2064 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL == hwLevel.data.u8[0]) {
2065 // Limited/External devices can skip this test
2066 ndk::ScopedAStatus ret = mSession->close();
2067 mSession = nullptr;
2068 ASSERT_TRUE(ret.isOk());
2069 continue;
2070 }
2071
2072 camera_metadata_entry_t isoRange = staticMeta.find(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE);
2073 ASSERT_EQ(isoRange.count, 2u);
2074
2075 ndk::ScopedAStatus ret = mSession->close();
2076 mSession = nullptr;
2077 ASSERT_TRUE(ret.isOk());
2078
2079 bool supportsPartialResults = false;
2080 bool useHalBufManager = false;
2081 int32_t partialResultCount = 0;
2082 Stream previewStream;
2083 std::vector<HalStream> halStreams;
2084 std::shared_ptr<DeviceCb> cb;
2085 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2086 &previewStream /*out*/, &halStreams /*out*/,
2087 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2088 &useHalBufManager /*out*/, &cb /*out*/);
2089
2090 ::aidl::android::hardware::common::fmq::MQDescriptor<
2091 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2092 descriptor;
2093 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2094 std::shared_ptr<ResultMetadataQueue> resultQueue =
2095 std::make_shared<ResultMetadataQueue>(descriptor);
2096 ASSERT_TRUE(resultQueueRet.isOk());
2097 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2098 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2099 resultQueue = nullptr;
2100 // Don't use the queue onwards.
2101 }
2102
2103 ret = mSession->constructDefaultRequestSettings(RequestTemplate::PREVIEW, &settings);
2104 ASSERT_TRUE(ret.isOk());
2105
2106 ::android::hardware::camera::common::V1_0::helper::CameraMetadata requestMeta;
2107 std::vector<CaptureRequest> requests(kBurstFrameCount);
2108 std::vector<buffer_handle_t> buffers(kBurstFrameCount);
2109 std::vector<std::shared_ptr<InFlightRequest>> inflightReqs(kBurstFrameCount);
2110 std::vector<int32_t> isoValues(kBurstFrameCount);
2111 std::vector<CameraMetadata> requestSettings(kBurstFrameCount);
2112
2113 for (int32_t i = 0; i < kBurstFrameCount; i++) {
2114 std::unique_lock<std::mutex> l(mLock);
2115 CaptureRequest& request = requests[i];
2116 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2117 outputBuffers.resize(1);
2118 StreamBuffer& outputBuffer = outputBuffers[0];
2119
2120 isoValues[i] = ((i % 2) == 0) ? isoRange.data.i32[0] : isoRange.data.i32[1];
2121 if (useHalBufManager) {
2122 outputBuffer = {halStreams[0].id, 0,
2123 NativeHandle(), BufferStatus::OK,
2124 NativeHandle(), NativeHandle()};
2125 } else {
2126 allocateGraphicBuffer(previewStream.width, previewStream.height,
2127 android_convertGralloc1To0Usage(
2128 static_cast<uint64_t>(halStreams[0].producerUsage),
2129 static_cast<uint64_t>(halStreams[0].consumerUsage)),
2130 halStreams[0].overrideFormat, &buffers[i]);
2131 outputBuffer = {halStreams[0].id, bufferId + i, ::android::makeToAidl(buffers[i]),
2132 BufferStatus::OK, NativeHandle(), NativeHandle()};
2133 }
2134
2135 requestMeta.append(reinterpret_cast<camera_metadata_t*>(settings.metadata.data()));
2136
2137 // Disable all 3A routines
2138 uint8_t mode = static_cast<uint8_t>(ANDROID_CONTROL_MODE_OFF);
2139 ASSERT_EQ(::android::OK, requestMeta.update(ANDROID_CONTROL_MODE, &mode, 1));
2140 ASSERT_EQ(::android::OK,
2141 requestMeta.update(ANDROID_SENSOR_SENSITIVITY, &isoValues[i], 1));
2142 camera_metadata_t* metaBuffer = requestMeta.release();
2143 uint8_t* rawMetaBuffer = reinterpret_cast<uint8_t*>(metaBuffer);
2144 requestSettings[i].metadata = std::vector(
2145 rawMetaBuffer, rawMetaBuffer + get_camera_metadata_size(metaBuffer));
2146 overrideRotateAndCrop(&(requestSettings[i]));
2147
2148 request.frameNumber = frameNumber + i;
2149 request.fmqSettingsSize = 0;
2150 request.settings = requestSettings[i];
2151 request.inputBuffer = {
2152 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2153
2154 inflightReqs[i] = std::make_shared<InFlightRequest>(1, false, supportsPartialResults,
2155 partialResultCount, resultQueue);
2156 mInflightMap[frameNumber + i] = inflightReqs[i];
2157 }
2158
2159 int32_t numRequestProcessed = 0;
2160 std::vector<BufferCache> cachesToRemove;
2161
2162 ndk::ScopedAStatus returnStatus =
2163 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2164 ASSERT_TRUE(returnStatus.isOk());
2165 ASSERT_EQ(numRequestProcessed, kBurstFrameCount);
2166
2167 for (size_t i = 0; i < kBurstFrameCount; i++) {
2168 std::unique_lock<std::mutex> l(mLock);
2169 while (!inflightReqs[i]->errorCodeValid && ((0 < inflightReqs[i]->numBuffersLeft) ||
2170 (!inflightReqs[i]->haveResultMetadata))) {
2171 auto timeout = std::chrono::system_clock::now() +
2172 std::chrono::seconds(kStreamBufferTimeoutSec);
2173 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2174 }
2175
2176 ASSERT_FALSE(inflightReqs[i]->errorCodeValid);
2177 ASSERT_NE(inflightReqs[i]->resultOutputBuffers.size(), 0u);
2178 ASSERT_EQ(previewStream.id, inflightReqs[i]->resultOutputBuffers[0].buffer.streamId);
2179 ASSERT_FALSE(inflightReqs[i]->collectedResult.isEmpty());
2180 ASSERT_TRUE(inflightReqs[i]->collectedResult.exists(ANDROID_SENSOR_SENSITIVITY));
2181 camera_metadata_entry_t isoResult =
2182 inflightReqs[i]->collectedResult.find(ANDROID_SENSOR_SENSITIVITY);
2183 ASSERT_TRUE(std::abs(isoResult.data.i32[0] - isoValues[i]) <=
2184 std::round(isoValues[i] * isoTol));
2185 }
2186
2187 if (useHalBufManager) {
2188 verifyBuffersReturned(mSession, previewStream.id, cb);
2189 }
2190 ret = mSession->close();
2191 mSession = nullptr;
2192 ASSERT_TRUE(ret.isOk());
2193 }
2194}
2195
2196// Test whether an incorrect capture request with missing settings will
2197// be reported correctly.
2198TEST_P(CameraAidlTest, processCaptureRequestInvalidSinglePreview) {
2199 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2200 std::vector<AvailableStream> outputPreviewStreams;
2201 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2202 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2203 int64_t bufferId = 1;
2204 int32_t frameNumber = 1;
2205 CameraMetadata settings;
2206
2207 for (const auto& name : cameraDeviceNames) {
2208 Stream previewStream;
2209 std::vector<HalStream> halStreams;
2210 std::shared_ptr<DeviceCb> cb;
2211 bool supportsPartialResults = false;
2212 bool useHalBufManager = false;
2213 int32_t partialResultCount = 0;
2214 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2215 &previewStream /*out*/, &halStreams /*out*/,
2216 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2217 &useHalBufManager /*out*/, &cb /*out*/);
2218 ASSERT_NE(mSession, nullptr);
2219 ASSERT_FALSE(halStreams.empty());
2220
2221 buffer_handle_t buffer_handle = nullptr;
2222
2223 if (useHalBufManager) {
2224 bufferId = 0;
2225 } else {
2226 allocateGraphicBuffer(previewStream.width, previewStream.height,
2227 android_convertGralloc1To0Usage(
2228 static_cast<uint64_t>(halStreams[0].producerUsage),
2229 static_cast<uint64_t>(halStreams[0].consumerUsage)),
2230 halStreams[0].overrideFormat, &buffer_handle);
2231 }
2232
2233 std::vector<CaptureRequest> requests(1);
2234 CaptureRequest& request = requests[0];
2235 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2236 outputBuffers.resize(1);
2237 StreamBuffer& outputBuffer = outputBuffers[0];
2238
2239 outputBuffer = {
2240 halStreams[0].id,
2241 bufferId,
2242 buffer_handle == nullptr ? NativeHandle() : ::android::makeToAidl(buffer_handle),
2243 BufferStatus::OK,
2244 NativeHandle(),
2245 NativeHandle()};
2246
2247 request.inputBuffer = {
2248 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2249 request.frameNumber = frameNumber;
2250 request.fmqSettingsSize = 0;
2251 request.settings = settings;
2252
2253 // Settings were not correctly initialized, we should fail here
2254 int32_t numRequestProcessed = 0;
2255 std::vector<BufferCache> cachesToRemove;
2256 ndk::ScopedAStatus ret =
2257 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2258 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
2259 ASSERT_EQ(numRequestProcessed, 0u);
2260
2261 ret = mSession->close();
2262 mSession = nullptr;
2263 ASSERT_TRUE(ret.isOk());
2264 }
2265}
2266
2267// Verify camera offline session behavior
2268TEST_P(CameraAidlTest, switchToOffline) {
2269 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2270 AvailableStream threshold = {kMaxStillWidth, kMaxStillHeight,
2271 static_cast<int32_t>(PixelFormat::BLOB)};
2272 int64_t bufferId = 1;
2273 int32_t frameNumber = 1;
2274 CameraMetadata settings;
2275
2276 for (const auto& name : cameraDeviceNames) {
2277 CameraMetadata meta;
2278 {
2279 std::shared_ptr<ICameraDevice> unusedDevice;
2280 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
2281 &unusedDevice);
2282 camera_metadata_t* staticMetaBuffer = clone_camera_metadata(
2283 reinterpret_cast<camera_metadata_t*>(meta.metadata.data()));
2284 ::android::hardware::camera::common::V1_0::helper::CameraMetadata staticMeta(
2285 staticMetaBuffer);
2286
2287 if (isOfflineSessionSupported(staticMetaBuffer) != Status::OK) {
2288 ndk::ScopedAStatus ret = mSession->close();
2289 mSession = nullptr;
2290 ASSERT_TRUE(ret.isOk());
2291 continue;
2292 }
2293 ndk::ScopedAStatus ret = mSession->close();
2294 mSession = nullptr;
2295 ASSERT_TRUE(ret.isOk());
2296 }
2297
2298 bool supportsPartialResults = false;
2299 int32_t partialResultCount = 0;
2300 Stream stream;
2301 std::vector<HalStream> halStreams;
2302 std::shared_ptr<DeviceCb> cb;
2303 int32_t jpegBufferSize;
2304 bool useHalBufManager;
2305 configureOfflineStillStream(name, mProvider, &threshold, &mSession /*out*/, &stream /*out*/,
2306 &halStreams /*out*/, &supportsPartialResults /*out*/,
2307 &partialResultCount /*out*/, &cb /*out*/,
2308 &jpegBufferSize /*out*/, &useHalBufManager /*out*/);
2309
2310 auto ret = mSession->constructDefaultRequestSettings(RequestTemplate::STILL_CAPTURE,
2311 &settings);
2312 ASSERT_TRUE(ret.isOk());
2313
2314 ::aidl::android::hardware::common::fmq::MQDescriptor<
2315 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2316 descriptor;
2317
2318 ndk::ScopedAStatus resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2319 ASSERT_TRUE(resultQueueRet.isOk());
2320 std::shared_ptr<ResultMetadataQueue> resultQueue =
2321 std::make_shared<ResultMetadataQueue>(descriptor);
2322 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2323 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2324 resultQueue = nullptr;
2325 // Don't use the queue onwards.
2326 }
2327
2328 ::android::hardware::camera::common::V1_0::helper::CameraMetadata requestMeta;
2329
2330 std::vector<buffer_handle_t> buffers(kBurstFrameCount);
2331 std::vector<std::shared_ptr<InFlightRequest>> inflightReqs(kBurstFrameCount);
2332 std::vector<CameraMetadata> requestSettings(kBurstFrameCount);
2333
2334 std::vector<CaptureRequest> requests(kBurstFrameCount);
2335
2336 HalStream halStream = halStreams[0];
2337 for (uint32_t i = 0; i < kBurstFrameCount; i++) {
2338 CaptureRequest& request = requests[i];
2339 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2340 outputBuffers.resize(1);
2341 StreamBuffer& outputBuffer = outputBuffers[0];
2342
2343 std::unique_lock<std::mutex> l(mLock);
2344 if (useHalBufManager) {
2345 outputBuffer = {halStream.id, 0, NativeHandle(), BufferStatus::OK, NativeHandle(),
2346 NativeHandle()};
2347 } else {
2348 // jpeg buffer (w,h) = (blobLen, 1)
2349 allocateGraphicBuffer(jpegBufferSize, /*height*/ 1,
2350 android_convertGralloc1To0Usage(
2351 static_cast<uint64_t>(halStream.producerUsage),
2352 static_cast<uint64_t>(halStream.consumerUsage)),
2353 halStream.overrideFormat, &buffers[i]);
2354 outputBuffer = {halStream.id, bufferId + i, ::android::makeToAidl(buffers[i]),
2355 BufferStatus::OK, NativeHandle(), NativeHandle()};
2356 }
2357
2358 requestMeta.clear();
2359 requestMeta.append(reinterpret_cast<camera_metadata_t*>(settings.metadata.data()));
2360
2361 camera_metadata_t* metaBuffer = requestMeta.release();
2362 uint8_t* rawMetaBuffer = reinterpret_cast<uint8_t*>(metaBuffer);
2363 requestSettings[i].metadata = std::vector(
2364 rawMetaBuffer, rawMetaBuffer + get_camera_metadata_size(metaBuffer));
2365 overrideRotateAndCrop(&requestSettings[i]);
2366
2367 request.frameNumber = frameNumber + i;
2368 request.fmqSettingsSize = 0;
2369 request.settings = requestSettings[i];
2370 request.inputBuffer = {/*streamId*/ -1,
2371 /*bufferId*/ 0, NativeHandle(),
2372 BufferStatus::ERROR, NativeHandle(),
2373 NativeHandle()};
2374
2375 inflightReqs[i] = std::make_shared<InFlightRequest>(1, false, supportsPartialResults,
2376 partialResultCount, resultQueue);
2377 mInflightMap[frameNumber + i] = inflightReqs[i];
2378 }
2379
2380 int32_t numRequestProcessed = 0;
2381 std::vector<BufferCache> cachesToRemove;
2382
2383 ndk::ScopedAStatus returnStatus =
2384 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2385 ASSERT_TRUE(returnStatus.isOk());
2386 ASSERT_EQ(numRequestProcessed, kBurstFrameCount);
2387
2388 std::vector<int32_t> offlineStreamIds = {halStream.id};
2389 CameraOfflineSessionInfo offlineSessionInfo;
2390 std::shared_ptr<ICameraOfflineSession> offlineSession;
2391 returnStatus =
2392 mSession->switchToOffline(offlineStreamIds, &offlineSessionInfo, &offlineSession);
2393
2394 if (!halStreams[0].supportOffline) {
2395 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
2396 returnStatus.getServiceSpecificError());
2397 ret = mSession->close();
2398 mSession = nullptr;
2399 ASSERT_TRUE(ret.isOk());
2400 continue;
2401 }
2402
2403 ASSERT_TRUE(returnStatus.isOk());
2404 // Hal might be unable to find any requests qualified for offline mode.
2405 if (offlineSession == nullptr) {
2406 ret = mSession->close();
2407 mSession = nullptr;
2408 ASSERT_TRUE(ret.isOk());
2409 continue;
2410 }
2411
2412 ASSERT_EQ(offlineSessionInfo.offlineStreams.size(), 1u);
2413 ASSERT_EQ(offlineSessionInfo.offlineStreams[0].id, halStream.id);
2414 ASSERT_NE(offlineSessionInfo.offlineRequests.size(), 0u);
2415
2416 // close device session to make sure offline session does not rely on it
2417 ret = mSession->close();
2418 mSession = nullptr;
2419 ASSERT_TRUE(ret.isOk());
2420
2421 ::aidl::android::hardware::common::fmq::MQDescriptor<
2422 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2423 offlineResultDescriptor;
2424
2425 auto offlineResultQueueRet =
2426 offlineSession->getCaptureResultMetadataQueue(&offlineResultDescriptor);
2427 std::shared_ptr<ResultMetadataQueue> offlineResultQueue =
2428 std::make_shared<ResultMetadataQueue>(descriptor);
2429 if (!offlineResultQueue->isValid() || offlineResultQueue->availableToWrite() <= 0) {
2430 ALOGE("%s: offline session returns empty result metadata fmq, not use it", __func__);
2431 offlineResultQueue = nullptr;
2432 // Don't use the queue onwards.
2433 }
2434 ASSERT_TRUE(offlineResultQueueRet.isOk());
2435
2436 updateInflightResultQueue(offlineResultQueue);
2437
2438 ret = offlineSession->setCallback(cb);
2439 ASSERT_TRUE(ret.isOk());
2440
2441 for (size_t i = 0; i < kBurstFrameCount; i++) {
2442 std::unique_lock<std::mutex> l(mLock);
2443 while (!inflightReqs[i]->errorCodeValid && ((0 < inflightReqs[i]->numBuffersLeft) ||
2444 (!inflightReqs[i]->haveResultMetadata))) {
2445 auto timeout = std::chrono::system_clock::now() +
2446 std::chrono::seconds(kStreamBufferTimeoutSec);
2447 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2448 }
2449
2450 ASSERT_FALSE(inflightReqs[i]->errorCodeValid);
2451 ASSERT_NE(inflightReqs[i]->resultOutputBuffers.size(), 0u);
2452 ASSERT_EQ(stream.id, inflightReqs[i]->resultOutputBuffers[0].buffer.streamId);
2453 ASSERT_FALSE(inflightReqs[i]->collectedResult.isEmpty());
2454 }
2455
2456 ret = offlineSession->close();
2457 ASSERT_TRUE(ret.isOk());
2458 }
2459}
2460
2461// Check whether an invalid capture request with missing output buffers
2462// will be reported correctly.
2463TEST_P(CameraAidlTest, processCaptureRequestInvalidBuffer) {
2464 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2465 std::vector<AvailableStream> outputBlobStreams;
2466 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2467 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2468 int32_t frameNumber = 1;
2469 CameraMetadata settings;
2470
2471 for (const auto& name : cameraDeviceNames) {
2472 Stream previewStream;
2473 std::vector<HalStream> halStreams;
2474 std::shared_ptr<DeviceCb> cb;
2475 bool supportsPartialResults = false;
2476 bool useHalBufManager = false;
2477 int32_t partialResultCount = 0;
2478 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2479 &previewStream /*out*/, &halStreams /*out*/,
2480 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2481 &useHalBufManager /*out*/, &cb /*out*/);
2482
2483 RequestTemplate reqTemplate = RequestTemplate::PREVIEW;
2484 ndk::ScopedAStatus ret = mSession->constructDefaultRequestSettings(reqTemplate, &settings);
2485 ASSERT_TRUE(ret.isOk());
2486 overrideRotateAndCrop(&settings);
2487
2488 std::vector<CaptureRequest> requests(1);
2489 CaptureRequest& request = requests[0];
2490 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2491 outputBuffers.resize(1);
2492 // Empty output buffer
2493 outputBuffers[0] = {
2494 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2495
2496 request.inputBuffer = {
2497 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2498 request.frameNumber = frameNumber;
2499 request.fmqSettingsSize = 0;
2500 request.settings = settings;
2501
2502 // Output buffers are missing, we should fail here
2503 int32_t numRequestProcessed = 0;
2504 std::vector<BufferCache> cachesToRemove;
2505 ret = mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2506 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
2507 ASSERT_EQ(numRequestProcessed, 0u);
2508
2509 ret = mSession->close();
2510 mSession = nullptr;
2511 ASSERT_TRUE(ret.isOk());
2512 }
2513}
2514
2515// Generate, trigger and flush a preview request
2516TEST_P(CameraAidlTest, flushPreviewRequest) {
2517 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2518 std::vector<AvailableStream> outputPreviewStreams;
2519 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2520 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2521 int64_t bufferId = 1;
2522 int32_t frameNumber = 1;
2523 CameraMetadata settings;
2524
2525 for (const auto& name : cameraDeviceNames) {
2526 Stream previewStream;
2527 std::vector<HalStream> halStreams;
2528 std::shared_ptr<DeviceCb> cb;
2529 bool supportsPartialResults = false;
2530 bool useHalBufManager = false;
2531 int32_t partialResultCount = 0;
2532
2533 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2534 &previewStream /*out*/, &halStreams /*out*/,
2535 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2536 &useHalBufManager /*out*/, &cb /*out*/);
2537
2538 ASSERT_NE(mSession, nullptr);
2539 ASSERT_NE(cb, nullptr);
2540 ASSERT_FALSE(halStreams.empty());
2541
2542 ::aidl::android::hardware::common::fmq::MQDescriptor<
2543 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2544 descriptor;
2545
2546 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2547 std::shared_ptr<ResultMetadataQueue> resultQueue =
2548 std::make_shared<ResultMetadataQueue>(descriptor);
2549 ASSERT_TRUE(resultQueueRet.isOk());
2550 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2551 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2552 resultQueue = nullptr;
2553 // Don't use the queue onwards.
2554 }
2555
2556 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
2557 1, false, supportsPartialResults, partialResultCount, resultQueue);
2558 RequestTemplate reqTemplate = RequestTemplate::PREVIEW;
2559
2560 ndk::ScopedAStatus ret = mSession->constructDefaultRequestSettings(reqTemplate, &settings);
2561 ASSERT_TRUE(ret.isOk());
2562 overrideRotateAndCrop(&settings);
2563
2564 buffer_handle_t buffer_handle;
2565 std::vector<CaptureRequest> requests(1);
2566 CaptureRequest& request = requests[0];
2567 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2568 outputBuffers.resize(1);
2569 StreamBuffer& outputBuffer = outputBuffers[0];
2570 if (useHalBufManager) {
2571 bufferId = 0;
2572 outputBuffer = {halStreams[0].id, bufferId, NativeHandle(),
2573 BufferStatus::OK, NativeHandle(), NativeHandle()};
2574 } else {
2575 allocateGraphicBuffer(previewStream.width, previewStream.height,
2576 android_convertGralloc1To0Usage(
2577 static_cast<uint64_t>(halStreams[0].producerUsage),
2578 static_cast<uint64_t>(halStreams[0].consumerUsage)),
2579 halStreams[0].overrideFormat, &buffer_handle);
2580 outputBuffer = {halStreams[0].id, bufferId, ::android::makeToAidl(buffer_handle),
2581 BufferStatus::OK, NativeHandle(), NativeHandle()};
2582 }
2583
2584 request.frameNumber = frameNumber;
2585 request.fmqSettingsSize = 0;
2586 request.settings = settings;
2587 request.inputBuffer = {
2588 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2589
2590 {
2591 std::unique_lock<std::mutex> l(mLock);
2592 mInflightMap.clear();
2593 mInflightMap[frameNumber] = inflightReq;
2594 }
2595
2596 int32_t numRequestProcessed = 0;
2597 std::vector<BufferCache> cachesToRemove;
2598 ret = mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2599 ASSERT_TRUE(ret.isOk());
2600 ASSERT_EQ(numRequestProcessed, 1u);
2601
2602 // Flush before waiting for request to complete.
2603 ndk::ScopedAStatus returnStatus = mSession->flush();
2604 ASSERT_TRUE(returnStatus.isOk());
2605
2606 {
2607 std::unique_lock<std::mutex> l(mLock);
2608 while (!inflightReq->errorCodeValid &&
2609 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
2610 auto timeout = std::chrono::system_clock::now() +
2611 std::chrono::seconds(kStreamBufferTimeoutSec);
2612 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2613 }
2614
2615 if (!inflightReq->errorCodeValid) {
2616 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
2617 ASSERT_EQ(previewStream.id, inflightReq->resultOutputBuffers[0].buffer.streamId);
2618 } else {
2619 switch (inflightReq->errorCode) {
2620 case ErrorCode::ERROR_REQUEST:
2621 case ErrorCode::ERROR_RESULT:
2622 case ErrorCode::ERROR_BUFFER:
2623 // Expected
2624 break;
2625 case ErrorCode::ERROR_DEVICE:
2626 default:
2627 FAIL() << "Unexpected error:"
2628 << static_cast<uint32_t>(inflightReq->errorCode);
2629 }
2630 }
2631 }
2632
2633 if (useHalBufManager) {
2634 verifyBuffersReturned(mSession, previewStream.id, cb);
2635 }
2636
2637 ret = mSession->close();
2638 mSession = nullptr;
2639 ASSERT_TRUE(ret.isOk());
2640 }
2641}
2642
2643// Verify that camera flushes correctly without any pending requests.
2644TEST_P(CameraAidlTest, flushEmpty) {
2645 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2646 std::vector<AvailableStream> outputPreviewStreams;
2647 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2648 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2649
2650 for (const auto& name : cameraDeviceNames) {
2651 Stream previewStream;
2652 std::vector<HalStream> halStreams;
2653 std::shared_ptr<DeviceCb> cb;
2654 bool supportsPartialResults = false;
2655 bool useHalBufManager = false;
2656
2657 int32_t partialResultCount = 0;
2658 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2659 &previewStream /*out*/, &halStreams /*out*/,
2660 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2661 &useHalBufManager /*out*/, &cb /*out*/);
2662
2663 ndk::ScopedAStatus returnStatus = mSession->flush();
2664 ASSERT_TRUE(returnStatus.isOk());
2665
2666 {
2667 std::unique_lock<std::mutex> l(mLock);
2668 auto timeout = std::chrono::system_clock::now() +
2669 std::chrono::milliseconds(kEmptyFlushTimeoutMSec);
2670 ASSERT_EQ(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2671 }
2672
2673 ndk::ScopedAStatus ret = mSession->close();
2674 mSession = nullptr;
2675 ASSERT_TRUE(ret.isOk());
2676 }
2677}
2678
2679// Test camera provider notify method
2680TEST_P(CameraAidlTest, providerDeviceStateNotification) {
2681 notifyDeviceState(ICameraProvider::DEVICE_STATE_BACK_COVERED);
2682 notifyDeviceState(ICameraProvider::DEVICE_STATE_NORMAL);
2683}
2684
2685// Verify that all supported stream formats and sizes can be configured
2686// successfully for injection camera.
2687TEST_P(CameraAidlTest, configureInjectionStreamsAvailableOutputs) {
2688 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2689 std::vector<AvailableStream> outputStreams;
2690
2691 for (const auto& name : cameraDeviceNames) {
2692 CameraMetadata metadata;
2693
2694 std::shared_ptr<ICameraInjectionSession> injectionSession;
2695 std::shared_ptr<ICameraDevice> unusedDevice;
2696 openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/,
2697 &unusedDevice /*out*/);
2698 if (injectionSession == nullptr) {
2699 continue;
2700 }
2701
2702 camera_metadata_t* staticMetaBuffer =
2703 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
2704 CameraMetadata chars;
2705 chars.metadata = metadata.metadata;
2706
2707 outputStreams.clear();
2708 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputStreams));
2709 ASSERT_NE(0u, outputStreams.size());
2710
2711 int32_t jpegBufferSize = 0;
2712 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMetaBuffer, &jpegBufferSize));
2713 ASSERT_NE(0u, jpegBufferSize);
2714
2715 int32_t streamId = 0;
2716 int32_t streamConfigCounter = 0;
2717 for (auto& it : outputStreams) {
2718 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(it.format));
2719 Stream stream = {streamId,
2720 StreamType::OUTPUT,
2721 it.width,
2722 it.height,
2723 static_cast<PixelFormat>(it.format),
2724 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2725 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2726 dataspace,
2727 StreamRotation::ROTATION_0,
2728 std::string(),
2729 jpegBufferSize,
2730 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002731 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2732 RequestAvailableDynamicRangeProfilesMap::
2733 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002734
2735 std::vector<Stream> streams = {stream};
2736 StreamConfiguration config;
2737 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2738 jpegBufferSize);
2739
2740 config.streamConfigCounter = streamConfigCounter++;
2741 ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars);
2742 ASSERT_TRUE(s.isOk());
2743 streamId++;
2744 }
2745
2746 std::shared_ptr<ICameraDeviceSession> session;
2747 ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session);
2748 ASSERT_TRUE(ret.isOk());
2749 ASSERT_NE(session, nullptr);
2750 ret = session->close();
2751 ASSERT_TRUE(ret.isOk());
2752 }
2753}
2754
2755// Check for correct handling of invalid/incorrect configuration parameters for injection camera.
2756TEST_P(CameraAidlTest, configureInjectionStreamsInvalidOutputs) {
2757 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2758 std::vector<AvailableStream> outputStreams;
2759
2760 for (const auto& name : cameraDeviceNames) {
2761 CameraMetadata metadata;
2762 std::shared_ptr<ICameraInjectionSession> injectionSession;
2763 std::shared_ptr<ICameraDevice> unusedDevice;
2764 openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/,
2765 &unusedDevice);
2766 if (injectionSession == nullptr) {
2767 continue;
2768 }
2769
2770 camera_metadata_t* staticMetaBuffer =
2771 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
2772 std::shared_ptr<ICameraDeviceSession> session;
2773 ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session);
2774 ASSERT_TRUE(ret.isOk());
2775 ASSERT_NE(session, nullptr);
2776
2777 CameraMetadata chars;
2778 chars.metadata = metadata.metadata;
2779
2780 outputStreams.clear();
2781 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputStreams));
2782 ASSERT_NE(0u, outputStreams.size());
2783
2784 int32_t jpegBufferSize = 0;
2785 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMetaBuffer, &jpegBufferSize));
2786 ASSERT_NE(0u, jpegBufferSize);
2787
2788 int32_t streamId = 0;
2789 Stream stream = {streamId++,
2790 StreamType::OUTPUT,
2791 0,
2792 0,
2793 static_cast<PixelFormat>(outputStreams[0].format),
2794 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2795 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2796 Dataspace::UNKNOWN,
2797 StreamRotation::ROTATION_0,
2798 std::string(),
2799 jpegBufferSize,
2800 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002801 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2802 RequestAvailableDynamicRangeProfilesMap::
2803 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002804
2805 int32_t streamConfigCounter = 0;
2806 std::vector<Stream> streams = {stream};
2807 StreamConfiguration config;
2808 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2809 jpegBufferSize);
2810
2811 config.streamConfigCounter = streamConfigCounter++;
2812 ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars);
2813 ASSERT_TRUE(
2814 (static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) == s.getServiceSpecificError()) ||
2815 (static_cast<int32_t>(Status::INTERNAL_ERROR) == s.getServiceSpecificError()));
2816
2817 stream = {streamId++,
2818 StreamType::OUTPUT,
2819 INT32_MAX,
2820 INT32_MAX,
2821 static_cast<PixelFormat>(outputStreams[0].format),
2822 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2823 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2824 Dataspace::UNKNOWN,
2825 StreamRotation::ROTATION_0,
2826 std::string(),
2827 jpegBufferSize,
2828 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002829 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2830 RequestAvailableDynamicRangeProfilesMap::
2831 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
2832
Avichal Rakesh362242f2022-02-08 12:40:53 -08002833 streams[0] = stream;
2834 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2835 jpegBufferSize);
2836 config.streamConfigCounter = streamConfigCounter++;
2837 s = injectionSession->configureInjectionStreams(config, chars);
2838 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError());
2839
2840 for (auto& it : outputStreams) {
2841 stream = {streamId++,
2842 StreamType::OUTPUT,
2843 it.width,
2844 it.height,
2845 static_cast<PixelFormat>(INT32_MAX),
2846 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2847 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2848 Dataspace::UNKNOWN,
2849 StreamRotation::ROTATION_0,
2850 std::string(),
2851 jpegBufferSize,
2852 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002853 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2854 RequestAvailableDynamicRangeProfilesMap::
2855 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002856 streams[0] = stream;
2857 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2858 jpegBufferSize);
2859 config.streamConfigCounter = streamConfigCounter++;
2860 s = injectionSession->configureInjectionStreams(config, chars);
2861 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError());
2862
2863 stream = {streamId++,
2864 StreamType::OUTPUT,
2865 it.width,
2866 it.height,
2867 static_cast<PixelFormat>(it.format),
2868 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2869 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2870 Dataspace::UNKNOWN,
2871 static_cast<StreamRotation>(INT32_MAX),
2872 std::string(),
2873 jpegBufferSize,
2874 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002875 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2876 RequestAvailableDynamicRangeProfilesMap::
2877 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002878 streams[0] = stream;
2879 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2880 jpegBufferSize);
2881 config.streamConfigCounter = streamConfigCounter++;
2882 s = injectionSession->configureInjectionStreams(config, chars);
2883 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError());
2884 }
2885
2886 ret = session->close();
2887 ASSERT_TRUE(ret.isOk());
2888 }
2889}
2890
2891// Check whether session parameters are supported for injection camera. If Hal support for them
2892// exist, then try to configure a preview stream using them.
2893TEST_P(CameraAidlTest, configureInjectionStreamsWithSessionParameters) {
2894 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2895 std::vector<AvailableStream> outputPreviewStreams;
2896 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2897 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2898
2899 for (const auto& name : cameraDeviceNames) {
2900 CameraMetadata metadata;
2901 std::shared_ptr<ICameraInjectionSession> injectionSession;
2902 std::shared_ptr<ICameraDevice> unusedDevice;
2903 openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/,
2904 &unusedDevice /*out*/);
2905 if (injectionSession == nullptr) {
2906 continue;
2907 }
2908
2909 std::shared_ptr<ICameraDeviceSession> session;
2910 ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session);
2911 ASSERT_TRUE(ret.isOk());
2912 ASSERT_NE(session, nullptr);
2913
2914 camera_metadata_t* staticMetaBuffer =
2915 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
2916 CameraMetadata chars;
2917 chars.metadata = metadata.metadata;
2918
2919 std::unordered_set<int32_t> availableSessionKeys;
2920 Status rc = getSupportedKeys(staticMetaBuffer, ANDROID_REQUEST_AVAILABLE_SESSION_KEYS,
2921 &availableSessionKeys);
2922 ASSERT_EQ(Status::OK, rc);
2923 if (availableSessionKeys.empty()) {
2924 ret = session->close();
2925 ASSERT_TRUE(ret.isOk());
2926 continue;
2927 }
2928
2929 android::hardware::camera::common::V1_0::helper::CameraMetadata previewRequestSettings;
2930 android::hardware::camera::common::V1_0::helper::CameraMetadata sessionParams,
2931 modifiedSessionParams;
2932 constructFilteredSettings(session, availableSessionKeys, RequestTemplate::PREVIEW,
2933 &previewRequestSettings, &sessionParams);
2934 if (sessionParams.isEmpty()) {
2935 ret = session->close();
2936 ASSERT_TRUE(ret.isOk());
2937 continue;
2938 }
2939
2940 outputPreviewStreams.clear();
2941
2942 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputPreviewStreams,
2943 &previewThreshold));
2944 ASSERT_NE(0u, outputPreviewStreams.size());
2945
2946 Stream previewStream = {
2947 0,
2948 StreamType::OUTPUT,
2949 outputPreviewStreams[0].width,
2950 outputPreviewStreams[0].height,
2951 static_cast<PixelFormat>(outputPreviewStreams[0].format),
2952 static_cast<::aidl::android::hardware::graphics::common::BufferUsage>(
2953 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2954 Dataspace::UNKNOWN,
2955 StreamRotation::ROTATION_0,
2956 std::string(),
2957 0,
2958 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002959 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2960 RequestAvailableDynamicRangeProfilesMap::
2961 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002962 std::vector<Stream> streams = {previewStream};
2963 StreamConfiguration config;
2964 config.streams = streams;
2965 config.operationMode = StreamConfigurationMode::NORMAL_MODE;
2966
2967 modifiedSessionParams = sessionParams;
2968 camera_metadata_t* sessionParamsBuffer = sessionParams.release();
2969 uint8_t* rawSessionParamsBuffer = reinterpret_cast<uint8_t*>(sessionParamsBuffer);
2970 config.sessionParams.metadata =
2971 std::vector(rawSessionParamsBuffer,
2972 rawSessionParamsBuffer + get_camera_metadata_size(sessionParamsBuffer));
2973
2974 config.streamConfigCounter = 0;
2975 config.streamConfigCounter = 0;
2976 config.multiResolutionInputImage = false;
2977
2978 ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars);
2979 ASSERT_TRUE(s.isOk());
2980
2981 sessionParams.acquire(sessionParamsBuffer);
2982 free_camera_metadata(staticMetaBuffer);
2983 ret = session->close();
2984 ASSERT_TRUE(ret.isOk());
2985 }
2986}
2987
2988// Verify that valid stream use cases can be configured successfully, and invalid use cases
2989// fail stream configuration.
2990TEST_P(CameraAidlTest, configureStreamsUseCases) {
2991 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2992
2993 for (const auto& name : cameraDeviceNames) {
2994 CameraMetadata meta;
2995 std::shared_ptr<ICameraDevice> cameraDevice;
2996
2997 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
2998 &cameraDevice /*out*/);
2999
3000 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
3001 // Check if camera support depth only
3002 if (isDepthOnly(staticMeta)) {
3003 ndk::ScopedAStatus ret = mSession->close();
3004 mSession = nullptr;
3005 ASSERT_TRUE(ret.isOk());
3006 continue;
3007 }
3008
3009 std::vector<AvailableStream> outputPreviewStreams;
3010 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
3011 static_cast<int32_t>(PixelFormat::YCBCR_420_888)};
3012 ASSERT_EQ(Status::OK,
3013 getAvailableOutputStreams(staticMeta, outputPreviewStreams, &previewThreshold));
3014 ASSERT_NE(0u, outputPreviewStreams.size());
3015
3016 // Combine valid and invalid stream use cases
Shuzhen Wang36efa712022-03-08 10:10:44 -08003017 std::vector<int64_t> useCases(kMandatoryUseCases);
Avichal Rakesh362242f2022-02-08 12:40:53 -08003018 useCases.push_back(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL + 1);
3019
Shuzhen Wang36efa712022-03-08 10:10:44 -08003020 std::vector<int64_t> supportedUseCases;
Avichal Rakesh362242f2022-02-08 12:40:53 -08003021 camera_metadata_ro_entry entry;
3022 auto retcode = find_camera_metadata_ro_entry(
3023 staticMeta, ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES, &entry);
3024 if ((0 == retcode) && (entry.count > 0)) {
Avichal Rakeshe1685a72022-03-22 13:52:36 -07003025 supportedUseCases.insert(supportedUseCases.end(), entry.data.i64,
3026 entry.data.i64 + entry.count);
Avichal Rakesh362242f2022-02-08 12:40:53 -08003027 } else {
3028 supportedUseCases.push_back(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT);
3029 }
3030
3031 std::vector<Stream> streams(1);
Avichal Rakeshd3503a32022-02-25 06:23:14 +00003032 streams[0] = {0,
3033 StreamType::OUTPUT,
3034 outputPreviewStreams[0].width,
3035 outputPreviewStreams[0].height,
3036 static_cast<PixelFormat>(outputPreviewStreams[0].format),
3037 static_cast<::aidl::android::hardware::graphics::common::BufferUsage>(
3038 GRALLOC1_CONSUMER_USAGE_CPU_READ),
3039 Dataspace::UNKNOWN,
3040 StreamRotation::ROTATION_0,
3041 std::string(),
3042 0,
3043 -1,
3044 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
3045 RequestAvailableDynamicRangeProfilesMap::
3046 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08003047
3048 int32_t streamConfigCounter = 0;
3049 CameraMetadata req;
3050 StreamConfiguration config;
3051 RequestTemplate reqTemplate = RequestTemplate::STILL_CAPTURE;
3052 ndk::ScopedAStatus ret = mSession->constructDefaultRequestSettings(reqTemplate, &req);
3053 ASSERT_TRUE(ret.isOk());
3054 config.sessionParams = req;
3055
Shuzhen Wang36efa712022-03-08 10:10:44 -08003056 for (int64_t useCase : useCases) {
Avichal Rakesh362242f2022-02-08 12:40:53 -08003057 bool useCaseSupported = std::find(supportedUseCases.begin(), supportedUseCases.end(),
3058 useCase) != supportedUseCases.end();
3059
3060 streams[0].useCase = static_cast<
3061 aidl::android::hardware::camera::metadata::ScalerAvailableStreamUseCases>(
3062 useCase);
3063 config.streams = streams;
3064 config.operationMode = StreamConfigurationMode::NORMAL_MODE;
3065 config.streamConfigCounter = streamConfigCounter;
3066 config.multiResolutionInputImage = false;
3067
3068 bool combSupported;
3069 ret = cameraDevice->isStreamCombinationSupported(config, &combSupported);
Avichal Rakeshe1685a72022-03-22 13:52:36 -07003070 if (static_cast<int32_t>(Status::OPERATION_NOT_SUPPORTED) ==
3071 ret.getServiceSpecificError()) {
3072 continue;
Avichal Rakesh362242f2022-02-08 12:40:53 -08003073 }
Avichal Rakeshe1685a72022-03-22 13:52:36 -07003074
Avichal Rakesh362242f2022-02-08 12:40:53 -08003075 ASSERT_TRUE(ret.isOk());
Avichal Rakeshe1685a72022-03-22 13:52:36 -07003076 ASSERT_EQ(combSupported, useCaseSupported);
Avichal Rakesh362242f2022-02-08 12:40:53 -08003077
3078 std::vector<HalStream> halStreams;
3079 ret = mSession->configureStreams(config, &halStreams);
3080 ALOGI("configureStreams returns status: %d", ret.getServiceSpecificError());
3081 if (useCaseSupported) {
3082 ASSERT_TRUE(ret.isOk());
3083 ASSERT_EQ(1u, halStreams.size());
3084 } else {
3085 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
3086 ret.getServiceSpecificError());
3087 }
3088 }
3089 ret = mSession->close();
3090 mSession = nullptr;
3091 ASSERT_TRUE(ret.isOk());
3092 }
3093}
3094
3095GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(CameraAidlTest);
3096INSTANTIATE_TEST_SUITE_P(
3097 PerInstance, CameraAidlTest,
3098 testing::ValuesIn(android::getAidlHalInstanceNames(ICameraProvider::descriptor)),
3099 android::hardware::PrintInstanceNameToString);