blob: d1fa94e5e3d4ade9c16485a028b8900a3d92b19c [file] [log] [blame]
Avichal Rakesh362242f2022-02-08 12:40:53 -08001/*
2 * Copyright (C) 2022 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <aidl/Vintf.h>
18#include <aidl/android/hardware/camera/common/VendorTagSection.h>
19#include <aidl/android/hardware/camera/device/ICameraDevice.h>
20#include <aidlcommonsupport/NativeHandle.h>
21#include <camera_aidl_test.h>
22#include <cutils/properties.h>
23#include <device_cb.h>
24#include <empty_device_cb.h>
25#include <grallocusage/GrallocUsageConversion.h>
26#include <gtest/gtest.h>
27#include <hardware/gralloc.h>
28#include <hardware/gralloc1.h>
29#include <hidl/GtestPrinter.h>
30#include <hidl/HidlSupport.h>
31#include <torch_provider_cb.h>
32#include <list>
33
34using ::aidl::android::hardware::camera::common::CameraDeviceStatus;
35using ::aidl::android::hardware::camera::common::CameraResourceCost;
36using ::aidl::android::hardware::camera::common::TorchModeStatus;
37using ::aidl::android::hardware::camera::common::VendorTagSection;
38using ::aidl::android::hardware::camera::device::ICameraDevice;
Avichal Rakeshd3503a32022-02-25 06:23:14 +000039using ::aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap;
Avichal Rakesh362242f2022-02-08 12:40:53 -080040using ::aidl::android::hardware::camera::metadata::SensorPixelMode;
41using ::aidl::android::hardware::camera::provider::CameraIdAndStreamCombination;
42using ::aidl::android::hardware::camera::provider::ICameraProviderCallbackDefault;
43
44using ::ndk::ScopedAStatus;
45
46namespace {
47const int32_t kBurstFrameCount = 10;
48const uint32_t kMaxStillWidth = 2048;
49const uint32_t kMaxStillHeight = 1536;
50
51const int64_t kEmptyFlushTimeoutMSec = 200;
52
53const static std::vector<int32_t> kMandatoryUseCases = {
54 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
55 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW,
56 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_STILL_CAPTURE,
57 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_RECORD,
58 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW_VIDEO_STILL,
59 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL};
60} // namespace
61
62TEST_P(CameraAidlTest, getCameraIdList) {
63 std::vector<std::string> idList;
64 ScopedAStatus ret = mProvider->getCameraIdList(&idList);
65 ASSERT_TRUE(ret.isOk());
66
67 for (size_t i = 0; i < idList.size(); i++) {
68 ALOGI("Camera Id[%zu] is %s", i, idList[i].c_str());
69 }
70}
71
72// Test if ICameraProvider::getVendorTags returns Status::OK
73TEST_P(CameraAidlTest, getVendorTags) {
74 std::vector<VendorTagSection> vendorTags;
75 ScopedAStatus ret = mProvider->getVendorTags(&vendorTags);
76
77 ASSERT_TRUE(ret.isOk());
78 for (size_t i = 0; i < vendorTags.size(); i++) {
79 ALOGI("Vendor tag section %zu name %s", i, vendorTags[i].sectionName.c_str());
80 for (auto& tag : vendorTags[i].tags) {
81 ALOGI("Vendor tag id %u name %s type %d", tag.tagId, tag.tagName.c_str(),
82 (int)tag.tagType);
83 }
84 }
85}
86
87// Test if ICameraProvider::setCallback returns Status::OK
88TEST_P(CameraAidlTest, setCallback) {
89 struct ProviderCb : public ICameraProviderCallbackDefault {
90 ScopedAStatus cameraDeviceStatusChange(const std::string& cameraDeviceName,
91 CameraDeviceStatus newStatus) override {
92 ALOGI("camera device status callback name %s, status %d", cameraDeviceName.c_str(),
93 (int)newStatus);
94 return ScopedAStatus::ok();
95 }
96 ScopedAStatus torchModeStatusChange(const std::string& cameraDeviceName,
97 TorchModeStatus newStatus) override {
98 ALOGI("Torch mode status callback name %s, status %d", cameraDeviceName.c_str(),
99 (int)newStatus);
100 return ScopedAStatus::ok();
101 }
102 ScopedAStatus physicalCameraDeviceStatusChange(const std::string& cameraDeviceName,
103 const std::string& physicalCameraDeviceName,
104 CameraDeviceStatus newStatus) override {
105 ALOGI("physical camera device status callback name %s, physical camera name %s,"
106 " status %d",
107 cameraDeviceName.c_str(), physicalCameraDeviceName.c_str(), (int)newStatus);
108 return ScopedAStatus::ok();
109 }
110 };
111
112 std::shared_ptr<ProviderCb> cb = ProviderCb::make<ProviderCb>();
113 ScopedAStatus ret = mProvider->setCallback(cb);
114 ASSERT_TRUE(ret.isOk());
115 ret = mProvider->setCallback(nullptr);
116 ASSERT_TRUE(ret.isOk());
117}
118
119// Test if ICameraProvider::getCameraDeviceInterface returns Status::OK and non-null device
120TEST_P(CameraAidlTest, getCameraDeviceInterface) {
121 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
122
123 for (const auto& name : cameraDeviceNames) {
124 std::shared_ptr<ICameraDevice> cameraDevice;
125 ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &cameraDevice);
126 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
127 ret.getServiceSpecificError());
128 ASSERT_TRUE(ret.isOk());
129 ASSERT_NE(cameraDevice, nullptr);
130 }
131}
132
133// Verify that the device resource cost can be retrieved and the values are
134// correct.
135TEST_P(CameraAidlTest, getResourceCost) {
136 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
137
138 for (const auto& deviceName : cameraDeviceNames) {
139 std::shared_ptr<ICameraDevice> cameraDevice;
140 ScopedAStatus ret = mProvider->getCameraDeviceInterface(deviceName, &cameraDevice);
141 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
142 ret.getServiceSpecificError());
143 ASSERT_TRUE(ret.isOk());
144 ASSERT_NE(cameraDevice, nullptr);
145
146 CameraResourceCost resourceCost;
147 ret = cameraDevice->getResourceCost(&resourceCost);
148 ALOGI("getResourceCost returns: %d:%d", ret.getExceptionCode(),
149 ret.getServiceSpecificError());
150 ASSERT_TRUE(ret.isOk());
151
152 ALOGI(" Resource cost is %d", resourceCost.resourceCost);
153 ASSERT_LE(resourceCost.resourceCost, 100u);
154
155 for (const auto& name : resourceCost.conflictingDevices) {
156 ALOGI(" Conflicting device: %s", name.c_str());
157 }
158 }
159}
160
161TEST_P(CameraAidlTest, systemCameraTest) {
162 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
163 std::map<std::string, std::vector<SystemCameraKind>> hiddenPhysicalIdToLogicalMap;
164 for (const auto& name : cameraDeviceNames) {
165 std::shared_ptr<ICameraDevice> device;
166 ALOGI("getCameraCharacteristics: Testing camera device %s", name.c_str());
167 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
168 ASSERT_TRUE(ret.isOk());
169 ASSERT_NE(device, nullptr);
170
171 CameraMetadata cameraCharacteristics;
172 ret = device->getCameraCharacteristics(&cameraCharacteristics);
173 ASSERT_TRUE(ret.isOk());
174
175 const camera_metadata_t* staticMeta =
176 reinterpret_cast<const camera_metadata_t*>(cameraCharacteristics.metadata.data());
177 Status rc = isLogicalMultiCamera(staticMeta);
178 if (rc == Status::OPERATION_NOT_SUPPORTED) {
179 return;
180 }
181
182 ASSERT_EQ(rc, Status::OK);
183 std::unordered_set<std::string> physicalIds;
184 ASSERT_EQ(getPhysicalCameraIds(staticMeta, &physicalIds), Status::OK);
185 SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC;
186 Status retStatus = getSystemCameraKind(staticMeta, &systemCameraKind);
187 ASSERT_EQ(retStatus, Status::OK);
188
189 for (auto physicalId : physicalIds) {
190 bool isPublicId = false;
191 for (auto& deviceName : cameraDeviceNames) {
192 std::string publicVersion, publicId;
193 ASSERT_TRUE(matchDeviceName(deviceName, mProviderType, &publicVersion, &publicId));
194 if (physicalId == publicId) {
195 isPublicId = true;
196 break;
197 }
198 }
199 // For hidden physical cameras, collect their associated logical cameras
200 // and store the system camera kind.
201 if (!isPublicId) {
202 auto it = hiddenPhysicalIdToLogicalMap.find(physicalId);
203 if (it == hiddenPhysicalIdToLogicalMap.end()) {
204 hiddenPhysicalIdToLogicalMap.insert(std::make_pair(
205 physicalId, std::vector<SystemCameraKind>(systemCameraKind)));
206 } else {
207 it->second.push_back(systemCameraKind);
208 }
209 }
210 }
211 }
212
213 // Check that the system camera kind of the logical cameras associated with
214 // each hidden physical camera is the same.
215 for (const auto& it : hiddenPhysicalIdToLogicalMap) {
216 SystemCameraKind neededSystemCameraKind = it.second.front();
217 for (auto foundSystemCamera : it.second) {
218 ASSERT_EQ(neededSystemCameraKind, foundSystemCamera);
219 }
220 }
221}
222
223// Verify that the static camera characteristics can be retrieved
224// successfully.
225TEST_P(CameraAidlTest, getCameraCharacteristics) {
226 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
227
228 for (const auto& name : cameraDeviceNames) {
229 std::shared_ptr<ICameraDevice> device;
230 ALOGI("getCameraCharacteristics: Testing camera device %s", name.c_str());
231 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
232 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
233 ret.getServiceSpecificError());
234 ASSERT_TRUE(ret.isOk());
235 ASSERT_NE(device, nullptr);
236
237 CameraMetadata chars;
238 ret = device->getCameraCharacteristics(&chars);
239 ASSERT_TRUE(ret.isOk());
240 verifyCameraCharacteristics(chars);
241 verifyMonochromeCharacteristics(chars);
242 verifyRecommendedConfigs(chars);
243 verifyLogicalOrUltraHighResCameraMetadata(name, device, chars, cameraDeviceNames);
244
245 ASSERT_TRUE(ret.isOk());
246
247 // getPhysicalCameraCharacteristics will fail for publicly
248 // advertised camera IDs.
249 std::string version, cameraId;
250 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &cameraId));
251 CameraMetadata devChars;
252 ret = device->getPhysicalCameraCharacteristics(cameraId, &devChars);
253 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
254 ASSERT_EQ(0, devChars.metadata.size());
255 }
256}
257
258// Verify that the torch strength level can be set and retrieved successfully.
259TEST_P(CameraAidlTest, turnOnTorchWithStrengthLevel) {
260 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
261
262 std::shared_ptr<TorchProviderCb> cb = ndk::SharedRefBase::make<TorchProviderCb>(this);
263 ndk::ScopedAStatus ret = mProvider->setCallback(cb);
264 ASSERT_TRUE(ret.isOk());
265
266 for (const auto& name : cameraDeviceNames) {
267 int32_t defaultLevel;
268 std::shared_ptr<ICameraDevice> device;
269 ALOGI("%s: Testing camera device %s", __FUNCTION__, name.c_str());
270
271 ret = mProvider->getCameraDeviceInterface(name, &device);
272 ASSERT_TRUE(ret.isOk());
273 ASSERT_NE(device, nullptr);
274
275 CameraMetadata chars;
276 ret = device->getCameraCharacteristics(&chars);
277 ASSERT_TRUE(ret.isOk());
278
279 const camera_metadata_t* staticMeta =
280 reinterpret_cast<const camera_metadata_t*>(chars.metadata.data());
281 bool torchStrengthControlSupported = isTorchStrengthControlSupported(staticMeta);
282 camera_metadata_ro_entry entry;
283 int rc = find_camera_metadata_ro_entry(staticMeta,
284 ANDROID_FLASH_INFO_STRENGTH_DEFAULT_LEVEL, &entry);
285 if (torchStrengthControlSupported) {
286 ASSERT_EQ(rc, 0);
287 ASSERT_GT(entry.count, 0);
288 defaultLevel = *entry.data.i32;
289 ALOGI("Default level is:%d", defaultLevel);
290 }
291
292 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
293 ret = device->turnOnTorchWithStrengthLevel(2);
294 ALOGI("turnOnTorchWithStrengthLevel returns status: %d", ret.getServiceSpecificError());
295 // OPERATION_NOT_SUPPORTED check
296 if (!torchStrengthControlSupported) {
297 ALOGI("Torch strength control not supported.");
298 ASSERT_EQ(static_cast<int32_t>(Status::OPERATION_NOT_SUPPORTED),
299 ret.getServiceSpecificError());
300 } else {
301 {
302 ASSERT_TRUE(ret.isOk());
303 std::unique_lock<std::mutex> l(mTorchLock);
304 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
305 auto timeout = std::chrono::system_clock::now() +
306 std::chrono::seconds(kTorchTimeoutSec);
307 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
308 }
309 ASSERT_EQ(TorchModeStatus::AVAILABLE_ON, mTorchStatus);
310 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
311 }
312 ALOGI("getTorchStrengthLevel: Testing");
313 int32_t strengthLevel;
314 ret = device->getTorchStrengthLevel(&strengthLevel);
315 ASSERT_TRUE(ret.isOk());
316 ALOGI("Torch strength level is : %d", strengthLevel);
317 ASSERT_EQ(strengthLevel, 2);
318
319 // Turn OFF the torch and verify torch strength level is reset to default level.
320 ALOGI("Testing torch strength level reset after turning the torch OFF.");
321 ret = device->setTorchMode(false);
322 ASSERT_TRUE(ret.isOk());
323 {
324 std::unique_lock<std::mutex> l(mTorchLock);
325 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
326 auto timeout = std::chrono::system_clock::now() +
327 std::chrono::seconds(kTorchTimeoutSec);
328 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
329 }
330 ASSERT_EQ(TorchModeStatus::AVAILABLE_OFF, mTorchStatus);
331 }
332
333 ret = device->getTorchStrengthLevel(&strengthLevel);
334 ASSERT_TRUE(ret.isOk());
335 ALOGI("Torch strength level after turning OFF torch is : %d", strengthLevel);
336 ASSERT_EQ(strengthLevel, defaultLevel);
337 }
338 }
339}
340
341// In case it is supported verify that torch can be enabled.
342// Check for corresponding torch callbacks as well.
343TEST_P(CameraAidlTest, setTorchMode) {
344 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
345
346 std::shared_ptr<TorchProviderCb> cb = ndk::SharedRefBase::make<TorchProviderCb>(this);
347 ndk::ScopedAStatus ret = mProvider->setCallback(cb);
348 ALOGI("setCallback returns status: %d", ret.getServiceSpecificError());
349 ASSERT_TRUE(ret.isOk());
350 ASSERT_NE(cb, nullptr);
351
352 for (const auto& name : cameraDeviceNames) {
353 std::shared_ptr<ICameraDevice> device;
354 ALOGI("setTorchMode: Testing camera device %s", name.c_str());
355 ret = mProvider->getCameraDeviceInterface(name, &device);
356 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
357 ret.getServiceSpecificError());
358 ASSERT_TRUE(ret.isOk());
359 ASSERT_NE(device, nullptr);
360
361 CameraMetadata metadata;
362 ret = device->getCameraCharacteristics(&metadata);
363 ALOGI("getCameraCharacteristics returns status:%d", ret.getServiceSpecificError());
364 ASSERT_TRUE(ret.isOk());
365 camera_metadata_t* staticMeta =
366 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
367 bool torchSupported = isTorchSupported(staticMeta);
368
369 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
370 ret = device->setTorchMode(true);
371 ALOGI("setTorchMode returns status: %d", ret.getServiceSpecificError());
372 if (!torchSupported) {
373 ASSERT_EQ(static_cast<int32_t>(Status::OPERATION_NOT_SUPPORTED),
374 ret.getServiceSpecificError());
375 } else {
376 ASSERT_TRUE(ret.isOk());
377 {
378 std::unique_lock<std::mutex> l(mTorchLock);
379 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
380 auto timeout = std::chrono::system_clock::now() +
381 std::chrono::seconds(kTorchTimeoutSec);
382 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
383 }
384 ASSERT_EQ(TorchModeStatus::AVAILABLE_ON, mTorchStatus);
385 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
386 }
387
388 ret = device->setTorchMode(false);
389 ASSERT_TRUE(ret.isOk());
390 {
391 std::unique_lock<std::mutex> l(mTorchLock);
392 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
393 auto timeout = std::chrono::system_clock::now() +
394 std::chrono::seconds(kTorchTimeoutSec);
395 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
396 }
397 ASSERT_EQ(TorchModeStatus::AVAILABLE_OFF, mTorchStatus);
398 }
399 }
400 }
401
402 ret = mProvider->setCallback(nullptr);
403 ASSERT_TRUE(ret.isOk());
404}
405
406// Check dump functionality.
407TEST_P(CameraAidlTest, dump) {
408 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
409
410 for (const auto& name : cameraDeviceNames) {
411 std::shared_ptr<ICameraDevice> device;
412 ALOGI("dump: Testing camera device %s", name.c_str());
413
414 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
415 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
416 ret.getServiceSpecificError());
417 ASSERT_TRUE(ret.isOk());
418 ASSERT_NE(device, nullptr);
419
420 int raw_handle = open(kDumpOutput, O_RDWR);
421 ASSERT_GE(raw_handle, 0);
422
423 auto retStatus = device->dump(raw_handle, nullptr, 0);
424 ASSERT_EQ(retStatus, ::android::OK);
425 close(raw_handle);
426 }
427}
428
429// Open, dump, then close
430TEST_P(CameraAidlTest, openClose) {
431 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
432
433 for (const auto& name : cameraDeviceNames) {
434 std::shared_ptr<ICameraDevice> device;
435 ALOGI("openClose: Testing camera device %s", name.c_str());
436 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
437 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
438 ret.getServiceSpecificError());
439 ASSERT_TRUE(ret.isOk());
440 ASSERT_NE(device, nullptr);
441
442 std::shared_ptr<EmptyDeviceCb> cb = ndk::SharedRefBase::make<EmptyDeviceCb>();
443
444 ret = device->open(cb, &mSession);
445 ASSERT_TRUE(ret.isOk());
446 ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
447 ret.getServiceSpecificError());
448 ASSERT_NE(mSession, nullptr);
449 int raw_handle = open(kDumpOutput, O_RDWR);
450 ASSERT_GE(raw_handle, 0);
451
452 auto retStatus = device->dump(raw_handle, nullptr, 0);
453 ASSERT_EQ(retStatus, ::android::OK);
454 close(raw_handle);
455
456 ret = mSession->close();
457 mSession = nullptr;
458 ASSERT_TRUE(ret.isOk());
459 // TODO: test all session API calls return INTERNAL_ERROR after close
460 // TODO: keep a wp copy here and verify session cannot be promoted out of this scope
461 }
462}
463
464// Check whether all common default request settings can be successfully
465// constructed.
466TEST_P(CameraAidlTest, constructDefaultRequestSettings) {
467 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
468
469 for (const auto& name : cameraDeviceNames) {
470 std::shared_ptr<ICameraDevice> device;
471 ALOGI("constructDefaultRequestSettings: Testing camera device %s", name.c_str());
472 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
473 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
474 ret.getServiceSpecificError());
475 ASSERT_TRUE(ret.isOk());
476 ASSERT_NE(device, nullptr);
477
478 std::shared_ptr<EmptyDeviceCb> cb = ndk::SharedRefBase::make<EmptyDeviceCb>();
479 ret = device->open(cb, &mSession);
480 ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
481 ret.getServiceSpecificError());
482 ASSERT_TRUE(ret.isOk());
483 ASSERT_NE(mSession, nullptr);
484
485 for (int32_t t = (int32_t)RequestTemplate::PREVIEW; t <= (int32_t)RequestTemplate::MANUAL;
486 t++) {
487 RequestTemplate reqTemplate = (RequestTemplate)t;
488 CameraMetadata rawMetadata;
489 ret = mSession->constructDefaultRequestSettings(reqTemplate, &rawMetadata);
490 ALOGI("constructDefaultRequestSettings returns status:%d:%d", ret.getExceptionCode(),
491 ret.getServiceSpecificError());
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000492
Avichal Rakesh362242f2022-02-08 12:40:53 -0800493 if (reqTemplate == RequestTemplate::ZERO_SHUTTER_LAG ||
494 reqTemplate == RequestTemplate::MANUAL) {
495 // optional templates
496 ASSERT_TRUE(ret.isOk() || static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
497 ret.getServiceSpecificError());
498 } else {
499 ASSERT_TRUE(ret.isOk());
500 }
501
502 if (ret.isOk()) {
503 const camera_metadata_t* metadata = (camera_metadata_t*)rawMetadata.metadata.data();
504 size_t expectedSize = rawMetadata.metadata.size();
505 int result = validate_camera_metadata_structure(metadata, &expectedSize);
506 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
507 verifyRequestTemplate(metadata, reqTemplate);
508 } else {
509 ASSERT_EQ(0u, rawMetadata.metadata.size());
510 }
511 }
512 ret = mSession->close();
513 mSession = nullptr;
514 ASSERT_TRUE(ret.isOk());
515 }
516}
517
518// Verify that all supported stream formats and sizes can be configured
519// successfully.
520TEST_P(CameraAidlTest, configureStreamsAvailableOutputs) {
521 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
522 std::vector<AvailableStream> outputStreams;
523
524 for (const auto& name : cameraDeviceNames) {
525 CameraMetadata meta;
526 std::shared_ptr<ICameraDevice> device;
527
528 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/, &device /*out*/);
529
530 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
531 outputStreams.clear();
532 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputStreams));
533 ASSERT_NE(0u, outputStreams.size());
534
535 int32_t jpegBufferSize = 0;
536 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
537 ASSERT_NE(0u, jpegBufferSize);
538
539 int32_t streamId = 0;
540 int32_t streamConfigCounter = 0;
541 for (auto& it : outputStreams) {
542 Stream stream;
543 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(it.format));
544 stream.id = streamId;
545 stream.streamType = StreamType::OUTPUT;
546 stream.width = it.width;
547 stream.height = it.height;
548 stream.format = static_cast<PixelFormat>(it.format);
549 stream.dataSpace = dataspace;
550 stream.usage = static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
551 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER);
552 stream.rotation = StreamRotation::ROTATION_0;
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000553 stream.dynamicRangeProfile = RequestAvailableDynamicRangeProfilesMap::
554 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
Avichal Rakesh362242f2022-02-08 12:40:53 -0800555
556 std::vector<Stream> streams = {stream};
557 StreamConfiguration config;
558 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
559 jpegBufferSize);
560
561 bool expectStreamCombQuery = (isLogicalMultiCamera(staticMeta) == Status::OK);
562 verifyStreamCombination(device, config, /*expectedStatus*/ true, expectStreamCombQuery);
563
564 config.streamConfigCounter = streamConfigCounter++;
565 std::vector<HalStream> halConfigs;
566 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
567 ASSERT_TRUE(ret.isOk());
568 ASSERT_EQ(halConfigs.size(), 1);
569 ASSERT_EQ(halConfigs[0].id, streamId);
570
571 streamId++;
572 }
573
574 ndk::ScopedAStatus ret = mSession->close();
575 mSession = nullptr;
576 ASSERT_TRUE(ret.isOk());
577 }
578}
579
580// Verify that mandatory concurrent streams and outputs are supported.
581TEST_P(CameraAidlTest, configureConcurrentStreamsAvailableOutputs) {
582 struct CameraTestInfo {
583 CameraMetadata staticMeta;
584 std::shared_ptr<ICameraDeviceSession> session;
585 std::shared_ptr<ICameraDevice> cameraDevice;
586 StreamConfiguration config;
587 };
588
589 std::map<std::string, std::string> idToNameMap = getCameraDeviceIdToNameMap(mProvider);
590 std::vector<ConcurrentCameraIdCombination> concurrentDeviceCombinations =
591 getConcurrentDeviceCombinations(mProvider);
592 std::vector<AvailableStream> outputStreams;
593 for (const auto& cameraDeviceIds : concurrentDeviceCombinations) {
594 std::vector<CameraIdAndStreamCombination> cameraIdsAndStreamCombinations;
595 std::vector<CameraTestInfo> cameraTestInfos;
596 size_t i = 0;
597 for (const auto& id : cameraDeviceIds.combination) {
598 CameraTestInfo cti;
599 auto it = idToNameMap.find(id);
600 ASSERT_TRUE(idToNameMap.end() != it);
601 std::string name = it->second;
602
603 openEmptyDeviceSession(name, mProvider, &cti.session /*out*/, &cti.staticMeta /*out*/,
604 &cti.cameraDevice /*out*/);
605
606 outputStreams.clear();
607 camera_metadata_t* staticMeta =
608 reinterpret_cast<camera_metadata_t*>(cti.staticMeta.metadata.data());
609 ASSERT_EQ(Status::OK, getMandatoryConcurrentStreams(staticMeta, &outputStreams));
610 ASSERT_NE(0u, outputStreams.size());
611
612 int32_t jpegBufferSize = 0;
613 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
614 ASSERT_NE(0u, jpegBufferSize);
615
616 int32_t streamId = 0;
617 std::vector<Stream> streams(outputStreams.size());
618 size_t j = 0;
619 for (const auto& s : outputStreams) {
620 Stream stream;
621 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(s.format));
622 stream.id = streamId++;
623 stream.streamType = StreamType::OUTPUT;
624 stream.width = s.width;
625 stream.height = s.height;
626 stream.format = static_cast<PixelFormat>(s.format);
627 stream.usage = static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
628 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER);
629 stream.dataSpace = dataspace;
630 stream.rotation = StreamRotation::ROTATION_0;
631 stream.sensorPixelModesUsed = {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT};
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000632 stream.dynamicRangeProfile = RequestAvailableDynamicRangeProfilesMap::
633 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
Avichal Rakesh362242f2022-02-08 12:40:53 -0800634 streams[j] = stream;
635 j++;
636 }
637
638 // Add the created stream configs to cameraIdsAndStreamCombinations
639 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &cti.config,
640 jpegBufferSize);
641
642 cti.config.streamConfigCounter = outputStreams.size();
643 CameraIdAndStreamCombination cameraIdAndStreamCombination;
644 cameraIdAndStreamCombination.cameraId = id;
645 cameraIdAndStreamCombination.streamConfiguration = cti.config;
646 cameraIdsAndStreamCombinations.push_back(cameraIdAndStreamCombination);
647 i++;
648 cameraTestInfos.push_back(cti);
649 }
650 // Now verify that concurrent streams are supported
651 bool combinationSupported;
652 ndk::ScopedAStatus ret = mProvider->isConcurrentStreamCombinationSupported(
653 cameraIdsAndStreamCombinations, &combinationSupported);
654 ASSERT_TRUE(ret.isOk());
655 ASSERT_EQ(combinationSupported, true);
656
657 // Test the stream can actually be configured
658 for (auto& cti : cameraTestInfos) {
659 if (cti.session != nullptr) {
660 camera_metadata_t* staticMeta =
661 reinterpret_cast<camera_metadata_t*>(cti.staticMeta.metadata.data());
662 bool expectStreamCombQuery = (isLogicalMultiCamera(staticMeta) == Status::OK);
663 verifyStreamCombination(cti.cameraDevice, cti.config, /*expectedStatus*/ true,
664 expectStreamCombQuery);
665 }
666
667 if (cti.session != nullptr) {
668 std::vector<HalStream> streamConfigs;
669 ret = cti.session->configureStreams(cti.config, &streamConfigs);
670 ASSERT_TRUE(ret.isOk());
671 ASSERT_EQ(cti.config.streams.size(), streamConfigs.size());
672 }
673 }
674
675 for (auto& cti : cameraTestInfos) {
676 ret = cti.session->close();
677 ASSERT_TRUE(ret.isOk());
678 }
679 }
680}
681
682// Check for correct handling of invalid/incorrect configuration parameters.
683TEST_P(CameraAidlTest, configureStreamsInvalidOutputs) {
684 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
685 std::vector<AvailableStream> outputStreams;
686
687 for (const auto& name : cameraDeviceNames) {
688 CameraMetadata meta;
689 std::shared_ptr<ICameraDevice> cameraDevice;
690
691 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
692 &cameraDevice /*out*/);
693 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
694 outputStreams.clear();
695
696 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputStreams));
697 ASSERT_NE(0u, outputStreams.size());
698
699 int32_t jpegBufferSize = 0;
700 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
701 ASSERT_NE(0u, jpegBufferSize);
702
703 int32_t streamId = 0;
704 Stream stream = {streamId++,
705 StreamType::OUTPUT,
706 static_cast<uint32_t>(0),
707 static_cast<uint32_t>(0),
708 static_cast<PixelFormat>(outputStreams[0].format),
709 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
710 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
711 Dataspace::UNKNOWN,
712 StreamRotation::ROTATION_0,
713 std::string(),
714 jpegBufferSize,
715 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000716 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
717 RequestAvailableDynamicRangeProfilesMap::
718 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800719 int32_t streamConfigCounter = 0;
720 std::vector<Stream> streams = {stream};
721 StreamConfiguration config;
722 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
723 jpegBufferSize);
724
725 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ false,
726 /*expectStreamCombQuery*/ false);
727
728 config.streamConfigCounter = streamConfigCounter++;
729 std::vector<HalStream> halConfigs;
730 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
731 ASSERT_TRUE(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
732 ret.getServiceSpecificError() ||
733 static_cast<int32_t>(Status::INTERNAL_ERROR) == ret.getServiceSpecificError());
734
735 stream = {streamId++,
736 StreamType::OUTPUT,
737 /*width*/ INT32_MAX,
738 /*height*/ INT32_MAX,
739 static_cast<PixelFormat>(outputStreams[0].format),
740 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
741 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
742 Dataspace::UNKNOWN,
743 StreamRotation::ROTATION_0,
744 std::string(),
745 jpegBufferSize,
746 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000747 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
748 RequestAvailableDynamicRangeProfilesMap::
749 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800750
751 streams[0] = stream;
752 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
753 jpegBufferSize);
754
755 config.streamConfigCounter = streamConfigCounter++;
756 halConfigs.clear();
757 ret = mSession->configureStreams(config, &halConfigs);
758 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
759
760 for (auto& it : outputStreams) {
761 stream = {streamId++,
762 StreamType::OUTPUT,
763 it.width,
764 it.height,
765 static_cast<PixelFormat>(UINT32_MAX),
766 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
767 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
768 Dataspace::UNKNOWN,
769 StreamRotation::ROTATION_0,
770 std::string(),
771 jpegBufferSize,
772 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000773 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
774 RequestAvailableDynamicRangeProfilesMap::
775 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800776
777 streams[0] = stream;
778 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
779 jpegBufferSize);
780 config.streamConfigCounter = streamConfigCounter++;
781 halConfigs.clear();
782 ret = mSession->configureStreams(config, &halConfigs);
783 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
784 ret.getServiceSpecificError());
785
786 stream = {streamId++,
787 StreamType::OUTPUT,
788 it.width,
789 it.height,
790 static_cast<PixelFormat>(it.format),
791 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
792 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
793 Dataspace::UNKNOWN,
794 static_cast<StreamRotation>(UINT32_MAX),
795 std::string(),
796 jpegBufferSize,
797 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000798 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
799 RequestAvailableDynamicRangeProfilesMap::
800 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800801
802 streams[0] = stream;
803 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
804 jpegBufferSize);
805
806 config.streamConfigCounter = streamConfigCounter++;
807 halConfigs.clear();
808 ret = mSession->configureStreams(config, &halConfigs);
809 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
810 ret.getServiceSpecificError());
811 }
812
813 ret = mSession->close();
814 mSession = nullptr;
815 ASSERT_TRUE(ret.isOk());
816 }
817}
818
819// Check whether all supported ZSL output stream combinations can be
820// configured successfully.
821TEST_P(CameraAidlTest, configureStreamsZSLInputOutputs) {
822 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
823 std::vector<AvailableStream> inputStreams;
824 std::vector<AvailableZSLInputOutput> inputOutputMap;
825
826 for (const auto& name : cameraDeviceNames) {
827 CameraMetadata meta;
828 std::shared_ptr<ICameraDevice> cameraDevice;
829
830 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
831 &cameraDevice /*out*/);
832 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
833
834 Status rc = isZSLModeAvailable(staticMeta);
835 if (Status::OPERATION_NOT_SUPPORTED == rc) {
836 ndk::ScopedAStatus ret = mSession->close();
837 mSession = nullptr;
838 ASSERT_TRUE(ret.isOk());
839 continue;
840 }
841 ASSERT_EQ(Status::OK, rc);
842
843 inputStreams.clear();
844 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, inputStreams));
845 ASSERT_NE(0u, inputStreams.size());
846
847 inputOutputMap.clear();
848 ASSERT_EQ(Status::OK, getZSLInputOutputMap(staticMeta, inputOutputMap));
849 ASSERT_NE(0u, inputOutputMap.size());
850
851 bool supportMonoY8 = false;
852 if (Status::OK == isMonochromeCamera(staticMeta)) {
853 for (auto& it : inputStreams) {
854 if (it.format == static_cast<uint32_t>(PixelFormat::Y8)) {
855 supportMonoY8 = true;
856 break;
857 }
858 }
859 }
860
861 int32_t jpegBufferSize = 0;
862 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
863 ASSERT_NE(0u, jpegBufferSize);
864
865 int32_t streamId = 0;
866 bool hasPrivToY8 = false, hasY8ToY8 = false, hasY8ToBlob = false;
867 uint32_t streamConfigCounter = 0;
868 for (auto& inputIter : inputOutputMap) {
869 AvailableStream input;
870 ASSERT_EQ(Status::OK, findLargestSize(inputStreams, inputIter.inputFormat, input));
871 ASSERT_NE(0u, inputStreams.size());
872
873 if (inputIter.inputFormat ==
874 static_cast<uint32_t>(PixelFormat::IMPLEMENTATION_DEFINED) &&
875 inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::Y8)) {
876 hasPrivToY8 = true;
877 } else if (inputIter.inputFormat == static_cast<uint32_t>(PixelFormat::Y8)) {
878 if (inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::BLOB)) {
879 hasY8ToBlob = true;
880 } else if (inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::Y8)) {
881 hasY8ToY8 = true;
882 }
883 }
884 AvailableStream outputThreshold = {INT32_MAX, INT32_MAX, inputIter.outputFormat};
885 std::vector<AvailableStream> outputStreams;
886 ASSERT_EQ(Status::OK,
887 getAvailableOutputStreams(staticMeta, outputStreams, &outputThreshold));
888 for (auto& outputIter : outputStreams) {
889 Dataspace outputDataSpace =
890 getDataspace(static_cast<PixelFormat>(outputIter.format));
891 Stream zslStream = {
892 streamId++,
893 StreamType::OUTPUT,
894 input.width,
895 input.height,
896 static_cast<PixelFormat>(input.format),
897 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
898 GRALLOC_USAGE_HW_CAMERA_ZSL),
899 Dataspace::UNKNOWN,
900 StreamRotation::ROTATION_0,
901 std::string(),
902 jpegBufferSize,
903 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000904 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
905 RequestAvailableDynamicRangeProfilesMap::
906 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800907 Stream inputStream = {
908 streamId++,
909 StreamType::INPUT,
910 input.width,
911 input.height,
912 static_cast<PixelFormat>(input.format),
913 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(0),
914 Dataspace::UNKNOWN,
915 StreamRotation::ROTATION_0,
916 std::string(),
917 jpegBufferSize,
918 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000919 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
920 RequestAvailableDynamicRangeProfilesMap::
921 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800922 Stream outputStream = {
923 streamId++,
924 StreamType::OUTPUT,
925 outputIter.width,
926 outputIter.height,
927 static_cast<PixelFormat>(outputIter.format),
928 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
929 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
930 outputDataSpace,
931 StreamRotation::ROTATION_0,
932 std::string(),
933 jpegBufferSize,
934 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +0000935 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
936 RequestAvailableDynamicRangeProfilesMap::
937 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -0800938
939 std::vector<Stream> streams = {inputStream, zslStream, outputStream};
940
941 StreamConfiguration config;
942 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
943 jpegBufferSize);
944
945 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true,
946 /*expectStreamCombQuery*/ false);
947
948 config.streamConfigCounter = streamConfigCounter++;
949 std::vector<HalStream> halConfigs;
950 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
951 ASSERT_TRUE(ret.isOk());
952 ASSERT_EQ(3u, halConfigs.size());
953 }
954 }
955
956 if (supportMonoY8) {
957 if (Status::OK == isZSLModeAvailable(staticMeta, PRIV_REPROCESS)) {
958 ASSERT_TRUE(hasPrivToY8);
959 }
960 if (Status::OK == isZSLModeAvailable(staticMeta, YUV_REPROCESS)) {
961 ASSERT_TRUE(hasY8ToY8);
962 ASSERT_TRUE(hasY8ToBlob);
963 }
964 }
965
966 ndk::ScopedAStatus ret = mSession->close();
967 mSession = nullptr;
968 ASSERT_TRUE(ret.isOk());
969 }
970}
971
972// Check whether session parameters are supported. If Hal support for them
973// exist, then try to configure a preview stream using them.
974TEST_P(CameraAidlTest, configureStreamsWithSessionParameters) {
975 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
976 std::vector<AvailableStream> outputPreviewStreams;
977 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
978 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
979
980 for (const auto& name : cameraDeviceNames) {
981 CameraMetadata meta;
982
983 std::shared_ptr<ICameraDevice> unusedCameraDevice;
984 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
985 &unusedCameraDevice /*out*/);
986 camera_metadata_t* staticMetaBuffer =
987 reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
988
989 std::unordered_set<int32_t> availableSessionKeys;
990 auto rc = getSupportedKeys(staticMetaBuffer, ANDROID_REQUEST_AVAILABLE_SESSION_KEYS,
991 &availableSessionKeys);
992 ASSERT_TRUE(Status::OK == rc);
993 if (availableSessionKeys.empty()) {
994 ndk::ScopedAStatus ret = mSession->close();
995 mSession = nullptr;
996 ASSERT_TRUE(ret.isOk());
997 continue;
998 }
999
1000 android::hardware::camera::common::V1_0::helper::CameraMetadata previewRequestSettings;
1001 android::hardware::camera::common::V1_0::helper::CameraMetadata sessionParams,
1002 modifiedSessionParams;
1003 constructFilteredSettings(mSession, availableSessionKeys, RequestTemplate::PREVIEW,
1004 &previewRequestSettings, &sessionParams);
1005 if (sessionParams.isEmpty()) {
1006 ndk::ScopedAStatus ret = mSession->close();
1007 mSession = nullptr;
1008 ASSERT_TRUE(ret.isOk());
1009 continue;
1010 }
1011
1012 outputPreviewStreams.clear();
1013
1014 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputPreviewStreams,
1015 &previewThreshold));
1016 ASSERT_NE(0u, outputPreviewStreams.size());
1017
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001018 Stream previewStream = {
1019 0,
1020 StreamType::OUTPUT,
1021 outputPreviewStreams[0].width,
1022 outputPreviewStreams[0].height,
1023 static_cast<PixelFormat>(outputPreviewStreams[0].format),
1024 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1025 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
1026 Dataspace::UNKNOWN,
1027 StreamRotation::ROTATION_0,
1028 std::string(),
1029 /*bufferSize*/ 0,
1030 /*groupId*/ -1,
1031 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1032 RequestAvailableDynamicRangeProfilesMap::
1033 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001034
1035 std::vector<Stream> streams = {previewStream};
1036 StreamConfiguration config;
1037
1038 config.streams = streams;
1039 config.operationMode = StreamConfigurationMode::NORMAL_MODE;
1040 modifiedSessionParams = sessionParams;
1041 auto sessionParamsBuffer = sessionParams.release();
1042 std::vector<uint8_t> rawSessionParam =
1043 std::vector(reinterpret_cast<uint8_t*>(sessionParamsBuffer),
1044 reinterpret_cast<uint8_t*>(sessionParamsBuffer) +
1045 get_camera_metadata_size(sessionParamsBuffer));
1046
1047 config.sessionParams.metadata = rawSessionParam;
1048 config.streamConfigCounter = 0;
1049 config.streams = {previewStream};
1050 config.streamConfigCounter = 0;
1051 config.multiResolutionInputImage = false;
1052
1053 bool newSessionParamsAvailable = false;
1054 for (const auto& it : availableSessionKeys) {
1055 if (modifiedSessionParams.exists(it)) {
1056 modifiedSessionParams.erase(it);
1057 newSessionParamsAvailable = true;
1058 break;
1059 }
1060 }
1061 if (newSessionParamsAvailable) {
1062 auto modifiedSessionParamsBuffer = modifiedSessionParams.release();
1063 verifySessionReconfigurationQuery(mSession, sessionParamsBuffer,
1064 modifiedSessionParamsBuffer);
1065 modifiedSessionParams.acquire(modifiedSessionParamsBuffer);
1066 }
1067
1068 std::vector<HalStream> halConfigs;
1069 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1070 ASSERT_TRUE(ret.isOk());
1071 ASSERT_EQ(1u, halConfigs.size());
1072
1073 sessionParams.acquire(sessionParamsBuffer);
1074 ret = mSession->close();
1075 mSession = nullptr;
1076 ASSERT_TRUE(ret.isOk());
1077 }
1078}
1079
1080// Verify that all supported preview + still capture stream combinations
1081// can be configured successfully.
1082TEST_P(CameraAidlTest, configureStreamsPreviewStillOutputs) {
1083 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1084 std::vector<AvailableStream> outputBlobStreams;
1085 std::vector<AvailableStream> outputPreviewStreams;
1086 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
1087 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
1088 AvailableStream blobThreshold = {INT32_MAX, INT32_MAX, static_cast<int32_t>(PixelFormat::BLOB)};
1089
1090 for (const auto& name : cameraDeviceNames) {
1091 CameraMetadata meta;
1092
1093 std::shared_ptr<ICameraDevice> cameraDevice;
1094 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1095 &cameraDevice /*out*/);
1096
1097 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1098
1099 // Check if camera support depth only
1100 if (isDepthOnly(staticMeta)) {
1101 ndk::ScopedAStatus ret = mSession->close();
1102 mSession = nullptr;
1103 ASSERT_TRUE(ret.isOk());
1104 continue;
1105 }
1106
1107 outputBlobStreams.clear();
1108 ASSERT_EQ(Status::OK,
1109 getAvailableOutputStreams(staticMeta, outputBlobStreams, &blobThreshold));
1110 ASSERT_NE(0u, outputBlobStreams.size());
1111
1112 outputPreviewStreams.clear();
1113 ASSERT_EQ(Status::OK,
1114 getAvailableOutputStreams(staticMeta, outputPreviewStreams, &previewThreshold));
1115 ASSERT_NE(0u, outputPreviewStreams.size());
1116
1117 int32_t jpegBufferSize = 0;
1118 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
1119 ASSERT_NE(0u, jpegBufferSize);
1120
1121 int32_t streamId = 0;
1122 uint32_t streamConfigCounter = 0;
1123
1124 for (auto& blobIter : outputBlobStreams) {
1125 for (auto& previewIter : outputPreviewStreams) {
1126 Stream previewStream = {
1127 streamId++,
1128 StreamType::OUTPUT,
1129 previewIter.width,
1130 previewIter.height,
1131 static_cast<PixelFormat>(previewIter.format),
1132 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1133 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
1134 Dataspace::UNKNOWN,
1135 StreamRotation::ROTATION_0,
1136 std::string(),
1137 /*bufferSize*/ 0,
1138 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001139 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1140 RequestAvailableDynamicRangeProfilesMap::
1141 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001142 Stream blobStream = {
1143 streamId++,
1144 StreamType::OUTPUT,
1145 blobIter.width,
1146 blobIter.height,
1147 static_cast<PixelFormat>(blobIter.format),
1148 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1149 GRALLOC1_CONSUMER_USAGE_CPU_READ),
1150 Dataspace::JFIF,
1151 StreamRotation::ROTATION_0,
1152 std::string(),
1153 /*bufferSize*/ 0,
1154 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001155 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1156 RequestAvailableDynamicRangeProfilesMap::
1157 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001158 std::vector<Stream> streams = {previewStream, blobStream};
1159 StreamConfiguration config;
1160
1161 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
1162 jpegBufferSize);
1163 config.streamConfigCounter = streamConfigCounter++;
1164 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true,
1165 /*expectStreamCombQuery*/ false);
1166
1167 std::vector<HalStream> halConfigs;
1168 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1169 ASSERT_TRUE(ret.isOk());
1170 ASSERT_EQ(2u, halConfigs.size());
1171 }
1172 }
1173
1174 ndk::ScopedAStatus ret = mSession->close();
1175 mSession = nullptr;
1176 ASSERT_TRUE(ret.isOk());
1177 }
1178}
1179
1180// In case constrained mode is supported, test whether it can be
1181// configured. Additionally check for common invalid inputs when
1182// using this mode.
1183TEST_P(CameraAidlTest, configureStreamsConstrainedOutputs) {
1184 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1185
1186 for (const auto& name : cameraDeviceNames) {
1187 CameraMetadata meta;
1188 std::shared_ptr<ICameraDevice> cameraDevice;
1189
1190 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1191 &cameraDevice /*out*/);
1192 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1193
1194 Status rc = isConstrainedModeAvailable(staticMeta);
1195 if (Status::OPERATION_NOT_SUPPORTED == rc) {
1196 ndk::ScopedAStatus ret = mSession->close();
1197 mSession = nullptr;
1198 ASSERT_TRUE(ret.isOk());
1199 continue;
1200 }
1201 ASSERT_EQ(Status::OK, rc);
1202
1203 AvailableStream hfrStream;
1204 rc = pickConstrainedModeSize(staticMeta, hfrStream);
1205 ASSERT_EQ(Status::OK, rc);
1206
1207 int32_t streamId = 0;
1208 uint32_t streamConfigCounter = 0;
1209 Stream stream = {streamId,
1210 StreamType::OUTPUT,
1211 hfrStream.width,
1212 hfrStream.height,
1213 static_cast<PixelFormat>(hfrStream.format),
1214 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1215 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1216 Dataspace::UNKNOWN,
1217 StreamRotation::ROTATION_0,
1218 std::string(),
1219 /*bufferSize*/ 0,
1220 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001221 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1222 RequestAvailableDynamicRangeProfilesMap::
1223 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001224 std::vector<Stream> streams = {stream};
1225 StreamConfiguration config;
1226 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1227 &config);
1228
1229 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true,
1230 /*expectStreamCombQuery*/ false);
1231
1232 config.streamConfigCounter = streamConfigCounter++;
1233 std::vector<HalStream> halConfigs;
1234 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1235 ASSERT_TRUE(ret.isOk());
1236 ASSERT_EQ(1u, halConfigs.size());
1237 ASSERT_EQ(halConfigs[0].id, streamId);
1238
1239 stream = {streamId++,
1240 StreamType::OUTPUT,
1241 static_cast<uint32_t>(0),
1242 static_cast<uint32_t>(0),
1243 static_cast<PixelFormat>(hfrStream.format),
1244 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1245 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1246 Dataspace::UNKNOWN,
1247 StreamRotation::ROTATION_0,
1248 std::string(),
1249 /*bufferSize*/ 0,
1250 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001251 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1252 RequestAvailableDynamicRangeProfilesMap::
1253 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001254 streams[0] = stream;
1255 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1256 &config);
1257
1258 config.streamConfigCounter = streamConfigCounter++;
1259 std::vector<HalStream> halConfig;
1260 ret = mSession->configureStreams(config, &halConfig);
1261 ASSERT_TRUE(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
1262 ret.getServiceSpecificError() ||
1263 static_cast<int32_t>(Status::INTERNAL_ERROR) == ret.getServiceSpecificError());
1264
1265 stream = {streamId++,
1266 StreamType::OUTPUT,
1267 INT32_MAX,
1268 INT32_MAX,
1269 static_cast<PixelFormat>(hfrStream.format),
1270 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1271 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1272 Dataspace::UNKNOWN,
1273 StreamRotation::ROTATION_0,
1274 std::string(),
1275 /*bufferSize*/ 0,
1276 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001277 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1278 RequestAvailableDynamicRangeProfilesMap::
1279 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001280 streams[0] = stream;
1281 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1282 &config);
1283
1284 config.streamConfigCounter = streamConfigCounter++;
1285 halConfigs.clear();
1286 ret = mSession->configureStreams(config, &halConfigs);
1287 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
1288
1289 stream = {streamId++,
1290 StreamType::OUTPUT,
1291 hfrStream.width,
1292 hfrStream.height,
1293 static_cast<PixelFormat>(UINT32_MAX),
1294 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1295 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1296 Dataspace::UNKNOWN,
1297 StreamRotation::ROTATION_0,
1298 std::string(),
1299 /*bufferSize*/ 0,
1300 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001301 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1302 RequestAvailableDynamicRangeProfilesMap::
1303 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001304 streams[0] = stream;
1305 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1306 &config);
1307
1308 config.streamConfigCounter = streamConfigCounter++;
1309 halConfigs.clear();
1310 ret = mSession->configureStreams(config, &halConfigs);
1311 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
1312
1313 ret = mSession->close();
1314 mSession = nullptr;
1315 ASSERT_TRUE(ret.isOk());
1316 }
1317}
1318
1319// Verify that all supported video + snapshot stream combinations can
1320// be configured successfully.
1321TEST_P(CameraAidlTest, configureStreamsVideoStillOutputs) {
1322 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1323 std::vector<AvailableStream> outputBlobStreams;
1324 std::vector<AvailableStream> outputVideoStreams;
1325 AvailableStream videoThreshold = {kMaxVideoWidth, kMaxVideoHeight,
1326 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
1327 AvailableStream blobThreshold = {kMaxVideoWidth, kMaxVideoHeight,
1328 static_cast<int32_t>(PixelFormat::BLOB)};
1329
1330 for (const auto& name : cameraDeviceNames) {
1331 CameraMetadata meta;
1332 std::shared_ptr<ICameraDevice> cameraDevice;
1333
1334 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1335 &cameraDevice /*out*/);
1336
1337 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1338
1339 // Check if camera support depth only
1340 if (isDepthOnly(staticMeta)) {
1341 ndk::ScopedAStatus ret = mSession->close();
1342 mSession = nullptr;
1343 ASSERT_TRUE(ret.isOk());
1344 continue;
1345 }
1346
1347 outputBlobStreams.clear();
1348 ASSERT_EQ(Status::OK,
1349 getAvailableOutputStreams(staticMeta, outputBlobStreams, &blobThreshold));
1350 ASSERT_NE(0u, outputBlobStreams.size());
1351
1352 outputVideoStreams.clear();
1353 ASSERT_EQ(Status::OK,
1354 getAvailableOutputStreams(staticMeta, outputVideoStreams, &videoThreshold));
1355 ASSERT_NE(0u, outputVideoStreams.size());
1356
1357 int32_t jpegBufferSize = 0;
1358 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
1359 ASSERT_NE(0u, jpegBufferSize);
1360
1361 int32_t streamId = 0;
1362 uint32_t streamConfigCounter = 0;
1363 for (auto& blobIter : outputBlobStreams) {
1364 for (auto& videoIter : outputVideoStreams) {
1365 Stream videoStream = {
1366 streamId++,
1367 StreamType::OUTPUT,
1368 videoIter.width,
1369 videoIter.height,
1370 static_cast<PixelFormat>(videoIter.format),
1371 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1372 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1373 Dataspace::UNKNOWN,
1374 StreamRotation::ROTATION_0,
1375 std::string(),
1376 jpegBufferSize,
1377 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001378 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1379 RequestAvailableDynamicRangeProfilesMap::
1380 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001381 Stream blobStream = {
1382 streamId++,
1383 StreamType::OUTPUT,
1384 blobIter.width,
1385 blobIter.height,
1386 static_cast<PixelFormat>(blobIter.format),
1387 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1388 GRALLOC1_CONSUMER_USAGE_CPU_READ),
1389 Dataspace::JFIF,
1390 StreamRotation::ROTATION_0,
1391 std::string(),
1392 jpegBufferSize,
1393 /*groupId*/ -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001394 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1395 RequestAvailableDynamicRangeProfilesMap::
1396 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08001397 std::vector<Stream> streams = {videoStream, blobStream};
1398 StreamConfiguration config;
1399
1400 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
1401 jpegBufferSize);
1402 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true,
1403 /*expectStreamCombQuery*/ false);
1404
1405 config.streamConfigCounter = streamConfigCounter++;
1406 std::vector<HalStream> halConfigs;
1407 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1408 ASSERT_TRUE(ret.isOk());
1409 ASSERT_EQ(2u, halConfigs.size());
1410 }
1411 }
1412
1413 ndk::ScopedAStatus ret = mSession->close();
1414 mSession = nullptr;
1415 ASSERT_TRUE(ret.isOk());
1416 }
1417}
1418
1419// Generate and verify a camera capture request
1420TEST_P(CameraAidlTest, processCaptureRequestPreview) {
1421 // TODO(b/220897574): Failing with BUFFER_ERROR
1422 processCaptureRequestInternal(GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, RequestTemplate::PREVIEW,
1423 false /*secureOnlyCameras*/);
1424}
1425
1426// Generate and verify a secure camera capture request
1427TEST_P(CameraAidlTest, processSecureCaptureRequest) {
1428 processCaptureRequestInternal(GRALLOC1_PRODUCER_USAGE_PROTECTED, RequestTemplate::STILL_CAPTURE,
1429 true /*secureOnlyCameras*/);
1430}
1431
1432TEST_P(CameraAidlTest, processCaptureRequestPreviewStabilization) {
1433 std::unordered_map<std::string, nsecs_t> cameraDeviceToTimeLag;
1434 processPreviewStabilizationCaptureRequestInternal(/*previewStabilizationOn*/ false,
1435 cameraDeviceToTimeLag);
1436 processPreviewStabilizationCaptureRequestInternal(/*previewStabilizationOn*/ true,
1437 cameraDeviceToTimeLag);
1438}
1439
1440// Generate and verify a multi-camera capture request
1441TEST_P(CameraAidlTest, processMultiCaptureRequestPreview) {
1442 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1443 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
1444 static_cast<int32_t>(PixelFormat::YCBCR_420_888)};
1445 int64_t bufferId = 1;
1446 uint32_t frameNumber = 1;
1447 std::vector<uint8_t> settings;
1448 std::vector<uint8_t> emptySettings;
1449 std::string invalidPhysicalId = "-1";
1450
1451 for (const auto& name : cameraDeviceNames) {
1452 std::string version, deviceId;
1453 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
1454 CameraMetadata metadata;
1455
1456 std::shared_ptr<ICameraDevice> unusedDevice;
1457 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &metadata /*out*/,
1458 &unusedDevice /*out*/);
1459
1460 camera_metadata_t* staticMeta =
1461 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
1462 Status rc = isLogicalMultiCamera(staticMeta);
1463 if (Status::OPERATION_NOT_SUPPORTED == rc) {
1464 ndk::ScopedAStatus ret = mSession->close();
1465 mSession = nullptr;
1466 ASSERT_TRUE(ret.isOk());
1467 continue;
1468 }
1469
1470 std::unordered_set<std::string> physicalIds;
1471 rc = getPhysicalCameraIds(staticMeta, &physicalIds);
1472 ASSERT_TRUE(Status::OK == rc);
1473 ASSERT_TRUE(physicalIds.size() > 1);
1474
1475 std::unordered_set<int32_t> physicalRequestKeyIDs;
1476 rc = getSupportedKeys(staticMeta, ANDROID_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS,
1477 &physicalRequestKeyIDs);
1478 ASSERT_TRUE(Status::OK == rc);
1479 if (physicalRequestKeyIDs.empty()) {
1480 ndk::ScopedAStatus ret = mSession->close();
1481 mSession = nullptr;
1482 ASSERT_TRUE(ret.isOk());
1483 // The logical camera doesn't support any individual physical requests.
1484 continue;
1485 }
1486
1487 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultPreviewSettings;
1488 android::hardware::camera::common::V1_0::helper::CameraMetadata filteredSettings;
1489 constructFilteredSettings(mSession, physicalRequestKeyIDs, RequestTemplate::PREVIEW,
1490 &defaultPreviewSettings, &filteredSettings);
1491 if (filteredSettings.isEmpty()) {
1492 // No physical device settings in default request.
1493 ndk::ScopedAStatus ret = mSession->close();
1494 mSession = nullptr;
1495 ASSERT_TRUE(ret.isOk());
1496 continue;
1497 }
1498
1499 const camera_metadata_t* settingsBuffer = defaultPreviewSettings.getAndLock();
1500 uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
1501 settings.assign(rawSettingsBuffer,
1502 rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
1503 CameraMetadata settingsMetadata = {settings};
1504 overrideRotateAndCrop(&settingsMetadata);
1505
1506 ndk::ScopedAStatus ret = mSession->close();
1507 mSession = nullptr;
1508 ASSERT_TRUE(ret.isOk());
1509
1510 // Leave only 2 physical devices in the id set.
1511 auto it = physicalIds.begin();
1512 std::string physicalDeviceId = *it;
1513 it++;
1514 physicalIds.erase(++it, physicalIds.end());
1515 ASSERT_EQ(physicalIds.size(), 2u);
1516
1517 std::vector<HalStream> halStreams;
1518 bool supportsPartialResults = false;
1519 bool useHalBufManager = false;
1520 int32_t partialResultCount = 0;
1521 Stream previewStream;
1522 std::shared_ptr<DeviceCb> cb;
1523
1524 configurePreviewStreams(name, mProvider, &previewThreshold, physicalIds, &mSession,
1525 &previewStream, &halStreams /*out*/,
1526 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
1527 &useHalBufManager /*out*/, &cb /*out*/, 0 /*streamConfigCounter*/);
1528
1529 ::aidl::android::hardware::common::fmq::MQDescriptor<
1530 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
1531 descriptor;
1532 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
1533 ASSERT_TRUE(resultQueueRet.isOk());
1534 std::shared_ptr<ResultMetadataQueue> resultQueue =
1535 std::make_shared<ResultMetadataQueue>(descriptor);
1536 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
1537 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
1538 resultQueue = nullptr;
1539 // Don't use the queue onwards.
1540 }
1541
1542 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
1543 static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
1544 partialResultCount, physicalIds, resultQueue);
1545
1546 std::vector<CaptureRequest> requests(1);
1547 CaptureRequest& request = requests[0];
1548 request.frameNumber = frameNumber;
1549 request.fmqSettingsSize = 0;
1550 request.settings.metadata = settings;
1551
1552 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
1553
1554 std::vector<buffer_handle_t> graphicBuffers;
1555 graphicBuffers.reserve(halStreams.size());
1556 outputBuffers.resize(halStreams.size());
1557 size_t k = 0;
1558 for (const auto& halStream : halStreams) {
1559 buffer_handle_t buffer_handle;
1560 if (useHalBufManager) {
1561 outputBuffers[k] = {halStream.id, /*bufferId*/ 0, NativeHandle(),
1562 BufferStatus::OK, NativeHandle(), NativeHandle()};
1563 } else {
1564 allocateGraphicBuffer(previewStream.width, previewStream.height,
1565 android_convertGralloc1To0Usage(
1566 static_cast<uint64_t>(halStream.producerUsage),
1567 static_cast<uint64_t>(halStream.consumerUsage)),
1568 halStream.overrideFormat, &buffer_handle);
1569 graphicBuffers.push_back(buffer_handle);
1570 outputBuffers[k] = {
1571 halStream.id, bufferId, ::android::makeToAidl(buffer_handle),
1572 BufferStatus::OK, NativeHandle(), NativeHandle()};
1573 bufferId++;
1574 }
1575 k++;
1576 }
1577
1578 std::vector<PhysicalCameraSetting> camSettings(1);
1579 const camera_metadata_t* filteredSettingsBuffer = filteredSettings.getAndLock();
1580 uint8_t* rawFilteredSettingsBuffer = (uint8_t*)filteredSettingsBuffer;
1581 camSettings[0].settings = {std::vector(
1582 rawFilteredSettingsBuffer,
1583 rawFilteredSettingsBuffer + get_camera_metadata_size(filteredSettingsBuffer))};
1584 overrideRotateAndCrop(&camSettings[0].settings);
1585 camSettings[0].fmqSettingsSize = 0;
1586 camSettings[0].physicalCameraId = physicalDeviceId;
1587
1588 request.inputBuffer = {
1589 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
1590 request.physicalCameraSettings = camSettings;
1591
1592 {
1593 std::unique_lock<std::mutex> l(mLock);
1594 mInflightMap.clear();
1595 mInflightMap[frameNumber] = inflightReq;
1596 }
1597
1598 int32_t numRequestProcessed = 0;
1599 std::vector<BufferCache> cachesToRemove;
1600 ndk::ScopedAStatus returnStatus =
1601 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1602 ASSERT_TRUE(returnStatus.isOk());
1603 ASSERT_EQ(numRequestProcessed, 1u);
1604
1605 {
1606 std::unique_lock<std::mutex> l(mLock);
1607 while (!inflightReq->errorCodeValid &&
1608 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1609 auto timeout = std::chrono::system_clock::now() +
1610 std::chrono::seconds(kStreamBufferTimeoutSec);
1611 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1612 }
1613
1614 ASSERT_FALSE(inflightReq->errorCodeValid);
1615 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1616
1617 request.frameNumber++;
1618 // Empty settings should be supported after the first call
1619 // for repeating requests.
1620 request.settings.metadata.clear();
1621 request.physicalCameraSettings[0].settings.metadata.clear();
1622 // The buffer has been registered to HAL by bufferId, so per
1623 // API contract we should send a null handle for this buffer
1624 request.outputBuffers[0].buffer = NativeHandle();
1625 mInflightMap.clear();
1626 inflightReq = std::make_shared<InFlightRequest>(
1627 static_cast<ssize_t>(physicalIds.size()), false, supportsPartialResults,
1628 partialResultCount, physicalIds, resultQueue);
1629 mInflightMap[request.frameNumber] = inflightReq;
1630 }
1631
1632 returnStatus =
1633 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1634 ASSERT_TRUE(returnStatus.isOk());
1635 ASSERT_EQ(numRequestProcessed, 1u);
1636
1637 {
1638 std::unique_lock<std::mutex> l(mLock);
1639 while (!inflightReq->errorCodeValid &&
1640 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1641 auto timeout = std::chrono::system_clock::now() +
1642 std::chrono::seconds(kStreamBufferTimeoutSec);
1643 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1644 }
1645
1646 ASSERT_FALSE(inflightReq->errorCodeValid);
1647 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1648 }
1649
1650 // Invalid physical camera id should fail process requests
1651 frameNumber++;
1652 camSettings[0].physicalCameraId = invalidPhysicalId;
1653 camSettings[0].settings.metadata = settings;
1654
1655 request.physicalCameraSettings = camSettings; // Invalid camera settings
1656 returnStatus =
1657 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1658 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
1659 returnStatus.getServiceSpecificError());
1660
1661 defaultPreviewSettings.unlock(settingsBuffer);
1662 filteredSettings.unlock(filteredSettingsBuffer);
1663
1664 if (useHalBufManager) {
1665 std::vector<int32_t> streamIds(halStreams.size());
1666 for (size_t i = 0; i < streamIds.size(); i++) {
1667 streamIds[i] = halStreams[i].id;
1668 }
1669 verifyBuffersReturned(mSession, streamIds, cb);
1670 }
1671
1672 ret = mSession->close();
1673 mSession = nullptr;
1674 ASSERT_TRUE(ret.isOk());
1675 }
1676}
1677
1678// Generate and verify an ultra high resolution capture request
1679TEST_P(CameraAidlTest, processUltraHighResolutionRequest) {
1680 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1681 int64_t bufferId = 1;
1682 int32_t frameNumber = 1;
1683 CameraMetadata settings;
1684
1685 for (const auto& name : cameraDeviceNames) {
1686 std::string version, deviceId;
1687 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
1688 CameraMetadata meta;
1689
1690 std::shared_ptr<ICameraDevice> unusedDevice;
1691 openEmptyDeviceSession(name, mProvider, &mSession, &meta, &unusedDevice);
1692 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1693 if (!isUltraHighResolution(staticMeta)) {
1694 ndk::ScopedAStatus ret = mSession->close();
1695 mSession = nullptr;
1696 ASSERT_TRUE(ret.isOk());
1697 continue;
1698 }
1699 CameraMetadata req;
1700 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultSettings;
1701 ndk::ScopedAStatus ret =
1702 mSession->constructDefaultRequestSettings(RequestTemplate::STILL_CAPTURE, &req);
1703 ASSERT_TRUE(ret.isOk());
1704
1705 const camera_metadata_t* metadata =
1706 reinterpret_cast<const camera_metadata_t*>(req.metadata.data());
1707 size_t expectedSize = req.metadata.size();
1708 int result = validate_camera_metadata_structure(metadata, &expectedSize);
1709 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
1710
1711 size_t entryCount = get_camera_metadata_entry_count(metadata);
1712 ASSERT_GT(entryCount, 0u);
1713 defaultSettings = metadata;
1714 uint8_t sensorPixelMode =
1715 static_cast<uint8_t>(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
1716 ASSERT_EQ(::android::OK,
1717 defaultSettings.update(ANDROID_SENSOR_PIXEL_MODE, &sensorPixelMode, 1));
1718
1719 const camera_metadata_t* settingsBuffer = defaultSettings.getAndLock();
1720 uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
1721 settings.metadata = std::vector(
1722 rawSettingsBuffer, rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
1723 overrideRotateAndCrop(&settings);
1724
1725 ret = mSession->close();
1726 mSession = nullptr;
1727 ASSERT_TRUE(ret.isOk());
1728
1729 std::vector<HalStream> halStreams;
1730 bool supportsPartialResults = false;
1731 bool useHalBufManager = false;
1732 int32_t partialResultCount = 0;
1733 Stream previewStream;
1734 std::shared_ptr<DeviceCb> cb;
1735
1736 std::list<PixelFormat> pixelFormats = {PixelFormat::YCBCR_420_888, PixelFormat::RAW16};
1737 for (PixelFormat format : pixelFormats) {
1738 configureStreams(name, mProvider, format, &mSession, &previewStream, &halStreams,
1739 &supportsPartialResults, &partialResultCount, &useHalBufManager, &cb,
1740 0, /*maxResolution*/ true);
1741 ASSERT_NE(mSession, nullptr);
1742
1743 ::aidl::android::hardware::common::fmq::MQDescriptor<
1744 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
1745 descriptor;
1746 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
1747 ASSERT_TRUE(resultQueueRet.isOk());
1748
1749 std::shared_ptr<ResultMetadataQueue> resultQueue =
1750 std::make_shared<ResultMetadataQueue>(descriptor);
1751 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
1752 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
1753 resultQueue = nullptr;
1754 // Don't use the queue onwards.
1755 }
1756
1757 std::vector<buffer_handle_t> graphicBuffers;
1758 graphicBuffers.reserve(halStreams.size());
1759 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
1760 static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
1761 partialResultCount, std::unordered_set<std::string>(), resultQueue);
1762
1763 std::vector<CaptureRequest> requests(1);
1764 CaptureRequest& request = requests[0];
1765 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
1766 outputBuffers.resize(halStreams.size());
1767
1768 size_t k = 0;
1769 for (const auto& halStream : halStreams) {
1770 buffer_handle_t buffer_handle;
1771 if (useHalBufManager) {
1772 outputBuffers[k] = {halStream.id, 0,
1773 NativeHandle(), BufferStatus::OK,
1774 NativeHandle(), NativeHandle()};
1775 } else {
1776 allocateGraphicBuffer(previewStream.width, previewStream.height,
1777 android_convertGralloc1To0Usage(
1778 static_cast<uint64_t>(halStream.producerUsage),
1779 static_cast<uint64_t>(halStream.consumerUsage)),
1780 halStream.overrideFormat, &buffer_handle);
1781 graphicBuffers.push_back(buffer_handle);
1782 outputBuffers[k] = {
1783 halStream.id, bufferId, ::android::makeToAidl(buffer_handle),
1784 BufferStatus::OK, NativeHandle(), NativeHandle()};
1785 bufferId++;
1786 }
1787 k++;
1788 }
1789
1790 request.inputBuffer = {
1791 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
1792 request.frameNumber = frameNumber;
1793 request.fmqSettingsSize = 0;
1794 request.settings = settings;
1795 request.inputWidth = 0;
1796 request.inputHeight = 0;
1797
1798 {
1799 std::unique_lock<std::mutex> l(mLock);
1800 mInflightMap.clear();
1801 mInflightMap[frameNumber] = inflightReq;
1802 }
1803
1804 int32_t numRequestProcessed = 0;
1805 std::vector<BufferCache> cachesToRemove;
1806 ndk::ScopedAStatus returnStatus =
1807 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1808 ASSERT_TRUE(returnStatus.isOk());
1809 ASSERT_EQ(numRequestProcessed, 1u);
1810
1811 {
1812 std::unique_lock<std::mutex> l(mLock);
1813 while (!inflightReq->errorCodeValid &&
1814 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1815 auto timeout = std::chrono::system_clock::now() +
1816 std::chrono::seconds(kStreamBufferTimeoutSec);
1817 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1818 }
1819
1820 ASSERT_FALSE(inflightReq->errorCodeValid);
1821 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1822 }
1823 if (useHalBufManager) {
1824 std::vector<int32_t> streamIds(halStreams.size());
1825 for (size_t i = 0; i < streamIds.size(); i++) {
1826 streamIds[i] = halStreams[i].id;
1827 }
1828 verifyBuffersReturned(mSession, streamIds, cb);
1829 }
1830
1831 ret = mSession->close();
1832 mSession = nullptr;
1833 ASSERT_TRUE(ret.isOk());
1834 }
1835 }
1836}
1837
1838// Generate and verify 10-bit dynamic range request
1839TEST_P(CameraAidlTest, process10BitDynamicRangeRequest) {
1840 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1841 int64_t bufferId = 1;
1842 int32_t frameNumber = 1;
1843 CameraMetadata settings;
1844
1845 for (const auto& name : cameraDeviceNames) {
1846 std::string version, deviceId;
1847 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
1848 CameraMetadata meta;
1849 std::shared_ptr<ICameraDevice> device;
1850 openEmptyDeviceSession(name, mProvider, &mSession, &meta, &device);
1851 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1852 if (!is10BitDynamicRangeCapable(staticMeta)) {
1853 ndk::ScopedAStatus ret = mSession->close();
1854 mSession = nullptr;
1855 ASSERT_TRUE(ret.isOk());
1856 continue;
1857 }
Avichal Rakeshd3503a32022-02-25 06:23:14 +00001858 std::vector<RequestAvailableDynamicRangeProfilesMap> profileList;
Avichal Rakesh362242f2022-02-08 12:40:53 -08001859 get10BitDynamicRangeProfiles(staticMeta, &profileList);
1860 ASSERT_FALSE(profileList.empty());
1861
1862 CameraMetadata req;
1863 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultSettings;
1864 ndk::ScopedAStatus ret =
1865 mSession->constructDefaultRequestSettings(RequestTemplate::STILL_CAPTURE, &req);
1866 ASSERT_TRUE(ret.isOk());
1867
1868 const camera_metadata_t* metadata =
1869 reinterpret_cast<const camera_metadata_t*>(req.metadata.data());
1870 size_t expectedSize = req.metadata.size();
1871 int result = validate_camera_metadata_structure(metadata, &expectedSize);
1872 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
1873
1874 size_t entryCount = get_camera_metadata_entry_count(metadata);
1875 ASSERT_GT(entryCount, 0u);
1876 defaultSettings = metadata;
1877
1878 const camera_metadata_t* settingsBuffer = defaultSettings.getAndLock();
1879 uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
1880 settings.metadata = std::vector(
1881 rawSettingsBuffer, rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
1882 overrideRotateAndCrop(&settings);
1883
1884 ret = mSession->close();
1885 mSession = nullptr;
1886 ASSERT_TRUE(ret.isOk());
1887
1888 std::vector<HalStream> halStreams;
1889 bool supportsPartialResults = false;
1890 bool useHalBufManager = false;
1891 int32_t partialResultCount = 0;
1892 Stream previewStream;
1893 std::shared_ptr<DeviceCb> cb;
1894 for (const auto& profile : profileList) {
1895 configureStreams(name, mProvider, PixelFormat::IMPLEMENTATION_DEFINED, &mSession,
1896 &previewStream, &halStreams, &supportsPartialResults,
1897 &partialResultCount, &useHalBufManager, &cb, 0,
1898 /*maxResolution*/ false, profile);
1899 ASSERT_NE(mSession, nullptr);
1900
1901 ::aidl::android::hardware::common::fmq::MQDescriptor<
1902 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
1903 descriptor;
1904 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
1905 ASSERT_TRUE(resultQueueRet.isOk());
1906
1907 std::shared_ptr<ResultMetadataQueue> resultQueue =
1908 std::make_shared<ResultMetadataQueue>(descriptor);
1909 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
1910 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
1911 resultQueue = nullptr;
1912 // Don't use the queue onwards.
1913 }
1914
1915 std::vector<buffer_handle_t> graphicBuffers;
1916 graphicBuffers.reserve(halStreams.size());
1917
1918 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
1919 static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
1920 partialResultCount, std::unordered_set<std::string>(), resultQueue);
1921
1922 std::vector<CaptureRequest> requests(1);
1923 CaptureRequest& request = requests[0];
1924 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
1925 outputBuffers.resize(halStreams.size());
1926
1927 size_t k = 0;
1928 for (const auto& halStream : halStreams) {
1929 buffer_handle_t buffer_handle;
1930 if (useHalBufManager) {
1931 outputBuffers[k] = {halStream.id, 0,
1932 NativeHandle(), BufferStatus::OK,
1933 NativeHandle(), NativeHandle()};
1934 } else {
1935 allocateGraphicBuffer(previewStream.width, previewStream.height,
1936 android_convertGralloc1To0Usage(
1937 static_cast<uint64_t>(halStream.producerUsage),
1938 static_cast<uint64_t>(halStream.consumerUsage)),
1939 halStream.overrideFormat, &buffer_handle);
1940
1941 graphicBuffers.push_back(buffer_handle);
1942 outputBuffers[k] = {
1943 halStream.id, bufferId, android::makeToAidl(buffer_handle),
1944 BufferStatus::OK, NativeHandle(), NativeHandle()};
1945 bufferId++;
1946 }
1947 k++;
1948 }
1949
1950 request.inputBuffer = {
1951 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
1952 request.frameNumber = frameNumber;
1953 request.fmqSettingsSize = 0;
1954 request.settings = settings;
1955 request.inputWidth = 0;
1956 request.inputHeight = 0;
1957
1958 {
1959 std::unique_lock<std::mutex> l(mLock);
1960 mInflightMap.clear();
1961 mInflightMap[frameNumber] = inflightReq;
1962 }
1963
1964 int32_t numRequestProcessed = 0;
1965 std::vector<BufferCache> cachesToRemove;
1966 ndk::ScopedAStatus returnStatus =
1967 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1968 ASSERT_TRUE(returnStatus.isOk());
1969 ASSERT_EQ(numRequestProcessed, 1u);
1970
1971 {
1972 std::unique_lock<std::mutex> l(mLock);
1973 while (!inflightReq->errorCodeValid &&
1974 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1975 auto timeout = std::chrono::system_clock::now() +
1976 std::chrono::seconds(kStreamBufferTimeoutSec);
1977 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1978 }
1979
1980 ASSERT_FALSE(inflightReq->errorCodeValid);
1981 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1982 verify10BitMetadata(mHandleImporter, *inflightReq, profile);
1983 }
1984 if (useHalBufManager) {
1985 std::vector<int32_t> streamIds(halStreams.size());
1986 for (size_t i = 0; i < streamIds.size(); i++) {
1987 streamIds[i] = halStreams[i].id;
1988 }
1989 mSession->signalStreamFlush(streamIds, /*streamConfigCounter*/ 0);
1990 cb->waitForBuffersReturned();
1991 }
1992
1993 ret = mSession->close();
1994 mSession = nullptr;
1995 ASSERT_TRUE(ret.isOk());
1996 }
1997 }
1998}
1999
2000// Generate and verify a burst containing alternating sensor sensitivity values
2001TEST_P(CameraAidlTest, processCaptureRequestBurstISO) {
2002 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2003 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2004 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2005 int64_t bufferId = 1;
2006 int32_t frameNumber = 1;
2007 float isoTol = .03f;
2008 CameraMetadata settings;
2009
2010 for (const auto& name : cameraDeviceNames) {
2011 CameraMetadata meta;
2012 settings.metadata.clear();
2013 std::shared_ptr<ICameraDevice> unusedDevice;
2014 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
2015 &unusedDevice /*out*/);
2016 camera_metadata_t* staticMetaBuffer =
2017 clone_camera_metadata(reinterpret_cast<camera_metadata_t*>(meta.metadata.data()));
2018 ::android::hardware::camera::common::V1_0::helper::CameraMetadata staticMeta(
2019 staticMetaBuffer);
2020
2021 camera_metadata_entry_t hwLevel = staticMeta.find(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL);
2022 ASSERT_TRUE(0 < hwLevel.count);
2023 if (ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED == hwLevel.data.u8[0] ||
2024 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL == hwLevel.data.u8[0]) {
2025 // Limited/External devices can skip this test
2026 ndk::ScopedAStatus ret = mSession->close();
2027 mSession = nullptr;
2028 ASSERT_TRUE(ret.isOk());
2029 continue;
2030 }
2031
2032 camera_metadata_entry_t isoRange = staticMeta.find(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE);
2033 ASSERT_EQ(isoRange.count, 2u);
2034
2035 ndk::ScopedAStatus ret = mSession->close();
2036 mSession = nullptr;
2037 ASSERT_TRUE(ret.isOk());
2038
2039 bool supportsPartialResults = false;
2040 bool useHalBufManager = false;
2041 int32_t partialResultCount = 0;
2042 Stream previewStream;
2043 std::vector<HalStream> halStreams;
2044 std::shared_ptr<DeviceCb> cb;
2045 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2046 &previewStream /*out*/, &halStreams /*out*/,
2047 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2048 &useHalBufManager /*out*/, &cb /*out*/);
2049
2050 ::aidl::android::hardware::common::fmq::MQDescriptor<
2051 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2052 descriptor;
2053 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2054 std::shared_ptr<ResultMetadataQueue> resultQueue =
2055 std::make_shared<ResultMetadataQueue>(descriptor);
2056 ASSERT_TRUE(resultQueueRet.isOk());
2057 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2058 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2059 resultQueue = nullptr;
2060 // Don't use the queue onwards.
2061 }
2062
2063 ret = mSession->constructDefaultRequestSettings(RequestTemplate::PREVIEW, &settings);
2064 ASSERT_TRUE(ret.isOk());
2065
2066 ::android::hardware::camera::common::V1_0::helper::CameraMetadata requestMeta;
2067 std::vector<CaptureRequest> requests(kBurstFrameCount);
2068 std::vector<buffer_handle_t> buffers(kBurstFrameCount);
2069 std::vector<std::shared_ptr<InFlightRequest>> inflightReqs(kBurstFrameCount);
2070 std::vector<int32_t> isoValues(kBurstFrameCount);
2071 std::vector<CameraMetadata> requestSettings(kBurstFrameCount);
2072
2073 for (int32_t i = 0; i < kBurstFrameCount; i++) {
2074 std::unique_lock<std::mutex> l(mLock);
2075 CaptureRequest& request = requests[i];
2076 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2077 outputBuffers.resize(1);
2078 StreamBuffer& outputBuffer = outputBuffers[0];
2079
2080 isoValues[i] = ((i % 2) == 0) ? isoRange.data.i32[0] : isoRange.data.i32[1];
2081 if (useHalBufManager) {
2082 outputBuffer = {halStreams[0].id, 0,
2083 NativeHandle(), BufferStatus::OK,
2084 NativeHandle(), NativeHandle()};
2085 } else {
2086 allocateGraphicBuffer(previewStream.width, previewStream.height,
2087 android_convertGralloc1To0Usage(
2088 static_cast<uint64_t>(halStreams[0].producerUsage),
2089 static_cast<uint64_t>(halStreams[0].consumerUsage)),
2090 halStreams[0].overrideFormat, &buffers[i]);
2091 outputBuffer = {halStreams[0].id, bufferId + i, ::android::makeToAidl(buffers[i]),
2092 BufferStatus::OK, NativeHandle(), NativeHandle()};
2093 }
2094
2095 requestMeta.append(reinterpret_cast<camera_metadata_t*>(settings.metadata.data()));
2096
2097 // Disable all 3A routines
2098 uint8_t mode = static_cast<uint8_t>(ANDROID_CONTROL_MODE_OFF);
2099 ASSERT_EQ(::android::OK, requestMeta.update(ANDROID_CONTROL_MODE, &mode, 1));
2100 ASSERT_EQ(::android::OK,
2101 requestMeta.update(ANDROID_SENSOR_SENSITIVITY, &isoValues[i], 1));
2102 camera_metadata_t* metaBuffer = requestMeta.release();
2103 uint8_t* rawMetaBuffer = reinterpret_cast<uint8_t*>(metaBuffer);
2104 requestSettings[i].metadata = std::vector(
2105 rawMetaBuffer, rawMetaBuffer + get_camera_metadata_size(metaBuffer));
2106 overrideRotateAndCrop(&(requestSettings[i]));
2107
2108 request.frameNumber = frameNumber + i;
2109 request.fmqSettingsSize = 0;
2110 request.settings = requestSettings[i];
2111 request.inputBuffer = {
2112 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2113
2114 inflightReqs[i] = std::make_shared<InFlightRequest>(1, false, supportsPartialResults,
2115 partialResultCount, resultQueue);
2116 mInflightMap[frameNumber + i] = inflightReqs[i];
2117 }
2118
2119 int32_t numRequestProcessed = 0;
2120 std::vector<BufferCache> cachesToRemove;
2121
2122 ndk::ScopedAStatus returnStatus =
2123 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2124 ASSERT_TRUE(returnStatus.isOk());
2125 ASSERT_EQ(numRequestProcessed, kBurstFrameCount);
2126
2127 for (size_t i = 0; i < kBurstFrameCount; i++) {
2128 std::unique_lock<std::mutex> l(mLock);
2129 while (!inflightReqs[i]->errorCodeValid && ((0 < inflightReqs[i]->numBuffersLeft) ||
2130 (!inflightReqs[i]->haveResultMetadata))) {
2131 auto timeout = std::chrono::system_clock::now() +
2132 std::chrono::seconds(kStreamBufferTimeoutSec);
2133 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2134 }
2135
2136 ASSERT_FALSE(inflightReqs[i]->errorCodeValid);
2137 ASSERT_NE(inflightReqs[i]->resultOutputBuffers.size(), 0u);
2138 ASSERT_EQ(previewStream.id, inflightReqs[i]->resultOutputBuffers[0].buffer.streamId);
2139 ASSERT_FALSE(inflightReqs[i]->collectedResult.isEmpty());
2140 ASSERT_TRUE(inflightReqs[i]->collectedResult.exists(ANDROID_SENSOR_SENSITIVITY));
2141 camera_metadata_entry_t isoResult =
2142 inflightReqs[i]->collectedResult.find(ANDROID_SENSOR_SENSITIVITY);
2143 ASSERT_TRUE(std::abs(isoResult.data.i32[0] - isoValues[i]) <=
2144 std::round(isoValues[i] * isoTol));
2145 }
2146
2147 if (useHalBufManager) {
2148 verifyBuffersReturned(mSession, previewStream.id, cb);
2149 }
2150 ret = mSession->close();
2151 mSession = nullptr;
2152 ASSERT_TRUE(ret.isOk());
2153 }
2154}
2155
2156// Test whether an incorrect capture request with missing settings will
2157// be reported correctly.
2158TEST_P(CameraAidlTest, processCaptureRequestInvalidSinglePreview) {
2159 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2160 std::vector<AvailableStream> outputPreviewStreams;
2161 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2162 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2163 int64_t bufferId = 1;
2164 int32_t frameNumber = 1;
2165 CameraMetadata settings;
2166
2167 for (const auto& name : cameraDeviceNames) {
2168 Stream previewStream;
2169 std::vector<HalStream> halStreams;
2170 std::shared_ptr<DeviceCb> cb;
2171 bool supportsPartialResults = false;
2172 bool useHalBufManager = false;
2173 int32_t partialResultCount = 0;
2174 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2175 &previewStream /*out*/, &halStreams /*out*/,
2176 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2177 &useHalBufManager /*out*/, &cb /*out*/);
2178 ASSERT_NE(mSession, nullptr);
2179 ASSERT_FALSE(halStreams.empty());
2180
2181 buffer_handle_t buffer_handle = nullptr;
2182
2183 if (useHalBufManager) {
2184 bufferId = 0;
2185 } else {
2186 allocateGraphicBuffer(previewStream.width, previewStream.height,
2187 android_convertGralloc1To0Usage(
2188 static_cast<uint64_t>(halStreams[0].producerUsage),
2189 static_cast<uint64_t>(halStreams[0].consumerUsage)),
2190 halStreams[0].overrideFormat, &buffer_handle);
2191 }
2192
2193 std::vector<CaptureRequest> requests(1);
2194 CaptureRequest& request = requests[0];
2195 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2196 outputBuffers.resize(1);
2197 StreamBuffer& outputBuffer = outputBuffers[0];
2198
2199 outputBuffer = {
2200 halStreams[0].id,
2201 bufferId,
2202 buffer_handle == nullptr ? NativeHandle() : ::android::makeToAidl(buffer_handle),
2203 BufferStatus::OK,
2204 NativeHandle(),
2205 NativeHandle()};
2206
2207 request.inputBuffer = {
2208 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2209 request.frameNumber = frameNumber;
2210 request.fmqSettingsSize = 0;
2211 request.settings = settings;
2212
2213 // Settings were not correctly initialized, we should fail here
2214 int32_t numRequestProcessed = 0;
2215 std::vector<BufferCache> cachesToRemove;
2216 ndk::ScopedAStatus ret =
2217 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2218 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
2219 ASSERT_EQ(numRequestProcessed, 0u);
2220
2221 ret = mSession->close();
2222 mSession = nullptr;
2223 ASSERT_TRUE(ret.isOk());
2224 }
2225}
2226
2227// Verify camera offline session behavior
2228TEST_P(CameraAidlTest, switchToOffline) {
2229 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2230 AvailableStream threshold = {kMaxStillWidth, kMaxStillHeight,
2231 static_cast<int32_t>(PixelFormat::BLOB)};
2232 int64_t bufferId = 1;
2233 int32_t frameNumber = 1;
2234 CameraMetadata settings;
2235
2236 for (const auto& name : cameraDeviceNames) {
2237 CameraMetadata meta;
2238 {
2239 std::shared_ptr<ICameraDevice> unusedDevice;
2240 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
2241 &unusedDevice);
2242 camera_metadata_t* staticMetaBuffer = clone_camera_metadata(
2243 reinterpret_cast<camera_metadata_t*>(meta.metadata.data()));
2244 ::android::hardware::camera::common::V1_0::helper::CameraMetadata staticMeta(
2245 staticMetaBuffer);
2246
2247 if (isOfflineSessionSupported(staticMetaBuffer) != Status::OK) {
2248 ndk::ScopedAStatus ret = mSession->close();
2249 mSession = nullptr;
2250 ASSERT_TRUE(ret.isOk());
2251 continue;
2252 }
2253 ndk::ScopedAStatus ret = mSession->close();
2254 mSession = nullptr;
2255 ASSERT_TRUE(ret.isOk());
2256 }
2257
2258 bool supportsPartialResults = false;
2259 int32_t partialResultCount = 0;
2260 Stream stream;
2261 std::vector<HalStream> halStreams;
2262 std::shared_ptr<DeviceCb> cb;
2263 int32_t jpegBufferSize;
2264 bool useHalBufManager;
2265 configureOfflineStillStream(name, mProvider, &threshold, &mSession /*out*/, &stream /*out*/,
2266 &halStreams /*out*/, &supportsPartialResults /*out*/,
2267 &partialResultCount /*out*/, &cb /*out*/,
2268 &jpegBufferSize /*out*/, &useHalBufManager /*out*/);
2269
2270 auto ret = mSession->constructDefaultRequestSettings(RequestTemplate::STILL_CAPTURE,
2271 &settings);
2272 ASSERT_TRUE(ret.isOk());
2273
2274 ::aidl::android::hardware::common::fmq::MQDescriptor<
2275 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2276 descriptor;
2277
2278 ndk::ScopedAStatus resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2279 ASSERT_TRUE(resultQueueRet.isOk());
2280 std::shared_ptr<ResultMetadataQueue> resultQueue =
2281 std::make_shared<ResultMetadataQueue>(descriptor);
2282 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2283 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2284 resultQueue = nullptr;
2285 // Don't use the queue onwards.
2286 }
2287
2288 ::android::hardware::camera::common::V1_0::helper::CameraMetadata requestMeta;
2289
2290 std::vector<buffer_handle_t> buffers(kBurstFrameCount);
2291 std::vector<std::shared_ptr<InFlightRequest>> inflightReqs(kBurstFrameCount);
2292 std::vector<CameraMetadata> requestSettings(kBurstFrameCount);
2293
2294 std::vector<CaptureRequest> requests(kBurstFrameCount);
2295
2296 HalStream halStream = halStreams[0];
2297 for (uint32_t i = 0; i < kBurstFrameCount; i++) {
2298 CaptureRequest& request = requests[i];
2299 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2300 outputBuffers.resize(1);
2301 StreamBuffer& outputBuffer = outputBuffers[0];
2302
2303 std::unique_lock<std::mutex> l(mLock);
2304 if (useHalBufManager) {
2305 outputBuffer = {halStream.id, 0, NativeHandle(), BufferStatus::OK, NativeHandle(),
2306 NativeHandle()};
2307 } else {
2308 // jpeg buffer (w,h) = (blobLen, 1)
2309 allocateGraphicBuffer(jpegBufferSize, /*height*/ 1,
2310 android_convertGralloc1To0Usage(
2311 static_cast<uint64_t>(halStream.producerUsage),
2312 static_cast<uint64_t>(halStream.consumerUsage)),
2313 halStream.overrideFormat, &buffers[i]);
2314 outputBuffer = {halStream.id, bufferId + i, ::android::makeToAidl(buffers[i]),
2315 BufferStatus::OK, NativeHandle(), NativeHandle()};
2316 }
2317
2318 requestMeta.clear();
2319 requestMeta.append(reinterpret_cast<camera_metadata_t*>(settings.metadata.data()));
2320
2321 camera_metadata_t* metaBuffer = requestMeta.release();
2322 uint8_t* rawMetaBuffer = reinterpret_cast<uint8_t*>(metaBuffer);
2323 requestSettings[i].metadata = std::vector(
2324 rawMetaBuffer, rawMetaBuffer + get_camera_metadata_size(metaBuffer));
2325 overrideRotateAndCrop(&requestSettings[i]);
2326
2327 request.frameNumber = frameNumber + i;
2328 request.fmqSettingsSize = 0;
2329 request.settings = requestSettings[i];
2330 request.inputBuffer = {/*streamId*/ -1,
2331 /*bufferId*/ 0, NativeHandle(),
2332 BufferStatus::ERROR, NativeHandle(),
2333 NativeHandle()};
2334
2335 inflightReqs[i] = std::make_shared<InFlightRequest>(1, false, supportsPartialResults,
2336 partialResultCount, resultQueue);
2337 mInflightMap[frameNumber + i] = inflightReqs[i];
2338 }
2339
2340 int32_t numRequestProcessed = 0;
2341 std::vector<BufferCache> cachesToRemove;
2342
2343 ndk::ScopedAStatus returnStatus =
2344 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2345 ASSERT_TRUE(returnStatus.isOk());
2346 ASSERT_EQ(numRequestProcessed, kBurstFrameCount);
2347
2348 std::vector<int32_t> offlineStreamIds = {halStream.id};
2349 CameraOfflineSessionInfo offlineSessionInfo;
2350 std::shared_ptr<ICameraOfflineSession> offlineSession;
2351 returnStatus =
2352 mSession->switchToOffline(offlineStreamIds, &offlineSessionInfo, &offlineSession);
2353
2354 if (!halStreams[0].supportOffline) {
2355 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
2356 returnStatus.getServiceSpecificError());
2357 ret = mSession->close();
2358 mSession = nullptr;
2359 ASSERT_TRUE(ret.isOk());
2360 continue;
2361 }
2362
2363 ASSERT_TRUE(returnStatus.isOk());
2364 // Hal might be unable to find any requests qualified for offline mode.
2365 if (offlineSession == nullptr) {
2366 ret = mSession->close();
2367 mSession = nullptr;
2368 ASSERT_TRUE(ret.isOk());
2369 continue;
2370 }
2371
2372 ASSERT_EQ(offlineSessionInfo.offlineStreams.size(), 1u);
2373 ASSERT_EQ(offlineSessionInfo.offlineStreams[0].id, halStream.id);
2374 ASSERT_NE(offlineSessionInfo.offlineRequests.size(), 0u);
2375
2376 // close device session to make sure offline session does not rely on it
2377 ret = mSession->close();
2378 mSession = nullptr;
2379 ASSERT_TRUE(ret.isOk());
2380
2381 ::aidl::android::hardware::common::fmq::MQDescriptor<
2382 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2383 offlineResultDescriptor;
2384
2385 auto offlineResultQueueRet =
2386 offlineSession->getCaptureResultMetadataQueue(&offlineResultDescriptor);
2387 std::shared_ptr<ResultMetadataQueue> offlineResultQueue =
2388 std::make_shared<ResultMetadataQueue>(descriptor);
2389 if (!offlineResultQueue->isValid() || offlineResultQueue->availableToWrite() <= 0) {
2390 ALOGE("%s: offline session returns empty result metadata fmq, not use it", __func__);
2391 offlineResultQueue = nullptr;
2392 // Don't use the queue onwards.
2393 }
2394 ASSERT_TRUE(offlineResultQueueRet.isOk());
2395
2396 updateInflightResultQueue(offlineResultQueue);
2397
2398 ret = offlineSession->setCallback(cb);
2399 ASSERT_TRUE(ret.isOk());
2400
2401 for (size_t i = 0; i < kBurstFrameCount; i++) {
2402 std::unique_lock<std::mutex> l(mLock);
2403 while (!inflightReqs[i]->errorCodeValid && ((0 < inflightReqs[i]->numBuffersLeft) ||
2404 (!inflightReqs[i]->haveResultMetadata))) {
2405 auto timeout = std::chrono::system_clock::now() +
2406 std::chrono::seconds(kStreamBufferTimeoutSec);
2407 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2408 }
2409
2410 ASSERT_FALSE(inflightReqs[i]->errorCodeValid);
2411 ASSERT_NE(inflightReqs[i]->resultOutputBuffers.size(), 0u);
2412 ASSERT_EQ(stream.id, inflightReqs[i]->resultOutputBuffers[0].buffer.streamId);
2413 ASSERT_FALSE(inflightReqs[i]->collectedResult.isEmpty());
2414 }
2415
2416 ret = offlineSession->close();
2417 ASSERT_TRUE(ret.isOk());
2418 }
2419}
2420
2421// Check whether an invalid capture request with missing output buffers
2422// will be reported correctly.
2423TEST_P(CameraAidlTest, processCaptureRequestInvalidBuffer) {
2424 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2425 std::vector<AvailableStream> outputBlobStreams;
2426 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2427 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2428 int32_t frameNumber = 1;
2429 CameraMetadata settings;
2430
2431 for (const auto& name : cameraDeviceNames) {
2432 Stream previewStream;
2433 std::vector<HalStream> halStreams;
2434 std::shared_ptr<DeviceCb> cb;
2435 bool supportsPartialResults = false;
2436 bool useHalBufManager = false;
2437 int32_t partialResultCount = 0;
2438 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2439 &previewStream /*out*/, &halStreams /*out*/,
2440 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2441 &useHalBufManager /*out*/, &cb /*out*/);
2442
2443 RequestTemplate reqTemplate = RequestTemplate::PREVIEW;
2444 ndk::ScopedAStatus ret = mSession->constructDefaultRequestSettings(reqTemplate, &settings);
2445 ASSERT_TRUE(ret.isOk());
2446 overrideRotateAndCrop(&settings);
2447
2448 std::vector<CaptureRequest> requests(1);
2449 CaptureRequest& request = requests[0];
2450 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2451 outputBuffers.resize(1);
2452 // Empty output buffer
2453 outputBuffers[0] = {
2454 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2455
2456 request.inputBuffer = {
2457 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2458 request.frameNumber = frameNumber;
2459 request.fmqSettingsSize = 0;
2460 request.settings = settings;
2461
2462 // Output buffers are missing, we should fail here
2463 int32_t numRequestProcessed = 0;
2464 std::vector<BufferCache> cachesToRemove;
2465 ret = mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2466 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
2467 ASSERT_EQ(numRequestProcessed, 0u);
2468
2469 ret = mSession->close();
2470 mSession = nullptr;
2471 ASSERT_TRUE(ret.isOk());
2472 }
2473}
2474
2475// Generate, trigger and flush a preview request
2476TEST_P(CameraAidlTest, flushPreviewRequest) {
2477 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2478 std::vector<AvailableStream> outputPreviewStreams;
2479 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2480 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2481 int64_t bufferId = 1;
2482 int32_t frameNumber = 1;
2483 CameraMetadata settings;
2484
2485 for (const auto& name : cameraDeviceNames) {
2486 Stream previewStream;
2487 std::vector<HalStream> halStreams;
2488 std::shared_ptr<DeviceCb> cb;
2489 bool supportsPartialResults = false;
2490 bool useHalBufManager = false;
2491 int32_t partialResultCount = 0;
2492
2493 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2494 &previewStream /*out*/, &halStreams /*out*/,
2495 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2496 &useHalBufManager /*out*/, &cb /*out*/);
2497
2498 ASSERT_NE(mSession, nullptr);
2499 ASSERT_NE(cb, nullptr);
2500 ASSERT_FALSE(halStreams.empty());
2501
2502 ::aidl::android::hardware::common::fmq::MQDescriptor<
2503 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2504 descriptor;
2505
2506 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2507 std::shared_ptr<ResultMetadataQueue> resultQueue =
2508 std::make_shared<ResultMetadataQueue>(descriptor);
2509 ASSERT_TRUE(resultQueueRet.isOk());
2510 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2511 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2512 resultQueue = nullptr;
2513 // Don't use the queue onwards.
2514 }
2515
2516 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
2517 1, false, supportsPartialResults, partialResultCount, resultQueue);
2518 RequestTemplate reqTemplate = RequestTemplate::PREVIEW;
2519
2520 ndk::ScopedAStatus ret = mSession->constructDefaultRequestSettings(reqTemplate, &settings);
2521 ASSERT_TRUE(ret.isOk());
2522 overrideRotateAndCrop(&settings);
2523
2524 buffer_handle_t buffer_handle;
2525 std::vector<CaptureRequest> requests(1);
2526 CaptureRequest& request = requests[0];
2527 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2528 outputBuffers.resize(1);
2529 StreamBuffer& outputBuffer = outputBuffers[0];
2530 if (useHalBufManager) {
2531 bufferId = 0;
2532 outputBuffer = {halStreams[0].id, bufferId, NativeHandle(),
2533 BufferStatus::OK, NativeHandle(), NativeHandle()};
2534 } else {
2535 allocateGraphicBuffer(previewStream.width, previewStream.height,
2536 android_convertGralloc1To0Usage(
2537 static_cast<uint64_t>(halStreams[0].producerUsage),
2538 static_cast<uint64_t>(halStreams[0].consumerUsage)),
2539 halStreams[0].overrideFormat, &buffer_handle);
2540 outputBuffer = {halStreams[0].id, bufferId, ::android::makeToAidl(buffer_handle),
2541 BufferStatus::OK, NativeHandle(), NativeHandle()};
2542 }
2543
2544 request.frameNumber = frameNumber;
2545 request.fmqSettingsSize = 0;
2546 request.settings = settings;
2547 request.inputBuffer = {
2548 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2549
2550 {
2551 std::unique_lock<std::mutex> l(mLock);
2552 mInflightMap.clear();
2553 mInflightMap[frameNumber] = inflightReq;
2554 }
2555
2556 int32_t numRequestProcessed = 0;
2557 std::vector<BufferCache> cachesToRemove;
2558 ret = mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2559 ASSERT_TRUE(ret.isOk());
2560 ASSERT_EQ(numRequestProcessed, 1u);
2561
2562 // Flush before waiting for request to complete.
2563 ndk::ScopedAStatus returnStatus = mSession->flush();
2564 ASSERT_TRUE(returnStatus.isOk());
2565
2566 {
2567 std::unique_lock<std::mutex> l(mLock);
2568 while (!inflightReq->errorCodeValid &&
2569 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
2570 auto timeout = std::chrono::system_clock::now() +
2571 std::chrono::seconds(kStreamBufferTimeoutSec);
2572 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2573 }
2574
2575 if (!inflightReq->errorCodeValid) {
2576 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
2577 ASSERT_EQ(previewStream.id, inflightReq->resultOutputBuffers[0].buffer.streamId);
2578 } else {
2579 switch (inflightReq->errorCode) {
2580 case ErrorCode::ERROR_REQUEST:
2581 case ErrorCode::ERROR_RESULT:
2582 case ErrorCode::ERROR_BUFFER:
2583 // Expected
2584 break;
2585 case ErrorCode::ERROR_DEVICE:
2586 default:
2587 FAIL() << "Unexpected error:"
2588 << static_cast<uint32_t>(inflightReq->errorCode);
2589 }
2590 }
2591 }
2592
2593 if (useHalBufManager) {
2594 verifyBuffersReturned(mSession, previewStream.id, cb);
2595 }
2596
2597 ret = mSession->close();
2598 mSession = nullptr;
2599 ASSERT_TRUE(ret.isOk());
2600 }
2601}
2602
2603// Verify that camera flushes correctly without any pending requests.
2604TEST_P(CameraAidlTest, flushEmpty) {
2605 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2606 std::vector<AvailableStream> outputPreviewStreams;
2607 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2608 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2609
2610 for (const auto& name : cameraDeviceNames) {
2611 Stream previewStream;
2612 std::vector<HalStream> halStreams;
2613 std::shared_ptr<DeviceCb> cb;
2614 bool supportsPartialResults = false;
2615 bool useHalBufManager = false;
2616
2617 int32_t partialResultCount = 0;
2618 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2619 &previewStream /*out*/, &halStreams /*out*/,
2620 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2621 &useHalBufManager /*out*/, &cb /*out*/);
2622
2623 ndk::ScopedAStatus returnStatus = mSession->flush();
2624 ASSERT_TRUE(returnStatus.isOk());
2625
2626 {
2627 std::unique_lock<std::mutex> l(mLock);
2628 auto timeout = std::chrono::system_clock::now() +
2629 std::chrono::milliseconds(kEmptyFlushTimeoutMSec);
2630 ASSERT_EQ(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2631 }
2632
2633 ndk::ScopedAStatus ret = mSession->close();
2634 mSession = nullptr;
2635 ASSERT_TRUE(ret.isOk());
2636 }
2637}
2638
2639// Test camera provider notify method
2640TEST_P(CameraAidlTest, providerDeviceStateNotification) {
2641 notifyDeviceState(ICameraProvider::DEVICE_STATE_BACK_COVERED);
2642 notifyDeviceState(ICameraProvider::DEVICE_STATE_NORMAL);
2643}
2644
2645// Verify that all supported stream formats and sizes can be configured
2646// successfully for injection camera.
2647TEST_P(CameraAidlTest, configureInjectionStreamsAvailableOutputs) {
2648 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2649 std::vector<AvailableStream> outputStreams;
2650
2651 for (const auto& name : cameraDeviceNames) {
2652 CameraMetadata metadata;
2653
2654 std::shared_ptr<ICameraInjectionSession> injectionSession;
2655 std::shared_ptr<ICameraDevice> unusedDevice;
2656 openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/,
2657 &unusedDevice /*out*/);
2658 if (injectionSession == nullptr) {
2659 continue;
2660 }
2661
2662 camera_metadata_t* staticMetaBuffer =
2663 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
2664 CameraMetadata chars;
2665 chars.metadata = metadata.metadata;
2666
2667 outputStreams.clear();
2668 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputStreams));
2669 ASSERT_NE(0u, outputStreams.size());
2670
2671 int32_t jpegBufferSize = 0;
2672 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMetaBuffer, &jpegBufferSize));
2673 ASSERT_NE(0u, jpegBufferSize);
2674
2675 int32_t streamId = 0;
2676 int32_t streamConfigCounter = 0;
2677 for (auto& it : outputStreams) {
2678 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(it.format));
2679 Stream stream = {streamId,
2680 StreamType::OUTPUT,
2681 it.width,
2682 it.height,
2683 static_cast<PixelFormat>(it.format),
2684 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2685 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2686 dataspace,
2687 StreamRotation::ROTATION_0,
2688 std::string(),
2689 jpegBufferSize,
2690 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002691 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2692 RequestAvailableDynamicRangeProfilesMap::
2693 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002694
2695 std::vector<Stream> streams = {stream};
2696 StreamConfiguration config;
2697 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2698 jpegBufferSize);
2699
2700 config.streamConfigCounter = streamConfigCounter++;
2701 ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars);
2702 ASSERT_TRUE(s.isOk());
2703 streamId++;
2704 }
2705
2706 std::shared_ptr<ICameraDeviceSession> session;
2707 ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session);
2708 ASSERT_TRUE(ret.isOk());
2709 ASSERT_NE(session, nullptr);
2710 ret = session->close();
2711 ASSERT_TRUE(ret.isOk());
2712 }
2713}
2714
2715// Check for correct handling of invalid/incorrect configuration parameters for injection camera.
2716TEST_P(CameraAidlTest, configureInjectionStreamsInvalidOutputs) {
2717 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2718 std::vector<AvailableStream> outputStreams;
2719
2720 for (const auto& name : cameraDeviceNames) {
2721 CameraMetadata metadata;
2722 std::shared_ptr<ICameraInjectionSession> injectionSession;
2723 std::shared_ptr<ICameraDevice> unusedDevice;
2724 openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/,
2725 &unusedDevice);
2726 if (injectionSession == nullptr) {
2727 continue;
2728 }
2729
2730 camera_metadata_t* staticMetaBuffer =
2731 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
2732 std::shared_ptr<ICameraDeviceSession> session;
2733 ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session);
2734 ASSERT_TRUE(ret.isOk());
2735 ASSERT_NE(session, nullptr);
2736
2737 CameraMetadata chars;
2738 chars.metadata = metadata.metadata;
2739
2740 outputStreams.clear();
2741 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputStreams));
2742 ASSERT_NE(0u, outputStreams.size());
2743
2744 int32_t jpegBufferSize = 0;
2745 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMetaBuffer, &jpegBufferSize));
2746 ASSERT_NE(0u, jpegBufferSize);
2747
2748 int32_t streamId = 0;
2749 Stream stream = {streamId++,
2750 StreamType::OUTPUT,
2751 0,
2752 0,
2753 static_cast<PixelFormat>(outputStreams[0].format),
2754 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2755 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2756 Dataspace::UNKNOWN,
2757 StreamRotation::ROTATION_0,
2758 std::string(),
2759 jpegBufferSize,
2760 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002761 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2762 RequestAvailableDynamicRangeProfilesMap::
2763 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002764
2765 int32_t streamConfigCounter = 0;
2766 std::vector<Stream> streams = {stream};
2767 StreamConfiguration config;
2768 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2769 jpegBufferSize);
2770
2771 config.streamConfigCounter = streamConfigCounter++;
2772 ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars);
2773 ASSERT_TRUE(
2774 (static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) == s.getServiceSpecificError()) ||
2775 (static_cast<int32_t>(Status::INTERNAL_ERROR) == s.getServiceSpecificError()));
2776
2777 stream = {streamId++,
2778 StreamType::OUTPUT,
2779 INT32_MAX,
2780 INT32_MAX,
2781 static_cast<PixelFormat>(outputStreams[0].format),
2782 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2783 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2784 Dataspace::UNKNOWN,
2785 StreamRotation::ROTATION_0,
2786 std::string(),
2787 jpegBufferSize,
2788 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002789 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2790 RequestAvailableDynamicRangeProfilesMap::
2791 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
2792
Avichal Rakesh362242f2022-02-08 12:40:53 -08002793 streams[0] = stream;
2794 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2795 jpegBufferSize);
2796 config.streamConfigCounter = streamConfigCounter++;
2797 s = injectionSession->configureInjectionStreams(config, chars);
2798 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError());
2799
2800 for (auto& it : outputStreams) {
2801 stream = {streamId++,
2802 StreamType::OUTPUT,
2803 it.width,
2804 it.height,
2805 static_cast<PixelFormat>(INT32_MAX),
2806 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2807 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2808 Dataspace::UNKNOWN,
2809 StreamRotation::ROTATION_0,
2810 std::string(),
2811 jpegBufferSize,
2812 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002813 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2814 RequestAvailableDynamicRangeProfilesMap::
2815 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002816 streams[0] = stream;
2817 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2818 jpegBufferSize);
2819 config.streamConfigCounter = streamConfigCounter++;
2820 s = injectionSession->configureInjectionStreams(config, chars);
2821 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError());
2822
2823 stream = {streamId++,
2824 StreamType::OUTPUT,
2825 it.width,
2826 it.height,
2827 static_cast<PixelFormat>(it.format),
2828 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2829 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2830 Dataspace::UNKNOWN,
2831 static_cast<StreamRotation>(INT32_MAX),
2832 std::string(),
2833 jpegBufferSize,
2834 0,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002835 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2836 RequestAvailableDynamicRangeProfilesMap::
2837 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002838 streams[0] = stream;
2839 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2840 jpegBufferSize);
2841 config.streamConfigCounter = streamConfigCounter++;
2842 s = injectionSession->configureInjectionStreams(config, chars);
2843 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError());
2844 }
2845
2846 ret = session->close();
2847 ASSERT_TRUE(ret.isOk());
2848 }
2849}
2850
2851// Check whether session parameters are supported for injection camera. If Hal support for them
2852// exist, then try to configure a preview stream using them.
2853TEST_P(CameraAidlTest, configureInjectionStreamsWithSessionParameters) {
2854 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2855 std::vector<AvailableStream> outputPreviewStreams;
2856 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2857 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2858
2859 for (const auto& name : cameraDeviceNames) {
2860 CameraMetadata metadata;
2861 std::shared_ptr<ICameraInjectionSession> injectionSession;
2862 std::shared_ptr<ICameraDevice> unusedDevice;
2863 openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/,
2864 &unusedDevice /*out*/);
2865 if (injectionSession == nullptr) {
2866 continue;
2867 }
2868
2869 std::shared_ptr<ICameraDeviceSession> session;
2870 ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session);
2871 ASSERT_TRUE(ret.isOk());
2872 ASSERT_NE(session, nullptr);
2873
2874 camera_metadata_t* staticMetaBuffer =
2875 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
2876 CameraMetadata chars;
2877 chars.metadata = metadata.metadata;
2878
2879 std::unordered_set<int32_t> availableSessionKeys;
2880 Status rc = getSupportedKeys(staticMetaBuffer, ANDROID_REQUEST_AVAILABLE_SESSION_KEYS,
2881 &availableSessionKeys);
2882 ASSERT_EQ(Status::OK, rc);
2883 if (availableSessionKeys.empty()) {
2884 ret = session->close();
2885 ASSERT_TRUE(ret.isOk());
2886 continue;
2887 }
2888
2889 android::hardware::camera::common::V1_0::helper::CameraMetadata previewRequestSettings;
2890 android::hardware::camera::common::V1_0::helper::CameraMetadata sessionParams,
2891 modifiedSessionParams;
2892 constructFilteredSettings(session, availableSessionKeys, RequestTemplate::PREVIEW,
2893 &previewRequestSettings, &sessionParams);
2894 if (sessionParams.isEmpty()) {
2895 ret = session->close();
2896 ASSERT_TRUE(ret.isOk());
2897 continue;
2898 }
2899
2900 outputPreviewStreams.clear();
2901
2902 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputPreviewStreams,
2903 &previewThreshold));
2904 ASSERT_NE(0u, outputPreviewStreams.size());
2905
2906 Stream previewStream = {
2907 0,
2908 StreamType::OUTPUT,
2909 outputPreviewStreams[0].width,
2910 outputPreviewStreams[0].height,
2911 static_cast<PixelFormat>(outputPreviewStreams[0].format),
2912 static_cast<::aidl::android::hardware::graphics::common::BufferUsage>(
2913 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2914 Dataspace::UNKNOWN,
2915 StreamRotation::ROTATION_0,
2916 std::string(),
2917 0,
2918 -1,
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002919 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2920 RequestAvailableDynamicRangeProfilesMap::
2921 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08002922 std::vector<Stream> streams = {previewStream};
2923 StreamConfiguration config;
2924 config.streams = streams;
2925 config.operationMode = StreamConfigurationMode::NORMAL_MODE;
2926
2927 modifiedSessionParams = sessionParams;
2928 camera_metadata_t* sessionParamsBuffer = sessionParams.release();
2929 uint8_t* rawSessionParamsBuffer = reinterpret_cast<uint8_t*>(sessionParamsBuffer);
2930 config.sessionParams.metadata =
2931 std::vector(rawSessionParamsBuffer,
2932 rawSessionParamsBuffer + get_camera_metadata_size(sessionParamsBuffer));
2933
2934 config.streamConfigCounter = 0;
2935 config.streamConfigCounter = 0;
2936 config.multiResolutionInputImage = false;
2937
2938 ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars);
2939 ASSERT_TRUE(s.isOk());
2940
2941 sessionParams.acquire(sessionParamsBuffer);
2942 free_camera_metadata(staticMetaBuffer);
2943 ret = session->close();
2944 ASSERT_TRUE(ret.isOk());
2945 }
2946}
2947
2948// Verify that valid stream use cases can be configured successfully, and invalid use cases
2949// fail stream configuration.
2950TEST_P(CameraAidlTest, configureStreamsUseCases) {
2951 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2952
2953 for (const auto& name : cameraDeviceNames) {
2954 CameraMetadata meta;
2955 std::shared_ptr<ICameraDevice> cameraDevice;
2956
2957 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
2958 &cameraDevice /*out*/);
2959
2960 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
2961 // Check if camera support depth only
2962 if (isDepthOnly(staticMeta)) {
2963 ndk::ScopedAStatus ret = mSession->close();
2964 mSession = nullptr;
2965 ASSERT_TRUE(ret.isOk());
2966 continue;
2967 }
2968
2969 std::vector<AvailableStream> outputPreviewStreams;
2970 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2971 static_cast<int32_t>(PixelFormat::YCBCR_420_888)};
2972 ASSERT_EQ(Status::OK,
2973 getAvailableOutputStreams(staticMeta, outputPreviewStreams, &previewThreshold));
2974 ASSERT_NE(0u, outputPreviewStreams.size());
2975
2976 // Combine valid and invalid stream use cases
2977 std::vector<int32_t> useCases(kMandatoryUseCases);
2978 useCases.push_back(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL + 1);
2979
2980 std::vector<int32_t> supportedUseCases;
2981 camera_metadata_ro_entry entry;
2982 auto retcode = find_camera_metadata_ro_entry(
2983 staticMeta, ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES, &entry);
2984 if ((0 == retcode) && (entry.count > 0)) {
2985 supportedUseCases.insert(supportedUseCases.end(), entry.data.i32,
2986 entry.data.i32 + entry.count);
2987 } else {
2988 supportedUseCases.push_back(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT);
2989 }
2990
2991 std::vector<Stream> streams(1);
Avichal Rakeshd3503a32022-02-25 06:23:14 +00002992 streams[0] = {0,
2993 StreamType::OUTPUT,
2994 outputPreviewStreams[0].width,
2995 outputPreviewStreams[0].height,
2996 static_cast<PixelFormat>(outputPreviewStreams[0].format),
2997 static_cast<::aidl::android::hardware::graphics::common::BufferUsage>(
2998 GRALLOC1_CONSUMER_USAGE_CPU_READ),
2999 Dataspace::UNKNOWN,
3000 StreamRotation::ROTATION_0,
3001 std::string(),
3002 0,
3003 -1,
3004 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
3005 RequestAvailableDynamicRangeProfilesMap::
3006 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
Avichal Rakesh362242f2022-02-08 12:40:53 -08003007
3008 int32_t streamConfigCounter = 0;
3009 CameraMetadata req;
3010 StreamConfiguration config;
3011 RequestTemplate reqTemplate = RequestTemplate::STILL_CAPTURE;
3012 ndk::ScopedAStatus ret = mSession->constructDefaultRequestSettings(reqTemplate, &req);
3013 ASSERT_TRUE(ret.isOk());
3014 config.sessionParams = req;
3015
3016 for (int32_t useCase : useCases) {
3017 bool useCaseSupported = std::find(supportedUseCases.begin(), supportedUseCases.end(),
3018 useCase) != supportedUseCases.end();
3019
3020 streams[0].useCase = static_cast<
3021 aidl::android::hardware::camera::metadata::ScalerAvailableStreamUseCases>(
3022 useCase);
3023 config.streams = streams;
3024 config.operationMode = StreamConfigurationMode::NORMAL_MODE;
3025 config.streamConfigCounter = streamConfigCounter;
3026 config.multiResolutionInputImage = false;
3027
3028 bool combSupported;
3029 ret = cameraDevice->isStreamCombinationSupported(config, &combSupported);
3030 ASSERT_TRUE((ret.isOk()) || (static_cast<int32_t>(Status::OPERATION_NOT_SUPPORTED) ==
3031 ret.getServiceSpecificError()));
3032 if (ret.isOk()) {
3033 ASSERT_EQ(combSupported, useCaseSupported);
3034 }
3035 ASSERT_TRUE(ret.isOk());
3036
3037 std::vector<HalStream> halStreams;
3038 ret = mSession->configureStreams(config, &halStreams);
3039 ALOGI("configureStreams returns status: %d", ret.getServiceSpecificError());
3040 if (useCaseSupported) {
3041 ASSERT_TRUE(ret.isOk());
3042 ASSERT_EQ(1u, halStreams.size());
3043 } else {
3044 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
3045 ret.getServiceSpecificError());
3046 }
3047 }
3048 ret = mSession->close();
3049 mSession = nullptr;
3050 ASSERT_TRUE(ret.isOk());
3051 }
3052}
3053
3054GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(CameraAidlTest);
3055INSTANTIATE_TEST_SUITE_P(
3056 PerInstance, CameraAidlTest,
3057 testing::ValuesIn(android::getAidlHalInstanceNames(ICameraProvider::descriptor)),
3058 android::hardware::PrintInstanceNameToString);