blob: 3da89e2237d44b316f235d7933f483a6a172ea43 [file] [log] [blame]
Avichal Rakesh362242f2022-02-08 12:40:53 -08001/*
2 * Copyright (C) 2022 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <aidl/Vintf.h>
18#include <aidl/android/hardware/camera/common/VendorTagSection.h>
19#include <aidl/android/hardware/camera/device/ICameraDevice.h>
20#include <aidlcommonsupport/NativeHandle.h>
21#include <camera_aidl_test.h>
22#include <cutils/properties.h>
23#include <device_cb.h>
24#include <empty_device_cb.h>
25#include <grallocusage/GrallocUsageConversion.h>
26#include <gtest/gtest.h>
27#include <hardware/gralloc.h>
28#include <hardware/gralloc1.h>
29#include <hidl/GtestPrinter.h>
30#include <hidl/HidlSupport.h>
31#include <torch_provider_cb.h>
32#include <list>
33
34using ::aidl::android::hardware::camera::common::CameraDeviceStatus;
35using ::aidl::android::hardware::camera::common::CameraResourceCost;
36using ::aidl::android::hardware::camera::common::TorchModeStatus;
37using ::aidl::android::hardware::camera::common::VendorTagSection;
38using ::aidl::android::hardware::camera::device::ICameraDevice;
39using ::aidl::android::hardware::camera::metadata::SensorPixelMode;
40using ::aidl::android::hardware::camera::provider::CameraIdAndStreamCombination;
41using ::aidl::android::hardware::camera::provider::ICameraProviderCallbackDefault;
42
43using ::ndk::ScopedAStatus;
44
45namespace {
46const int32_t kBurstFrameCount = 10;
47const uint32_t kMaxStillWidth = 2048;
48const uint32_t kMaxStillHeight = 1536;
49
50const int64_t kEmptyFlushTimeoutMSec = 200;
51
52const static std::vector<int32_t> kMandatoryUseCases = {
53 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
54 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW,
55 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_STILL_CAPTURE,
56 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_RECORD,
57 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW_VIDEO_STILL,
58 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL};
59} // namespace
60
61TEST_P(CameraAidlTest, getCameraIdList) {
62 std::vector<std::string> idList;
63 ScopedAStatus ret = mProvider->getCameraIdList(&idList);
64 ASSERT_TRUE(ret.isOk());
65
66 for (size_t i = 0; i < idList.size(); i++) {
67 ALOGI("Camera Id[%zu] is %s", i, idList[i].c_str());
68 }
69}
70
71// Test if ICameraProvider::getVendorTags returns Status::OK
72TEST_P(CameraAidlTest, getVendorTags) {
73 std::vector<VendorTagSection> vendorTags;
74 ScopedAStatus ret = mProvider->getVendorTags(&vendorTags);
75
76 ASSERT_TRUE(ret.isOk());
77 for (size_t i = 0; i < vendorTags.size(); i++) {
78 ALOGI("Vendor tag section %zu name %s", i, vendorTags[i].sectionName.c_str());
79 for (auto& tag : vendorTags[i].tags) {
80 ALOGI("Vendor tag id %u name %s type %d", tag.tagId, tag.tagName.c_str(),
81 (int)tag.tagType);
82 }
83 }
84}
85
86// Test if ICameraProvider::setCallback returns Status::OK
87TEST_P(CameraAidlTest, setCallback) {
88 struct ProviderCb : public ICameraProviderCallbackDefault {
89 ScopedAStatus cameraDeviceStatusChange(const std::string& cameraDeviceName,
90 CameraDeviceStatus newStatus) override {
91 ALOGI("camera device status callback name %s, status %d", cameraDeviceName.c_str(),
92 (int)newStatus);
93 return ScopedAStatus::ok();
94 }
95 ScopedAStatus torchModeStatusChange(const std::string& cameraDeviceName,
96 TorchModeStatus newStatus) override {
97 ALOGI("Torch mode status callback name %s, status %d", cameraDeviceName.c_str(),
98 (int)newStatus);
99 return ScopedAStatus::ok();
100 }
101 ScopedAStatus physicalCameraDeviceStatusChange(const std::string& cameraDeviceName,
102 const std::string& physicalCameraDeviceName,
103 CameraDeviceStatus newStatus) override {
104 ALOGI("physical camera device status callback name %s, physical camera name %s,"
105 " status %d",
106 cameraDeviceName.c_str(), physicalCameraDeviceName.c_str(), (int)newStatus);
107 return ScopedAStatus::ok();
108 }
109 };
110
111 std::shared_ptr<ProviderCb> cb = ProviderCb::make<ProviderCb>();
112 ScopedAStatus ret = mProvider->setCallback(cb);
113 ASSERT_TRUE(ret.isOk());
114 ret = mProvider->setCallback(nullptr);
115 ASSERT_TRUE(ret.isOk());
116}
117
118// Test if ICameraProvider::getCameraDeviceInterface returns Status::OK and non-null device
119TEST_P(CameraAidlTest, getCameraDeviceInterface) {
120 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
121
122 for (const auto& name : cameraDeviceNames) {
123 std::shared_ptr<ICameraDevice> cameraDevice;
124 ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &cameraDevice);
125 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
126 ret.getServiceSpecificError());
127 ASSERT_TRUE(ret.isOk());
128 ASSERT_NE(cameraDevice, nullptr);
129 }
130}
131
132// Verify that the device resource cost can be retrieved and the values are
133// correct.
134TEST_P(CameraAidlTest, getResourceCost) {
135 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
136
137 for (const auto& deviceName : cameraDeviceNames) {
138 std::shared_ptr<ICameraDevice> cameraDevice;
139 ScopedAStatus ret = mProvider->getCameraDeviceInterface(deviceName, &cameraDevice);
140 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
141 ret.getServiceSpecificError());
142 ASSERT_TRUE(ret.isOk());
143 ASSERT_NE(cameraDevice, nullptr);
144
145 CameraResourceCost resourceCost;
146 ret = cameraDevice->getResourceCost(&resourceCost);
147 ALOGI("getResourceCost returns: %d:%d", ret.getExceptionCode(),
148 ret.getServiceSpecificError());
149 ASSERT_TRUE(ret.isOk());
150
151 ALOGI(" Resource cost is %d", resourceCost.resourceCost);
152 ASSERT_LE(resourceCost.resourceCost, 100u);
153
154 for (const auto& name : resourceCost.conflictingDevices) {
155 ALOGI(" Conflicting device: %s", name.c_str());
156 }
157 }
158}
159
160TEST_P(CameraAidlTest, systemCameraTest) {
161 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
162 std::map<std::string, std::vector<SystemCameraKind>> hiddenPhysicalIdToLogicalMap;
163 for (const auto& name : cameraDeviceNames) {
164 std::shared_ptr<ICameraDevice> device;
165 ALOGI("getCameraCharacteristics: Testing camera device %s", name.c_str());
166 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
167 ASSERT_TRUE(ret.isOk());
168 ASSERT_NE(device, nullptr);
169
170 CameraMetadata cameraCharacteristics;
171 ret = device->getCameraCharacteristics(&cameraCharacteristics);
172 ASSERT_TRUE(ret.isOk());
173
174 const camera_metadata_t* staticMeta =
175 reinterpret_cast<const camera_metadata_t*>(cameraCharacteristics.metadata.data());
176 Status rc = isLogicalMultiCamera(staticMeta);
177 if (rc == Status::OPERATION_NOT_SUPPORTED) {
178 return;
179 }
180
181 ASSERT_EQ(rc, Status::OK);
182 std::unordered_set<std::string> physicalIds;
183 ASSERT_EQ(getPhysicalCameraIds(staticMeta, &physicalIds), Status::OK);
184 SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC;
185 Status retStatus = getSystemCameraKind(staticMeta, &systemCameraKind);
186 ASSERT_EQ(retStatus, Status::OK);
187
188 for (auto physicalId : physicalIds) {
189 bool isPublicId = false;
190 for (auto& deviceName : cameraDeviceNames) {
191 std::string publicVersion, publicId;
192 ASSERT_TRUE(matchDeviceName(deviceName, mProviderType, &publicVersion, &publicId));
193 if (physicalId == publicId) {
194 isPublicId = true;
195 break;
196 }
197 }
198 // For hidden physical cameras, collect their associated logical cameras
199 // and store the system camera kind.
200 if (!isPublicId) {
201 auto it = hiddenPhysicalIdToLogicalMap.find(physicalId);
202 if (it == hiddenPhysicalIdToLogicalMap.end()) {
203 hiddenPhysicalIdToLogicalMap.insert(std::make_pair(
204 physicalId, std::vector<SystemCameraKind>(systemCameraKind)));
205 } else {
206 it->second.push_back(systemCameraKind);
207 }
208 }
209 }
210 }
211
212 // Check that the system camera kind of the logical cameras associated with
213 // each hidden physical camera is the same.
214 for (const auto& it : hiddenPhysicalIdToLogicalMap) {
215 SystemCameraKind neededSystemCameraKind = it.second.front();
216 for (auto foundSystemCamera : it.second) {
217 ASSERT_EQ(neededSystemCameraKind, foundSystemCamera);
218 }
219 }
220}
221
222// Verify that the static camera characteristics can be retrieved
223// successfully.
224TEST_P(CameraAidlTest, getCameraCharacteristics) {
225 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
226
227 for (const auto& name : cameraDeviceNames) {
228 std::shared_ptr<ICameraDevice> device;
229 ALOGI("getCameraCharacteristics: Testing camera device %s", name.c_str());
230 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
231 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
232 ret.getServiceSpecificError());
233 ASSERT_TRUE(ret.isOk());
234 ASSERT_NE(device, nullptr);
235
236 CameraMetadata chars;
237 ret = device->getCameraCharacteristics(&chars);
238 ASSERT_TRUE(ret.isOk());
239 verifyCameraCharacteristics(chars);
240 verifyMonochromeCharacteristics(chars);
241 verifyRecommendedConfigs(chars);
242 verifyLogicalOrUltraHighResCameraMetadata(name, device, chars, cameraDeviceNames);
243
244 ASSERT_TRUE(ret.isOk());
245
246 // getPhysicalCameraCharacteristics will fail for publicly
247 // advertised camera IDs.
248 std::string version, cameraId;
249 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &cameraId));
250 CameraMetadata devChars;
251 ret = device->getPhysicalCameraCharacteristics(cameraId, &devChars);
252 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
253 ASSERT_EQ(0, devChars.metadata.size());
254 }
255}
256
257// Verify that the torch strength level can be set and retrieved successfully.
258TEST_P(CameraAidlTest, turnOnTorchWithStrengthLevel) {
259 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
260
261 std::shared_ptr<TorchProviderCb> cb = ndk::SharedRefBase::make<TorchProviderCb>(this);
262 ndk::ScopedAStatus ret = mProvider->setCallback(cb);
263 ASSERT_TRUE(ret.isOk());
264
265 for (const auto& name : cameraDeviceNames) {
266 int32_t defaultLevel;
267 std::shared_ptr<ICameraDevice> device;
268 ALOGI("%s: Testing camera device %s", __FUNCTION__, name.c_str());
269
270 ret = mProvider->getCameraDeviceInterface(name, &device);
271 ASSERT_TRUE(ret.isOk());
272 ASSERT_NE(device, nullptr);
273
274 CameraMetadata chars;
275 ret = device->getCameraCharacteristics(&chars);
276 ASSERT_TRUE(ret.isOk());
277
278 const camera_metadata_t* staticMeta =
279 reinterpret_cast<const camera_metadata_t*>(chars.metadata.data());
280 bool torchStrengthControlSupported = isTorchStrengthControlSupported(staticMeta);
281 camera_metadata_ro_entry entry;
282 int rc = find_camera_metadata_ro_entry(staticMeta,
283 ANDROID_FLASH_INFO_STRENGTH_DEFAULT_LEVEL, &entry);
284 if (torchStrengthControlSupported) {
285 ASSERT_EQ(rc, 0);
286 ASSERT_GT(entry.count, 0);
287 defaultLevel = *entry.data.i32;
288 ALOGI("Default level is:%d", defaultLevel);
289 }
290
291 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
292 ret = device->turnOnTorchWithStrengthLevel(2);
293 ALOGI("turnOnTorchWithStrengthLevel returns status: %d", ret.getServiceSpecificError());
294 // OPERATION_NOT_SUPPORTED check
295 if (!torchStrengthControlSupported) {
296 ALOGI("Torch strength control not supported.");
297 ASSERT_EQ(static_cast<int32_t>(Status::OPERATION_NOT_SUPPORTED),
298 ret.getServiceSpecificError());
299 } else {
300 {
301 ASSERT_TRUE(ret.isOk());
302 std::unique_lock<std::mutex> l(mTorchLock);
303 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
304 auto timeout = std::chrono::system_clock::now() +
305 std::chrono::seconds(kTorchTimeoutSec);
306 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
307 }
308 ASSERT_EQ(TorchModeStatus::AVAILABLE_ON, mTorchStatus);
309 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
310 }
311 ALOGI("getTorchStrengthLevel: Testing");
312 int32_t strengthLevel;
313 ret = device->getTorchStrengthLevel(&strengthLevel);
314 ASSERT_TRUE(ret.isOk());
315 ALOGI("Torch strength level is : %d", strengthLevel);
316 ASSERT_EQ(strengthLevel, 2);
317
318 // Turn OFF the torch and verify torch strength level is reset to default level.
319 ALOGI("Testing torch strength level reset after turning the torch OFF.");
320 ret = device->setTorchMode(false);
321 ASSERT_TRUE(ret.isOk());
322 {
323 std::unique_lock<std::mutex> l(mTorchLock);
324 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
325 auto timeout = std::chrono::system_clock::now() +
326 std::chrono::seconds(kTorchTimeoutSec);
327 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
328 }
329 ASSERT_EQ(TorchModeStatus::AVAILABLE_OFF, mTorchStatus);
330 }
331
332 ret = device->getTorchStrengthLevel(&strengthLevel);
333 ASSERT_TRUE(ret.isOk());
334 ALOGI("Torch strength level after turning OFF torch is : %d", strengthLevel);
335 ASSERT_EQ(strengthLevel, defaultLevel);
336 }
337 }
338}
339
340// In case it is supported verify that torch can be enabled.
341// Check for corresponding torch callbacks as well.
342TEST_P(CameraAidlTest, setTorchMode) {
343 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
344
345 std::shared_ptr<TorchProviderCb> cb = ndk::SharedRefBase::make<TorchProviderCb>(this);
346 ndk::ScopedAStatus ret = mProvider->setCallback(cb);
347 ALOGI("setCallback returns status: %d", ret.getServiceSpecificError());
348 ASSERT_TRUE(ret.isOk());
349 ASSERT_NE(cb, nullptr);
350
351 for (const auto& name : cameraDeviceNames) {
352 std::shared_ptr<ICameraDevice> device;
353 ALOGI("setTorchMode: Testing camera device %s", name.c_str());
354 ret = mProvider->getCameraDeviceInterface(name, &device);
355 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
356 ret.getServiceSpecificError());
357 ASSERT_TRUE(ret.isOk());
358 ASSERT_NE(device, nullptr);
359
360 CameraMetadata metadata;
361 ret = device->getCameraCharacteristics(&metadata);
362 ALOGI("getCameraCharacteristics returns status:%d", ret.getServiceSpecificError());
363 ASSERT_TRUE(ret.isOk());
364 camera_metadata_t* staticMeta =
365 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
366 bool torchSupported = isTorchSupported(staticMeta);
367
368 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
369 ret = device->setTorchMode(true);
370 ALOGI("setTorchMode returns status: %d", ret.getServiceSpecificError());
371 if (!torchSupported) {
372 ASSERT_EQ(static_cast<int32_t>(Status::OPERATION_NOT_SUPPORTED),
373 ret.getServiceSpecificError());
374 } else {
375 ASSERT_TRUE(ret.isOk());
376 {
377 std::unique_lock<std::mutex> l(mTorchLock);
378 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
379 auto timeout = std::chrono::system_clock::now() +
380 std::chrono::seconds(kTorchTimeoutSec);
381 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
382 }
383 ASSERT_EQ(TorchModeStatus::AVAILABLE_ON, mTorchStatus);
384 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
385 }
386
387 ret = device->setTorchMode(false);
388 ASSERT_TRUE(ret.isOk());
389 {
390 std::unique_lock<std::mutex> l(mTorchLock);
391 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
392 auto timeout = std::chrono::system_clock::now() +
393 std::chrono::seconds(kTorchTimeoutSec);
394 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
395 }
396 ASSERT_EQ(TorchModeStatus::AVAILABLE_OFF, mTorchStatus);
397 }
398 }
399 }
400
401 ret = mProvider->setCallback(nullptr);
402 ASSERT_TRUE(ret.isOk());
403}
404
405// Check dump functionality.
406TEST_P(CameraAidlTest, dump) {
407 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
408
409 for (const auto& name : cameraDeviceNames) {
410 std::shared_ptr<ICameraDevice> device;
411 ALOGI("dump: Testing camera device %s", name.c_str());
412
413 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
414 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
415 ret.getServiceSpecificError());
416 ASSERT_TRUE(ret.isOk());
417 ASSERT_NE(device, nullptr);
418
419 int raw_handle = open(kDumpOutput, O_RDWR);
420 ASSERT_GE(raw_handle, 0);
421
422 auto retStatus = device->dump(raw_handle, nullptr, 0);
423 ASSERT_EQ(retStatus, ::android::OK);
424 close(raw_handle);
425 }
426}
427
428// Open, dump, then close
429TEST_P(CameraAidlTest, openClose) {
430 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
431
432 for (const auto& name : cameraDeviceNames) {
433 std::shared_ptr<ICameraDevice> device;
434 ALOGI("openClose: Testing camera device %s", name.c_str());
435 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
436 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
437 ret.getServiceSpecificError());
438 ASSERT_TRUE(ret.isOk());
439 ASSERT_NE(device, nullptr);
440
441 std::shared_ptr<EmptyDeviceCb> cb = ndk::SharedRefBase::make<EmptyDeviceCb>();
442
443 ret = device->open(cb, &mSession);
444 ASSERT_TRUE(ret.isOk());
445 ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
446 ret.getServiceSpecificError());
447 ASSERT_NE(mSession, nullptr);
448 int raw_handle = open(kDumpOutput, O_RDWR);
449 ASSERT_GE(raw_handle, 0);
450
451 auto retStatus = device->dump(raw_handle, nullptr, 0);
452 ASSERT_EQ(retStatus, ::android::OK);
453 close(raw_handle);
454
455 ret = mSession->close();
456 mSession = nullptr;
457 ASSERT_TRUE(ret.isOk());
458 // TODO: test all session API calls return INTERNAL_ERROR after close
459 // TODO: keep a wp copy here and verify session cannot be promoted out of this scope
460 }
461}
462
463// Check whether all common default request settings can be successfully
464// constructed.
465TEST_P(CameraAidlTest, constructDefaultRequestSettings) {
466 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
467
468 for (const auto& name : cameraDeviceNames) {
469 std::shared_ptr<ICameraDevice> device;
470 ALOGI("constructDefaultRequestSettings: Testing camera device %s", name.c_str());
471 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
472 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
473 ret.getServiceSpecificError());
474 ASSERT_TRUE(ret.isOk());
475 ASSERT_NE(device, nullptr);
476
477 std::shared_ptr<EmptyDeviceCb> cb = ndk::SharedRefBase::make<EmptyDeviceCb>();
478 ret = device->open(cb, &mSession);
479 ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
480 ret.getServiceSpecificError());
481 ASSERT_TRUE(ret.isOk());
482 ASSERT_NE(mSession, nullptr);
483
484 for (int32_t t = (int32_t)RequestTemplate::PREVIEW; t <= (int32_t)RequestTemplate::MANUAL;
485 t++) {
486 RequestTemplate reqTemplate = (RequestTemplate)t;
487 CameraMetadata rawMetadata;
488 ret = mSession->constructDefaultRequestSettings(reqTemplate, &rawMetadata);
489 ALOGI("constructDefaultRequestSettings returns status:%d:%d", ret.getExceptionCode(),
490 ret.getServiceSpecificError());
491 ASSERT_TRUE(ret.isOk());
492 if (reqTemplate == RequestTemplate::ZERO_SHUTTER_LAG ||
493 reqTemplate == RequestTemplate::MANUAL) {
494 // optional templates
495 ASSERT_TRUE(ret.isOk() || static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
496 ret.getServiceSpecificError());
497 } else {
498 ASSERT_TRUE(ret.isOk());
499 }
500
501 if (ret.isOk()) {
502 const camera_metadata_t* metadata = (camera_metadata_t*)rawMetadata.metadata.data();
503 size_t expectedSize = rawMetadata.metadata.size();
504 int result = validate_camera_metadata_structure(metadata, &expectedSize);
505 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
506 verifyRequestTemplate(metadata, reqTemplate);
507 } else {
508 ASSERT_EQ(0u, rawMetadata.metadata.size());
509 }
510 }
511 ret = mSession->close();
512 mSession = nullptr;
513 ASSERT_TRUE(ret.isOk());
514 }
515}
516
517// Verify that all supported stream formats and sizes can be configured
518// successfully.
519TEST_P(CameraAidlTest, configureStreamsAvailableOutputs) {
520 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
521 std::vector<AvailableStream> outputStreams;
522
523 for (const auto& name : cameraDeviceNames) {
524 CameraMetadata meta;
525 std::shared_ptr<ICameraDevice> device;
526
527 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/, &device /*out*/);
528
529 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
530 outputStreams.clear();
531 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputStreams));
532 ASSERT_NE(0u, outputStreams.size());
533
534 int32_t jpegBufferSize = 0;
535 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
536 ASSERT_NE(0u, jpegBufferSize);
537
538 int32_t streamId = 0;
539 int32_t streamConfigCounter = 0;
540 for (auto& it : outputStreams) {
541 Stream stream;
542 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(it.format));
543 stream.id = streamId;
544 stream.streamType = StreamType::OUTPUT;
545 stream.width = it.width;
546 stream.height = it.height;
547 stream.format = static_cast<PixelFormat>(it.format);
548 stream.dataSpace = dataspace;
549 stream.usage = static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
550 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER);
551 stream.rotation = StreamRotation::ROTATION_0;
552
553 std::vector<Stream> streams = {stream};
554 StreamConfiguration config;
555 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
556 jpegBufferSize);
557
558 bool expectStreamCombQuery = (isLogicalMultiCamera(staticMeta) == Status::OK);
559 verifyStreamCombination(device, config, /*expectedStatus*/ true, expectStreamCombQuery);
560
561 config.streamConfigCounter = streamConfigCounter++;
562 std::vector<HalStream> halConfigs;
563 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
564 ASSERT_TRUE(ret.isOk());
565 ASSERT_EQ(halConfigs.size(), 1);
566 ASSERT_EQ(halConfigs[0].id, streamId);
567
568 streamId++;
569 }
570
571 ndk::ScopedAStatus ret = mSession->close();
572 mSession = nullptr;
573 ASSERT_TRUE(ret.isOk());
574 }
575}
576
577// Verify that mandatory concurrent streams and outputs are supported.
578TEST_P(CameraAidlTest, configureConcurrentStreamsAvailableOutputs) {
579 struct CameraTestInfo {
580 CameraMetadata staticMeta;
581 std::shared_ptr<ICameraDeviceSession> session;
582 std::shared_ptr<ICameraDevice> cameraDevice;
583 StreamConfiguration config;
584 };
585
586 std::map<std::string, std::string> idToNameMap = getCameraDeviceIdToNameMap(mProvider);
587 std::vector<ConcurrentCameraIdCombination> concurrentDeviceCombinations =
588 getConcurrentDeviceCombinations(mProvider);
589 std::vector<AvailableStream> outputStreams;
590 for (const auto& cameraDeviceIds : concurrentDeviceCombinations) {
591 std::vector<CameraIdAndStreamCombination> cameraIdsAndStreamCombinations;
592 std::vector<CameraTestInfo> cameraTestInfos;
593 size_t i = 0;
594 for (const auto& id : cameraDeviceIds.combination) {
595 CameraTestInfo cti;
596 auto it = idToNameMap.find(id);
597 ASSERT_TRUE(idToNameMap.end() != it);
598 std::string name = it->second;
599
600 openEmptyDeviceSession(name, mProvider, &cti.session /*out*/, &cti.staticMeta /*out*/,
601 &cti.cameraDevice /*out*/);
602
603 outputStreams.clear();
604 camera_metadata_t* staticMeta =
605 reinterpret_cast<camera_metadata_t*>(cti.staticMeta.metadata.data());
606 ASSERT_EQ(Status::OK, getMandatoryConcurrentStreams(staticMeta, &outputStreams));
607 ASSERT_NE(0u, outputStreams.size());
608
609 int32_t jpegBufferSize = 0;
610 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
611 ASSERT_NE(0u, jpegBufferSize);
612
613 int32_t streamId = 0;
614 std::vector<Stream> streams(outputStreams.size());
615 size_t j = 0;
616 for (const auto& s : outputStreams) {
617 Stream stream;
618 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(s.format));
619 stream.id = streamId++;
620 stream.streamType = StreamType::OUTPUT;
621 stream.width = s.width;
622 stream.height = s.height;
623 stream.format = static_cast<PixelFormat>(s.format);
624 stream.usage = static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
625 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER);
626 stream.dataSpace = dataspace;
627 stream.rotation = StreamRotation::ROTATION_0;
628 stream.sensorPixelModesUsed = {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT};
629 streams[j] = stream;
630 j++;
631 }
632
633 // Add the created stream configs to cameraIdsAndStreamCombinations
634 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &cti.config,
635 jpegBufferSize);
636
637 cti.config.streamConfigCounter = outputStreams.size();
638 CameraIdAndStreamCombination cameraIdAndStreamCombination;
639 cameraIdAndStreamCombination.cameraId = id;
640 cameraIdAndStreamCombination.streamConfiguration = cti.config;
641 cameraIdsAndStreamCombinations.push_back(cameraIdAndStreamCombination);
642 i++;
643 cameraTestInfos.push_back(cti);
644 }
645 // Now verify that concurrent streams are supported
646 bool combinationSupported;
647 ndk::ScopedAStatus ret = mProvider->isConcurrentStreamCombinationSupported(
648 cameraIdsAndStreamCombinations, &combinationSupported);
649 ASSERT_TRUE(ret.isOk());
650 ASSERT_EQ(combinationSupported, true);
651
652 // Test the stream can actually be configured
653 for (auto& cti : cameraTestInfos) {
654 if (cti.session != nullptr) {
655 camera_metadata_t* staticMeta =
656 reinterpret_cast<camera_metadata_t*>(cti.staticMeta.metadata.data());
657 bool expectStreamCombQuery = (isLogicalMultiCamera(staticMeta) == Status::OK);
658 verifyStreamCombination(cti.cameraDevice, cti.config, /*expectedStatus*/ true,
659 expectStreamCombQuery);
660 }
661
662 if (cti.session != nullptr) {
663 std::vector<HalStream> streamConfigs;
664 ret = cti.session->configureStreams(cti.config, &streamConfigs);
665 ASSERT_TRUE(ret.isOk());
666 ASSERT_EQ(cti.config.streams.size(), streamConfigs.size());
667 }
668 }
669
670 for (auto& cti : cameraTestInfos) {
671 ret = cti.session->close();
672 ASSERT_TRUE(ret.isOk());
673 }
674 }
675}
676
677// Check for correct handling of invalid/incorrect configuration parameters.
678TEST_P(CameraAidlTest, configureStreamsInvalidOutputs) {
679 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
680 std::vector<AvailableStream> outputStreams;
681
682 for (const auto& name : cameraDeviceNames) {
683 CameraMetadata meta;
684 std::shared_ptr<ICameraDevice> cameraDevice;
685
686 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
687 &cameraDevice /*out*/);
688 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
689 outputStreams.clear();
690
691 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputStreams));
692 ASSERT_NE(0u, outputStreams.size());
693
694 int32_t jpegBufferSize = 0;
695 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
696 ASSERT_NE(0u, jpegBufferSize);
697
698 int32_t streamId = 0;
699 Stream stream = {streamId++,
700 StreamType::OUTPUT,
701 static_cast<uint32_t>(0),
702 static_cast<uint32_t>(0),
703 static_cast<PixelFormat>(outputStreams[0].format),
704 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
705 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
706 Dataspace::UNKNOWN,
707 StreamRotation::ROTATION_0,
708 std::string(),
709 jpegBufferSize,
710 -1,
711 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT}};
712 int32_t streamConfigCounter = 0;
713 std::vector<Stream> streams = {stream};
714 StreamConfiguration config;
715 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
716 jpegBufferSize);
717
718 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ false,
719 /*expectStreamCombQuery*/ false);
720
721 config.streamConfigCounter = streamConfigCounter++;
722 std::vector<HalStream> halConfigs;
723 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
724 ASSERT_TRUE(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
725 ret.getServiceSpecificError() ||
726 static_cast<int32_t>(Status::INTERNAL_ERROR) == ret.getServiceSpecificError());
727
728 stream = {streamId++,
729 StreamType::OUTPUT,
730 /*width*/ INT32_MAX,
731 /*height*/ INT32_MAX,
732 static_cast<PixelFormat>(outputStreams[0].format),
733 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
734 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
735 Dataspace::UNKNOWN,
736 StreamRotation::ROTATION_0,
737 std::string(),
738 jpegBufferSize,
739 -1,
740 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT}};
741
742 streams[0] = stream;
743 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
744 jpegBufferSize);
745
746 config.streamConfigCounter = streamConfigCounter++;
747 halConfigs.clear();
748 ret = mSession->configureStreams(config, &halConfigs);
749 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
750
751 for (auto& it : outputStreams) {
752 stream = {streamId++,
753 StreamType::OUTPUT,
754 it.width,
755 it.height,
756 static_cast<PixelFormat>(UINT32_MAX),
757 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
758 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
759 Dataspace::UNKNOWN,
760 StreamRotation::ROTATION_0,
761 std::string(),
762 jpegBufferSize,
763 -1,
764 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT}};
765
766 streams[0] = stream;
767 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
768 jpegBufferSize);
769 config.streamConfigCounter = streamConfigCounter++;
770 halConfigs.clear();
771 ret = mSession->configureStreams(config, &halConfigs);
772 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
773 ret.getServiceSpecificError());
774
775 stream = {streamId++,
776 StreamType::OUTPUT,
777 it.width,
778 it.height,
779 static_cast<PixelFormat>(it.format),
780 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
781 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
782 Dataspace::UNKNOWN,
783 static_cast<StreamRotation>(UINT32_MAX),
784 std::string(),
785 jpegBufferSize,
786 -1,
787 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT}};
788
789 streams[0] = stream;
790 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
791 jpegBufferSize);
792
793 config.streamConfigCounter = streamConfigCounter++;
794 halConfigs.clear();
795 ret = mSession->configureStreams(config, &halConfigs);
796 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
797 ret.getServiceSpecificError());
798 }
799
800 ret = mSession->close();
801 mSession = nullptr;
802 ASSERT_TRUE(ret.isOk());
803 }
804}
805
806// Check whether all supported ZSL output stream combinations can be
807// configured successfully.
808TEST_P(CameraAidlTest, configureStreamsZSLInputOutputs) {
809 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
810 std::vector<AvailableStream> inputStreams;
811 std::vector<AvailableZSLInputOutput> inputOutputMap;
812
813 for (const auto& name : cameraDeviceNames) {
814 CameraMetadata meta;
815 std::shared_ptr<ICameraDevice> cameraDevice;
816
817 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
818 &cameraDevice /*out*/);
819 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
820
821 Status rc = isZSLModeAvailable(staticMeta);
822 if (Status::OPERATION_NOT_SUPPORTED == rc) {
823 ndk::ScopedAStatus ret = mSession->close();
824 mSession = nullptr;
825 ASSERT_TRUE(ret.isOk());
826 continue;
827 }
828 ASSERT_EQ(Status::OK, rc);
829
830 inputStreams.clear();
831 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, inputStreams));
832 ASSERT_NE(0u, inputStreams.size());
833
834 inputOutputMap.clear();
835 ASSERT_EQ(Status::OK, getZSLInputOutputMap(staticMeta, inputOutputMap));
836 ASSERT_NE(0u, inputOutputMap.size());
837
838 bool supportMonoY8 = false;
839 if (Status::OK == isMonochromeCamera(staticMeta)) {
840 for (auto& it : inputStreams) {
841 if (it.format == static_cast<uint32_t>(PixelFormat::Y8)) {
842 supportMonoY8 = true;
843 break;
844 }
845 }
846 }
847
848 int32_t jpegBufferSize = 0;
849 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
850 ASSERT_NE(0u, jpegBufferSize);
851
852 int32_t streamId = 0;
853 bool hasPrivToY8 = false, hasY8ToY8 = false, hasY8ToBlob = false;
854 uint32_t streamConfigCounter = 0;
855 for (auto& inputIter : inputOutputMap) {
856 AvailableStream input;
857 ASSERT_EQ(Status::OK, findLargestSize(inputStreams, inputIter.inputFormat, input));
858 ASSERT_NE(0u, inputStreams.size());
859
860 if (inputIter.inputFormat ==
861 static_cast<uint32_t>(PixelFormat::IMPLEMENTATION_DEFINED) &&
862 inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::Y8)) {
863 hasPrivToY8 = true;
864 } else if (inputIter.inputFormat == static_cast<uint32_t>(PixelFormat::Y8)) {
865 if (inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::BLOB)) {
866 hasY8ToBlob = true;
867 } else if (inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::Y8)) {
868 hasY8ToY8 = true;
869 }
870 }
871 AvailableStream outputThreshold = {INT32_MAX, INT32_MAX, inputIter.outputFormat};
872 std::vector<AvailableStream> outputStreams;
873 ASSERT_EQ(Status::OK,
874 getAvailableOutputStreams(staticMeta, outputStreams, &outputThreshold));
875 for (auto& outputIter : outputStreams) {
876 Dataspace outputDataSpace =
877 getDataspace(static_cast<PixelFormat>(outputIter.format));
878 Stream zslStream = {
879 streamId++,
880 StreamType::OUTPUT,
881 input.width,
882 input.height,
883 static_cast<PixelFormat>(input.format),
884 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
885 GRALLOC_USAGE_HW_CAMERA_ZSL),
886 Dataspace::UNKNOWN,
887 StreamRotation::ROTATION_0,
888 std::string(),
889 jpegBufferSize,
890 -1,
891 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT}};
892 Stream inputStream = {
893 streamId++,
894 StreamType::INPUT,
895 input.width,
896 input.height,
897 static_cast<PixelFormat>(input.format),
898 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(0),
899 Dataspace::UNKNOWN,
900 StreamRotation::ROTATION_0,
901 std::string(),
902 jpegBufferSize,
903 -1,
904 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT}};
905 Stream outputStream = {
906 streamId++,
907 StreamType::OUTPUT,
908 outputIter.width,
909 outputIter.height,
910 static_cast<PixelFormat>(outputIter.format),
911 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
912 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
913 outputDataSpace,
914 StreamRotation::ROTATION_0,
915 std::string(),
916 jpegBufferSize,
917 -1,
918 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT}};
919
920 std::vector<Stream> streams = {inputStream, zslStream, outputStream};
921
922 StreamConfiguration config;
923 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
924 jpegBufferSize);
925
926 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true,
927 /*expectStreamCombQuery*/ false);
928
929 config.streamConfigCounter = streamConfigCounter++;
930 std::vector<HalStream> halConfigs;
931 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
932 ASSERT_TRUE(ret.isOk());
933 ASSERT_EQ(3u, halConfigs.size());
934 }
935 }
936
937 if (supportMonoY8) {
938 if (Status::OK == isZSLModeAvailable(staticMeta, PRIV_REPROCESS)) {
939 ASSERT_TRUE(hasPrivToY8);
940 }
941 if (Status::OK == isZSLModeAvailable(staticMeta, YUV_REPROCESS)) {
942 ASSERT_TRUE(hasY8ToY8);
943 ASSERT_TRUE(hasY8ToBlob);
944 }
945 }
946
947 ndk::ScopedAStatus ret = mSession->close();
948 mSession = nullptr;
949 ASSERT_TRUE(ret.isOk());
950 }
951}
952
953// Check whether session parameters are supported. If Hal support for them
954// exist, then try to configure a preview stream using them.
955TEST_P(CameraAidlTest, configureStreamsWithSessionParameters) {
956 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
957 std::vector<AvailableStream> outputPreviewStreams;
958 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
959 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
960
961 for (const auto& name : cameraDeviceNames) {
962 CameraMetadata meta;
963
964 std::shared_ptr<ICameraDevice> unusedCameraDevice;
965 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
966 &unusedCameraDevice /*out*/);
967 camera_metadata_t* staticMetaBuffer =
968 reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
969
970 std::unordered_set<int32_t> availableSessionKeys;
971 auto rc = getSupportedKeys(staticMetaBuffer, ANDROID_REQUEST_AVAILABLE_SESSION_KEYS,
972 &availableSessionKeys);
973 ASSERT_TRUE(Status::OK == rc);
974 if (availableSessionKeys.empty()) {
975 ndk::ScopedAStatus ret = mSession->close();
976 mSession = nullptr;
977 ASSERT_TRUE(ret.isOk());
978 continue;
979 }
980
981 android::hardware::camera::common::V1_0::helper::CameraMetadata previewRequestSettings;
982 android::hardware::camera::common::V1_0::helper::CameraMetadata sessionParams,
983 modifiedSessionParams;
984 constructFilteredSettings(mSession, availableSessionKeys, RequestTemplate::PREVIEW,
985 &previewRequestSettings, &sessionParams);
986 if (sessionParams.isEmpty()) {
987 ndk::ScopedAStatus ret = mSession->close();
988 mSession = nullptr;
989 ASSERT_TRUE(ret.isOk());
990 continue;
991 }
992
993 outputPreviewStreams.clear();
994
995 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputPreviewStreams,
996 &previewThreshold));
997 ASSERT_NE(0u, outputPreviewStreams.size());
998
999 Stream previewStream = {0,
1000 StreamType::OUTPUT,
1001 outputPreviewStreams[0].width,
1002 outputPreviewStreams[0].height,
1003 static_cast<PixelFormat>(outputPreviewStreams[0].format),
1004 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1005 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
1006 Dataspace::UNKNOWN,
1007 StreamRotation::ROTATION_0,
1008 std::string(),
1009 /*bufferSize*/ 0,
1010 /*groupId*/ -1,
1011 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT}};
1012
1013 std::vector<Stream> streams = {previewStream};
1014 StreamConfiguration config;
1015
1016 config.streams = streams;
1017 config.operationMode = StreamConfigurationMode::NORMAL_MODE;
1018 modifiedSessionParams = sessionParams;
1019 auto sessionParamsBuffer = sessionParams.release();
1020 std::vector<uint8_t> rawSessionParam =
1021 std::vector(reinterpret_cast<uint8_t*>(sessionParamsBuffer),
1022 reinterpret_cast<uint8_t*>(sessionParamsBuffer) +
1023 get_camera_metadata_size(sessionParamsBuffer));
1024
1025 config.sessionParams.metadata = rawSessionParam;
1026 config.streamConfigCounter = 0;
1027 config.streams = {previewStream};
1028 config.streamConfigCounter = 0;
1029 config.multiResolutionInputImage = false;
1030
1031 bool newSessionParamsAvailable = false;
1032 for (const auto& it : availableSessionKeys) {
1033 if (modifiedSessionParams.exists(it)) {
1034 modifiedSessionParams.erase(it);
1035 newSessionParamsAvailable = true;
1036 break;
1037 }
1038 }
1039 if (newSessionParamsAvailable) {
1040 auto modifiedSessionParamsBuffer = modifiedSessionParams.release();
1041 verifySessionReconfigurationQuery(mSession, sessionParamsBuffer,
1042 modifiedSessionParamsBuffer);
1043 modifiedSessionParams.acquire(modifiedSessionParamsBuffer);
1044 }
1045
1046 std::vector<HalStream> halConfigs;
1047 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1048 ASSERT_TRUE(ret.isOk());
1049 ASSERT_EQ(1u, halConfigs.size());
1050
1051 sessionParams.acquire(sessionParamsBuffer);
1052 ret = mSession->close();
1053 mSession = nullptr;
1054 ASSERT_TRUE(ret.isOk());
1055 }
1056}
1057
1058// Verify that all supported preview + still capture stream combinations
1059// can be configured successfully.
1060TEST_P(CameraAidlTest, configureStreamsPreviewStillOutputs) {
1061 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1062 std::vector<AvailableStream> outputBlobStreams;
1063 std::vector<AvailableStream> outputPreviewStreams;
1064 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
1065 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
1066 AvailableStream blobThreshold = {INT32_MAX, INT32_MAX, static_cast<int32_t>(PixelFormat::BLOB)};
1067
1068 for (const auto& name : cameraDeviceNames) {
1069 CameraMetadata meta;
1070
1071 std::shared_ptr<ICameraDevice> cameraDevice;
1072 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1073 &cameraDevice /*out*/);
1074
1075 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1076
1077 // Check if camera support depth only
1078 if (isDepthOnly(staticMeta)) {
1079 ndk::ScopedAStatus ret = mSession->close();
1080 mSession = nullptr;
1081 ASSERT_TRUE(ret.isOk());
1082 continue;
1083 }
1084
1085 outputBlobStreams.clear();
1086 ASSERT_EQ(Status::OK,
1087 getAvailableOutputStreams(staticMeta, outputBlobStreams, &blobThreshold));
1088 ASSERT_NE(0u, outputBlobStreams.size());
1089
1090 outputPreviewStreams.clear();
1091 ASSERT_EQ(Status::OK,
1092 getAvailableOutputStreams(staticMeta, outputPreviewStreams, &previewThreshold));
1093 ASSERT_NE(0u, outputPreviewStreams.size());
1094
1095 int32_t jpegBufferSize = 0;
1096 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
1097 ASSERT_NE(0u, jpegBufferSize);
1098
1099 int32_t streamId = 0;
1100 uint32_t streamConfigCounter = 0;
1101
1102 for (auto& blobIter : outputBlobStreams) {
1103 for (auto& previewIter : outputPreviewStreams) {
1104 Stream previewStream = {
1105 streamId++,
1106 StreamType::OUTPUT,
1107 previewIter.width,
1108 previewIter.height,
1109 static_cast<PixelFormat>(previewIter.format),
1110 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1111 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
1112 Dataspace::UNKNOWN,
1113 StreamRotation::ROTATION_0,
1114 std::string(),
1115 /*bufferSize*/ 0,
1116 /*groupId*/ -1,
1117 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT}};
1118 Stream blobStream = {
1119 streamId++,
1120 StreamType::OUTPUT,
1121 blobIter.width,
1122 blobIter.height,
1123 static_cast<PixelFormat>(blobIter.format),
1124 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1125 GRALLOC1_CONSUMER_USAGE_CPU_READ),
1126 Dataspace::JFIF,
1127 StreamRotation::ROTATION_0,
1128 std::string(),
1129 /*bufferSize*/ 0,
1130 /*groupId*/ -1,
1131 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT}};
1132 std::vector<Stream> streams = {previewStream, blobStream};
1133 StreamConfiguration config;
1134
1135 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
1136 jpegBufferSize);
1137 config.streamConfigCounter = streamConfigCounter++;
1138 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true,
1139 /*expectStreamCombQuery*/ false);
1140
1141 std::vector<HalStream> halConfigs;
1142 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1143 ASSERT_TRUE(ret.isOk());
1144 ASSERT_EQ(2u, halConfigs.size());
1145 }
1146 }
1147
1148 ndk::ScopedAStatus ret = mSession->close();
1149 mSession = nullptr;
1150 ASSERT_TRUE(ret.isOk());
1151 }
1152}
1153
1154// In case constrained mode is supported, test whether it can be
1155// configured. Additionally check for common invalid inputs when
1156// using this mode.
1157TEST_P(CameraAidlTest, configureStreamsConstrainedOutputs) {
1158 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1159
1160 for (const auto& name : cameraDeviceNames) {
1161 CameraMetadata meta;
1162 std::shared_ptr<ICameraDevice> cameraDevice;
1163
1164 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1165 &cameraDevice /*out*/);
1166 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1167
1168 Status rc = isConstrainedModeAvailable(staticMeta);
1169 if (Status::OPERATION_NOT_SUPPORTED == rc) {
1170 ndk::ScopedAStatus ret = mSession->close();
1171 mSession = nullptr;
1172 ASSERT_TRUE(ret.isOk());
1173 continue;
1174 }
1175 ASSERT_EQ(Status::OK, rc);
1176
1177 AvailableStream hfrStream;
1178 rc = pickConstrainedModeSize(staticMeta, hfrStream);
1179 ASSERT_EQ(Status::OK, rc);
1180
1181 int32_t streamId = 0;
1182 uint32_t streamConfigCounter = 0;
1183 Stream stream = {streamId,
1184 StreamType::OUTPUT,
1185 hfrStream.width,
1186 hfrStream.height,
1187 static_cast<PixelFormat>(hfrStream.format),
1188 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1189 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1190 Dataspace::UNKNOWN,
1191 StreamRotation::ROTATION_0,
1192 std::string(),
1193 /*bufferSize*/ 0,
1194 /*groupId*/ -1,
1195 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT}};
1196 std::vector<Stream> streams = {stream};
1197 StreamConfiguration config;
1198 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1199 &config);
1200
1201 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true,
1202 /*expectStreamCombQuery*/ false);
1203
1204 config.streamConfigCounter = streamConfigCounter++;
1205 std::vector<HalStream> halConfigs;
1206 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1207 ASSERT_TRUE(ret.isOk());
1208 ASSERT_EQ(1u, halConfigs.size());
1209 ASSERT_EQ(halConfigs[0].id, streamId);
1210
1211 stream = {streamId++,
1212 StreamType::OUTPUT,
1213 static_cast<uint32_t>(0),
1214 static_cast<uint32_t>(0),
1215 static_cast<PixelFormat>(hfrStream.format),
1216 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1217 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1218 Dataspace::UNKNOWN,
1219 StreamRotation::ROTATION_0,
1220 std::string(),
1221 /*bufferSize*/ 0,
1222 /*groupId*/ -1,
1223 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT}};
1224 streams[0] = stream;
1225 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1226 &config);
1227
1228 config.streamConfigCounter = streamConfigCounter++;
1229 std::vector<HalStream> halConfig;
1230 ret = mSession->configureStreams(config, &halConfig);
1231 ASSERT_TRUE(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
1232 ret.getServiceSpecificError() ||
1233 static_cast<int32_t>(Status::INTERNAL_ERROR) == ret.getServiceSpecificError());
1234
1235 stream = {streamId++,
1236 StreamType::OUTPUT,
1237 INT32_MAX,
1238 INT32_MAX,
1239 static_cast<PixelFormat>(hfrStream.format),
1240 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1241 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1242 Dataspace::UNKNOWN,
1243 StreamRotation::ROTATION_0,
1244 std::string(),
1245 /*bufferSize*/ 0,
1246 /*groupId*/ -1,
1247 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT}};
1248 streams[0] = stream;
1249 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1250 &config);
1251
1252 config.streamConfigCounter = streamConfigCounter++;
1253 halConfigs.clear();
1254 ret = mSession->configureStreams(config, &halConfigs);
1255 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
1256
1257 stream = {streamId++,
1258 StreamType::OUTPUT,
1259 hfrStream.width,
1260 hfrStream.height,
1261 static_cast<PixelFormat>(UINT32_MAX),
1262 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1263 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1264 Dataspace::UNKNOWN,
1265 StreamRotation::ROTATION_0,
1266 std::string(),
1267 /*bufferSize*/ 0,
1268 /*groupId*/ -1,
1269 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT}};
1270 streams[0] = stream;
1271 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1272 &config);
1273
1274 config.streamConfigCounter = streamConfigCounter++;
1275 halConfigs.clear();
1276 ret = mSession->configureStreams(config, &halConfigs);
1277 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
1278
1279 ret = mSession->close();
1280 mSession = nullptr;
1281 ASSERT_TRUE(ret.isOk());
1282 }
1283}
1284
1285// Verify that all supported video + snapshot stream combinations can
1286// be configured successfully.
1287TEST_P(CameraAidlTest, configureStreamsVideoStillOutputs) {
1288 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1289 std::vector<AvailableStream> outputBlobStreams;
1290 std::vector<AvailableStream> outputVideoStreams;
1291 AvailableStream videoThreshold = {kMaxVideoWidth, kMaxVideoHeight,
1292 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
1293 AvailableStream blobThreshold = {kMaxVideoWidth, kMaxVideoHeight,
1294 static_cast<int32_t>(PixelFormat::BLOB)};
1295
1296 for (const auto& name : cameraDeviceNames) {
1297 CameraMetadata meta;
1298 std::shared_ptr<ICameraDevice> cameraDevice;
1299
1300 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1301 &cameraDevice /*out*/);
1302
1303 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1304
1305 // Check if camera support depth only
1306 if (isDepthOnly(staticMeta)) {
1307 ndk::ScopedAStatus ret = mSession->close();
1308 mSession = nullptr;
1309 ASSERT_TRUE(ret.isOk());
1310 continue;
1311 }
1312
1313 outputBlobStreams.clear();
1314 ASSERT_EQ(Status::OK,
1315 getAvailableOutputStreams(staticMeta, outputBlobStreams, &blobThreshold));
1316 ASSERT_NE(0u, outputBlobStreams.size());
1317
1318 outputVideoStreams.clear();
1319 ASSERT_EQ(Status::OK,
1320 getAvailableOutputStreams(staticMeta, outputVideoStreams, &videoThreshold));
1321 ASSERT_NE(0u, outputVideoStreams.size());
1322
1323 int32_t jpegBufferSize = 0;
1324 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
1325 ASSERT_NE(0u, jpegBufferSize);
1326
1327 int32_t streamId = 0;
1328 uint32_t streamConfigCounter = 0;
1329 for (auto& blobIter : outputBlobStreams) {
1330 for (auto& videoIter : outputVideoStreams) {
1331 Stream videoStream = {
1332 streamId++,
1333 StreamType::OUTPUT,
1334 videoIter.width,
1335 videoIter.height,
1336 static_cast<PixelFormat>(videoIter.format),
1337 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1338 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1339 Dataspace::UNKNOWN,
1340 StreamRotation::ROTATION_0,
1341 std::string(),
1342 jpegBufferSize,
1343 /*groupId*/ -1,
1344 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT}};
1345 Stream blobStream = {
1346 streamId++,
1347 StreamType::OUTPUT,
1348 blobIter.width,
1349 blobIter.height,
1350 static_cast<PixelFormat>(blobIter.format),
1351 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1352 GRALLOC1_CONSUMER_USAGE_CPU_READ),
1353 Dataspace::JFIF,
1354 StreamRotation::ROTATION_0,
1355 std::string(),
1356 jpegBufferSize,
1357 /*groupId*/ -1,
1358 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT}};
1359 std::vector<Stream> streams = {videoStream, blobStream};
1360 StreamConfiguration config;
1361
1362 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
1363 jpegBufferSize);
1364 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true,
1365 /*expectStreamCombQuery*/ false);
1366
1367 config.streamConfigCounter = streamConfigCounter++;
1368 std::vector<HalStream> halConfigs;
1369 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1370 ASSERT_TRUE(ret.isOk());
1371 ASSERT_EQ(2u, halConfigs.size());
1372 }
1373 }
1374
1375 ndk::ScopedAStatus ret = mSession->close();
1376 mSession = nullptr;
1377 ASSERT_TRUE(ret.isOk());
1378 }
1379}
1380
1381// Generate and verify a camera capture request
1382TEST_P(CameraAidlTest, processCaptureRequestPreview) {
1383 // TODO(b/220897574): Failing with BUFFER_ERROR
1384 processCaptureRequestInternal(GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, RequestTemplate::PREVIEW,
1385 false /*secureOnlyCameras*/);
1386}
1387
1388// Generate and verify a secure camera capture request
1389TEST_P(CameraAidlTest, processSecureCaptureRequest) {
1390 processCaptureRequestInternal(GRALLOC1_PRODUCER_USAGE_PROTECTED, RequestTemplate::STILL_CAPTURE,
1391 true /*secureOnlyCameras*/);
1392}
1393
1394TEST_P(CameraAidlTest, processCaptureRequestPreviewStabilization) {
1395 std::unordered_map<std::string, nsecs_t> cameraDeviceToTimeLag;
1396 processPreviewStabilizationCaptureRequestInternal(/*previewStabilizationOn*/ false,
1397 cameraDeviceToTimeLag);
1398 processPreviewStabilizationCaptureRequestInternal(/*previewStabilizationOn*/ true,
1399 cameraDeviceToTimeLag);
1400}
1401
1402// Generate and verify a multi-camera capture request
1403TEST_P(CameraAidlTest, processMultiCaptureRequestPreview) {
1404 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1405 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
1406 static_cast<int32_t>(PixelFormat::YCBCR_420_888)};
1407 int64_t bufferId = 1;
1408 uint32_t frameNumber = 1;
1409 std::vector<uint8_t> settings;
1410 std::vector<uint8_t> emptySettings;
1411 std::string invalidPhysicalId = "-1";
1412
1413 for (const auto& name : cameraDeviceNames) {
1414 std::string version, deviceId;
1415 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
1416 CameraMetadata metadata;
1417
1418 std::shared_ptr<ICameraDevice> unusedDevice;
1419 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &metadata /*out*/,
1420 &unusedDevice /*out*/);
1421
1422 camera_metadata_t* staticMeta =
1423 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
1424 Status rc = isLogicalMultiCamera(staticMeta);
1425 if (Status::OPERATION_NOT_SUPPORTED == rc) {
1426 ndk::ScopedAStatus ret = mSession->close();
1427 mSession = nullptr;
1428 ASSERT_TRUE(ret.isOk());
1429 continue;
1430 }
1431
1432 std::unordered_set<std::string> physicalIds;
1433 rc = getPhysicalCameraIds(staticMeta, &physicalIds);
1434 ASSERT_TRUE(Status::OK == rc);
1435 ASSERT_TRUE(physicalIds.size() > 1);
1436
1437 std::unordered_set<int32_t> physicalRequestKeyIDs;
1438 rc = getSupportedKeys(staticMeta, ANDROID_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS,
1439 &physicalRequestKeyIDs);
1440 ASSERT_TRUE(Status::OK == rc);
1441 if (physicalRequestKeyIDs.empty()) {
1442 ndk::ScopedAStatus ret = mSession->close();
1443 mSession = nullptr;
1444 ASSERT_TRUE(ret.isOk());
1445 // The logical camera doesn't support any individual physical requests.
1446 continue;
1447 }
1448
1449 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultPreviewSettings;
1450 android::hardware::camera::common::V1_0::helper::CameraMetadata filteredSettings;
1451 constructFilteredSettings(mSession, physicalRequestKeyIDs, RequestTemplate::PREVIEW,
1452 &defaultPreviewSettings, &filteredSettings);
1453 if (filteredSettings.isEmpty()) {
1454 // No physical device settings in default request.
1455 ndk::ScopedAStatus ret = mSession->close();
1456 mSession = nullptr;
1457 ASSERT_TRUE(ret.isOk());
1458 continue;
1459 }
1460
1461 const camera_metadata_t* settingsBuffer = defaultPreviewSettings.getAndLock();
1462 uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
1463 settings.assign(rawSettingsBuffer,
1464 rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
1465 CameraMetadata settingsMetadata = {settings};
1466 overrideRotateAndCrop(&settingsMetadata);
1467
1468 ndk::ScopedAStatus ret = mSession->close();
1469 mSession = nullptr;
1470 ASSERT_TRUE(ret.isOk());
1471
1472 // Leave only 2 physical devices in the id set.
1473 auto it = physicalIds.begin();
1474 std::string physicalDeviceId = *it;
1475 it++;
1476 physicalIds.erase(++it, physicalIds.end());
1477 ASSERT_EQ(physicalIds.size(), 2u);
1478
1479 std::vector<HalStream> halStreams;
1480 bool supportsPartialResults = false;
1481 bool useHalBufManager = false;
1482 int32_t partialResultCount = 0;
1483 Stream previewStream;
1484 std::shared_ptr<DeviceCb> cb;
1485
1486 configurePreviewStreams(name, mProvider, &previewThreshold, physicalIds, &mSession,
1487 &previewStream, &halStreams /*out*/,
1488 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
1489 &useHalBufManager /*out*/, &cb /*out*/, 0 /*streamConfigCounter*/);
1490
1491 ::aidl::android::hardware::common::fmq::MQDescriptor<
1492 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
1493 descriptor;
1494 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
1495 ASSERT_TRUE(resultQueueRet.isOk());
1496 std::shared_ptr<ResultMetadataQueue> resultQueue =
1497 std::make_shared<ResultMetadataQueue>(descriptor);
1498 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
1499 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
1500 resultQueue = nullptr;
1501 // Don't use the queue onwards.
1502 }
1503
1504 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
1505 static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
1506 partialResultCount, physicalIds, resultQueue);
1507
1508 std::vector<CaptureRequest> requests(1);
1509 CaptureRequest& request = requests[0];
1510 request.frameNumber = frameNumber;
1511 request.fmqSettingsSize = 0;
1512 request.settings.metadata = settings;
1513
1514 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
1515
1516 std::vector<buffer_handle_t> graphicBuffers;
1517 graphicBuffers.reserve(halStreams.size());
1518 outputBuffers.resize(halStreams.size());
1519 size_t k = 0;
1520 for (const auto& halStream : halStreams) {
1521 buffer_handle_t buffer_handle;
1522 if (useHalBufManager) {
1523 outputBuffers[k] = {halStream.id, /*bufferId*/ 0, NativeHandle(),
1524 BufferStatus::OK, NativeHandle(), NativeHandle()};
1525 } else {
1526 allocateGraphicBuffer(previewStream.width, previewStream.height,
1527 android_convertGralloc1To0Usage(
1528 static_cast<uint64_t>(halStream.producerUsage),
1529 static_cast<uint64_t>(halStream.consumerUsage)),
1530 halStream.overrideFormat, &buffer_handle);
1531 graphicBuffers.push_back(buffer_handle);
1532 outputBuffers[k] = {
1533 halStream.id, bufferId, ::android::makeToAidl(buffer_handle),
1534 BufferStatus::OK, NativeHandle(), NativeHandle()};
1535 bufferId++;
1536 }
1537 k++;
1538 }
1539
1540 std::vector<PhysicalCameraSetting> camSettings(1);
1541 const camera_metadata_t* filteredSettingsBuffer = filteredSettings.getAndLock();
1542 uint8_t* rawFilteredSettingsBuffer = (uint8_t*)filteredSettingsBuffer;
1543 camSettings[0].settings = {std::vector(
1544 rawFilteredSettingsBuffer,
1545 rawFilteredSettingsBuffer + get_camera_metadata_size(filteredSettingsBuffer))};
1546 overrideRotateAndCrop(&camSettings[0].settings);
1547 camSettings[0].fmqSettingsSize = 0;
1548 camSettings[0].physicalCameraId = physicalDeviceId;
1549
1550 request.inputBuffer = {
1551 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
1552 request.physicalCameraSettings = camSettings;
1553
1554 {
1555 std::unique_lock<std::mutex> l(mLock);
1556 mInflightMap.clear();
1557 mInflightMap[frameNumber] = inflightReq;
1558 }
1559
1560 int32_t numRequestProcessed = 0;
1561 std::vector<BufferCache> cachesToRemove;
1562 ndk::ScopedAStatus returnStatus =
1563 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1564 ASSERT_TRUE(returnStatus.isOk());
1565 ASSERT_EQ(numRequestProcessed, 1u);
1566
1567 {
1568 std::unique_lock<std::mutex> l(mLock);
1569 while (!inflightReq->errorCodeValid &&
1570 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1571 auto timeout = std::chrono::system_clock::now() +
1572 std::chrono::seconds(kStreamBufferTimeoutSec);
1573 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1574 }
1575
1576 ASSERT_FALSE(inflightReq->errorCodeValid);
1577 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1578
1579 request.frameNumber++;
1580 // Empty settings should be supported after the first call
1581 // for repeating requests.
1582 request.settings.metadata.clear();
1583 request.physicalCameraSettings[0].settings.metadata.clear();
1584 // The buffer has been registered to HAL by bufferId, so per
1585 // API contract we should send a null handle for this buffer
1586 request.outputBuffers[0].buffer = NativeHandle();
1587 mInflightMap.clear();
1588 inflightReq = std::make_shared<InFlightRequest>(
1589 static_cast<ssize_t>(physicalIds.size()), false, supportsPartialResults,
1590 partialResultCount, physicalIds, resultQueue);
1591 mInflightMap[request.frameNumber] = inflightReq;
1592 }
1593
1594 returnStatus =
1595 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1596 ASSERT_TRUE(returnStatus.isOk());
1597 ASSERT_EQ(numRequestProcessed, 1u);
1598
1599 {
1600 std::unique_lock<std::mutex> l(mLock);
1601 while (!inflightReq->errorCodeValid &&
1602 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1603 auto timeout = std::chrono::system_clock::now() +
1604 std::chrono::seconds(kStreamBufferTimeoutSec);
1605 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1606 }
1607
1608 ASSERT_FALSE(inflightReq->errorCodeValid);
1609 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1610 }
1611
1612 // Invalid physical camera id should fail process requests
1613 frameNumber++;
1614 camSettings[0].physicalCameraId = invalidPhysicalId;
1615 camSettings[0].settings.metadata = settings;
1616
1617 request.physicalCameraSettings = camSettings; // Invalid camera settings
1618 returnStatus =
1619 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1620 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
1621 returnStatus.getServiceSpecificError());
1622
1623 defaultPreviewSettings.unlock(settingsBuffer);
1624 filteredSettings.unlock(filteredSettingsBuffer);
1625
1626 if (useHalBufManager) {
1627 std::vector<int32_t> streamIds(halStreams.size());
1628 for (size_t i = 0; i < streamIds.size(); i++) {
1629 streamIds[i] = halStreams[i].id;
1630 }
1631 verifyBuffersReturned(mSession, streamIds, cb);
1632 }
1633
1634 ret = mSession->close();
1635 mSession = nullptr;
1636 ASSERT_TRUE(ret.isOk());
1637 }
1638}
1639
1640// Generate and verify an ultra high resolution capture request
1641TEST_P(CameraAidlTest, processUltraHighResolutionRequest) {
1642 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1643 int64_t bufferId = 1;
1644 int32_t frameNumber = 1;
1645 CameraMetadata settings;
1646
1647 for (const auto& name : cameraDeviceNames) {
1648 std::string version, deviceId;
1649 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
1650 CameraMetadata meta;
1651
1652 std::shared_ptr<ICameraDevice> unusedDevice;
1653 openEmptyDeviceSession(name, mProvider, &mSession, &meta, &unusedDevice);
1654 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1655 if (!isUltraHighResolution(staticMeta)) {
1656 ndk::ScopedAStatus ret = mSession->close();
1657 mSession = nullptr;
1658 ASSERT_TRUE(ret.isOk());
1659 continue;
1660 }
1661 CameraMetadata req;
1662 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultSettings;
1663 ndk::ScopedAStatus ret =
1664 mSession->constructDefaultRequestSettings(RequestTemplate::STILL_CAPTURE, &req);
1665 ASSERT_TRUE(ret.isOk());
1666
1667 const camera_metadata_t* metadata =
1668 reinterpret_cast<const camera_metadata_t*>(req.metadata.data());
1669 size_t expectedSize = req.metadata.size();
1670 int result = validate_camera_metadata_structure(metadata, &expectedSize);
1671 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
1672
1673 size_t entryCount = get_camera_metadata_entry_count(metadata);
1674 ASSERT_GT(entryCount, 0u);
1675 defaultSettings = metadata;
1676 uint8_t sensorPixelMode =
1677 static_cast<uint8_t>(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
1678 ASSERT_EQ(::android::OK,
1679 defaultSettings.update(ANDROID_SENSOR_PIXEL_MODE, &sensorPixelMode, 1));
1680
1681 const camera_metadata_t* settingsBuffer = defaultSettings.getAndLock();
1682 uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
1683 settings.metadata = std::vector(
1684 rawSettingsBuffer, rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
1685 overrideRotateAndCrop(&settings);
1686
1687 ret = mSession->close();
1688 mSession = nullptr;
1689 ASSERT_TRUE(ret.isOk());
1690
1691 std::vector<HalStream> halStreams;
1692 bool supportsPartialResults = false;
1693 bool useHalBufManager = false;
1694 int32_t partialResultCount = 0;
1695 Stream previewStream;
1696 std::shared_ptr<DeviceCb> cb;
1697
1698 std::list<PixelFormat> pixelFormats = {PixelFormat::YCBCR_420_888, PixelFormat::RAW16};
1699 for (PixelFormat format : pixelFormats) {
1700 configureStreams(name, mProvider, format, &mSession, &previewStream, &halStreams,
1701 &supportsPartialResults, &partialResultCount, &useHalBufManager, &cb,
1702 0, /*maxResolution*/ true);
1703 ASSERT_NE(mSession, nullptr);
1704
1705 ::aidl::android::hardware::common::fmq::MQDescriptor<
1706 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
1707 descriptor;
1708 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
1709 ASSERT_TRUE(resultQueueRet.isOk());
1710
1711 std::shared_ptr<ResultMetadataQueue> resultQueue =
1712 std::make_shared<ResultMetadataQueue>(descriptor);
1713 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
1714 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
1715 resultQueue = nullptr;
1716 // Don't use the queue onwards.
1717 }
1718
1719 std::vector<buffer_handle_t> graphicBuffers;
1720 graphicBuffers.reserve(halStreams.size());
1721 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
1722 static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
1723 partialResultCount, std::unordered_set<std::string>(), resultQueue);
1724
1725 std::vector<CaptureRequest> requests(1);
1726 CaptureRequest& request = requests[0];
1727 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
1728 outputBuffers.resize(halStreams.size());
1729
1730 size_t k = 0;
1731 for (const auto& halStream : halStreams) {
1732 buffer_handle_t buffer_handle;
1733 if (useHalBufManager) {
1734 outputBuffers[k] = {halStream.id, 0,
1735 NativeHandle(), BufferStatus::OK,
1736 NativeHandle(), NativeHandle()};
1737 } else {
1738 allocateGraphicBuffer(previewStream.width, previewStream.height,
1739 android_convertGralloc1To0Usage(
1740 static_cast<uint64_t>(halStream.producerUsage),
1741 static_cast<uint64_t>(halStream.consumerUsage)),
1742 halStream.overrideFormat, &buffer_handle);
1743 graphicBuffers.push_back(buffer_handle);
1744 outputBuffers[k] = {
1745 halStream.id, bufferId, ::android::makeToAidl(buffer_handle),
1746 BufferStatus::OK, NativeHandle(), NativeHandle()};
1747 bufferId++;
1748 }
1749 k++;
1750 }
1751
1752 request.inputBuffer = {
1753 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
1754 request.frameNumber = frameNumber;
1755 request.fmqSettingsSize = 0;
1756 request.settings = settings;
1757 request.inputWidth = 0;
1758 request.inputHeight = 0;
1759
1760 {
1761 std::unique_lock<std::mutex> l(mLock);
1762 mInflightMap.clear();
1763 mInflightMap[frameNumber] = inflightReq;
1764 }
1765
1766 int32_t numRequestProcessed = 0;
1767 std::vector<BufferCache> cachesToRemove;
1768 ndk::ScopedAStatus returnStatus =
1769 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1770 ASSERT_TRUE(returnStatus.isOk());
1771 ASSERT_EQ(numRequestProcessed, 1u);
1772
1773 {
1774 std::unique_lock<std::mutex> l(mLock);
1775 while (!inflightReq->errorCodeValid &&
1776 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1777 auto timeout = std::chrono::system_clock::now() +
1778 std::chrono::seconds(kStreamBufferTimeoutSec);
1779 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1780 }
1781
1782 ASSERT_FALSE(inflightReq->errorCodeValid);
1783 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1784 }
1785 if (useHalBufManager) {
1786 std::vector<int32_t> streamIds(halStreams.size());
1787 for (size_t i = 0; i < streamIds.size(); i++) {
1788 streamIds[i] = halStreams[i].id;
1789 }
1790 verifyBuffersReturned(mSession, streamIds, cb);
1791 }
1792
1793 ret = mSession->close();
1794 mSession = nullptr;
1795 ASSERT_TRUE(ret.isOk());
1796 }
1797 }
1798}
1799
1800// Generate and verify 10-bit dynamic range request
1801TEST_P(CameraAidlTest, process10BitDynamicRangeRequest) {
1802 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1803 int64_t bufferId = 1;
1804 int32_t frameNumber = 1;
1805 CameraMetadata settings;
1806
1807 for (const auto& name : cameraDeviceNames) {
1808 std::string version, deviceId;
1809 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
1810 CameraMetadata meta;
1811 std::shared_ptr<ICameraDevice> device;
1812 openEmptyDeviceSession(name, mProvider, &mSession, &meta, &device);
1813 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1814 if (!is10BitDynamicRangeCapable(staticMeta)) {
1815 ndk::ScopedAStatus ret = mSession->close();
1816 mSession = nullptr;
1817 ASSERT_TRUE(ret.isOk());
1818 continue;
1819 }
1820 std::vector<
1821 aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap>
1822 profileList;
1823 get10BitDynamicRangeProfiles(staticMeta, &profileList);
1824 ASSERT_FALSE(profileList.empty());
1825
1826 CameraMetadata req;
1827 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultSettings;
1828 ndk::ScopedAStatus ret =
1829 mSession->constructDefaultRequestSettings(RequestTemplate::STILL_CAPTURE, &req);
1830 ASSERT_TRUE(ret.isOk());
1831
1832 const camera_metadata_t* metadata =
1833 reinterpret_cast<const camera_metadata_t*>(req.metadata.data());
1834 size_t expectedSize = req.metadata.size();
1835 int result = validate_camera_metadata_structure(metadata, &expectedSize);
1836 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
1837
1838 size_t entryCount = get_camera_metadata_entry_count(metadata);
1839 ASSERT_GT(entryCount, 0u);
1840 defaultSettings = metadata;
1841
1842 const camera_metadata_t* settingsBuffer = defaultSettings.getAndLock();
1843 uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
1844 settings.metadata = std::vector(
1845 rawSettingsBuffer, rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
1846 overrideRotateAndCrop(&settings);
1847
1848 ret = mSession->close();
1849 mSession = nullptr;
1850 ASSERT_TRUE(ret.isOk());
1851
1852 std::vector<HalStream> halStreams;
1853 bool supportsPartialResults = false;
1854 bool useHalBufManager = false;
1855 int32_t partialResultCount = 0;
1856 Stream previewStream;
1857 std::shared_ptr<DeviceCb> cb;
1858 for (const auto& profile : profileList) {
1859 configureStreams(name, mProvider, PixelFormat::IMPLEMENTATION_DEFINED, &mSession,
1860 &previewStream, &halStreams, &supportsPartialResults,
1861 &partialResultCount, &useHalBufManager, &cb, 0,
1862 /*maxResolution*/ false, profile);
1863 ASSERT_NE(mSession, nullptr);
1864
1865 ::aidl::android::hardware::common::fmq::MQDescriptor<
1866 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
1867 descriptor;
1868 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
1869 ASSERT_TRUE(resultQueueRet.isOk());
1870
1871 std::shared_ptr<ResultMetadataQueue> resultQueue =
1872 std::make_shared<ResultMetadataQueue>(descriptor);
1873 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
1874 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
1875 resultQueue = nullptr;
1876 // Don't use the queue onwards.
1877 }
1878
1879 std::vector<buffer_handle_t> graphicBuffers;
1880 graphicBuffers.reserve(halStreams.size());
1881
1882 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
1883 static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
1884 partialResultCount, std::unordered_set<std::string>(), resultQueue);
1885
1886 std::vector<CaptureRequest> requests(1);
1887 CaptureRequest& request = requests[0];
1888 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
1889 outputBuffers.resize(halStreams.size());
1890
1891 size_t k = 0;
1892 for (const auto& halStream : halStreams) {
1893 buffer_handle_t buffer_handle;
1894 if (useHalBufManager) {
1895 outputBuffers[k] = {halStream.id, 0,
1896 NativeHandle(), BufferStatus::OK,
1897 NativeHandle(), NativeHandle()};
1898 } else {
1899 allocateGraphicBuffer(previewStream.width, previewStream.height,
1900 android_convertGralloc1To0Usage(
1901 static_cast<uint64_t>(halStream.producerUsage),
1902 static_cast<uint64_t>(halStream.consumerUsage)),
1903 halStream.overrideFormat, &buffer_handle);
1904
1905 graphicBuffers.push_back(buffer_handle);
1906 outputBuffers[k] = {
1907 halStream.id, bufferId, android::makeToAidl(buffer_handle),
1908 BufferStatus::OK, NativeHandle(), NativeHandle()};
1909 bufferId++;
1910 }
1911 k++;
1912 }
1913
1914 request.inputBuffer = {
1915 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
1916 request.frameNumber = frameNumber;
1917 request.fmqSettingsSize = 0;
1918 request.settings = settings;
1919 request.inputWidth = 0;
1920 request.inputHeight = 0;
1921
1922 {
1923 std::unique_lock<std::mutex> l(mLock);
1924 mInflightMap.clear();
1925 mInflightMap[frameNumber] = inflightReq;
1926 }
1927
1928 int32_t numRequestProcessed = 0;
1929 std::vector<BufferCache> cachesToRemove;
1930 ndk::ScopedAStatus returnStatus =
1931 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1932 ASSERT_TRUE(returnStatus.isOk());
1933 ASSERT_EQ(numRequestProcessed, 1u);
1934
1935 {
1936 std::unique_lock<std::mutex> l(mLock);
1937 while (!inflightReq->errorCodeValid &&
1938 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1939 auto timeout = std::chrono::system_clock::now() +
1940 std::chrono::seconds(kStreamBufferTimeoutSec);
1941 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1942 }
1943
1944 ASSERT_FALSE(inflightReq->errorCodeValid);
1945 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1946 verify10BitMetadata(mHandleImporter, *inflightReq, profile);
1947 }
1948 if (useHalBufManager) {
1949 std::vector<int32_t> streamIds(halStreams.size());
1950 for (size_t i = 0; i < streamIds.size(); i++) {
1951 streamIds[i] = halStreams[i].id;
1952 }
1953 mSession->signalStreamFlush(streamIds, /*streamConfigCounter*/ 0);
1954 cb->waitForBuffersReturned();
1955 }
1956
1957 ret = mSession->close();
1958 mSession = nullptr;
1959 ASSERT_TRUE(ret.isOk());
1960 }
1961 }
1962}
1963
1964// Generate and verify a burst containing alternating sensor sensitivity values
1965TEST_P(CameraAidlTest, processCaptureRequestBurstISO) {
1966 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1967 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
1968 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
1969 int64_t bufferId = 1;
1970 int32_t frameNumber = 1;
1971 float isoTol = .03f;
1972 CameraMetadata settings;
1973
1974 for (const auto& name : cameraDeviceNames) {
1975 CameraMetadata meta;
1976 settings.metadata.clear();
1977 std::shared_ptr<ICameraDevice> unusedDevice;
1978 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1979 &unusedDevice /*out*/);
1980 camera_metadata_t* staticMetaBuffer =
1981 clone_camera_metadata(reinterpret_cast<camera_metadata_t*>(meta.metadata.data()));
1982 ::android::hardware::camera::common::V1_0::helper::CameraMetadata staticMeta(
1983 staticMetaBuffer);
1984
1985 camera_metadata_entry_t hwLevel = staticMeta.find(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL);
1986 ASSERT_TRUE(0 < hwLevel.count);
1987 if (ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED == hwLevel.data.u8[0] ||
1988 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL == hwLevel.data.u8[0]) {
1989 // Limited/External devices can skip this test
1990 ndk::ScopedAStatus ret = mSession->close();
1991 mSession = nullptr;
1992 ASSERT_TRUE(ret.isOk());
1993 continue;
1994 }
1995
1996 camera_metadata_entry_t isoRange = staticMeta.find(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE);
1997 ASSERT_EQ(isoRange.count, 2u);
1998
1999 ndk::ScopedAStatus ret = mSession->close();
2000 mSession = nullptr;
2001 ASSERT_TRUE(ret.isOk());
2002
2003 bool supportsPartialResults = false;
2004 bool useHalBufManager = false;
2005 int32_t partialResultCount = 0;
2006 Stream previewStream;
2007 std::vector<HalStream> halStreams;
2008 std::shared_ptr<DeviceCb> cb;
2009 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2010 &previewStream /*out*/, &halStreams /*out*/,
2011 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2012 &useHalBufManager /*out*/, &cb /*out*/);
2013
2014 ::aidl::android::hardware::common::fmq::MQDescriptor<
2015 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2016 descriptor;
2017 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2018 std::shared_ptr<ResultMetadataQueue> resultQueue =
2019 std::make_shared<ResultMetadataQueue>(descriptor);
2020 ASSERT_TRUE(resultQueueRet.isOk());
2021 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2022 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2023 resultQueue = nullptr;
2024 // Don't use the queue onwards.
2025 }
2026
2027 ret = mSession->constructDefaultRequestSettings(RequestTemplate::PREVIEW, &settings);
2028 ASSERT_TRUE(ret.isOk());
2029
2030 ::android::hardware::camera::common::V1_0::helper::CameraMetadata requestMeta;
2031 std::vector<CaptureRequest> requests(kBurstFrameCount);
2032 std::vector<buffer_handle_t> buffers(kBurstFrameCount);
2033 std::vector<std::shared_ptr<InFlightRequest>> inflightReqs(kBurstFrameCount);
2034 std::vector<int32_t> isoValues(kBurstFrameCount);
2035 std::vector<CameraMetadata> requestSettings(kBurstFrameCount);
2036
2037 for (int32_t i = 0; i < kBurstFrameCount; i++) {
2038 std::unique_lock<std::mutex> l(mLock);
2039 CaptureRequest& request = requests[i];
2040 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2041 outputBuffers.resize(1);
2042 StreamBuffer& outputBuffer = outputBuffers[0];
2043
2044 isoValues[i] = ((i % 2) == 0) ? isoRange.data.i32[0] : isoRange.data.i32[1];
2045 if (useHalBufManager) {
2046 outputBuffer = {halStreams[0].id, 0,
2047 NativeHandle(), BufferStatus::OK,
2048 NativeHandle(), NativeHandle()};
2049 } else {
2050 allocateGraphicBuffer(previewStream.width, previewStream.height,
2051 android_convertGralloc1To0Usage(
2052 static_cast<uint64_t>(halStreams[0].producerUsage),
2053 static_cast<uint64_t>(halStreams[0].consumerUsage)),
2054 halStreams[0].overrideFormat, &buffers[i]);
2055 outputBuffer = {halStreams[0].id, bufferId + i, ::android::makeToAidl(buffers[i]),
2056 BufferStatus::OK, NativeHandle(), NativeHandle()};
2057 }
2058
2059 requestMeta.append(reinterpret_cast<camera_metadata_t*>(settings.metadata.data()));
2060
2061 // Disable all 3A routines
2062 uint8_t mode = static_cast<uint8_t>(ANDROID_CONTROL_MODE_OFF);
2063 ASSERT_EQ(::android::OK, requestMeta.update(ANDROID_CONTROL_MODE, &mode, 1));
2064 ASSERT_EQ(::android::OK,
2065 requestMeta.update(ANDROID_SENSOR_SENSITIVITY, &isoValues[i], 1));
2066 camera_metadata_t* metaBuffer = requestMeta.release();
2067 uint8_t* rawMetaBuffer = reinterpret_cast<uint8_t*>(metaBuffer);
2068 requestSettings[i].metadata = std::vector(
2069 rawMetaBuffer, rawMetaBuffer + get_camera_metadata_size(metaBuffer));
2070 overrideRotateAndCrop(&(requestSettings[i]));
2071
2072 request.frameNumber = frameNumber + i;
2073 request.fmqSettingsSize = 0;
2074 request.settings = requestSettings[i];
2075 request.inputBuffer = {
2076 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2077
2078 inflightReqs[i] = std::make_shared<InFlightRequest>(1, false, supportsPartialResults,
2079 partialResultCount, resultQueue);
2080 mInflightMap[frameNumber + i] = inflightReqs[i];
2081 }
2082
2083 int32_t numRequestProcessed = 0;
2084 std::vector<BufferCache> cachesToRemove;
2085
2086 ndk::ScopedAStatus returnStatus =
2087 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2088 ASSERT_TRUE(returnStatus.isOk());
2089 ASSERT_EQ(numRequestProcessed, kBurstFrameCount);
2090
2091 for (size_t i = 0; i < kBurstFrameCount; i++) {
2092 std::unique_lock<std::mutex> l(mLock);
2093 while (!inflightReqs[i]->errorCodeValid && ((0 < inflightReqs[i]->numBuffersLeft) ||
2094 (!inflightReqs[i]->haveResultMetadata))) {
2095 auto timeout = std::chrono::system_clock::now() +
2096 std::chrono::seconds(kStreamBufferTimeoutSec);
2097 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2098 }
2099
2100 ASSERT_FALSE(inflightReqs[i]->errorCodeValid);
2101 ASSERT_NE(inflightReqs[i]->resultOutputBuffers.size(), 0u);
2102 ASSERT_EQ(previewStream.id, inflightReqs[i]->resultOutputBuffers[0].buffer.streamId);
2103 ASSERT_FALSE(inflightReqs[i]->collectedResult.isEmpty());
2104 ASSERT_TRUE(inflightReqs[i]->collectedResult.exists(ANDROID_SENSOR_SENSITIVITY));
2105 camera_metadata_entry_t isoResult =
2106 inflightReqs[i]->collectedResult.find(ANDROID_SENSOR_SENSITIVITY);
2107 ASSERT_TRUE(std::abs(isoResult.data.i32[0] - isoValues[i]) <=
2108 std::round(isoValues[i] * isoTol));
2109 }
2110
2111 if (useHalBufManager) {
2112 verifyBuffersReturned(mSession, previewStream.id, cb);
2113 }
2114 ret = mSession->close();
2115 mSession = nullptr;
2116 ASSERT_TRUE(ret.isOk());
2117 }
2118}
2119
2120// Test whether an incorrect capture request with missing settings will
2121// be reported correctly.
2122TEST_P(CameraAidlTest, processCaptureRequestInvalidSinglePreview) {
2123 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2124 std::vector<AvailableStream> outputPreviewStreams;
2125 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2126 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2127 int64_t bufferId = 1;
2128 int32_t frameNumber = 1;
2129 CameraMetadata settings;
2130
2131 for (const auto& name : cameraDeviceNames) {
2132 Stream previewStream;
2133 std::vector<HalStream> halStreams;
2134 std::shared_ptr<DeviceCb> cb;
2135 bool supportsPartialResults = false;
2136 bool useHalBufManager = false;
2137 int32_t partialResultCount = 0;
2138 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2139 &previewStream /*out*/, &halStreams /*out*/,
2140 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2141 &useHalBufManager /*out*/, &cb /*out*/);
2142 ASSERT_NE(mSession, nullptr);
2143 ASSERT_FALSE(halStreams.empty());
2144
2145 buffer_handle_t buffer_handle = nullptr;
2146
2147 if (useHalBufManager) {
2148 bufferId = 0;
2149 } else {
2150 allocateGraphicBuffer(previewStream.width, previewStream.height,
2151 android_convertGralloc1To0Usage(
2152 static_cast<uint64_t>(halStreams[0].producerUsage),
2153 static_cast<uint64_t>(halStreams[0].consumerUsage)),
2154 halStreams[0].overrideFormat, &buffer_handle);
2155 }
2156
2157 std::vector<CaptureRequest> requests(1);
2158 CaptureRequest& request = requests[0];
2159 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2160 outputBuffers.resize(1);
2161 StreamBuffer& outputBuffer = outputBuffers[0];
2162
2163 outputBuffer = {
2164 halStreams[0].id,
2165 bufferId,
2166 buffer_handle == nullptr ? NativeHandle() : ::android::makeToAidl(buffer_handle),
2167 BufferStatus::OK,
2168 NativeHandle(),
2169 NativeHandle()};
2170
2171 request.inputBuffer = {
2172 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2173 request.frameNumber = frameNumber;
2174 request.fmqSettingsSize = 0;
2175 request.settings = settings;
2176
2177 // Settings were not correctly initialized, we should fail here
2178 int32_t numRequestProcessed = 0;
2179 std::vector<BufferCache> cachesToRemove;
2180 ndk::ScopedAStatus ret =
2181 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2182 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
2183 ASSERT_EQ(numRequestProcessed, 0u);
2184
2185 ret = mSession->close();
2186 mSession = nullptr;
2187 ASSERT_TRUE(ret.isOk());
2188 }
2189}
2190
2191// Verify camera offline session behavior
2192TEST_P(CameraAidlTest, switchToOffline) {
2193 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2194 AvailableStream threshold = {kMaxStillWidth, kMaxStillHeight,
2195 static_cast<int32_t>(PixelFormat::BLOB)};
2196 int64_t bufferId = 1;
2197 int32_t frameNumber = 1;
2198 CameraMetadata settings;
2199
2200 for (const auto& name : cameraDeviceNames) {
2201 CameraMetadata meta;
2202 {
2203 std::shared_ptr<ICameraDevice> unusedDevice;
2204 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
2205 &unusedDevice);
2206 camera_metadata_t* staticMetaBuffer = clone_camera_metadata(
2207 reinterpret_cast<camera_metadata_t*>(meta.metadata.data()));
2208 ::android::hardware::camera::common::V1_0::helper::CameraMetadata staticMeta(
2209 staticMetaBuffer);
2210
2211 if (isOfflineSessionSupported(staticMetaBuffer) != Status::OK) {
2212 ndk::ScopedAStatus ret = mSession->close();
2213 mSession = nullptr;
2214 ASSERT_TRUE(ret.isOk());
2215 continue;
2216 }
2217 ndk::ScopedAStatus ret = mSession->close();
2218 mSession = nullptr;
2219 ASSERT_TRUE(ret.isOk());
2220 }
2221
2222 bool supportsPartialResults = false;
2223 int32_t partialResultCount = 0;
2224 Stream stream;
2225 std::vector<HalStream> halStreams;
2226 std::shared_ptr<DeviceCb> cb;
2227 int32_t jpegBufferSize;
2228 bool useHalBufManager;
2229 configureOfflineStillStream(name, mProvider, &threshold, &mSession /*out*/, &stream /*out*/,
2230 &halStreams /*out*/, &supportsPartialResults /*out*/,
2231 &partialResultCount /*out*/, &cb /*out*/,
2232 &jpegBufferSize /*out*/, &useHalBufManager /*out*/);
2233
2234 auto ret = mSession->constructDefaultRequestSettings(RequestTemplate::STILL_CAPTURE,
2235 &settings);
2236 ASSERT_TRUE(ret.isOk());
2237
2238 ::aidl::android::hardware::common::fmq::MQDescriptor<
2239 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2240 descriptor;
2241
2242 ndk::ScopedAStatus resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2243 ASSERT_TRUE(resultQueueRet.isOk());
2244 std::shared_ptr<ResultMetadataQueue> resultQueue =
2245 std::make_shared<ResultMetadataQueue>(descriptor);
2246 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2247 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2248 resultQueue = nullptr;
2249 // Don't use the queue onwards.
2250 }
2251
2252 ::android::hardware::camera::common::V1_0::helper::CameraMetadata requestMeta;
2253
2254 std::vector<buffer_handle_t> buffers(kBurstFrameCount);
2255 std::vector<std::shared_ptr<InFlightRequest>> inflightReqs(kBurstFrameCount);
2256 std::vector<CameraMetadata> requestSettings(kBurstFrameCount);
2257
2258 std::vector<CaptureRequest> requests(kBurstFrameCount);
2259
2260 HalStream halStream = halStreams[0];
2261 for (uint32_t i = 0; i < kBurstFrameCount; i++) {
2262 CaptureRequest& request = requests[i];
2263 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2264 outputBuffers.resize(1);
2265 StreamBuffer& outputBuffer = outputBuffers[0];
2266
2267 std::unique_lock<std::mutex> l(mLock);
2268 if (useHalBufManager) {
2269 outputBuffer = {halStream.id, 0, NativeHandle(), BufferStatus::OK, NativeHandle(),
2270 NativeHandle()};
2271 } else {
2272 // jpeg buffer (w,h) = (blobLen, 1)
2273 allocateGraphicBuffer(jpegBufferSize, /*height*/ 1,
2274 android_convertGralloc1To0Usage(
2275 static_cast<uint64_t>(halStream.producerUsage),
2276 static_cast<uint64_t>(halStream.consumerUsage)),
2277 halStream.overrideFormat, &buffers[i]);
2278 outputBuffer = {halStream.id, bufferId + i, ::android::makeToAidl(buffers[i]),
2279 BufferStatus::OK, NativeHandle(), NativeHandle()};
2280 }
2281
2282 requestMeta.clear();
2283 requestMeta.append(reinterpret_cast<camera_metadata_t*>(settings.metadata.data()));
2284
2285 camera_metadata_t* metaBuffer = requestMeta.release();
2286 uint8_t* rawMetaBuffer = reinterpret_cast<uint8_t*>(metaBuffer);
2287 requestSettings[i].metadata = std::vector(
2288 rawMetaBuffer, rawMetaBuffer + get_camera_metadata_size(metaBuffer));
2289 overrideRotateAndCrop(&requestSettings[i]);
2290
2291 request.frameNumber = frameNumber + i;
2292 request.fmqSettingsSize = 0;
2293 request.settings = requestSettings[i];
2294 request.inputBuffer = {/*streamId*/ -1,
2295 /*bufferId*/ 0, NativeHandle(),
2296 BufferStatus::ERROR, NativeHandle(),
2297 NativeHandle()};
2298
2299 inflightReqs[i] = std::make_shared<InFlightRequest>(1, false, supportsPartialResults,
2300 partialResultCount, resultQueue);
2301 mInflightMap[frameNumber + i] = inflightReqs[i];
2302 }
2303
2304 int32_t numRequestProcessed = 0;
2305 std::vector<BufferCache> cachesToRemove;
2306
2307 ndk::ScopedAStatus returnStatus =
2308 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2309 ASSERT_TRUE(returnStatus.isOk());
2310 ASSERT_EQ(numRequestProcessed, kBurstFrameCount);
2311
2312 std::vector<int32_t> offlineStreamIds = {halStream.id};
2313 CameraOfflineSessionInfo offlineSessionInfo;
2314 std::shared_ptr<ICameraOfflineSession> offlineSession;
2315 returnStatus =
2316 mSession->switchToOffline(offlineStreamIds, &offlineSessionInfo, &offlineSession);
2317
2318 if (!halStreams[0].supportOffline) {
2319 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
2320 returnStatus.getServiceSpecificError());
2321 ret = mSession->close();
2322 mSession = nullptr;
2323 ASSERT_TRUE(ret.isOk());
2324 continue;
2325 }
2326
2327 ASSERT_TRUE(returnStatus.isOk());
2328 // Hal might be unable to find any requests qualified for offline mode.
2329 if (offlineSession == nullptr) {
2330 ret = mSession->close();
2331 mSession = nullptr;
2332 ASSERT_TRUE(ret.isOk());
2333 continue;
2334 }
2335
2336 ASSERT_EQ(offlineSessionInfo.offlineStreams.size(), 1u);
2337 ASSERT_EQ(offlineSessionInfo.offlineStreams[0].id, halStream.id);
2338 ASSERT_NE(offlineSessionInfo.offlineRequests.size(), 0u);
2339
2340 // close device session to make sure offline session does not rely on it
2341 ret = mSession->close();
2342 mSession = nullptr;
2343 ASSERT_TRUE(ret.isOk());
2344
2345 ::aidl::android::hardware::common::fmq::MQDescriptor<
2346 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2347 offlineResultDescriptor;
2348
2349 auto offlineResultQueueRet =
2350 offlineSession->getCaptureResultMetadataQueue(&offlineResultDescriptor);
2351 std::shared_ptr<ResultMetadataQueue> offlineResultQueue =
2352 std::make_shared<ResultMetadataQueue>(descriptor);
2353 if (!offlineResultQueue->isValid() || offlineResultQueue->availableToWrite() <= 0) {
2354 ALOGE("%s: offline session returns empty result metadata fmq, not use it", __func__);
2355 offlineResultQueue = nullptr;
2356 // Don't use the queue onwards.
2357 }
2358 ASSERT_TRUE(offlineResultQueueRet.isOk());
2359
2360 updateInflightResultQueue(offlineResultQueue);
2361
2362 ret = offlineSession->setCallback(cb);
2363 ASSERT_TRUE(ret.isOk());
2364
2365 for (size_t i = 0; i < kBurstFrameCount; i++) {
2366 std::unique_lock<std::mutex> l(mLock);
2367 while (!inflightReqs[i]->errorCodeValid && ((0 < inflightReqs[i]->numBuffersLeft) ||
2368 (!inflightReqs[i]->haveResultMetadata))) {
2369 auto timeout = std::chrono::system_clock::now() +
2370 std::chrono::seconds(kStreamBufferTimeoutSec);
2371 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2372 }
2373
2374 ASSERT_FALSE(inflightReqs[i]->errorCodeValid);
2375 ASSERT_NE(inflightReqs[i]->resultOutputBuffers.size(), 0u);
2376 ASSERT_EQ(stream.id, inflightReqs[i]->resultOutputBuffers[0].buffer.streamId);
2377 ASSERT_FALSE(inflightReqs[i]->collectedResult.isEmpty());
2378 }
2379
2380 ret = offlineSession->close();
2381 ASSERT_TRUE(ret.isOk());
2382 }
2383}
2384
2385// Check whether an invalid capture request with missing output buffers
2386// will be reported correctly.
2387TEST_P(CameraAidlTest, processCaptureRequestInvalidBuffer) {
2388 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2389 std::vector<AvailableStream> outputBlobStreams;
2390 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2391 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2392 int32_t frameNumber = 1;
2393 CameraMetadata settings;
2394
2395 for (const auto& name : cameraDeviceNames) {
2396 Stream previewStream;
2397 std::vector<HalStream> halStreams;
2398 std::shared_ptr<DeviceCb> cb;
2399 bool supportsPartialResults = false;
2400 bool useHalBufManager = false;
2401 int32_t partialResultCount = 0;
2402 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2403 &previewStream /*out*/, &halStreams /*out*/,
2404 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2405 &useHalBufManager /*out*/, &cb /*out*/);
2406
2407 RequestTemplate reqTemplate = RequestTemplate::PREVIEW;
2408 ndk::ScopedAStatus ret = mSession->constructDefaultRequestSettings(reqTemplate, &settings);
2409 ASSERT_TRUE(ret.isOk());
2410 overrideRotateAndCrop(&settings);
2411
2412 std::vector<CaptureRequest> requests(1);
2413 CaptureRequest& request = requests[0];
2414 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2415 outputBuffers.resize(1);
2416 // Empty output buffer
2417 outputBuffers[0] = {
2418 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2419
2420 request.inputBuffer = {
2421 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2422 request.frameNumber = frameNumber;
2423 request.fmqSettingsSize = 0;
2424 request.settings = settings;
2425
2426 // Output buffers are missing, we should fail here
2427 int32_t numRequestProcessed = 0;
2428 std::vector<BufferCache> cachesToRemove;
2429 ret = mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2430 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
2431 ASSERT_EQ(numRequestProcessed, 0u);
2432
2433 ret = mSession->close();
2434 mSession = nullptr;
2435 ASSERT_TRUE(ret.isOk());
2436 }
2437}
2438
2439// Generate, trigger and flush a preview request
2440TEST_P(CameraAidlTest, flushPreviewRequest) {
2441 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2442 std::vector<AvailableStream> outputPreviewStreams;
2443 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2444 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2445 int64_t bufferId = 1;
2446 int32_t frameNumber = 1;
2447 CameraMetadata settings;
2448
2449 for (const auto& name : cameraDeviceNames) {
2450 Stream previewStream;
2451 std::vector<HalStream> halStreams;
2452 std::shared_ptr<DeviceCb> cb;
2453 bool supportsPartialResults = false;
2454 bool useHalBufManager = false;
2455 int32_t partialResultCount = 0;
2456
2457 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2458 &previewStream /*out*/, &halStreams /*out*/,
2459 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2460 &useHalBufManager /*out*/, &cb /*out*/);
2461
2462 ASSERT_NE(mSession, nullptr);
2463 ASSERT_NE(cb, nullptr);
2464 ASSERT_FALSE(halStreams.empty());
2465
2466 ::aidl::android::hardware::common::fmq::MQDescriptor<
2467 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2468 descriptor;
2469
2470 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2471 std::shared_ptr<ResultMetadataQueue> resultQueue =
2472 std::make_shared<ResultMetadataQueue>(descriptor);
2473 ASSERT_TRUE(resultQueueRet.isOk());
2474 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2475 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2476 resultQueue = nullptr;
2477 // Don't use the queue onwards.
2478 }
2479
2480 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
2481 1, false, supportsPartialResults, partialResultCount, resultQueue);
2482 RequestTemplate reqTemplate = RequestTemplate::PREVIEW;
2483
2484 ndk::ScopedAStatus ret = mSession->constructDefaultRequestSettings(reqTemplate, &settings);
2485 ASSERT_TRUE(ret.isOk());
2486 overrideRotateAndCrop(&settings);
2487
2488 buffer_handle_t buffer_handle;
2489 std::vector<CaptureRequest> requests(1);
2490 CaptureRequest& request = requests[0];
2491 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2492 outputBuffers.resize(1);
2493 StreamBuffer& outputBuffer = outputBuffers[0];
2494 if (useHalBufManager) {
2495 bufferId = 0;
2496 outputBuffer = {halStreams[0].id, bufferId, NativeHandle(),
2497 BufferStatus::OK, NativeHandle(), NativeHandle()};
2498 } else {
2499 allocateGraphicBuffer(previewStream.width, previewStream.height,
2500 android_convertGralloc1To0Usage(
2501 static_cast<uint64_t>(halStreams[0].producerUsage),
2502 static_cast<uint64_t>(halStreams[0].consumerUsage)),
2503 halStreams[0].overrideFormat, &buffer_handle);
2504 outputBuffer = {halStreams[0].id, bufferId, ::android::makeToAidl(buffer_handle),
2505 BufferStatus::OK, NativeHandle(), NativeHandle()};
2506 }
2507
2508 request.frameNumber = frameNumber;
2509 request.fmqSettingsSize = 0;
2510 request.settings = settings;
2511 request.inputBuffer = {
2512 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2513
2514 {
2515 std::unique_lock<std::mutex> l(mLock);
2516 mInflightMap.clear();
2517 mInflightMap[frameNumber] = inflightReq;
2518 }
2519
2520 int32_t numRequestProcessed = 0;
2521 std::vector<BufferCache> cachesToRemove;
2522 ret = mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2523 ASSERT_TRUE(ret.isOk());
2524 ASSERT_EQ(numRequestProcessed, 1u);
2525
2526 // Flush before waiting for request to complete.
2527 ndk::ScopedAStatus returnStatus = mSession->flush();
2528 ASSERT_TRUE(returnStatus.isOk());
2529
2530 {
2531 std::unique_lock<std::mutex> l(mLock);
2532 while (!inflightReq->errorCodeValid &&
2533 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
2534 auto timeout = std::chrono::system_clock::now() +
2535 std::chrono::seconds(kStreamBufferTimeoutSec);
2536 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2537 }
2538
2539 if (!inflightReq->errorCodeValid) {
2540 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
2541 ASSERT_EQ(previewStream.id, inflightReq->resultOutputBuffers[0].buffer.streamId);
2542 } else {
2543 switch (inflightReq->errorCode) {
2544 case ErrorCode::ERROR_REQUEST:
2545 case ErrorCode::ERROR_RESULT:
2546 case ErrorCode::ERROR_BUFFER:
2547 // Expected
2548 break;
2549 case ErrorCode::ERROR_DEVICE:
2550 default:
2551 FAIL() << "Unexpected error:"
2552 << static_cast<uint32_t>(inflightReq->errorCode);
2553 }
2554 }
2555 }
2556
2557 if (useHalBufManager) {
2558 verifyBuffersReturned(mSession, previewStream.id, cb);
2559 }
2560
2561 ret = mSession->close();
2562 mSession = nullptr;
2563 ASSERT_TRUE(ret.isOk());
2564 }
2565}
2566
2567// Verify that camera flushes correctly without any pending requests.
2568TEST_P(CameraAidlTest, flushEmpty) {
2569 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2570 std::vector<AvailableStream> outputPreviewStreams;
2571 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2572 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2573
2574 for (const auto& name : cameraDeviceNames) {
2575 Stream previewStream;
2576 std::vector<HalStream> halStreams;
2577 std::shared_ptr<DeviceCb> cb;
2578 bool supportsPartialResults = false;
2579 bool useHalBufManager = false;
2580
2581 int32_t partialResultCount = 0;
2582 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2583 &previewStream /*out*/, &halStreams /*out*/,
2584 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2585 &useHalBufManager /*out*/, &cb /*out*/);
2586
2587 ndk::ScopedAStatus returnStatus = mSession->flush();
2588 ASSERT_TRUE(returnStatus.isOk());
2589
2590 {
2591 std::unique_lock<std::mutex> l(mLock);
2592 auto timeout = std::chrono::system_clock::now() +
2593 std::chrono::milliseconds(kEmptyFlushTimeoutMSec);
2594 ASSERT_EQ(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2595 }
2596
2597 ndk::ScopedAStatus ret = mSession->close();
2598 mSession = nullptr;
2599 ASSERT_TRUE(ret.isOk());
2600 }
2601}
2602
2603// Test camera provider notify method
2604TEST_P(CameraAidlTest, providerDeviceStateNotification) {
2605 notifyDeviceState(ICameraProvider::DEVICE_STATE_BACK_COVERED);
2606 notifyDeviceState(ICameraProvider::DEVICE_STATE_NORMAL);
2607}
2608
2609// Verify that all supported stream formats and sizes can be configured
2610// successfully for injection camera.
2611TEST_P(CameraAidlTest, configureInjectionStreamsAvailableOutputs) {
2612 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2613 std::vector<AvailableStream> outputStreams;
2614
2615 for (const auto& name : cameraDeviceNames) {
2616 CameraMetadata metadata;
2617
2618 std::shared_ptr<ICameraInjectionSession> injectionSession;
2619 std::shared_ptr<ICameraDevice> unusedDevice;
2620 openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/,
2621 &unusedDevice /*out*/);
2622 if (injectionSession == nullptr) {
2623 continue;
2624 }
2625
2626 camera_metadata_t* staticMetaBuffer =
2627 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
2628 CameraMetadata chars;
2629 chars.metadata = metadata.metadata;
2630
2631 outputStreams.clear();
2632 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputStreams));
2633 ASSERT_NE(0u, outputStreams.size());
2634
2635 int32_t jpegBufferSize = 0;
2636 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMetaBuffer, &jpegBufferSize));
2637 ASSERT_NE(0u, jpegBufferSize);
2638
2639 int32_t streamId = 0;
2640 int32_t streamConfigCounter = 0;
2641 for (auto& it : outputStreams) {
2642 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(it.format));
2643 Stream stream = {streamId,
2644 StreamType::OUTPUT,
2645 it.width,
2646 it.height,
2647 static_cast<PixelFormat>(it.format),
2648 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2649 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2650 dataspace,
2651 StreamRotation::ROTATION_0,
2652 std::string(),
2653 jpegBufferSize,
2654 0,
2655 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT}};
2656
2657 std::vector<Stream> streams = {stream};
2658 StreamConfiguration config;
2659 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2660 jpegBufferSize);
2661
2662 config.streamConfigCounter = streamConfigCounter++;
2663 ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars);
2664 ASSERT_TRUE(s.isOk());
2665 streamId++;
2666 }
2667
2668 std::shared_ptr<ICameraDeviceSession> session;
2669 ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session);
2670 ASSERT_TRUE(ret.isOk());
2671 ASSERT_NE(session, nullptr);
2672 ret = session->close();
2673 ASSERT_TRUE(ret.isOk());
2674 }
2675}
2676
2677// Check for correct handling of invalid/incorrect configuration parameters for injection camera.
2678TEST_P(CameraAidlTest, configureInjectionStreamsInvalidOutputs) {
2679 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2680 std::vector<AvailableStream> outputStreams;
2681
2682 for (const auto& name : cameraDeviceNames) {
2683 CameraMetadata metadata;
2684 std::shared_ptr<ICameraInjectionSession> injectionSession;
2685 std::shared_ptr<ICameraDevice> unusedDevice;
2686 openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/,
2687 &unusedDevice);
2688 if (injectionSession == nullptr) {
2689 continue;
2690 }
2691
2692 camera_metadata_t* staticMetaBuffer =
2693 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
2694 std::shared_ptr<ICameraDeviceSession> session;
2695 ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session);
2696 ASSERT_TRUE(ret.isOk());
2697 ASSERT_NE(session, nullptr);
2698
2699 CameraMetadata chars;
2700 chars.metadata = metadata.metadata;
2701
2702 outputStreams.clear();
2703 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputStreams));
2704 ASSERT_NE(0u, outputStreams.size());
2705
2706 int32_t jpegBufferSize = 0;
2707 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMetaBuffer, &jpegBufferSize));
2708 ASSERT_NE(0u, jpegBufferSize);
2709
2710 int32_t streamId = 0;
2711 Stream stream = {streamId++,
2712 StreamType::OUTPUT,
2713 0,
2714 0,
2715 static_cast<PixelFormat>(outputStreams[0].format),
2716 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2717 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2718 Dataspace::UNKNOWN,
2719 StreamRotation::ROTATION_0,
2720 std::string(),
2721 jpegBufferSize,
2722 0,
2723 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT}};
2724
2725 int32_t streamConfigCounter = 0;
2726 std::vector<Stream> streams = {stream};
2727 StreamConfiguration config;
2728 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2729 jpegBufferSize);
2730
2731 config.streamConfigCounter = streamConfigCounter++;
2732 ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars);
2733 ASSERT_TRUE(
2734 (static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) == s.getServiceSpecificError()) ||
2735 (static_cast<int32_t>(Status::INTERNAL_ERROR) == s.getServiceSpecificError()));
2736
2737 stream = {streamId++,
2738 StreamType::OUTPUT,
2739 INT32_MAX,
2740 INT32_MAX,
2741 static_cast<PixelFormat>(outputStreams[0].format),
2742 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2743 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2744 Dataspace::UNKNOWN,
2745 StreamRotation::ROTATION_0,
2746 std::string(),
2747 jpegBufferSize,
2748 0,
2749 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT}};
2750 streams[0] = stream;
2751 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2752 jpegBufferSize);
2753 config.streamConfigCounter = streamConfigCounter++;
2754 s = injectionSession->configureInjectionStreams(config, chars);
2755 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError());
2756
2757 for (auto& it : outputStreams) {
2758 stream = {streamId++,
2759 StreamType::OUTPUT,
2760 it.width,
2761 it.height,
2762 static_cast<PixelFormat>(INT32_MAX),
2763 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2764 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2765 Dataspace::UNKNOWN,
2766 StreamRotation::ROTATION_0,
2767 std::string(),
2768 jpegBufferSize,
2769 0,
2770 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT}};
2771 streams[0] = stream;
2772 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2773 jpegBufferSize);
2774 config.streamConfigCounter = streamConfigCounter++;
2775 s = injectionSession->configureInjectionStreams(config, chars);
2776 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError());
2777
2778 stream = {streamId++,
2779 StreamType::OUTPUT,
2780 it.width,
2781 it.height,
2782 static_cast<PixelFormat>(it.format),
2783 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2784 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2785 Dataspace::UNKNOWN,
2786 static_cast<StreamRotation>(INT32_MAX),
2787 std::string(),
2788 jpegBufferSize,
2789 0,
2790 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT}};
2791 streams[0] = stream;
2792 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2793 jpegBufferSize);
2794 config.streamConfigCounter = streamConfigCounter++;
2795 s = injectionSession->configureInjectionStreams(config, chars);
2796 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError());
2797 }
2798
2799 ret = session->close();
2800 ASSERT_TRUE(ret.isOk());
2801 }
2802}
2803
2804// Check whether session parameters are supported for injection camera. If Hal support for them
2805// exist, then try to configure a preview stream using them.
2806TEST_P(CameraAidlTest, configureInjectionStreamsWithSessionParameters) {
2807 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2808 std::vector<AvailableStream> outputPreviewStreams;
2809 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2810 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2811
2812 for (const auto& name : cameraDeviceNames) {
2813 CameraMetadata metadata;
2814 std::shared_ptr<ICameraInjectionSession> injectionSession;
2815 std::shared_ptr<ICameraDevice> unusedDevice;
2816 openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/,
2817 &unusedDevice /*out*/);
2818 if (injectionSession == nullptr) {
2819 continue;
2820 }
2821
2822 std::shared_ptr<ICameraDeviceSession> session;
2823 ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session);
2824 ASSERT_TRUE(ret.isOk());
2825 ASSERT_NE(session, nullptr);
2826
2827 camera_metadata_t* staticMetaBuffer =
2828 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
2829 CameraMetadata chars;
2830 chars.metadata = metadata.metadata;
2831
2832 std::unordered_set<int32_t> availableSessionKeys;
2833 Status rc = getSupportedKeys(staticMetaBuffer, ANDROID_REQUEST_AVAILABLE_SESSION_KEYS,
2834 &availableSessionKeys);
2835 ASSERT_EQ(Status::OK, rc);
2836 if (availableSessionKeys.empty()) {
2837 ret = session->close();
2838 ASSERT_TRUE(ret.isOk());
2839 continue;
2840 }
2841
2842 android::hardware::camera::common::V1_0::helper::CameraMetadata previewRequestSettings;
2843 android::hardware::camera::common::V1_0::helper::CameraMetadata sessionParams,
2844 modifiedSessionParams;
2845 constructFilteredSettings(session, availableSessionKeys, RequestTemplate::PREVIEW,
2846 &previewRequestSettings, &sessionParams);
2847 if (sessionParams.isEmpty()) {
2848 ret = session->close();
2849 ASSERT_TRUE(ret.isOk());
2850 continue;
2851 }
2852
2853 outputPreviewStreams.clear();
2854
2855 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputPreviewStreams,
2856 &previewThreshold));
2857 ASSERT_NE(0u, outputPreviewStreams.size());
2858
2859 Stream previewStream = {
2860 0,
2861 StreamType::OUTPUT,
2862 outputPreviewStreams[0].width,
2863 outputPreviewStreams[0].height,
2864 static_cast<PixelFormat>(outputPreviewStreams[0].format),
2865 static_cast<::aidl::android::hardware::graphics::common::BufferUsage>(
2866 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2867 Dataspace::UNKNOWN,
2868 StreamRotation::ROTATION_0,
2869 std::string(),
2870 0,
2871 -1,
2872 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT}};
2873 std::vector<Stream> streams = {previewStream};
2874 StreamConfiguration config;
2875 config.streams = streams;
2876 config.operationMode = StreamConfigurationMode::NORMAL_MODE;
2877
2878 modifiedSessionParams = sessionParams;
2879 camera_metadata_t* sessionParamsBuffer = sessionParams.release();
2880 uint8_t* rawSessionParamsBuffer = reinterpret_cast<uint8_t*>(sessionParamsBuffer);
2881 config.sessionParams.metadata =
2882 std::vector(rawSessionParamsBuffer,
2883 rawSessionParamsBuffer + get_camera_metadata_size(sessionParamsBuffer));
2884
2885 config.streamConfigCounter = 0;
2886 config.streamConfigCounter = 0;
2887 config.multiResolutionInputImage = false;
2888
2889 ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars);
2890 ASSERT_TRUE(s.isOk());
2891
2892 sessionParams.acquire(sessionParamsBuffer);
2893 free_camera_metadata(staticMetaBuffer);
2894 ret = session->close();
2895 ASSERT_TRUE(ret.isOk());
2896 }
2897}
2898
2899// Verify that valid stream use cases can be configured successfully, and invalid use cases
2900// fail stream configuration.
2901TEST_P(CameraAidlTest, configureStreamsUseCases) {
2902 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2903
2904 for (const auto& name : cameraDeviceNames) {
2905 CameraMetadata meta;
2906 std::shared_ptr<ICameraDevice> cameraDevice;
2907
2908 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
2909 &cameraDevice /*out*/);
2910
2911 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
2912 // Check if camera support depth only
2913 if (isDepthOnly(staticMeta)) {
2914 ndk::ScopedAStatus ret = mSession->close();
2915 mSession = nullptr;
2916 ASSERT_TRUE(ret.isOk());
2917 continue;
2918 }
2919
2920 std::vector<AvailableStream> outputPreviewStreams;
2921 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2922 static_cast<int32_t>(PixelFormat::YCBCR_420_888)};
2923 ASSERT_EQ(Status::OK,
2924 getAvailableOutputStreams(staticMeta, outputPreviewStreams, &previewThreshold));
2925 ASSERT_NE(0u, outputPreviewStreams.size());
2926
2927 // Combine valid and invalid stream use cases
2928 std::vector<int32_t> useCases(kMandatoryUseCases);
2929 useCases.push_back(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL + 1);
2930
2931 std::vector<int32_t> supportedUseCases;
2932 camera_metadata_ro_entry entry;
2933 auto retcode = find_camera_metadata_ro_entry(
2934 staticMeta, ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES, &entry);
2935 if ((0 == retcode) && (entry.count > 0)) {
2936 supportedUseCases.insert(supportedUseCases.end(), entry.data.i32,
2937 entry.data.i32 + entry.count);
2938 } else {
2939 supportedUseCases.push_back(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT);
2940 }
2941
2942 std::vector<Stream> streams(1);
2943 streams[0] = {
2944 0,
2945 StreamType::OUTPUT,
2946 outputPreviewStreams[0].width,
2947 outputPreviewStreams[0].height,
2948 static_cast<PixelFormat>(outputPreviewStreams[0].format),
2949 static_cast<::aidl::android::hardware::graphics::common::BufferUsage>(
2950 GRALLOC1_CONSUMER_USAGE_CPU_READ),
2951 Dataspace::UNKNOWN,
2952 StreamRotation::ROTATION_0,
2953 std::string(),
2954 0,
2955 -1,
2956 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2957 aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap::
2958 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
2959
2960 int32_t streamConfigCounter = 0;
2961 CameraMetadata req;
2962 StreamConfiguration config;
2963 RequestTemplate reqTemplate = RequestTemplate::STILL_CAPTURE;
2964 ndk::ScopedAStatus ret = mSession->constructDefaultRequestSettings(reqTemplate, &req);
2965 ASSERT_TRUE(ret.isOk());
2966 config.sessionParams = req;
2967
2968 for (int32_t useCase : useCases) {
2969 bool useCaseSupported = std::find(supportedUseCases.begin(), supportedUseCases.end(),
2970 useCase) != supportedUseCases.end();
2971
2972 streams[0].useCase = static_cast<
2973 aidl::android::hardware::camera::metadata::ScalerAvailableStreamUseCases>(
2974 useCase);
2975 config.streams = streams;
2976 config.operationMode = StreamConfigurationMode::NORMAL_MODE;
2977 config.streamConfigCounter = streamConfigCounter;
2978 config.multiResolutionInputImage = false;
2979
2980 bool combSupported;
2981 ret = cameraDevice->isStreamCombinationSupported(config, &combSupported);
2982 ASSERT_TRUE((ret.isOk()) || (static_cast<int32_t>(Status::OPERATION_NOT_SUPPORTED) ==
2983 ret.getServiceSpecificError()));
2984 if (ret.isOk()) {
2985 ASSERT_EQ(combSupported, useCaseSupported);
2986 }
2987 ASSERT_TRUE(ret.isOk());
2988
2989 std::vector<HalStream> halStreams;
2990 ret = mSession->configureStreams(config, &halStreams);
2991 ALOGI("configureStreams returns status: %d", ret.getServiceSpecificError());
2992 if (useCaseSupported) {
2993 ASSERT_TRUE(ret.isOk());
2994 ASSERT_EQ(1u, halStreams.size());
2995 } else {
2996 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
2997 ret.getServiceSpecificError());
2998 }
2999 }
3000 ret = mSession->close();
3001 mSession = nullptr;
3002 ASSERT_TRUE(ret.isOk());
3003 }
3004}
3005
3006GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(CameraAidlTest);
3007INSTANTIATE_TEST_SUITE_P(
3008 PerInstance, CameraAidlTest,
3009 testing::ValuesIn(android::getAidlHalInstanceNames(ICameraProvider::descriptor)),
3010 android::hardware::PrintInstanceNameToString);