Avichal Rakesh | e1857f8 | 2022-06-08 17:47:23 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2022 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #define LOG_TAG "ExtCamDev" |
| 18 | // #define LOG_NDEBUG 0 |
| 19 | #include <log/log.h> |
| 20 | |
| 21 | #include "ExternalCameraDevice.h" |
| 22 | |
| 23 | #include <aidl/android/hardware/camera/common/Status.h> |
| 24 | #include <convert.h> |
| 25 | #include <linux/videodev2.h> |
| 26 | #include <regex> |
| 27 | #include <set> |
| 28 | |
| 29 | namespace android { |
| 30 | namespace hardware { |
| 31 | namespace camera { |
| 32 | namespace device { |
| 33 | namespace implementation { |
| 34 | |
| 35 | using ::aidl::android::hardware::camera::common::Status; |
| 36 | |
| 37 | namespace { |
| 38 | // Only support MJPEG for now as it seems to be the one supports higher fps |
| 39 | // Other formats to consider in the future: |
| 40 | // * V4L2_PIX_FMT_YVU420 (== YV12) |
| 41 | // * V4L2_PIX_FMT_YVYU (YVYU: can be converted to YV12 or other YUV420_888 formats) |
| 42 | const std::array<uint32_t, /*size*/ 2> kSupportedFourCCs{ |
| 43 | {V4L2_PIX_FMT_MJPEG, V4L2_PIX_FMT_Z16}}; // double braces required in C++11 |
| 44 | |
| 45 | constexpr int MAX_RETRY = 5; // Allow retry v4l2 open failures a few times. |
| 46 | constexpr int OPEN_RETRY_SLEEP_US = 100'000; // 100ms * MAX_RETRY = 0.5 seconds |
| 47 | |
| 48 | const std::regex kDevicePathRE("/dev/video([0-9]+)"); |
| 49 | } // namespace |
| 50 | |
| 51 | std::string ExternalCameraDevice::kDeviceVersion = "1.1"; |
| 52 | |
| 53 | ExternalCameraDevice::ExternalCameraDevice(const std::string& devicePath, |
| 54 | const ExternalCameraConfig& config) |
| 55 | : mCameraId("-1"), mDevicePath(devicePath), mCfg(config) { |
| 56 | std::smatch sm; |
| 57 | if (std::regex_match(mDevicePath, sm, kDevicePathRE)) { |
| 58 | mCameraId = std::to_string(mCfg.cameraIdOffset + std::stoi(sm[1])); |
| 59 | } else { |
| 60 | ALOGE("%s: device path match failed for %s", __FUNCTION__, mDevicePath.c_str()); |
| 61 | } |
| 62 | } |
| 63 | |
| 64 | ExternalCameraDevice::~ExternalCameraDevice() {} |
| 65 | |
| 66 | ndk::ScopedAStatus ExternalCameraDevice::getCameraCharacteristics(CameraMetadata* _aidl_return) { |
| 67 | Mutex::Autolock _l(mLock); |
| 68 | if (_aidl_return == nullptr) { |
| 69 | return fromStatus(Status::ILLEGAL_ARGUMENT); |
| 70 | } |
| 71 | |
| 72 | if (isInitFailedLocked()) { |
| 73 | return fromStatus(Status::INTERNAL_ERROR); |
| 74 | } |
| 75 | |
| 76 | const camera_metadata_t* rawMetadata = mCameraCharacteristics.getAndLock(); |
| 77 | convertToAidl(rawMetadata, _aidl_return); |
| 78 | mCameraCharacteristics.unlock(rawMetadata); |
| 79 | return fromStatus(Status::OK); |
| 80 | } |
| 81 | |
| 82 | ndk::ScopedAStatus ExternalCameraDevice::getPhysicalCameraCharacteristics(const std::string&, |
| 83 | CameraMetadata*) { |
| 84 | ALOGE("%s: Physical camera functions are not supported for external cameras.", __FUNCTION__); |
| 85 | return fromStatus(Status::ILLEGAL_ARGUMENT); |
| 86 | } |
| 87 | |
| 88 | ndk::ScopedAStatus ExternalCameraDevice::getResourceCost(CameraResourceCost* _aidl_return) { |
| 89 | if (_aidl_return == nullptr) { |
| 90 | return fromStatus(Status::ILLEGAL_ARGUMENT); |
| 91 | } |
| 92 | |
| 93 | _aidl_return->resourceCost = 100; |
| 94 | return fromStatus(Status::OK); |
| 95 | } |
| 96 | |
| 97 | ndk::ScopedAStatus ExternalCameraDevice::isStreamCombinationSupported( |
| 98 | const StreamConfiguration& in_streams, bool* _aidl_return) { |
| 99 | if (isInitFailed()) { |
| 100 | ALOGE("%s: camera %s. camera init failed!", __FUNCTION__, mCameraId.c_str()); |
| 101 | return fromStatus(Status::INTERNAL_ERROR); |
| 102 | } |
| 103 | Status s = ExternalCameraDeviceSession::isStreamCombinationSupported(in_streams, |
| 104 | mSupportedFormats, mCfg); |
| 105 | *_aidl_return = s == Status::OK; |
| 106 | return fromStatus(Status::OK); |
| 107 | } |
| 108 | |
| 109 | ndk::ScopedAStatus ExternalCameraDevice::open( |
| 110 | const std::shared_ptr<ICameraDeviceCallback>& in_callback, |
| 111 | std::shared_ptr<ICameraDeviceSession>* _aidl_return) { |
| 112 | if (_aidl_return == nullptr) { |
| 113 | ALOGE("%s: cannot open camera %s. return session ptr is null!", __FUNCTION__, |
| 114 | mCameraId.c_str()); |
| 115 | return fromStatus(Status::ILLEGAL_ARGUMENT); |
| 116 | } |
| 117 | |
| 118 | Mutex::Autolock _l(mLock); |
| 119 | if (isInitFailedLocked()) { |
| 120 | ALOGE("%s: cannot open camera %s. camera init failed!", __FUNCTION__, mCameraId.c_str()); |
| 121 | return fromStatus(Status::INTERNAL_ERROR); |
| 122 | } |
| 123 | |
| 124 | std::shared_ptr<ExternalCameraDeviceSession> session; |
| 125 | ALOGV("%s: Initializing device for camera %s", __FUNCTION__, mCameraId.c_str()); |
| 126 | session = mSession.lock(); |
| 127 | |
| 128 | if (session != nullptr && !session->isClosed()) { |
| 129 | ALOGE("%s: cannot open an already opened camera!", __FUNCTION__); |
| 130 | return fromStatus(Status::CAMERA_IN_USE); |
| 131 | } |
| 132 | |
| 133 | int numAttempt = 0; |
| 134 | unique_fd fd(::open(mDevicePath.c_str(), O_RDWR)); |
| 135 | while (fd.get() < 0 && numAttempt < MAX_RETRY) { |
| 136 | // Previous retry attempts failed. Retry opening the device at most MAX_RETRY times |
| 137 | ALOGW("%s: v4l2 device %s open failed, wait 33ms and try again", __FUNCTION__, |
| 138 | mDevicePath.c_str()); |
| 139 | usleep(OPEN_RETRY_SLEEP_US); // sleep and try again |
| 140 | fd.reset(::open(mDevicePath.c_str(), O_RDWR)); |
| 141 | numAttempt++; |
| 142 | } |
| 143 | |
| 144 | if (fd.get() < 0) { |
| 145 | ALOGE("%s: v4l2 device open %s failed: %s", __FUNCTION__, mDevicePath.c_str(), |
| 146 | strerror(errno)); |
| 147 | return fromStatus(Status::INTERNAL_ERROR); |
| 148 | } |
| 149 | |
| 150 | session = createSession(in_callback, mCfg, mSupportedFormats, mCroppingType, |
| 151 | mCameraCharacteristics, mCameraId, std::move(fd)); |
| 152 | if (session == nullptr) { |
| 153 | ALOGE("%s: camera device session allocation failed", __FUNCTION__); |
| 154 | return fromStatus(Status::INTERNAL_ERROR); |
| 155 | } |
| 156 | |
| 157 | if (session->isInitFailed()) { |
| 158 | ALOGE("%s: camera device session init failed", __FUNCTION__); |
| 159 | return fromStatus(Status::INTERNAL_ERROR); |
| 160 | } |
| 161 | |
| 162 | mSession = session; |
| 163 | *_aidl_return = session; |
| 164 | return fromStatus(Status::OK); |
| 165 | } |
| 166 | |
| 167 | ndk::ScopedAStatus ExternalCameraDevice::openInjectionSession( |
| 168 | const std::shared_ptr<ICameraDeviceCallback>&, std::shared_ptr<ICameraInjectionSession>*) { |
| 169 | return fromStatus(Status::OPERATION_NOT_SUPPORTED); |
| 170 | } |
| 171 | |
| 172 | ndk::ScopedAStatus ExternalCameraDevice::setTorchMode(bool) { |
| 173 | return fromStatus(Status::OPERATION_NOT_SUPPORTED); |
| 174 | } |
| 175 | |
| 176 | ndk::ScopedAStatus ExternalCameraDevice::turnOnTorchWithStrengthLevel(int32_t) { |
| 177 | return fromStatus(Status::OPERATION_NOT_SUPPORTED); |
| 178 | } |
| 179 | |
| 180 | ndk::ScopedAStatus ExternalCameraDevice::getTorchStrengthLevel(int32_t*) { |
| 181 | return fromStatus(Status::OPERATION_NOT_SUPPORTED); |
| 182 | } |
| 183 | |
| 184 | std::shared_ptr<ExternalCameraDeviceSession> ExternalCameraDevice::createSession( |
| 185 | const std::shared_ptr<ICameraDeviceCallback>& cb, const ExternalCameraConfig& cfg, |
| 186 | const std::vector<SupportedV4L2Format>& sortedFormats, const CroppingType& croppingType, |
| 187 | const common::V1_0::helper::CameraMetadata& chars, const std::string& cameraId, |
| 188 | unique_fd v4l2Fd) { |
| 189 | return ndk::SharedRefBase::make<ExternalCameraDeviceSession>( |
| 190 | cb, cfg, sortedFormats, croppingType, chars, cameraId, std::move(v4l2Fd)); |
| 191 | } |
| 192 | |
| 193 | bool ExternalCameraDevice::isInitFailed() { |
| 194 | Mutex::Autolock _l(mLock); |
| 195 | return isInitFailedLocked(); |
| 196 | } |
| 197 | |
| 198 | bool ExternalCameraDevice::isInitFailedLocked() { |
| 199 | if (!mInitialized) { |
| 200 | status_t ret = initCameraCharacteristics(); |
| 201 | if (ret != OK) { |
| 202 | ALOGE("%s: init camera characteristics failed: errorno %d", __FUNCTION__, ret); |
| 203 | mInitFailed = true; |
| 204 | } |
| 205 | mInitialized = true; |
| 206 | } |
| 207 | return mInitFailed; |
| 208 | } |
| 209 | |
| 210 | void ExternalCameraDevice::initSupportedFormatsLocked(int fd) { |
| 211 | std::vector<SupportedV4L2Format> horizontalFmts = |
| 212 | getCandidateSupportedFormatsLocked(fd, HORIZONTAL, mCfg.fpsLimits, mCfg.depthFpsLimits, |
| 213 | mCfg.minStreamSize, mCfg.depthEnabled); |
| 214 | std::vector<SupportedV4L2Format> verticalFmts = |
| 215 | getCandidateSupportedFormatsLocked(fd, VERTICAL, mCfg.fpsLimits, mCfg.depthFpsLimits, |
| 216 | mCfg.minStreamSize, mCfg.depthEnabled); |
| 217 | |
| 218 | size_t horiSize = horizontalFmts.size(); |
| 219 | size_t vertSize = verticalFmts.size(); |
| 220 | |
| 221 | if (horiSize == 0 && vertSize == 0) { |
| 222 | ALOGE("%s: cannot find suitable cropping type!", __FUNCTION__); |
| 223 | return; |
| 224 | } |
| 225 | |
| 226 | if (horiSize == 0) { |
| 227 | mSupportedFormats = verticalFmts; |
| 228 | mCroppingType = VERTICAL; |
| 229 | return; |
| 230 | } else if (vertSize == 0) { |
| 231 | mSupportedFormats = horizontalFmts; |
| 232 | mCroppingType = HORIZONTAL; |
| 233 | return; |
| 234 | } |
| 235 | |
| 236 | const auto& maxHoriSize = horizontalFmts[horizontalFmts.size() - 1]; |
| 237 | const auto& maxVertSize = verticalFmts[verticalFmts.size() - 1]; |
| 238 | |
| 239 | // Try to keep the largest possible output size |
| 240 | // When they are the same or ambiguous, pick the one support more sizes |
| 241 | if (maxHoriSize.width == maxVertSize.width && maxHoriSize.height == maxVertSize.height) { |
| 242 | if (horiSize > vertSize) { |
| 243 | mSupportedFormats = horizontalFmts; |
| 244 | mCroppingType = HORIZONTAL; |
| 245 | } else { |
| 246 | mSupportedFormats = verticalFmts; |
| 247 | mCroppingType = VERTICAL; |
| 248 | } |
| 249 | } else if (maxHoriSize.width >= maxVertSize.width && maxHoriSize.height >= maxVertSize.height) { |
| 250 | mSupportedFormats = horizontalFmts; |
| 251 | mCroppingType = HORIZONTAL; |
| 252 | } else if (maxHoriSize.width <= maxVertSize.width && maxHoriSize.height <= maxVertSize.height) { |
| 253 | mSupportedFormats = verticalFmts; |
| 254 | mCroppingType = VERTICAL; |
| 255 | } else { |
| 256 | if (horiSize > vertSize) { |
| 257 | mSupportedFormats = horizontalFmts; |
| 258 | mCroppingType = HORIZONTAL; |
| 259 | } else { |
| 260 | mSupportedFormats = verticalFmts; |
| 261 | mCroppingType = VERTICAL; |
| 262 | } |
| 263 | } |
| 264 | } |
| 265 | |
| 266 | status_t ExternalCameraDevice::initCameraCharacteristics() { |
| 267 | if (!mCameraCharacteristics.isEmpty()) { |
| 268 | // Camera Characteristics previously initialized. Skip. |
| 269 | return OK; |
| 270 | } |
| 271 | |
| 272 | // init camera characteristics |
| 273 | unique_fd fd(::open(mDevicePath.c_str(), O_RDWR)); |
| 274 | if (fd.get() < 0) { |
| 275 | ALOGE("%s: v4l2 device open %s failed", __FUNCTION__, mDevicePath.c_str()); |
| 276 | return DEAD_OBJECT; |
| 277 | } |
| 278 | |
| 279 | status_t ret; |
| 280 | ret = initDefaultCharsKeys(&mCameraCharacteristics); |
| 281 | if (ret != OK) { |
| 282 | ALOGE("%s: init default characteristics key failed: errorno %d", __FUNCTION__, ret); |
| 283 | mCameraCharacteristics.clear(); |
| 284 | return ret; |
| 285 | } |
| 286 | |
| 287 | ret = initCameraControlsCharsKeys(fd.get(), &mCameraCharacteristics); |
| 288 | if (ret != OK) { |
| 289 | ALOGE("%s: init camera control characteristics key failed: errorno %d", __FUNCTION__, ret); |
| 290 | mCameraCharacteristics.clear(); |
| 291 | return ret; |
| 292 | } |
| 293 | |
| 294 | ret = initOutputCharsKeys(fd.get(), &mCameraCharacteristics); |
| 295 | if (ret != OK) { |
| 296 | ALOGE("%s: init output characteristics key failed: errorno %d", __FUNCTION__, ret); |
| 297 | mCameraCharacteristics.clear(); |
| 298 | return ret; |
| 299 | } |
| 300 | |
| 301 | ret = initAvailableCapabilities(&mCameraCharacteristics); |
| 302 | if (ret != OK) { |
| 303 | ALOGE("%s: init available capabilities key failed: errorno %d", __FUNCTION__, ret); |
| 304 | mCameraCharacteristics.clear(); |
| 305 | return ret; |
| 306 | } |
| 307 | |
| 308 | return OK; |
| 309 | } |
| 310 | |
| 311 | #define ARRAY_SIZE(a) (sizeof(a) / sizeof((a)[0])) |
| 312 | #define UPDATE(tag, data, size) \ |
| 313 | do { \ |
| 314 | if (metadata->update((tag), (data), (size))) { \ |
| 315 | ALOGE("Update " #tag " failed!"); \ |
| 316 | return -EINVAL; \ |
| 317 | } \ |
| 318 | } while (0) |
| 319 | |
| 320 | status_t ExternalCameraDevice::initAvailableCapabilities( |
| 321 | ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) { |
| 322 | if (mSupportedFormats.empty()) { |
| 323 | ALOGE("%s: Supported formats list is empty", __FUNCTION__); |
| 324 | return UNKNOWN_ERROR; |
| 325 | } |
| 326 | |
| 327 | bool hasDepth = false; |
| 328 | bool hasColor = false; |
| 329 | for (const auto& fmt : mSupportedFormats) { |
| 330 | switch (fmt.fourcc) { |
| 331 | case V4L2_PIX_FMT_Z16: |
| 332 | hasDepth = true; |
| 333 | break; |
| 334 | case V4L2_PIX_FMT_MJPEG: |
| 335 | hasColor = true; |
| 336 | break; |
| 337 | default: |
| 338 | ALOGW("%s: Unsupported format found", __FUNCTION__); |
| 339 | } |
| 340 | } |
| 341 | |
| 342 | std::vector<uint8_t> availableCapabilities; |
| 343 | if (hasDepth) { |
| 344 | availableCapabilities.push_back(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT); |
| 345 | } |
| 346 | if (hasColor) { |
| 347 | availableCapabilities.push_back(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE); |
| 348 | } |
| 349 | if (!availableCapabilities.empty()) { |
| 350 | UPDATE(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, availableCapabilities.data(), |
| 351 | availableCapabilities.size()); |
| 352 | } |
| 353 | |
| 354 | return OK; |
| 355 | } |
| 356 | |
| 357 | status_t ExternalCameraDevice::initDefaultCharsKeys( |
| 358 | ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) { |
| 359 | const uint8_t hardware_level = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL; |
| 360 | UPDATE(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL, &hardware_level, 1); |
| 361 | |
| 362 | // android.colorCorrection |
| 363 | const uint8_t availableAberrationModes[] = {ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF}; |
| 364 | UPDATE(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES, availableAberrationModes, |
| 365 | ARRAY_SIZE(availableAberrationModes)); |
| 366 | |
| 367 | // android.control |
| 368 | const uint8_t antibandingMode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO; |
| 369 | UPDATE(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, &antibandingMode, 1); |
| 370 | |
| 371 | const int32_t controlMaxRegions[] = {/*AE*/ 0, /*AWB*/ 0, /*AF*/ 0}; |
| 372 | UPDATE(ANDROID_CONTROL_MAX_REGIONS, controlMaxRegions, ARRAY_SIZE(controlMaxRegions)); |
| 373 | |
| 374 | const uint8_t videoStabilizationMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF; |
| 375 | UPDATE(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, &videoStabilizationMode, 1); |
| 376 | |
| 377 | const uint8_t awbAvailableMode = ANDROID_CONTROL_AWB_MODE_AUTO; |
| 378 | UPDATE(ANDROID_CONTROL_AWB_AVAILABLE_MODES, &awbAvailableMode, 1); |
| 379 | |
| 380 | const uint8_t aeAvailableMode = ANDROID_CONTROL_AE_MODE_ON; |
| 381 | UPDATE(ANDROID_CONTROL_AE_AVAILABLE_MODES, &aeAvailableMode, 1); |
| 382 | |
| 383 | const uint8_t availableFffect = ANDROID_CONTROL_EFFECT_MODE_OFF; |
| 384 | UPDATE(ANDROID_CONTROL_AVAILABLE_EFFECTS, &availableFffect, 1); |
| 385 | |
| 386 | const uint8_t controlAvailableModes[] = {ANDROID_CONTROL_MODE_OFF, ANDROID_CONTROL_MODE_AUTO}; |
| 387 | UPDATE(ANDROID_CONTROL_AVAILABLE_MODES, controlAvailableModes, |
| 388 | ARRAY_SIZE(controlAvailableModes)); |
| 389 | |
| 390 | // android.edge |
| 391 | const uint8_t edgeMode = ANDROID_EDGE_MODE_OFF; |
| 392 | UPDATE(ANDROID_EDGE_AVAILABLE_EDGE_MODES, &edgeMode, 1); |
| 393 | |
| 394 | // android.flash |
| 395 | const uint8_t flashInfo = ANDROID_FLASH_INFO_AVAILABLE_FALSE; |
| 396 | UPDATE(ANDROID_FLASH_INFO_AVAILABLE, &flashInfo, 1); |
| 397 | |
| 398 | // android.hotPixel |
| 399 | const uint8_t hotPixelMode = ANDROID_HOT_PIXEL_MODE_OFF; |
| 400 | UPDATE(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES, &hotPixelMode, 1); |
| 401 | |
Sangheum | aa9d8dc | 2023-12-20 16:48:03 +0900 | [diff] [blame] | 402 | // android.info |
| 403 | const uint8_t bufMgrVer = ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_HIDL_DEVICE_3_5; |
| 404 | UPDATE(ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION, &bufMgrVer, 1); |
| 405 | |
Avichal Rakesh | e1857f8 | 2022-06-08 17:47:23 -0700 | [diff] [blame] | 406 | // android.jpeg |
| 407 | const int32_t jpegAvailableThumbnailSizes[] = {0, 0, 176, 144, 240, 144, 256, |
| 408 | 144, 240, 160, 256, 154, 240, 180}; |
| 409 | UPDATE(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, jpegAvailableThumbnailSizes, |
| 410 | ARRAY_SIZE(jpegAvailableThumbnailSizes)); |
| 411 | |
| 412 | const int32_t jpegMaxSize = mCfg.maxJpegBufSize; |
| 413 | UPDATE(ANDROID_JPEG_MAX_SIZE, &jpegMaxSize, 1); |
| 414 | |
| 415 | // android.lens |
| 416 | const uint8_t focusDistanceCalibration = |
| 417 | ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED; |
| 418 | UPDATE(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION, &focusDistanceCalibration, 1); |
| 419 | |
| 420 | const uint8_t opticalStabilizationMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF; |
| 421 | UPDATE(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION, &opticalStabilizationMode, 1); |
| 422 | |
| 423 | const uint8_t facing = ANDROID_LENS_FACING_EXTERNAL; |
| 424 | UPDATE(ANDROID_LENS_FACING, &facing, 1); |
| 425 | |
| 426 | // android.noiseReduction |
| 427 | const uint8_t noiseReductionMode = ANDROID_NOISE_REDUCTION_MODE_OFF; |
| 428 | UPDATE(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES, &noiseReductionMode, 1); |
| 429 | UPDATE(ANDROID_NOISE_REDUCTION_MODE, &noiseReductionMode, 1); |
| 430 | |
| 431 | const int32_t partialResultCount = 1; |
| 432 | UPDATE(ANDROID_REQUEST_PARTIAL_RESULT_COUNT, &partialResultCount, 1); |
| 433 | |
| 434 | // This means pipeline latency of X frame intervals. The maximum number is 4. |
| 435 | const uint8_t requestPipelineMaxDepth = 4; |
| 436 | UPDATE(ANDROID_REQUEST_PIPELINE_MAX_DEPTH, &requestPipelineMaxDepth, 1); |
| 437 | |
| 438 | // Three numbers represent the maximum numbers of different types of output |
| 439 | // streams simultaneously. The types are raw sensor, processed (but not |
| 440 | // stalling), and processed (but stalling). For usb limited mode, raw sensor |
| 441 | // is not supported. Stalling stream is JPEG. Non-stalling streams are |
| 442 | // YUV_420_888 or YV12. |
| 443 | const int32_t requestMaxNumOutputStreams[] = { |
| 444 | /*RAW*/ 0, |
| 445 | /*Processed*/ ExternalCameraDeviceSession::kMaxProcessedStream, |
| 446 | /*Stall*/ ExternalCameraDeviceSession::kMaxStallStream}; |
| 447 | UPDATE(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, requestMaxNumOutputStreams, |
| 448 | ARRAY_SIZE(requestMaxNumOutputStreams)); |
| 449 | |
| 450 | // Limited mode doesn't support reprocessing. |
| 451 | const int32_t requestMaxNumInputStreams = 0; |
| 452 | UPDATE(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS, &requestMaxNumInputStreams, 1); |
| 453 | |
| 454 | // android.scaler |
| 455 | // TODO: b/72263447 V4L2_CID_ZOOM_* |
| 456 | const float scalerAvailableMaxDigitalZoom[] = {1}; |
| 457 | UPDATE(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, scalerAvailableMaxDigitalZoom, |
| 458 | ARRAY_SIZE(scalerAvailableMaxDigitalZoom)); |
| 459 | |
| 460 | const uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY; |
| 461 | UPDATE(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1); |
| 462 | |
| 463 | const int32_t testPatternModes[] = {ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, |
| 464 | ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR}; |
| 465 | UPDATE(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES, testPatternModes, |
| 466 | ARRAY_SIZE(testPatternModes)); |
| 467 | |
| 468 | const uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN; |
| 469 | UPDATE(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE, ×tampSource, 1); |
| 470 | |
| 471 | // Orientation is a bit odd for external camera, but consider it as the orientation |
| 472 | // between the external camera sensor (which is usually landscape) and the device's |
| 473 | // natural display orientation. For devices with natural landscape display (ex: tablet/TV), the |
| 474 | // orientation should be 0. For devices with natural portrait display (phone), the orientation |
| 475 | // should be 270. |
| 476 | const int32_t orientation = mCfg.orientation; |
| 477 | UPDATE(ANDROID_SENSOR_ORIENTATION, &orientation, 1); |
| 478 | |
| 479 | // android.shading |
| 480 | const uint8_t availableMode = ANDROID_SHADING_MODE_OFF; |
| 481 | UPDATE(ANDROID_SHADING_AVAILABLE_MODES, &availableMode, 1); |
| 482 | |
| 483 | // android.statistics |
| 484 | const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF; |
| 485 | UPDATE(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES, &faceDetectMode, 1); |
| 486 | |
| 487 | const int32_t maxFaceCount = 0; |
| 488 | UPDATE(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, &maxFaceCount, 1); |
| 489 | |
| 490 | const uint8_t availableHotpixelMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF; |
| 491 | UPDATE(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES, &availableHotpixelMode, 1); |
| 492 | |
| 493 | const uint8_t lensShadingMapMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF; |
| 494 | UPDATE(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES, &lensShadingMapMode, 1); |
| 495 | |
| 496 | // android.sync |
| 497 | const int32_t maxLatency = ANDROID_SYNC_MAX_LATENCY_UNKNOWN; |
| 498 | UPDATE(ANDROID_SYNC_MAX_LATENCY, &maxLatency, 1); |
| 499 | |
Avichal Rakesh | 1fb9ba4 | 2023-12-13 13:09:57 -0800 | [diff] [blame] | 500 | const uint8_t sensorReadoutTimestamp = ANDROID_SENSOR_READOUT_TIMESTAMP_NOT_SUPPORTED; |
| 501 | UPDATE(ANDROID_SENSOR_READOUT_TIMESTAMP, &sensorReadoutTimestamp, 1); |
| 502 | |
Avichal Rakesh | e1857f8 | 2022-06-08 17:47:23 -0700 | [diff] [blame] | 503 | /* Other sensor/RAW related keys: |
| 504 | * android.sensor.info.colorFilterArrangement -> no need if we don't do RAW |
| 505 | * android.sensor.info.physicalSize -> not available |
| 506 | * android.sensor.info.whiteLevel -> not available/not needed |
| 507 | * android.sensor.info.lensShadingApplied -> not needed |
| 508 | * android.sensor.info.preCorrectionActiveArraySize -> not available/not needed |
| 509 | * android.sensor.blackLevelPattern -> not available/not needed |
| 510 | */ |
| 511 | |
| 512 | const int32_t availableRequestKeys[] = {ANDROID_COLOR_CORRECTION_ABERRATION_MODE, |
| 513 | ANDROID_CONTROL_AE_ANTIBANDING_MODE, |
| 514 | ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, |
| 515 | ANDROID_CONTROL_AE_LOCK, |
| 516 | ANDROID_CONTROL_AE_MODE, |
| 517 | ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, |
| 518 | ANDROID_CONTROL_AE_TARGET_FPS_RANGE, |
| 519 | ANDROID_CONTROL_AF_MODE, |
| 520 | ANDROID_CONTROL_AF_TRIGGER, |
| 521 | ANDROID_CONTROL_AWB_LOCK, |
| 522 | ANDROID_CONTROL_AWB_MODE, |
| 523 | ANDROID_CONTROL_CAPTURE_INTENT, |
| 524 | ANDROID_CONTROL_EFFECT_MODE, |
| 525 | ANDROID_CONTROL_MODE, |
| 526 | ANDROID_CONTROL_SCENE_MODE, |
| 527 | ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, |
| 528 | ANDROID_FLASH_MODE, |
| 529 | ANDROID_JPEG_ORIENTATION, |
| 530 | ANDROID_JPEG_QUALITY, |
| 531 | ANDROID_JPEG_THUMBNAIL_QUALITY, |
| 532 | ANDROID_JPEG_THUMBNAIL_SIZE, |
| 533 | ANDROID_LENS_OPTICAL_STABILIZATION_MODE, |
| 534 | ANDROID_NOISE_REDUCTION_MODE, |
| 535 | ANDROID_SCALER_CROP_REGION, |
| 536 | ANDROID_SENSOR_TEST_PATTERN_MODE, |
| 537 | ANDROID_STATISTICS_FACE_DETECT_MODE, |
| 538 | ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE}; |
| 539 | UPDATE(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, availableRequestKeys, |
| 540 | ARRAY_SIZE(availableRequestKeys)); |
| 541 | |
| 542 | const int32_t availableResultKeys[] = {ANDROID_COLOR_CORRECTION_ABERRATION_MODE, |
| 543 | ANDROID_CONTROL_AE_ANTIBANDING_MODE, |
| 544 | ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, |
| 545 | ANDROID_CONTROL_AE_LOCK, |
| 546 | ANDROID_CONTROL_AE_MODE, |
| 547 | ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, |
| 548 | ANDROID_CONTROL_AE_STATE, |
| 549 | ANDROID_CONTROL_AE_TARGET_FPS_RANGE, |
| 550 | ANDROID_CONTROL_AF_MODE, |
| 551 | ANDROID_CONTROL_AF_STATE, |
| 552 | ANDROID_CONTROL_AF_TRIGGER, |
| 553 | ANDROID_CONTROL_AWB_LOCK, |
| 554 | ANDROID_CONTROL_AWB_MODE, |
| 555 | ANDROID_CONTROL_AWB_STATE, |
| 556 | ANDROID_CONTROL_CAPTURE_INTENT, |
| 557 | ANDROID_CONTROL_EFFECT_MODE, |
| 558 | ANDROID_CONTROL_MODE, |
| 559 | ANDROID_CONTROL_SCENE_MODE, |
| 560 | ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, |
| 561 | ANDROID_FLASH_MODE, |
| 562 | ANDROID_FLASH_STATE, |
| 563 | ANDROID_JPEG_ORIENTATION, |
| 564 | ANDROID_JPEG_QUALITY, |
| 565 | ANDROID_JPEG_THUMBNAIL_QUALITY, |
| 566 | ANDROID_JPEG_THUMBNAIL_SIZE, |
| 567 | ANDROID_LENS_OPTICAL_STABILIZATION_MODE, |
| 568 | ANDROID_NOISE_REDUCTION_MODE, |
| 569 | ANDROID_REQUEST_PIPELINE_DEPTH, |
| 570 | ANDROID_SCALER_CROP_REGION, |
| 571 | ANDROID_SENSOR_TIMESTAMP, |
| 572 | ANDROID_STATISTICS_FACE_DETECT_MODE, |
| 573 | ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, |
| 574 | ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, |
| 575 | ANDROID_STATISTICS_SCENE_FLICKER}; |
| 576 | UPDATE(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, availableResultKeys, |
| 577 | ARRAY_SIZE(availableResultKeys)); |
| 578 | |
| 579 | UPDATE(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, AVAILABLE_CHARACTERISTICS_KEYS.data(), |
| 580 | AVAILABLE_CHARACTERISTICS_KEYS.size()); |
| 581 | |
| 582 | return OK; |
| 583 | } |
| 584 | |
| 585 | status_t ExternalCameraDevice::initCameraControlsCharsKeys( |
| 586 | int, ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) { |
| 587 | // android.sensor.info.sensitivityRange -> V4L2_CID_ISO_SENSITIVITY |
| 588 | // android.sensor.info.exposureTimeRange -> V4L2_CID_EXPOSURE_ABSOLUTE |
| 589 | // android.sensor.info.maxFrameDuration -> TBD |
| 590 | // android.lens.info.minimumFocusDistance -> V4L2_CID_FOCUS_ABSOLUTE |
| 591 | // android.lens.info.hyperfocalDistance |
| 592 | // android.lens.info.availableFocalLengths -> not available? |
| 593 | |
| 594 | // android.control |
| 595 | // No AE compensation support for now. |
| 596 | // TODO: V4L2_CID_EXPOSURE_BIAS |
| 597 | const int32_t controlAeCompensationRange[] = {0, 0}; |
| 598 | UPDATE(ANDROID_CONTROL_AE_COMPENSATION_RANGE, controlAeCompensationRange, |
| 599 | ARRAY_SIZE(controlAeCompensationRange)); |
| 600 | const camera_metadata_rational_t controlAeCompensationStep[] = {{0, 1}}; |
| 601 | UPDATE(ANDROID_CONTROL_AE_COMPENSATION_STEP, controlAeCompensationStep, |
| 602 | ARRAY_SIZE(controlAeCompensationStep)); |
| 603 | |
| 604 | // TODO: Check V4L2_CID_AUTO_FOCUS_*. |
| 605 | const uint8_t afAvailableModes[] = {ANDROID_CONTROL_AF_MODE_AUTO, ANDROID_CONTROL_AF_MODE_OFF}; |
| 606 | UPDATE(ANDROID_CONTROL_AF_AVAILABLE_MODES, afAvailableModes, ARRAY_SIZE(afAvailableModes)); |
| 607 | |
| 608 | // TODO: V4L2_CID_SCENE_MODE |
| 609 | const uint8_t availableSceneMode = ANDROID_CONTROL_SCENE_MODE_DISABLED; |
| 610 | UPDATE(ANDROID_CONTROL_AVAILABLE_SCENE_MODES, &availableSceneMode, 1); |
| 611 | |
| 612 | // TODO: V4L2_CID_3A_LOCK |
| 613 | const uint8_t aeLockAvailable = ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE; |
| 614 | UPDATE(ANDROID_CONTROL_AE_LOCK_AVAILABLE, &aeLockAvailable, 1); |
| 615 | const uint8_t awbLockAvailable = ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE; |
| 616 | UPDATE(ANDROID_CONTROL_AWB_LOCK_AVAILABLE, &awbLockAvailable, 1); |
| 617 | |
| 618 | // TODO: V4L2_CID_ZOOM_* |
| 619 | const float scalerAvailableMaxDigitalZoom[] = {1}; |
| 620 | UPDATE(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, scalerAvailableMaxDigitalZoom, |
| 621 | ARRAY_SIZE(scalerAvailableMaxDigitalZoom)); |
| 622 | |
| 623 | return OK; |
| 624 | } |
| 625 | |
| 626 | status_t ExternalCameraDevice::initOutputCharsKeys( |
| 627 | int fd, ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) { |
| 628 | initSupportedFormatsLocked(fd); |
| 629 | if (mSupportedFormats.empty()) { |
| 630 | ALOGE("%s: Init supported format list failed", __FUNCTION__); |
| 631 | return UNKNOWN_ERROR; |
| 632 | } |
| 633 | |
| 634 | bool hasDepth = false; |
| 635 | bool hasColor = false; |
| 636 | |
| 637 | // For V4L2_PIX_FMT_Z16 |
| 638 | std::array<int, /*size*/ 1> halDepthFormats{{HAL_PIXEL_FORMAT_Y16}}; |
| 639 | // For V4L2_PIX_FMT_MJPEG |
| 640 | std::array<int, /*size*/ 3> halFormats{{HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888, |
| 641 | HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED}}; |
| 642 | |
| 643 | for (const auto& supportedFormat : mSupportedFormats) { |
| 644 | switch (supportedFormat.fourcc) { |
| 645 | case V4L2_PIX_FMT_Z16: |
| 646 | hasDepth = true; |
| 647 | break; |
| 648 | case V4L2_PIX_FMT_MJPEG: |
| 649 | hasColor = true; |
| 650 | break; |
| 651 | default: |
| 652 | ALOGW("%s: format %c%c%c%c is not supported!", __FUNCTION__, |
| 653 | supportedFormat.fourcc & 0xFF, (supportedFormat.fourcc >> 8) & 0xFF, |
| 654 | (supportedFormat.fourcc >> 16) & 0xFF, (supportedFormat.fourcc >> 24) & 0xFF); |
| 655 | } |
| 656 | } |
| 657 | |
| 658 | if (hasDepth) { |
| 659 | status_t ret = initOutputCharsKeysByFormat( |
| 660 | metadata, V4L2_PIX_FMT_Z16, halDepthFormats, |
| 661 | ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_OUTPUT, |
| 662 | ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS, |
| 663 | ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS, |
| 664 | ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS); |
| 665 | if (ret != OK) { |
| 666 | ALOGE("%s: Unable to initialize depth format keys: %s", __FUNCTION__, |
| 667 | statusToString(ret).c_str()); |
| 668 | return ret; |
| 669 | } |
| 670 | } |
| 671 | if (hasColor) { |
| 672 | status_t ret = |
| 673 | initOutputCharsKeysByFormat(metadata, V4L2_PIX_FMT_MJPEG, halFormats, |
| 674 | ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT, |
| 675 | ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, |
| 676 | ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, |
| 677 | ANDROID_SCALER_AVAILABLE_STALL_DURATIONS); |
| 678 | if (ret != OK) { |
| 679 | ALOGE("%s: Unable to initialize color format keys: %s", __FUNCTION__, |
| 680 | statusToString(ret).c_str()); |
| 681 | return ret; |
| 682 | } |
| 683 | } |
| 684 | |
| 685 | status_t ret = calculateMinFps(metadata); |
| 686 | if (ret != OK) { |
| 687 | ALOGE("%s: Unable to update fps metadata: %s", __FUNCTION__, statusToString(ret).c_str()); |
| 688 | return ret; |
| 689 | } |
| 690 | |
| 691 | SupportedV4L2Format maximumFormat{.width = 0, .height = 0}; |
| 692 | for (const auto& supportedFormat : mSupportedFormats) { |
| 693 | if (supportedFormat.width >= maximumFormat.width && |
| 694 | supportedFormat.height >= maximumFormat.height) { |
| 695 | maximumFormat = supportedFormat; |
| 696 | } |
| 697 | } |
| 698 | int32_t activeArraySize[] = {0, 0, static_cast<int32_t>(maximumFormat.width), |
| 699 | static_cast<int32_t>(maximumFormat.height)}; |
| 700 | UPDATE(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE, activeArraySize, |
| 701 | ARRAY_SIZE(activeArraySize)); |
| 702 | UPDATE(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, activeArraySize, ARRAY_SIZE(activeArraySize)); |
| 703 | |
| 704 | int32_t pixelArraySize[] = {static_cast<int32_t>(maximumFormat.width), |
| 705 | static_cast<int32_t>(maximumFormat.height)}; |
| 706 | UPDATE(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, pixelArraySize, ARRAY_SIZE(pixelArraySize)); |
| 707 | return OK; |
| 708 | } |
| 709 | |
| 710 | template <size_t SIZE> |
| 711 | status_t ExternalCameraDevice::initOutputCharsKeysByFormat( |
| 712 | ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata, |
| 713 | uint32_t fourcc, const std::array<int, SIZE>& halFormats, int streamConfigTag, |
| 714 | int streamConfigurationKey, int minFrameDurationKey, int stallDurationKey) { |
| 715 | if (mSupportedFormats.empty()) { |
| 716 | ALOGE("%s: Init supported format list failed", __FUNCTION__); |
| 717 | return UNKNOWN_ERROR; |
| 718 | } |
| 719 | |
| 720 | std::vector<int32_t> streamConfigurations; |
| 721 | std::vector<int64_t> minFrameDurations; |
| 722 | std::vector<int64_t> stallDurations; |
| 723 | |
| 724 | for (const auto& supportedFormat : mSupportedFormats) { |
| 725 | if (supportedFormat.fourcc != fourcc) { |
| 726 | // Skip 4CCs not meant for the halFormats |
| 727 | continue; |
| 728 | } |
| 729 | for (const auto& format : halFormats) { |
| 730 | streamConfigurations.push_back(format); |
| 731 | streamConfigurations.push_back(supportedFormat.width); |
| 732 | streamConfigurations.push_back(supportedFormat.height); |
| 733 | streamConfigurations.push_back(streamConfigTag); |
| 734 | } |
| 735 | |
| 736 | int64_t minFrameDuration = std::numeric_limits<int64_t>::max(); |
| 737 | for (const auto& fr : supportedFormat.frameRates) { |
| 738 | // 1000000000LL < (2^32 - 1) and |
| 739 | // fr.durationNumerator is uint32_t, so no overflow here |
| 740 | int64_t frameDuration = 1000000000LL * fr.durationNumerator / fr.durationDenominator; |
| 741 | if (frameDuration < minFrameDuration) { |
| 742 | minFrameDuration = frameDuration; |
| 743 | } |
| 744 | } |
| 745 | |
| 746 | for (const auto& format : halFormats) { |
| 747 | minFrameDurations.push_back(format); |
| 748 | minFrameDurations.push_back(supportedFormat.width); |
| 749 | minFrameDurations.push_back(supportedFormat.height); |
| 750 | minFrameDurations.push_back(minFrameDuration); |
| 751 | } |
| 752 | |
| 753 | // The stall duration is 0 for non-jpeg formats. For JPEG format, stall |
| 754 | // duration can be 0 if JPEG is small. Here we choose 1 sec for JPEG. |
| 755 | // TODO: b/72261675. Maybe set this dynamically |
| 756 | for (const auto& format : halFormats) { |
| 757 | const int64_t NS_TO_SECOND = 1E9; |
| 758 | int64_t stall_duration = (format == HAL_PIXEL_FORMAT_BLOB) ? NS_TO_SECOND : 0; |
| 759 | stallDurations.push_back(format); |
| 760 | stallDurations.push_back(supportedFormat.width); |
| 761 | stallDurations.push_back(supportedFormat.height); |
| 762 | stallDurations.push_back(stall_duration); |
| 763 | } |
| 764 | } |
| 765 | |
| 766 | UPDATE(streamConfigurationKey, streamConfigurations.data(), streamConfigurations.size()); |
| 767 | |
| 768 | UPDATE(minFrameDurationKey, minFrameDurations.data(), minFrameDurations.size()); |
| 769 | |
| 770 | UPDATE(stallDurationKey, stallDurations.data(), stallDurations.size()); |
| 771 | |
| 772 | return OK; |
| 773 | } |
| 774 | |
| 775 | status_t ExternalCameraDevice::calculateMinFps( |
| 776 | ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) { |
| 777 | std::set<int32_t> framerates; |
| 778 | int32_t minFps = std::numeric_limits<int32_t>::max(); |
| 779 | |
| 780 | for (const auto& supportedFormat : mSupportedFormats) { |
| 781 | for (const auto& fr : supportedFormat.frameRates) { |
| 782 | int32_t frameRateInt = static_cast<int32_t>(fr.getFramesPerSecond()); |
| 783 | if (minFps > frameRateInt) { |
| 784 | minFps = frameRateInt; |
| 785 | } |
| 786 | framerates.insert(frameRateInt); |
| 787 | } |
| 788 | } |
| 789 | |
| 790 | std::vector<int32_t> fpsRanges; |
| 791 | // FPS ranges |
| 792 | for (const auto& framerate : framerates) { |
| 793 | // Empirical: webcams often have close to 2x fps error and cannot support fixed fps range |
| 794 | fpsRanges.push_back(framerate / 2); |
| 795 | fpsRanges.push_back(framerate); |
| 796 | } |
| 797 | minFps /= 2; |
| 798 | int64_t maxFrameDuration = 1000000000LL / minFps; |
| 799 | |
| 800 | UPDATE(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, fpsRanges.data(), fpsRanges.size()); |
| 801 | |
| 802 | UPDATE(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION, &maxFrameDuration, 1); |
| 803 | |
| 804 | return OK; |
| 805 | } |
| 806 | |
| 807 | #undef ARRAY_SIZE |
| 808 | #undef UPDATE |
| 809 | |
| 810 | void ExternalCameraDevice::getFrameRateList(int fd, double fpsUpperBound, |
| 811 | SupportedV4L2Format* format) { |
| 812 | format->frameRates.clear(); |
| 813 | |
| 814 | v4l2_frmivalenum frameInterval{ |
| 815 | .index = 0, |
| 816 | .pixel_format = format->fourcc, |
| 817 | .width = static_cast<__u32>(format->width), |
| 818 | .height = static_cast<__u32>(format->height), |
| 819 | }; |
| 820 | |
| 821 | for (frameInterval.index = 0; |
| 822 | TEMP_FAILURE_RETRY(ioctl(fd, VIDIOC_ENUM_FRAMEINTERVALS, &frameInterval)) == 0; |
| 823 | ++frameInterval.index) { |
| 824 | if (frameInterval.type == V4L2_FRMIVAL_TYPE_DISCRETE) { |
| 825 | if (frameInterval.discrete.numerator != 0) { |
| 826 | SupportedV4L2Format::FrameRate fr = {frameInterval.discrete.numerator, |
| 827 | frameInterval.discrete.denominator}; |
| 828 | double framerate = fr.getFramesPerSecond(); |
| 829 | if (framerate > fpsUpperBound) { |
| 830 | continue; |
| 831 | } |
| 832 | ALOGV("index:%d, format:%c%c%c%c, w %d, h %d, framerate %f", frameInterval.index, |
| 833 | frameInterval.pixel_format & 0xFF, (frameInterval.pixel_format >> 8) & 0xFF, |
| 834 | (frameInterval.pixel_format >> 16) & 0xFF, |
| 835 | (frameInterval.pixel_format >> 24) & 0xFF, frameInterval.width, |
| 836 | frameInterval.height, framerate); |
| 837 | format->frameRates.push_back(fr); |
| 838 | } |
| 839 | } |
| 840 | } |
| 841 | |
| 842 | if (format->frameRates.empty()) { |
| 843 | ALOGE("%s: failed to get supported frame rates for format:%c%c%c%c w %d h %d", __FUNCTION__, |
| 844 | frameInterval.pixel_format & 0xFF, (frameInterval.pixel_format >> 8) & 0xFF, |
| 845 | (frameInterval.pixel_format >> 16) & 0xFF, (frameInterval.pixel_format >> 24) & 0xFF, |
| 846 | frameInterval.width, frameInterval.height); |
| 847 | } |
| 848 | } |
| 849 | |
| 850 | void ExternalCameraDevice::updateFpsBounds( |
| 851 | int fd, CroppingType cropType, |
| 852 | const std::vector<ExternalCameraConfig::FpsLimitation>& fpsLimits, |
| 853 | SupportedV4L2Format format, std::vector<SupportedV4L2Format>& outFmts) { |
| 854 | double fpsUpperBound = -1.0; |
| 855 | for (const auto& limit : fpsLimits) { |
| 856 | if (cropType == VERTICAL) { |
| 857 | if (format.width <= limit.size.width) { |
| 858 | fpsUpperBound = limit.fpsUpperBound; |
| 859 | break; |
| 860 | } |
| 861 | } else { // HORIZONTAL |
| 862 | if (format.height <= limit.size.height) { |
| 863 | fpsUpperBound = limit.fpsUpperBound; |
| 864 | break; |
| 865 | } |
| 866 | } |
| 867 | } |
| 868 | if (fpsUpperBound < 0.f) { |
| 869 | return; |
| 870 | } |
| 871 | |
| 872 | getFrameRateList(fd, fpsUpperBound, &format); |
| 873 | if (!format.frameRates.empty()) { |
| 874 | outFmts.push_back(format); |
| 875 | } |
| 876 | } |
| 877 | |
| 878 | std::vector<SupportedV4L2Format> ExternalCameraDevice::getCandidateSupportedFormatsLocked( |
| 879 | int fd, CroppingType cropType, |
| 880 | const std::vector<ExternalCameraConfig::FpsLimitation>& fpsLimits, |
| 881 | const std::vector<ExternalCameraConfig::FpsLimitation>& depthFpsLimits, |
| 882 | const Size& minStreamSize, bool depthEnabled) { |
| 883 | std::vector<SupportedV4L2Format> outFmts; |
| 884 | struct v4l2_fmtdesc fmtdesc { |
| 885 | .index = 0, .type = V4L2_BUF_TYPE_VIDEO_CAPTURE |
| 886 | }; |
| 887 | int ret = 0; |
| 888 | while (ret == 0) { |
| 889 | ret = TEMP_FAILURE_RETRY(ioctl(fd, VIDIOC_ENUM_FMT, &fmtdesc)); |
| 890 | ALOGV("index:%d,ret:%d, format:%c%c%c%c", fmtdesc.index, ret, fmtdesc.pixelformat & 0xFF, |
| 891 | (fmtdesc.pixelformat >> 8) & 0xFF, (fmtdesc.pixelformat >> 16) & 0xFF, |
| 892 | (fmtdesc.pixelformat >> 24) & 0xFF); |
| 893 | |
| 894 | if (ret != 0 || (fmtdesc.flags & V4L2_FMT_FLAG_EMULATED)) { |
| 895 | // Skip if IOCTL failed, or if the format is emulated |
| 896 | fmtdesc.index++; |
| 897 | continue; |
| 898 | } |
| 899 | auto it = |
| 900 | std::find(kSupportedFourCCs.begin(), kSupportedFourCCs.end(), fmtdesc.pixelformat); |
| 901 | if (it == kSupportedFourCCs.end()) { |
| 902 | fmtdesc.index++; |
| 903 | continue; |
| 904 | } |
| 905 | |
| 906 | // Found supported format |
| 907 | v4l2_frmsizeenum frameSize{.index = 0, .pixel_format = fmtdesc.pixelformat}; |
| 908 | for (; TEMP_FAILURE_RETRY(ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frameSize)) == 0; |
| 909 | ++frameSize.index) { |
| 910 | if (frameSize.type == V4L2_FRMSIZE_TYPE_DISCRETE) { |
| 911 | ALOGV("index:%d, format:%c%c%c%c, w %d, h %d", frameSize.index, |
| 912 | fmtdesc.pixelformat & 0xFF, (fmtdesc.pixelformat >> 8) & 0xFF, |
| 913 | (fmtdesc.pixelformat >> 16) & 0xFF, (fmtdesc.pixelformat >> 24) & 0xFF, |
| 914 | frameSize.discrete.width, frameSize.discrete.height); |
| 915 | |
| 916 | // Disregard h > w formats so all aspect ratio (h/w) <= 1.0 |
| 917 | // This will simplify the crop/scaling logic down the road |
| 918 | if (frameSize.discrete.height > frameSize.discrete.width) { |
| 919 | continue; |
| 920 | } |
| 921 | |
| 922 | // Discard all formats which is smaller than minStreamSize |
| 923 | if (frameSize.discrete.width < minStreamSize.width || |
| 924 | frameSize.discrete.height < minStreamSize.height) { |
| 925 | continue; |
| 926 | } |
| 927 | |
| 928 | SupportedV4L2Format format{ |
| 929 | .width = static_cast<int32_t>(frameSize.discrete.width), |
| 930 | .height = static_cast<int32_t>(frameSize.discrete.height), |
| 931 | .fourcc = fmtdesc.pixelformat}; |
| 932 | |
| 933 | if (format.fourcc == V4L2_PIX_FMT_Z16 && depthEnabled) { |
| 934 | updateFpsBounds(fd, cropType, depthFpsLimits, format, outFmts); |
| 935 | } else { |
| 936 | updateFpsBounds(fd, cropType, fpsLimits, format, outFmts); |
| 937 | } |
| 938 | } |
| 939 | } |
| 940 | fmtdesc.index++; |
| 941 | } |
| 942 | trimSupportedFormats(cropType, &outFmts); |
| 943 | return outFmts; |
| 944 | } |
| 945 | |
| 946 | void ExternalCameraDevice::trimSupportedFormats(CroppingType cropType, |
| 947 | std::vector<SupportedV4L2Format>* pFmts) { |
| 948 | std::vector<SupportedV4L2Format>& sortedFmts = *pFmts; |
| 949 | if (cropType == VERTICAL) { |
| 950 | std::sort(sortedFmts.begin(), sortedFmts.end(), |
| 951 | [](const SupportedV4L2Format& a, const SupportedV4L2Format& b) -> bool { |
| 952 | if (a.width == b.width) { |
| 953 | return a.height < b.height; |
| 954 | } |
| 955 | return a.width < b.width; |
| 956 | }); |
| 957 | } else { |
| 958 | std::sort(sortedFmts.begin(), sortedFmts.end(), |
| 959 | [](const SupportedV4L2Format& a, const SupportedV4L2Format& b) -> bool { |
| 960 | if (a.height == b.height) { |
| 961 | return a.width < b.width; |
| 962 | } |
| 963 | return a.height < b.height; |
| 964 | }); |
| 965 | } |
| 966 | |
| 967 | if (sortedFmts.empty()) { |
| 968 | ALOGE("%s: input format list is empty!", __FUNCTION__); |
| 969 | return; |
| 970 | } |
| 971 | |
| 972 | const auto& maxSize = sortedFmts[sortedFmts.size() - 1]; |
| 973 | float maxSizeAr = ASPECT_RATIO(maxSize); |
| 974 | |
| 975 | // Remove formats that has aspect ratio not croppable from largest size |
| 976 | std::vector<SupportedV4L2Format> out; |
| 977 | for (const auto& fmt : sortedFmts) { |
| 978 | float ar = ASPECT_RATIO(fmt); |
| 979 | if (isAspectRatioClose(ar, maxSizeAr)) { |
| 980 | out.push_back(fmt); |
| 981 | } else if (cropType == HORIZONTAL && ar < maxSizeAr) { |
| 982 | out.push_back(fmt); |
| 983 | } else if (cropType == VERTICAL && ar > maxSizeAr) { |
| 984 | out.push_back(fmt); |
| 985 | } else { |
| 986 | ALOGV("%s: size (%d,%d) is removed due to unable to crop %s from (%d,%d)", __FUNCTION__, |
| 987 | fmt.width, fmt.height, cropType == VERTICAL ? "vertically" : "horizontally", |
| 988 | maxSize.width, maxSize.height); |
| 989 | } |
| 990 | } |
| 991 | sortedFmts = out; |
| 992 | } |
| 993 | |
| 994 | binder_status_t ExternalCameraDevice::dump(int fd, const char** args, uint32_t numArgs) { |
| 995 | std::shared_ptr<ExternalCameraDeviceSession> session = mSession.lock(); |
| 996 | if (session == nullptr) { |
| 997 | dprintf(fd, "No active camera device session instance\n"); |
| 998 | return STATUS_OK; |
| 999 | } |
| 1000 | |
| 1001 | return session->dump(fd, args, numArgs); |
| 1002 | } |
| 1003 | |
| 1004 | } // namespace implementation |
| 1005 | } // namespace device |
| 1006 | } // namespace camera |
| 1007 | } // namespace hardware |
Avichal Rakesh | 1fb9ba4 | 2023-12-13 13:09:57 -0800 | [diff] [blame] | 1008 | } // namespace android |