Yin-Chia Yeh | 1903059 | 2017-10-19 17:30:11 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2018 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #define LOG_TAG "ExtCamDev@3.4" |
Yin-Chia Yeh | 53f4cb1 | 2018-01-29 10:31:45 -0800 | [diff] [blame] | 18 | //#define LOG_NDEBUG 0 |
Yin-Chia Yeh | 1903059 | 2017-10-19 17:30:11 -0700 | [diff] [blame] | 19 | #include <log/log.h> |
| 20 | |
Yin-Chia Yeh | 53f4cb1 | 2018-01-29 10:31:45 -0800 | [diff] [blame] | 21 | #include <algorithm> |
Yin-Chia Yeh | 1903059 | 2017-10-19 17:30:11 -0700 | [diff] [blame] | 22 | #include <array> |
| 23 | #include <linux/videodev2.h> |
| 24 | #include "android-base/macros.h" |
| 25 | #include "CameraMetadata.h" |
| 26 | #include "../../3.2/default/include/convert.h" |
| 27 | #include "ExternalCameraDevice_3_4.h" |
| 28 | |
Yin-Chia Yeh | 1903059 | 2017-10-19 17:30:11 -0700 | [diff] [blame] | 29 | namespace android { |
| 30 | namespace hardware { |
| 31 | namespace camera { |
| 32 | namespace device { |
| 33 | namespace V3_4 { |
| 34 | namespace implementation { |
| 35 | |
| 36 | namespace { |
| 37 | // Only support MJPEG for now as it seems to be the one supports higher fps |
| 38 | // Other formats to consider in the future: |
| 39 | // * V4L2_PIX_FMT_YVU420 (== YV12) |
| 40 | // * V4L2_PIX_FMT_YVYU (YVYU: can be converted to YV12 or other YUV420_888 formats) |
| 41 | const std::array<uint32_t, /*size*/1> kSupportedFourCCs {{ |
| 42 | V4L2_PIX_FMT_MJPEG |
| 43 | }}; // double braces required in C++11 |
| 44 | |
Yin-Chia Yeh | 1903059 | 2017-10-19 17:30:11 -0700 | [diff] [blame] | 45 | } // anonymous namespace |
| 46 | |
Yin-Chia Yeh | 1798249 | 2018-02-05 17:41:01 -0800 | [diff] [blame] | 47 | ExternalCameraDevice::ExternalCameraDevice( |
| 48 | const std::string& cameraId, const ExternalCameraConfig& cfg) : |
Yin-Chia Yeh | 53f4cb1 | 2018-01-29 10:31:45 -0800 | [diff] [blame] | 49 | mCameraId(cameraId), |
Yin-Chia Yeh | 1798249 | 2018-02-05 17:41:01 -0800 | [diff] [blame] | 50 | mCfg(cfg) { |
Yin-Chia Yeh | 53f4cb1 | 2018-01-29 10:31:45 -0800 | [diff] [blame] | 51 | |
Yin-Chia Yeh | 1903059 | 2017-10-19 17:30:11 -0700 | [diff] [blame] | 52 | status_t ret = initCameraCharacteristics(); |
| 53 | if (ret != OK) { |
| 54 | ALOGE("%s: init camera characteristics failed: errorno %d", __FUNCTION__, ret); |
| 55 | mInitFailed = true; |
| 56 | } |
| 57 | } |
| 58 | |
| 59 | ExternalCameraDevice::~ExternalCameraDevice() {} |
| 60 | |
| 61 | bool ExternalCameraDevice::isInitFailed() { |
| 62 | return mInitFailed; |
| 63 | } |
| 64 | |
| 65 | Return<void> ExternalCameraDevice::getResourceCost(getResourceCost_cb _hidl_cb) { |
| 66 | CameraResourceCost resCost; |
| 67 | resCost.resourceCost = 100; |
| 68 | _hidl_cb(Status::OK, resCost); |
| 69 | return Void(); |
| 70 | } |
| 71 | |
| 72 | Return<void> ExternalCameraDevice::getCameraCharacteristics( |
| 73 | getCameraCharacteristics_cb _hidl_cb) { |
| 74 | Mutex::Autolock _l(mLock); |
| 75 | V3_2::CameraMetadata hidlChars; |
| 76 | |
| 77 | if (isInitFailed()) { |
| 78 | _hidl_cb(Status::INTERNAL_ERROR, hidlChars); |
| 79 | return Void(); |
| 80 | } |
| 81 | |
| 82 | const camera_metadata_t* rawMetadata = mCameraCharacteristics.getAndLock(); |
| 83 | V3_2::implementation::convertToHidl(rawMetadata, &hidlChars); |
| 84 | _hidl_cb(Status::OK, hidlChars); |
| 85 | mCameraCharacteristics.unlock(rawMetadata); |
| 86 | return Void(); |
| 87 | } |
| 88 | |
| 89 | Return<Status> ExternalCameraDevice::setTorchMode(TorchMode) { |
| 90 | return Status::METHOD_NOT_SUPPORTED; |
| 91 | } |
| 92 | |
| 93 | Return<void> ExternalCameraDevice::open( |
| 94 | const sp<ICameraDeviceCallback>& callback, open_cb _hidl_cb) { |
| 95 | Status status = Status::OK; |
| 96 | sp<ExternalCameraDeviceSession> session = nullptr; |
| 97 | |
| 98 | if (callback == nullptr) { |
| 99 | ALOGE("%s: cannot open camera %s. callback is null!", |
| 100 | __FUNCTION__, mCameraId.c_str()); |
| 101 | _hidl_cb(Status::ILLEGAL_ARGUMENT, nullptr); |
| 102 | return Void(); |
| 103 | } |
| 104 | |
| 105 | if (isInitFailed()) { |
| 106 | ALOGE("%s: cannot open camera %s. camera init failed!", |
| 107 | __FUNCTION__, mCameraId.c_str()); |
| 108 | _hidl_cb(Status::INTERNAL_ERROR, nullptr); |
| 109 | return Void(); |
| 110 | } |
| 111 | |
| 112 | mLock.lock(); |
| 113 | |
| 114 | ALOGV("%s: Initializing device for camera %s", __FUNCTION__, mCameraId.c_str()); |
| 115 | session = mSession.promote(); |
| 116 | if (session != nullptr && !session->isClosed()) { |
| 117 | ALOGE("%s: cannot open an already opened camera!", __FUNCTION__); |
| 118 | mLock.unlock(); |
| 119 | _hidl_cb(Status::CAMERA_IN_USE, nullptr); |
| 120 | return Void(); |
| 121 | } |
| 122 | |
| 123 | unique_fd fd(::open(mCameraId.c_str(), O_RDWR)); |
| 124 | if (fd.get() < 0) { |
| 125 | ALOGE("%s: v4l2 device open %s failed: %s", |
| 126 | __FUNCTION__, mCameraId.c_str(), strerror(errno)); |
| 127 | mLock.unlock(); |
| 128 | _hidl_cb(Status::INTERNAL_ERROR, nullptr); |
| 129 | return Void(); |
| 130 | } |
| 131 | |
| 132 | session = new ExternalCameraDeviceSession( |
Yin-Chia Yeh | 53f4cb1 | 2018-01-29 10:31:45 -0800 | [diff] [blame] | 133 | callback, mCfg, mSupportedFormats, mCroppingType, |
Yin-Chia Yeh | 4a3393c | 2018-02-14 12:47:16 -0800 | [diff] [blame] | 134 | mCameraCharacteristics, mCameraId, std::move(fd)); |
Yin-Chia Yeh | 1903059 | 2017-10-19 17:30:11 -0700 | [diff] [blame] | 135 | if (session == nullptr) { |
| 136 | ALOGE("%s: camera device session allocation failed", __FUNCTION__); |
| 137 | mLock.unlock(); |
| 138 | _hidl_cb(Status::INTERNAL_ERROR, nullptr); |
| 139 | return Void(); |
| 140 | } |
| 141 | if (session->isInitFailed()) { |
| 142 | ALOGE("%s: camera device session init failed", __FUNCTION__); |
| 143 | session = nullptr; |
| 144 | mLock.unlock(); |
| 145 | _hidl_cb(Status::INTERNAL_ERROR, nullptr); |
| 146 | return Void(); |
| 147 | } |
| 148 | mSession = session; |
| 149 | |
| 150 | mLock.unlock(); |
| 151 | |
| 152 | _hidl_cb(status, session->getInterface()); |
| 153 | return Void(); |
| 154 | } |
| 155 | |
| 156 | Return<void> ExternalCameraDevice::dumpState(const ::android::hardware::hidl_handle& handle) { |
| 157 | Mutex::Autolock _l(mLock); |
| 158 | if (handle.getNativeHandle() == nullptr) { |
| 159 | ALOGE("%s: handle must not be null", __FUNCTION__); |
| 160 | return Void(); |
| 161 | } |
| 162 | if (handle->numFds != 1 || handle->numInts != 0) { |
| 163 | ALOGE("%s: handle must contain 1 FD and 0 integers! Got %d FDs and %d ints", |
| 164 | __FUNCTION__, handle->numFds, handle->numInts); |
| 165 | return Void(); |
| 166 | } |
| 167 | int fd = handle->data[0]; |
| 168 | if (mSession == nullptr) { |
| 169 | dprintf(fd, "No active camera device session instance\n"); |
| 170 | return Void(); |
| 171 | } |
| 172 | auto session = mSession.promote(); |
| 173 | if (session == nullptr) { |
| 174 | dprintf(fd, "No active camera device session instance\n"); |
| 175 | return Void(); |
| 176 | } |
| 177 | // Call into active session to dump states |
| 178 | session->dumpState(handle); |
| 179 | return Void(); |
| 180 | } |
| 181 | |
| 182 | |
| 183 | status_t ExternalCameraDevice::initCameraCharacteristics() { |
| 184 | if (mCameraCharacteristics.isEmpty()) { |
| 185 | // init camera characteristics |
| 186 | unique_fd fd(::open(mCameraId.c_str(), O_RDWR)); |
| 187 | if (fd.get() < 0) { |
| 188 | ALOGE("%s: v4l2 device open %s failed", __FUNCTION__, mCameraId.c_str()); |
| 189 | return DEAD_OBJECT; |
| 190 | } |
| 191 | |
| 192 | status_t ret; |
| 193 | ret = initDefaultCharsKeys(&mCameraCharacteristics); |
| 194 | if (ret != OK) { |
| 195 | ALOGE("%s: init default characteristics key failed: errorno %d", __FUNCTION__, ret); |
| 196 | mCameraCharacteristics.clear(); |
| 197 | return ret; |
| 198 | } |
| 199 | |
| 200 | ret = initCameraControlsCharsKeys(fd.get(), &mCameraCharacteristics); |
| 201 | if (ret != OK) { |
| 202 | ALOGE("%s: init camera control characteristics key failed: errorno %d", __FUNCTION__, ret); |
| 203 | mCameraCharacteristics.clear(); |
| 204 | return ret; |
| 205 | } |
| 206 | |
| 207 | ret = initOutputCharsKeys(fd.get(), &mCameraCharacteristics); |
| 208 | if (ret != OK) { |
| 209 | ALOGE("%s: init output characteristics key failed: errorno %d", __FUNCTION__, ret); |
| 210 | mCameraCharacteristics.clear(); |
| 211 | return ret; |
| 212 | } |
| 213 | } |
| 214 | return OK; |
| 215 | } |
| 216 | |
| 217 | #define ARRAY_SIZE(a) (sizeof(a) / sizeof(a[0])) |
| 218 | #define UPDATE(tag, data, size) \ |
| 219 | do { \ |
| 220 | if (metadata->update((tag), (data), (size))) { \ |
| 221 | ALOGE("Update " #tag " failed!"); \ |
| 222 | return -EINVAL; \ |
| 223 | } \ |
| 224 | } while (0) |
| 225 | |
| 226 | status_t ExternalCameraDevice::initDefaultCharsKeys( |
| 227 | ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) { |
Yin-Chia Yeh | 4acd76e | 2018-01-23 15:29:14 -0800 | [diff] [blame] | 228 | const uint8_t hardware_level = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL; |
Yin-Chia Yeh | 1903059 | 2017-10-19 17:30:11 -0700 | [diff] [blame] | 229 | UPDATE(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL, &hardware_level, 1); |
| 230 | |
| 231 | // android.colorCorrection |
| 232 | const uint8_t availableAberrationModes[] = { |
| 233 | ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF}; |
| 234 | UPDATE(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES, |
| 235 | availableAberrationModes, ARRAY_SIZE(availableAberrationModes)); |
| 236 | |
| 237 | // android.control |
| 238 | const uint8_t antibandingMode = |
| 239 | ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO; |
| 240 | UPDATE(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, |
| 241 | &antibandingMode, 1); |
| 242 | |
| 243 | const int32_t controlMaxRegions[] = {/*AE*/ 0, /*AWB*/ 0, /*AF*/ 0}; |
| 244 | UPDATE(ANDROID_CONTROL_MAX_REGIONS, controlMaxRegions, |
| 245 | ARRAY_SIZE(controlMaxRegions)); |
| 246 | |
| 247 | const uint8_t videoStabilizationMode = |
| 248 | ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF; |
| 249 | UPDATE(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, |
| 250 | &videoStabilizationMode, 1); |
| 251 | |
| 252 | const uint8_t awbAvailableMode = ANDROID_CONTROL_AWB_MODE_AUTO; |
| 253 | UPDATE(ANDROID_CONTROL_AWB_AVAILABLE_MODES, &awbAvailableMode, 1); |
| 254 | |
| 255 | const uint8_t aeAvailableMode = ANDROID_CONTROL_AE_MODE_ON; |
| 256 | UPDATE(ANDROID_CONTROL_AE_AVAILABLE_MODES, &aeAvailableMode, 1); |
| 257 | |
| 258 | const uint8_t availableFffect = ANDROID_CONTROL_EFFECT_MODE_OFF; |
| 259 | UPDATE(ANDROID_CONTROL_AVAILABLE_EFFECTS, &availableFffect, 1); |
| 260 | |
| 261 | const uint8_t controlAvailableModes[] = {ANDROID_CONTROL_MODE_OFF, |
| 262 | ANDROID_CONTROL_MODE_AUTO}; |
| 263 | UPDATE(ANDROID_CONTROL_AVAILABLE_MODES, controlAvailableModes, |
| 264 | ARRAY_SIZE(controlAvailableModes)); |
| 265 | |
| 266 | // android.edge |
| 267 | const uint8_t edgeMode = ANDROID_EDGE_MODE_OFF; |
| 268 | UPDATE(ANDROID_EDGE_AVAILABLE_EDGE_MODES, &edgeMode, 1); |
| 269 | |
| 270 | // android.flash |
| 271 | const uint8_t flashInfo = ANDROID_FLASH_INFO_AVAILABLE_FALSE; |
| 272 | UPDATE(ANDROID_FLASH_INFO_AVAILABLE, &flashInfo, 1); |
| 273 | |
| 274 | // android.hotPixel |
| 275 | const uint8_t hotPixelMode = ANDROID_HOT_PIXEL_MODE_OFF; |
| 276 | UPDATE(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES, &hotPixelMode, 1); |
| 277 | |
| 278 | // android.jpeg |
| 279 | // TODO: b/72261675 See if we can provide thumbnail size for all jpeg aspect ratios |
| 280 | const int32_t jpegAvailableThumbnailSizes[] = {0, 0, 240, 180}; |
| 281 | UPDATE(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, jpegAvailableThumbnailSizes, |
| 282 | ARRAY_SIZE(jpegAvailableThumbnailSizes)); |
| 283 | |
Yin-Chia Yeh | 53f4cb1 | 2018-01-29 10:31:45 -0800 | [diff] [blame] | 284 | const int32_t jpegMaxSize = mCfg.maxJpegBufSize; |
Yin-Chia Yeh | 1903059 | 2017-10-19 17:30:11 -0700 | [diff] [blame] | 285 | UPDATE(ANDROID_JPEG_MAX_SIZE, &jpegMaxSize, 1); |
| 286 | |
| 287 | const uint8_t jpegQuality = 90; |
| 288 | UPDATE(ANDROID_JPEG_QUALITY, &jpegQuality, 1); |
| 289 | UPDATE(ANDROID_JPEG_THUMBNAIL_QUALITY, &jpegQuality, 1); |
| 290 | |
| 291 | const int32_t jpegOrientation = 0; |
| 292 | UPDATE(ANDROID_JPEG_ORIENTATION, &jpegOrientation, 1); |
| 293 | |
| 294 | // android.lens |
| 295 | const uint8_t focusDistanceCalibration = |
| 296 | ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED; |
| 297 | UPDATE(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION, &focusDistanceCalibration, 1); |
| 298 | |
| 299 | const uint8_t opticalStabilizationMode = |
| 300 | ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF; |
| 301 | UPDATE(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION, |
| 302 | &opticalStabilizationMode, 1); |
| 303 | |
| 304 | const uint8_t facing = ANDROID_LENS_FACING_EXTERNAL; |
| 305 | UPDATE(ANDROID_LENS_FACING, &facing, 1); |
| 306 | |
| 307 | // android.noiseReduction |
| 308 | const uint8_t noiseReductionMode = ANDROID_NOISE_REDUCTION_MODE_OFF; |
| 309 | UPDATE(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES, |
| 310 | &noiseReductionMode, 1); |
| 311 | UPDATE(ANDROID_NOISE_REDUCTION_MODE, &noiseReductionMode, 1); |
| 312 | |
| 313 | // android.request |
| 314 | const uint8_t availableCapabilities[] = { |
| 315 | ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE}; |
| 316 | UPDATE(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, availableCapabilities, |
| 317 | ARRAY_SIZE(availableCapabilities)); |
| 318 | |
| 319 | const int32_t partialResultCount = 1; |
| 320 | UPDATE(ANDROID_REQUEST_PARTIAL_RESULT_COUNT, &partialResultCount, 1); |
| 321 | |
| 322 | // This means pipeline latency of X frame intervals. The maximum number is 4. |
| 323 | const uint8_t requestPipelineMaxDepth = 4; |
| 324 | UPDATE(ANDROID_REQUEST_PIPELINE_MAX_DEPTH, &requestPipelineMaxDepth, 1); |
| 325 | UPDATE(ANDROID_REQUEST_PIPELINE_DEPTH, &requestPipelineMaxDepth, 1); |
| 326 | |
| 327 | // Three numbers represent the maximum numbers of different types of output |
| 328 | // streams simultaneously. The types are raw sensor, processed (but not |
| 329 | // stalling), and processed (but stalling). For usb limited mode, raw sensor |
| 330 | // is not supported. Stalling stream is JPEG. Non-stalling streams are |
| 331 | // YUV_420_888 or YV12. |
| 332 | const int32_t requestMaxNumOutputStreams[] = { |
| 333 | /*RAW*/0, |
| 334 | /*Processed*/ExternalCameraDeviceSession::kMaxProcessedStream, |
| 335 | /*Stall*/ExternalCameraDeviceSession::kMaxStallStream}; |
| 336 | UPDATE(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, requestMaxNumOutputStreams, |
| 337 | ARRAY_SIZE(requestMaxNumOutputStreams)); |
| 338 | |
| 339 | // Limited mode doesn't support reprocessing. |
| 340 | const int32_t requestMaxNumInputStreams = 0; |
| 341 | UPDATE(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS, &requestMaxNumInputStreams, |
| 342 | 1); |
| 343 | |
| 344 | // android.scaler |
| 345 | // TODO: b/72263447 V4L2_CID_ZOOM_* |
| 346 | const float scalerAvailableMaxDigitalZoom[] = {1}; |
| 347 | UPDATE(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, |
| 348 | scalerAvailableMaxDigitalZoom, |
| 349 | ARRAY_SIZE(scalerAvailableMaxDigitalZoom)); |
| 350 | |
| 351 | const uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY; |
| 352 | UPDATE(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1); |
| 353 | |
| 354 | const int32_t testPatternModes[] = { |
| 355 | ANDROID_SENSOR_TEST_PATTERN_MODE_OFF}; |
| 356 | UPDATE(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES, testPatternModes, |
| 357 | ARRAY_SIZE(testPatternModes)); |
| 358 | UPDATE(ANDROID_SENSOR_TEST_PATTERN_MODE, &testPatternModes[0], 1); |
| 359 | |
| 360 | const uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN; |
| 361 | UPDATE(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE, ×tampSource, 1); |
| 362 | |
| 363 | // Orientation probably isn't useful for external facing camera? |
| 364 | const int32_t orientation = 0; |
| 365 | UPDATE(ANDROID_SENSOR_ORIENTATION, &orientation, 1); |
| 366 | |
| 367 | // android.shading |
| 368 | const uint8_t availabeMode = ANDROID_SHADING_MODE_OFF; |
| 369 | UPDATE(ANDROID_SHADING_AVAILABLE_MODES, &availabeMode, 1); |
| 370 | |
| 371 | // android.statistics |
| 372 | const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF; |
| 373 | UPDATE(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES, &faceDetectMode, |
| 374 | 1); |
| 375 | |
| 376 | const int32_t maxFaceCount = 0; |
| 377 | UPDATE(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, &maxFaceCount, 1); |
| 378 | |
| 379 | const uint8_t availableHotpixelMode = |
| 380 | ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF; |
| 381 | UPDATE(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES, |
| 382 | &availableHotpixelMode, 1); |
| 383 | |
| 384 | const uint8_t lensShadingMapMode = |
| 385 | ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF; |
| 386 | UPDATE(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES, |
| 387 | &lensShadingMapMode, 1); |
| 388 | |
| 389 | // android.sync |
| 390 | const int32_t maxLatency = ANDROID_SYNC_MAX_LATENCY_UNKNOWN; |
| 391 | UPDATE(ANDROID_SYNC_MAX_LATENCY, &maxLatency, 1); |
| 392 | |
| 393 | /* Other sensor/RAW realted keys: |
| 394 | * android.sensor.info.colorFilterArrangement -> no need if we don't do RAW |
| 395 | * android.sensor.info.physicalSize -> not available |
| 396 | * android.sensor.info.whiteLevel -> not available/not needed |
| 397 | * android.sensor.info.lensShadingApplied -> not needed |
| 398 | * android.sensor.info.preCorrectionActiveArraySize -> not available/not needed |
| 399 | * android.sensor.blackLevelPattern -> not available/not needed |
| 400 | */ |
| 401 | |
| 402 | const int32_t availableRequestKeys[] = { |
| 403 | ANDROID_COLOR_CORRECTION_ABERRATION_MODE, |
| 404 | ANDROID_CONTROL_AE_ANTIBANDING_MODE, |
| 405 | ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, |
| 406 | ANDROID_CONTROL_AE_LOCK, |
| 407 | ANDROID_CONTROL_AE_MODE, |
| 408 | ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, |
| 409 | ANDROID_CONTROL_AE_TARGET_FPS_RANGE, |
| 410 | ANDROID_CONTROL_AF_MODE, |
| 411 | ANDROID_CONTROL_AF_TRIGGER, |
| 412 | ANDROID_CONTROL_AWB_LOCK, |
| 413 | ANDROID_CONTROL_AWB_MODE, |
| 414 | ANDROID_CONTROL_CAPTURE_INTENT, |
| 415 | ANDROID_CONTROL_EFFECT_MODE, |
| 416 | ANDROID_CONTROL_MODE, |
| 417 | ANDROID_CONTROL_SCENE_MODE, |
| 418 | ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, |
| 419 | ANDROID_FLASH_MODE, |
| 420 | ANDROID_JPEG_ORIENTATION, |
| 421 | ANDROID_JPEG_QUALITY, |
| 422 | ANDROID_JPEG_THUMBNAIL_QUALITY, |
| 423 | ANDROID_JPEG_THUMBNAIL_SIZE, |
| 424 | ANDROID_LENS_OPTICAL_STABILIZATION_MODE, |
| 425 | ANDROID_NOISE_REDUCTION_MODE, |
| 426 | ANDROID_SCALER_CROP_REGION, |
| 427 | ANDROID_SENSOR_TEST_PATTERN_MODE, |
| 428 | ANDROID_STATISTICS_FACE_DETECT_MODE, |
| 429 | ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE}; |
| 430 | UPDATE(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, availableRequestKeys, |
| 431 | ARRAY_SIZE(availableRequestKeys)); |
| 432 | |
| 433 | const int32_t availableResultKeys[] = { |
| 434 | ANDROID_COLOR_CORRECTION_ABERRATION_MODE, |
| 435 | ANDROID_CONTROL_AE_ANTIBANDING_MODE, |
| 436 | ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, |
| 437 | ANDROID_CONTROL_AE_LOCK, |
| 438 | ANDROID_CONTROL_AE_MODE, |
| 439 | ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, |
| 440 | ANDROID_CONTROL_AE_STATE, |
| 441 | ANDROID_CONTROL_AE_TARGET_FPS_RANGE, |
| 442 | ANDROID_CONTROL_AF_MODE, |
| 443 | ANDROID_CONTROL_AF_STATE, |
| 444 | ANDROID_CONTROL_AF_TRIGGER, |
| 445 | ANDROID_CONTROL_AWB_LOCK, |
| 446 | ANDROID_CONTROL_AWB_MODE, |
| 447 | ANDROID_CONTROL_AWB_STATE, |
| 448 | ANDROID_CONTROL_CAPTURE_INTENT, |
| 449 | ANDROID_CONTROL_EFFECT_MODE, |
| 450 | ANDROID_CONTROL_MODE, |
| 451 | ANDROID_CONTROL_SCENE_MODE, |
| 452 | ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, |
| 453 | ANDROID_FLASH_MODE, |
| 454 | ANDROID_FLASH_STATE, |
| 455 | ANDROID_JPEG_ORIENTATION, |
| 456 | ANDROID_JPEG_QUALITY, |
| 457 | ANDROID_JPEG_THUMBNAIL_QUALITY, |
| 458 | ANDROID_JPEG_THUMBNAIL_SIZE, |
| 459 | ANDROID_LENS_OPTICAL_STABILIZATION_MODE, |
| 460 | ANDROID_NOISE_REDUCTION_MODE, |
| 461 | ANDROID_REQUEST_PIPELINE_DEPTH, |
| 462 | ANDROID_SCALER_CROP_REGION, |
| 463 | ANDROID_SENSOR_TIMESTAMP, |
| 464 | ANDROID_STATISTICS_FACE_DETECT_MODE, |
| 465 | ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, |
| 466 | ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, |
| 467 | ANDROID_STATISTICS_SCENE_FLICKER}; |
| 468 | UPDATE(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, availableResultKeys, |
| 469 | ARRAY_SIZE(availableResultKeys)); |
| 470 | |
| 471 | const int32_t availableCharacteristicsKeys[] = { |
| 472 | ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES, |
| 473 | ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, |
| 474 | ANDROID_CONTROL_AE_AVAILABLE_MODES, |
| 475 | ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, |
| 476 | ANDROID_CONTROL_AE_COMPENSATION_RANGE, |
| 477 | ANDROID_CONTROL_AE_COMPENSATION_STEP, |
| 478 | ANDROID_CONTROL_AE_LOCK_AVAILABLE, |
| 479 | ANDROID_CONTROL_AF_AVAILABLE_MODES, |
| 480 | ANDROID_CONTROL_AVAILABLE_EFFECTS, |
| 481 | ANDROID_CONTROL_AVAILABLE_MODES, |
| 482 | ANDROID_CONTROL_AVAILABLE_SCENE_MODES, |
| 483 | ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, |
| 484 | ANDROID_CONTROL_AWB_AVAILABLE_MODES, |
| 485 | ANDROID_CONTROL_AWB_LOCK_AVAILABLE, |
| 486 | ANDROID_CONTROL_MAX_REGIONS, |
| 487 | ANDROID_FLASH_INFO_AVAILABLE, |
| 488 | ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL, |
| 489 | ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, |
| 490 | ANDROID_LENS_FACING, |
| 491 | ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION, |
| 492 | ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION, |
| 493 | ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES, |
| 494 | ANDROID_REQUEST_AVAILABLE_CAPABILITIES, |
| 495 | ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS, |
| 496 | ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, |
| 497 | ANDROID_REQUEST_PARTIAL_RESULT_COUNT, |
| 498 | ANDROID_REQUEST_PIPELINE_MAX_DEPTH, |
| 499 | ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, |
| 500 | ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, |
| 501 | ANDROID_SCALER_CROPPING_TYPE, |
| 502 | ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, |
| 503 | ANDROID_SENSOR_INFO_MAX_FRAME_DURATION, |
| 504 | ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, |
| 505 | ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE, |
| 506 | ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE, |
| 507 | ANDROID_SENSOR_ORIENTATION, |
| 508 | ANDROID_SHADING_AVAILABLE_MODES, |
| 509 | ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES, |
| 510 | ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES, |
| 511 | ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES, |
| 512 | ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, |
| 513 | ANDROID_SYNC_MAX_LATENCY}; |
| 514 | UPDATE(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, |
| 515 | availableCharacteristicsKeys, |
| 516 | ARRAY_SIZE(availableCharacteristicsKeys)); |
| 517 | |
| 518 | return OK; |
| 519 | } |
| 520 | |
| 521 | status_t ExternalCameraDevice::initCameraControlsCharsKeys(int, |
| 522 | ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) { |
| 523 | /** |
| 524 | * android.sensor.info.sensitivityRange -> V4L2_CID_ISO_SENSITIVITY |
| 525 | * android.sensor.info.exposureTimeRange -> V4L2_CID_EXPOSURE_ABSOLUTE |
| 526 | * android.sensor.info.maxFrameDuration -> TBD |
| 527 | * android.lens.info.minimumFocusDistance -> V4L2_CID_FOCUS_ABSOLUTE |
| 528 | * android.lens.info.hyperfocalDistance |
| 529 | * android.lens.info.availableFocalLengths -> not available? |
| 530 | */ |
| 531 | |
| 532 | // android.control |
| 533 | // No AE compensation support for now. |
| 534 | // TODO: V4L2_CID_EXPOSURE_BIAS |
| 535 | const int32_t controlAeCompensationRange[] = {0, 0}; |
| 536 | UPDATE(ANDROID_CONTROL_AE_COMPENSATION_RANGE, controlAeCompensationRange, |
| 537 | ARRAY_SIZE(controlAeCompensationRange)); |
| 538 | const camera_metadata_rational_t controlAeCompensationStep[] = {{0, 1}}; |
| 539 | UPDATE(ANDROID_CONTROL_AE_COMPENSATION_STEP, controlAeCompensationStep, |
| 540 | ARRAY_SIZE(controlAeCompensationStep)); |
| 541 | |
| 542 | |
| 543 | // TODO: Check V4L2_CID_AUTO_FOCUS_*. |
| 544 | const uint8_t afAvailableModes[] = {ANDROID_CONTROL_AF_MODE_AUTO, |
| 545 | ANDROID_CONTROL_AF_MODE_OFF}; |
| 546 | UPDATE(ANDROID_CONTROL_AF_AVAILABLE_MODES, afAvailableModes, |
| 547 | ARRAY_SIZE(afAvailableModes)); |
| 548 | |
| 549 | // TODO: V4L2_CID_SCENE_MODE |
| 550 | const uint8_t availableSceneMode = ANDROID_CONTROL_SCENE_MODE_DISABLED; |
| 551 | UPDATE(ANDROID_CONTROL_AVAILABLE_SCENE_MODES, &availableSceneMode, 1); |
| 552 | |
| 553 | // TODO: V4L2_CID_3A_LOCK |
| 554 | const uint8_t aeLockAvailable = ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE; |
| 555 | UPDATE(ANDROID_CONTROL_AE_LOCK_AVAILABLE, &aeLockAvailable, 1); |
| 556 | const uint8_t awbLockAvailable = ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE; |
| 557 | UPDATE(ANDROID_CONTROL_AWB_LOCK_AVAILABLE, &awbLockAvailable, 1); |
| 558 | |
| 559 | // TODO: V4L2_CID_ZOOM_* |
| 560 | const float scalerAvailableMaxDigitalZoom[] = {1}; |
| 561 | UPDATE(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, |
| 562 | scalerAvailableMaxDigitalZoom, |
| 563 | ARRAY_SIZE(scalerAvailableMaxDigitalZoom)); |
| 564 | |
| 565 | return OK; |
| 566 | } |
| 567 | |
| 568 | status_t ExternalCameraDevice::initOutputCharsKeys(int fd, |
| 569 | ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) { |
| 570 | initSupportedFormatsLocked(fd); |
| 571 | if (mSupportedFormats.empty()) { |
| 572 | ALOGE("%s: Init supported format list failed", __FUNCTION__); |
| 573 | return UNKNOWN_ERROR; |
| 574 | } |
| 575 | |
| 576 | std::vector<int32_t> streamConfigurations; |
| 577 | std::vector<int64_t> minFrameDurations; |
| 578 | std::vector<int64_t> stallDurations; |
| 579 | int64_t maxFrameDuration = 0; |
| 580 | int32_t maxFps = std::numeric_limits<int32_t>::min(); |
| 581 | int32_t minFps = std::numeric_limits<int32_t>::max(); |
| 582 | std::set<int32_t> framerates; |
| 583 | |
| 584 | std::array<int, /*size*/3> halFormats{{ |
| 585 | HAL_PIXEL_FORMAT_BLOB, |
| 586 | HAL_PIXEL_FORMAT_YCbCr_420_888, |
| 587 | HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED}}; |
| 588 | |
| 589 | for (const auto& supportedFormat : mSupportedFormats) { |
| 590 | for (const auto& format : halFormats) { |
| 591 | streamConfigurations.push_back(format); |
| 592 | streamConfigurations.push_back(supportedFormat.width); |
| 593 | streamConfigurations.push_back(supportedFormat.height); |
| 594 | streamConfigurations.push_back( |
| 595 | ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT); |
| 596 | } |
| 597 | |
Yin-Chia Yeh | 134093a | 2018-02-12 14:05:48 -0800 | [diff] [blame] | 598 | int64_t minFrameDuration = std::numeric_limits<int64_t>::max(); |
| 599 | for (const auto& fr : supportedFormat.frameRates) { |
| 600 | // 1000000000LL < (2^32 - 1) and |
| 601 | // fr.durationNumerator is uint32_t, so no overflow here |
| 602 | int64_t frameDuration = 1000000000LL * fr.durationNumerator / |
| 603 | fr.durationDenominator; |
| 604 | if (frameDuration < minFrameDuration) { |
| 605 | minFrameDuration = frameDuration; |
Yin-Chia Yeh | 1903059 | 2017-10-19 17:30:11 -0700 | [diff] [blame] | 606 | } |
Yin-Chia Yeh | 134093a | 2018-02-12 14:05:48 -0800 | [diff] [blame] | 607 | if (frameDuration > maxFrameDuration) { |
| 608 | maxFrameDuration = frameDuration; |
Yin-Chia Yeh | 1903059 | 2017-10-19 17:30:11 -0700 | [diff] [blame] | 609 | } |
Yin-Chia Yeh | 134093a | 2018-02-12 14:05:48 -0800 | [diff] [blame] | 610 | int32_t frameRateInt = static_cast<int32_t>(fr.getDouble()); |
Yin-Chia Yeh | 1903059 | 2017-10-19 17:30:11 -0700 | [diff] [blame] | 611 | if (minFps > frameRateInt) { |
| 612 | minFps = frameRateInt; |
| 613 | } |
| 614 | if (maxFps < frameRateInt) { |
| 615 | maxFps = frameRateInt; |
| 616 | } |
| 617 | framerates.insert(frameRateInt); |
| 618 | } |
| 619 | |
| 620 | for (const auto& format : halFormats) { |
| 621 | minFrameDurations.push_back(format); |
| 622 | minFrameDurations.push_back(supportedFormat.width); |
| 623 | minFrameDurations.push_back(supportedFormat.height); |
Yin-Chia Yeh | 134093a | 2018-02-12 14:05:48 -0800 | [diff] [blame] | 624 | minFrameDurations.push_back(minFrameDuration); |
Yin-Chia Yeh | 1903059 | 2017-10-19 17:30:11 -0700 | [diff] [blame] | 625 | } |
| 626 | |
| 627 | // The stall duration is 0 for non-jpeg formats. For JPEG format, stall |
| 628 | // duration can be 0 if JPEG is small. Here we choose 1 sec for JPEG. |
| 629 | // TODO: b/72261675. Maybe set this dynamically |
| 630 | for (const auto& format : halFormats) { |
| 631 | const int64_t NS_TO_SECOND = 1000000000; |
| 632 | int64_t stall_duration = |
| 633 | (format == HAL_PIXEL_FORMAT_BLOB) ? NS_TO_SECOND : 0; |
| 634 | stallDurations.push_back(format); |
| 635 | stallDurations.push_back(supportedFormat.width); |
| 636 | stallDurations.push_back(supportedFormat.height); |
| 637 | stallDurations.push_back(stall_duration); |
| 638 | } |
| 639 | } |
| 640 | |
Yin-Chia Yeh | 1903059 | 2017-10-19 17:30:11 -0700 | [diff] [blame] | 641 | std::vector<int32_t> fpsRanges; |
Yin-Chia Yeh | 8b699aa | 2018-02-28 15:58:54 -0800 | [diff] [blame^] | 642 | // FPS ranges |
Yin-Chia Yeh | 1903059 | 2017-10-19 17:30:11 -0700 | [diff] [blame] | 643 | for (const auto& framerate : framerates) { |
Yin-Chia Yeh | 8b699aa | 2018-02-28 15:58:54 -0800 | [diff] [blame^] | 644 | // Empirical: webcams often have close to 2x fps error and cannot support fixed fps range |
| 645 | fpsRanges.push_back(framerate / 2); |
Yin-Chia Yeh | 1903059 | 2017-10-19 17:30:11 -0700 | [diff] [blame] | 646 | fpsRanges.push_back(framerate); |
| 647 | } |
Yin-Chia Yeh | 8b699aa | 2018-02-28 15:58:54 -0800 | [diff] [blame^] | 648 | maxFrameDuration *= 2; |
| 649 | |
Yin-Chia Yeh | 1903059 | 2017-10-19 17:30:11 -0700 | [diff] [blame] | 650 | UPDATE(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, fpsRanges.data(), |
| 651 | fpsRanges.size()); |
| 652 | |
| 653 | UPDATE(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, |
| 654 | streamConfigurations.data(), streamConfigurations.size()); |
| 655 | |
| 656 | UPDATE(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, |
| 657 | minFrameDurations.data(), minFrameDurations.size()); |
| 658 | |
| 659 | UPDATE(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS, stallDurations.data(), |
| 660 | stallDurations.size()); |
| 661 | |
| 662 | UPDATE(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION, &maxFrameDuration, 1); |
| 663 | |
| 664 | SupportedV4L2Format maximumFormat {.width = 0, .height = 0}; |
| 665 | for (const auto& supportedFormat : mSupportedFormats) { |
| 666 | if (supportedFormat.width >= maximumFormat.width && |
| 667 | supportedFormat.height >= maximumFormat.height) { |
| 668 | maximumFormat = supportedFormat; |
| 669 | } |
| 670 | } |
| 671 | int32_t activeArraySize[] = {0, 0, |
| 672 | static_cast<int32_t>(maximumFormat.width), |
| 673 | static_cast<int32_t>(maximumFormat.height)}; |
| 674 | UPDATE(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE, |
| 675 | activeArraySize, ARRAY_SIZE(activeArraySize)); |
| 676 | UPDATE(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, activeArraySize, |
| 677 | ARRAY_SIZE(activeArraySize)); |
| 678 | |
| 679 | int32_t pixelArraySize[] = {static_cast<int32_t>(maximumFormat.width), |
| 680 | static_cast<int32_t>(maximumFormat.height)}; |
| 681 | UPDATE(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, pixelArraySize, |
| 682 | ARRAY_SIZE(pixelArraySize)); |
| 683 | return OK; |
| 684 | } |
| 685 | |
| 686 | #undef ARRAY_SIZE |
| 687 | #undef UPDATE |
| 688 | |
| 689 | void ExternalCameraDevice::getFrameRateList( |
Yin-Chia Yeh | 3aa9ae9 | 2018-02-23 17:21:51 -0800 | [diff] [blame] | 690 | int fd, double fpsUpperBound, SupportedV4L2Format* format) { |
Yin-Chia Yeh | 1903059 | 2017-10-19 17:30:11 -0700 | [diff] [blame] | 691 | format->frameRates.clear(); |
| 692 | |
| 693 | v4l2_frmivalenum frameInterval { |
| 694 | .pixel_format = format->fourcc, |
| 695 | .width = format->width, |
| 696 | .height = format->height, |
| 697 | .index = 0 |
| 698 | }; |
| 699 | |
| 700 | for (frameInterval.index = 0; |
| 701 | TEMP_FAILURE_RETRY(ioctl(fd, VIDIOC_ENUM_FRAMEINTERVALS, &frameInterval)) == 0; |
| 702 | ++frameInterval.index) { |
| 703 | if (frameInterval.type == V4L2_FRMIVAL_TYPE_DISCRETE) { |
| 704 | if (frameInterval.discrete.numerator != 0) { |
Yin-Chia Yeh | 134093a | 2018-02-12 14:05:48 -0800 | [diff] [blame] | 705 | SupportedV4L2Format::FrameRate fr = { |
| 706 | frameInterval.discrete.numerator, |
| 707 | frameInterval.discrete.denominator}; |
| 708 | double framerate = fr.getDouble(); |
Yin-Chia Yeh | 53f4cb1 | 2018-01-29 10:31:45 -0800 | [diff] [blame] | 709 | if (framerate > fpsUpperBound) { |
| 710 | continue; |
| 711 | } |
Yin-Chia Yeh | 3aa9ae9 | 2018-02-23 17:21:51 -0800 | [diff] [blame] | 712 | ALOGV("index:%d, format:%c%c%c%c, w %d, h %d, framerate %f", |
Yin-Chia Yeh | 1903059 | 2017-10-19 17:30:11 -0700 | [diff] [blame] | 713 | frameInterval.index, |
| 714 | frameInterval.pixel_format & 0xFF, |
| 715 | (frameInterval.pixel_format >> 8) & 0xFF, |
| 716 | (frameInterval.pixel_format >> 16) & 0xFF, |
| 717 | (frameInterval.pixel_format >> 24) & 0xFF, |
| 718 | frameInterval.width, frameInterval.height, framerate); |
Yin-Chia Yeh | 134093a | 2018-02-12 14:05:48 -0800 | [diff] [blame] | 719 | format->frameRates.push_back(fr); |
Yin-Chia Yeh | 1903059 | 2017-10-19 17:30:11 -0700 | [diff] [blame] | 720 | } |
| 721 | } |
| 722 | } |
| 723 | |
| 724 | if (format->frameRates.empty()) { |
| 725 | ALOGE("%s: failed to get supported frame rates for format:%c%c%c%c w %d h %d", |
| 726 | __FUNCTION__, |
| 727 | frameInterval.pixel_format & 0xFF, |
| 728 | (frameInterval.pixel_format >> 8) & 0xFF, |
| 729 | (frameInterval.pixel_format >> 16) & 0xFF, |
| 730 | (frameInterval.pixel_format >> 24) & 0xFF, |
| 731 | frameInterval.width, frameInterval.height); |
| 732 | } |
| 733 | } |
| 734 | |
Yin-Chia Yeh | 3aa9ae9 | 2018-02-23 17:21:51 -0800 | [diff] [blame] | 735 | void ExternalCameraDevice::trimSupportedFormats( |
| 736 | CroppingType cropType, |
| 737 | /*inout*/std::vector<SupportedV4L2Format>* pFmts) { |
| 738 | std::vector<SupportedV4L2Format>& sortedFmts = *pFmts; |
| 739 | if (cropType == VERTICAL) { |
| 740 | std::sort(sortedFmts.begin(), sortedFmts.end(), |
| 741 | [](const SupportedV4L2Format& a, const SupportedV4L2Format& b) -> bool { |
| 742 | if (a.width == b.width) { |
| 743 | return a.height < b.height; |
| 744 | } |
| 745 | return a.width < b.width; |
| 746 | }); |
| 747 | } else { |
| 748 | std::sort(sortedFmts.begin(), sortedFmts.end(), |
| 749 | [](const SupportedV4L2Format& a, const SupportedV4L2Format& b) -> bool { |
| 750 | if (a.height == b.height) { |
| 751 | return a.width < b.width; |
| 752 | } |
| 753 | return a.height < b.height; |
| 754 | }); |
| 755 | } |
| 756 | |
| 757 | if (sortedFmts.size() == 0) { |
| 758 | ALOGE("%s: input format list is empty!", __FUNCTION__); |
| 759 | return; |
| 760 | } |
| 761 | |
Yin-Chia Yeh | 53f4cb1 | 2018-01-29 10:31:45 -0800 | [diff] [blame] | 762 | const auto& maxSize = sortedFmts[sortedFmts.size() - 1]; |
| 763 | float maxSizeAr = ASPECT_RATIO(maxSize); |
Yin-Chia Yeh | 3aa9ae9 | 2018-02-23 17:21:51 -0800 | [diff] [blame] | 764 | |
| 765 | // Remove formats that has aspect ratio not croppable from largest size |
| 766 | std::vector<SupportedV4L2Format> out; |
Yin-Chia Yeh | 53f4cb1 | 2018-01-29 10:31:45 -0800 | [diff] [blame] | 767 | for (const auto& fmt : sortedFmts) { |
| 768 | float ar = ASPECT_RATIO(fmt); |
Yin-Chia Yeh | 3aa9ae9 | 2018-02-23 17:21:51 -0800 | [diff] [blame] | 769 | if (isAspectRatioClose(ar, maxSizeAr)) { |
| 770 | out.push_back(fmt); |
| 771 | } else if (cropType == HORIZONTAL && ar < maxSizeAr) { |
| 772 | out.push_back(fmt); |
| 773 | } else if (cropType == VERTICAL && ar > maxSizeAr) { |
| 774 | out.push_back(fmt); |
Yin-Chia Yeh | 53f4cb1 | 2018-01-29 10:31:45 -0800 | [diff] [blame] | 775 | } else { |
Yin-Chia Yeh | 3aa9ae9 | 2018-02-23 17:21:51 -0800 | [diff] [blame] | 776 | ALOGV("%s: size (%d,%d) is removed due to unable to crop %s from (%d,%d)", |
| 777 | __FUNCTION__, fmt.width, fmt.height, |
| 778 | cropType == VERTICAL ? "vertically" : "horizontally", |
| 779 | maxSize.width, maxSize.height); |
Yin-Chia Yeh | 53f4cb1 | 2018-01-29 10:31:45 -0800 | [diff] [blame] | 780 | } |
Yin-Chia Yeh | 53f4cb1 | 2018-01-29 10:31:45 -0800 | [diff] [blame] | 781 | } |
Yin-Chia Yeh | 3aa9ae9 | 2018-02-23 17:21:51 -0800 | [diff] [blame] | 782 | sortedFmts = out; |
Yin-Chia Yeh | 53f4cb1 | 2018-01-29 10:31:45 -0800 | [diff] [blame] | 783 | } |
| 784 | |
Yin-Chia Yeh | 3aa9ae9 | 2018-02-23 17:21:51 -0800 | [diff] [blame] | 785 | std::vector<SupportedV4L2Format> |
| 786 | ExternalCameraDevice::getCandidateSupportedFormatsLocked( |
| 787 | int fd, CroppingType cropType, |
| 788 | const std::vector<ExternalCameraConfig::FpsLimitation>& fpsLimits) { |
| 789 | std::vector<SupportedV4L2Format> outFmts; |
Yin-Chia Yeh | 1903059 | 2017-10-19 17:30:11 -0700 | [diff] [blame] | 790 | struct v4l2_fmtdesc fmtdesc { |
| 791 | .index = 0, |
| 792 | .type = V4L2_BUF_TYPE_VIDEO_CAPTURE}; |
| 793 | int ret = 0; |
| 794 | while (ret == 0) { |
| 795 | ret = TEMP_FAILURE_RETRY(ioctl(fd, VIDIOC_ENUM_FMT, &fmtdesc)); |
Yin-Chia Yeh | 3aa9ae9 | 2018-02-23 17:21:51 -0800 | [diff] [blame] | 796 | ALOGV("index:%d,ret:%d, format:%c%c%c%c", fmtdesc.index, ret, |
Yin-Chia Yeh | 1903059 | 2017-10-19 17:30:11 -0700 | [diff] [blame] | 797 | fmtdesc.pixelformat & 0xFF, |
| 798 | (fmtdesc.pixelformat >> 8) & 0xFF, |
| 799 | (fmtdesc.pixelformat >> 16) & 0xFF, |
| 800 | (fmtdesc.pixelformat >> 24) & 0xFF); |
| 801 | if (ret == 0 && !(fmtdesc.flags & V4L2_FMT_FLAG_EMULATED)) { |
| 802 | auto it = std::find ( |
| 803 | kSupportedFourCCs.begin(), kSupportedFourCCs.end(), fmtdesc.pixelformat); |
| 804 | if (it != kSupportedFourCCs.end()) { |
| 805 | // Found supported format |
| 806 | v4l2_frmsizeenum frameSize { |
| 807 | .index = 0, |
| 808 | .pixel_format = fmtdesc.pixelformat}; |
| 809 | for (; TEMP_FAILURE_RETRY(ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frameSize)) == 0; |
| 810 | ++frameSize.index) { |
| 811 | if (frameSize.type == V4L2_FRMSIZE_TYPE_DISCRETE) { |
Yin-Chia Yeh | 53f4cb1 | 2018-01-29 10:31:45 -0800 | [diff] [blame] | 812 | ALOGV("index:%d, format:%c%c%c%c, w %d, h %d", frameSize.index, |
Yin-Chia Yeh | 1903059 | 2017-10-19 17:30:11 -0700 | [diff] [blame] | 813 | fmtdesc.pixelformat & 0xFF, |
| 814 | (fmtdesc.pixelformat >> 8) & 0xFF, |
| 815 | (fmtdesc.pixelformat >> 16) & 0xFF, |
| 816 | (fmtdesc.pixelformat >> 24) & 0xFF, |
| 817 | frameSize.discrete.width, frameSize.discrete.height); |
| 818 | // Disregard h > w formats so all aspect ratio (h/w) <= 1.0 |
| 819 | // This will simplify the crop/scaling logic down the road |
| 820 | if (frameSize.discrete.height > frameSize.discrete.width) { |
| 821 | continue; |
| 822 | } |
| 823 | SupportedV4L2Format format { |
| 824 | .width = frameSize.discrete.width, |
| 825 | .height = frameSize.discrete.height, |
| 826 | .fourcc = fmtdesc.pixelformat |
| 827 | }; |
Yin-Chia Yeh | 53f4cb1 | 2018-01-29 10:31:45 -0800 | [diff] [blame] | 828 | |
Yin-Chia Yeh | 3aa9ae9 | 2018-02-23 17:21:51 -0800 | [diff] [blame] | 829 | double fpsUpperBound = -1.0; |
| 830 | for (const auto& limit : fpsLimits) { |
| 831 | if (cropType == VERTICAL) { |
| 832 | if (format.width <= limit.size.width) { |
| 833 | fpsUpperBound = limit.fpsUpperBound; |
| 834 | break; |
| 835 | } |
| 836 | } else { // HORIZONTAL |
| 837 | if (format.height <= limit.size.height) { |
| 838 | fpsUpperBound = limit.fpsUpperBound; |
| 839 | break; |
| 840 | } |
Yin-Chia Yeh | 53f4cb1 | 2018-01-29 10:31:45 -0800 | [diff] [blame] | 841 | } |
Yin-Chia Yeh | 3aa9ae9 | 2018-02-23 17:21:51 -0800 | [diff] [blame] | 842 | |
Yin-Chia Yeh | 53f4cb1 | 2018-01-29 10:31:45 -0800 | [diff] [blame] | 843 | } |
| 844 | if (fpsUpperBound < 0.f) { |
| 845 | continue; |
| 846 | } |
| 847 | |
| 848 | getFrameRateList(fd, fpsUpperBound, &format); |
Yin-Chia Yeh | 1903059 | 2017-10-19 17:30:11 -0700 | [diff] [blame] | 849 | if (!format.frameRates.empty()) { |
Yin-Chia Yeh | 3aa9ae9 | 2018-02-23 17:21:51 -0800 | [diff] [blame] | 850 | outFmts.push_back(format); |
Yin-Chia Yeh | 1903059 | 2017-10-19 17:30:11 -0700 | [diff] [blame] | 851 | } |
| 852 | } |
| 853 | } |
| 854 | } |
| 855 | } |
| 856 | fmtdesc.index++; |
| 857 | } |
Yin-Chia Yeh | 3aa9ae9 | 2018-02-23 17:21:51 -0800 | [diff] [blame] | 858 | trimSupportedFormats(cropType, &outFmts); |
| 859 | return outFmts; |
| 860 | } |
Yin-Chia Yeh | 53f4cb1 | 2018-01-29 10:31:45 -0800 | [diff] [blame] | 861 | |
Yin-Chia Yeh | 3aa9ae9 | 2018-02-23 17:21:51 -0800 | [diff] [blame] | 862 | void ExternalCameraDevice::initSupportedFormatsLocked(int fd) { |
Yin-Chia Yeh | 53f4cb1 | 2018-01-29 10:31:45 -0800 | [diff] [blame] | 863 | |
Yin-Chia Yeh | 3aa9ae9 | 2018-02-23 17:21:51 -0800 | [diff] [blame] | 864 | std::vector<SupportedV4L2Format> horizontalFmts = |
| 865 | getCandidateSupportedFormatsLocked(fd, HORIZONTAL, mCfg.fpsLimits); |
| 866 | std::vector<SupportedV4L2Format> verticalFmts = |
| 867 | getCandidateSupportedFormatsLocked(fd, VERTICAL, mCfg.fpsLimits); |
| 868 | |
| 869 | size_t horiSize = horizontalFmts.size(); |
| 870 | size_t vertSize = verticalFmts.size(); |
| 871 | |
| 872 | if (horiSize == 0 && vertSize == 0) { |
| 873 | ALOGE("%s: cannot find suitable cropping type!", __FUNCTION__); |
| 874 | return; |
| 875 | } |
| 876 | |
| 877 | if (horiSize == 0) { |
| 878 | mSupportedFormats = verticalFmts; |
| 879 | mCroppingType = VERTICAL; |
| 880 | return; |
| 881 | } else if (vertSize == 0) { |
| 882 | mSupportedFormats = horizontalFmts; |
| 883 | mCroppingType = HORIZONTAL; |
| 884 | return; |
| 885 | } |
| 886 | |
| 887 | const auto& maxHoriSize = horizontalFmts[horizontalFmts.size() - 1]; |
| 888 | const auto& maxVertSize = verticalFmts[verticalFmts.size() - 1]; |
| 889 | |
| 890 | // Try to keep largest possible output size |
| 891 | // When they are the same or ambiguous, pick the one support more sizes |
| 892 | if (maxHoriSize.width == maxVertSize.width && |
| 893 | maxHoriSize.height == maxVertSize.height) { |
| 894 | if (horiSize > vertSize) { |
| 895 | mSupportedFormats = horizontalFmts; |
| 896 | mCroppingType = HORIZONTAL; |
| 897 | } else { |
| 898 | mSupportedFormats = verticalFmts; |
| 899 | mCroppingType = VERTICAL; |
| 900 | } |
| 901 | } else if (maxHoriSize.width >= maxVertSize.width && |
| 902 | maxHoriSize.height >= maxVertSize.height) { |
| 903 | mSupportedFormats = horizontalFmts; |
| 904 | mCroppingType = HORIZONTAL; |
| 905 | } else if (maxHoriSize.width <= maxVertSize.width && |
| 906 | maxHoriSize.height <= maxVertSize.height) { |
| 907 | mSupportedFormats = verticalFmts; |
| 908 | mCroppingType = VERTICAL; |
| 909 | } else { |
| 910 | if (horiSize > vertSize) { |
| 911 | mSupportedFormats = horizontalFmts; |
| 912 | mCroppingType = HORIZONTAL; |
| 913 | } else { |
| 914 | mSupportedFormats = verticalFmts; |
| 915 | mCroppingType = VERTICAL; |
| 916 | } |
| 917 | } |
Yin-Chia Yeh | 1903059 | 2017-10-19 17:30:11 -0700 | [diff] [blame] | 918 | } |
| 919 | |
| 920 | } // namespace implementation |
| 921 | } // namespace V3_4 |
| 922 | } // namespace device |
| 923 | } // namespace camera |
| 924 | } // namespace hardware |
| 925 | } // namespace android |
| 926 | |