Merge "Y16 format enablement for external provider"
diff --git a/camera/device/3.4/default/ExternalCameraDevice.cpp b/camera/device/3.4/default/ExternalCameraDevice.cpp
index e7361dd..38a78e0 100644
--- a/camera/device/3.4/default/ExternalCameraDevice.cpp
+++ b/camera/device/3.4/default/ExternalCameraDevice.cpp
@@ -38,9 +38,8 @@
// Other formats to consider in the future:
// * V4L2_PIX_FMT_YVU420 (== YV12)
// * V4L2_PIX_FMT_YVYU (YVYU: can be converted to YV12 or other YUV420_888 formats)
-const std::array<uint32_t, /*size*/1> kSupportedFourCCs {{
- V4L2_PIX_FMT_MJPEG
-}}; // double braces required in C++11
+const std::array<uint32_t, /*size*/ 2> kSupportedFourCCs{
+ {V4L2_PIX_FMT_MJPEG, V4L2_PIX_FMT_Z16}}; // double braces required in C++11
constexpr int MAX_RETRY = 5; // Allow retry v4l2 open failures a few times.
constexpr int OPEN_RETRY_SLEEP_US = 100000; // 100ms * MAX_RETRY = 0.5 seconds
@@ -224,6 +223,13 @@
mCameraCharacteristics.clear();
return ret;
}
+
+ ret = initAvailableCapabilities(&mCameraCharacteristics);
+ if (ret != OK) {
+ ALOGE("%s: init available capabilities key failed: errorno %d", __FUNCTION__, ret);
+ mCameraCharacteristics.clear();
+ return ret;
+ }
}
return OK;
}
@@ -237,6 +243,39 @@
} \
} while (0)
+status_t ExternalCameraDevice::initAvailableCapabilities(
+ ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) {
+
+ if (mSupportedFormats.empty()) {
+ ALOGE("%s: Supported formats list is empty", __FUNCTION__);
+ return UNKNOWN_ERROR;
+ }
+
+ bool hasDepth = false;
+ bool hasColor = false;
+ for (const auto& fmt : mSupportedFormats) {
+ switch (fmt.fourcc) {
+ case V4L2_PIX_FMT_Z16: hasDepth = true; break;
+ case V4L2_PIX_FMT_MJPEG: hasColor = true; break;
+ default: ALOGW("%s: Unsupported format found", __FUNCTION__);
+ }
+ }
+
+ std::vector<uint8_t> availableCapabilities;
+ if (hasDepth) {
+ availableCapabilities.push_back(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT);
+ }
+ if (hasColor) {
+ availableCapabilities.push_back(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
+ }
+ if(!availableCapabilities.empty()) {
+ UPDATE(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, availableCapabilities.data(),
+ availableCapabilities.size());
+ }
+
+ return OK;
+}
+
status_t ExternalCameraDevice::initDefaultCharsKeys(
::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) {
const uint8_t hardware_level = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL;
@@ -323,12 +362,6 @@
&noiseReductionMode, 1);
UPDATE(ANDROID_NOISE_REDUCTION_MODE, &noiseReductionMode, 1);
- // android.request
- const uint8_t availableCapabilities[] = {
- ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE};
- UPDATE(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, availableCapabilities,
- ARRAY_SIZE(availableCapabilities));
-
const int32_t partialResultCount = 1;
UPDATE(ANDROID_REQUEST_PARTIAL_RESULT_COUNT, &partialResultCount, 1);
@@ -576,9 +609,11 @@
return OK;
}
-status_t ExternalCameraDevice::initOutputCharsKeys(int fd,
- ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) {
- initSupportedFormatsLocked(fd);
+template <size_t SIZE>
+status_t ExternalCameraDevice::initOutputCharskeysByFormat(
+ ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata,
+ uint32_t fourcc, const std::array<int, SIZE>& halFormats,
+ int streamConfigTag, int streamConfiguration, int minFrameDuration, int stallDuration) {
if (mSupportedFormats.empty()) {
ALOGE("%s: Init supported format list failed", __FUNCTION__);
return UNKNOWN_ERROR;
@@ -587,22 +622,17 @@
std::vector<int32_t> streamConfigurations;
std::vector<int64_t> minFrameDurations;
std::vector<int64_t> stallDurations;
- int32_t maxFps = std::numeric_limits<int32_t>::min();
- int32_t minFps = std::numeric_limits<int32_t>::max();
- std::set<int32_t> framerates;
-
- std::array<int, /*size*/3> halFormats{{
- HAL_PIXEL_FORMAT_BLOB,
- HAL_PIXEL_FORMAT_YCbCr_420_888,
- HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED}};
for (const auto& supportedFormat : mSupportedFormats) {
+ if (supportedFormat.fourcc != fourcc) {
+ // Skip 4CCs not meant for the halFormats
+ continue;
+ }
for (const auto& format : halFormats) {
streamConfigurations.push_back(format);
streamConfigurations.push_back(supportedFormat.width);
streamConfigurations.push_back(supportedFormat.height);
- streamConfigurations.push_back(
- ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
+ streamConfigurations.push_back(streamConfigTag);
}
int64_t minFrameDuration = std::numeric_limits<int64_t>::max();
@@ -614,14 +644,6 @@
if (frameDuration < minFrameDuration) {
minFrameDuration = frameDuration;
}
- int32_t frameRateInt = static_cast<int32_t>(fr.getDouble());
- if (minFps > frameRateInt) {
- minFps = frameRateInt;
- }
- if (maxFps < frameRateInt) {
- maxFps = frameRateInt;
- }
- framerates.insert(frameRateInt);
}
for (const auto& format : halFormats) {
@@ -645,6 +667,30 @@
}
}
+ UPDATE(streamConfiguration, streamConfigurations.data(), streamConfigurations.size());
+
+ UPDATE(minFrameDuration, minFrameDurations.data(), minFrameDurations.size());
+
+ UPDATE(stallDuration, stallDurations.data(), stallDurations.size());
+
+ return true;
+}
+
+bool ExternalCameraDevice::calculateMinFps(
+ ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) {
+ std::set<int32_t> framerates;
+ int32_t minFps = std::numeric_limits<int32_t>::max();
+
+ for (const auto& supportedFormat : mSupportedFormats) {
+ for (const auto& fr : supportedFormat.frameRates) {
+ int32_t frameRateInt = static_cast<int32_t>(fr.getDouble());
+ if (minFps > frameRateInt) {
+ minFps = frameRateInt;
+ }
+ framerates.insert(frameRateInt);
+ }
+ }
+
std::vector<int32_t> fpsRanges;
// FPS ranges
for (const auto& framerate : framerates) {
@@ -658,17 +704,60 @@
UPDATE(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, fpsRanges.data(),
fpsRanges.size());
- UPDATE(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
- streamConfigurations.data(), streamConfigurations.size());
-
- UPDATE(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
- minFrameDurations.data(), minFrameDurations.size());
-
- UPDATE(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS, stallDurations.data(),
- stallDurations.size());
-
UPDATE(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION, &maxFrameDuration, 1);
+ return true;
+}
+
+status_t ExternalCameraDevice::initOutputCharsKeys(
+ int fd, ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) {
+ initSupportedFormatsLocked(fd);
+ if (mSupportedFormats.empty()) {
+ ALOGE("%s: Init supported format list failed", __FUNCTION__);
+ return UNKNOWN_ERROR;
+ }
+
+ bool hasDepth = false;
+ bool hasColor = false;
+
+ // For V4L2_PIX_FMT_Z16
+ std::array<int, /*size*/ 1> halDepthFormats{{HAL_PIXEL_FORMAT_Y16}};
+ // For V4L2_PIX_FMT_MJPEG
+ std::array<int, /*size*/ 3> halFormats{{HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
+ HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED}};
+
+ for (const auto& supportedFormat : mSupportedFormats) {
+ switch (supportedFormat.fourcc) {
+ case V4L2_PIX_FMT_Z16:
+ hasDepth = true;
+ break;
+ case V4L2_PIX_FMT_MJPEG:
+ hasColor = true;
+ break;
+ default:
+ ALOGW("%s: format %c%c%c%c is not supported!", __FUNCTION__,
+ supportedFormat.fourcc & 0xFF, (supportedFormat.fourcc >> 8) & 0xFF,
+ (supportedFormat.fourcc >> 16) & 0xFF, (supportedFormat.fourcc >> 24) & 0xFF);
+ }
+ }
+
+ if (hasDepth) {
+ initOutputCharskeysByFormat(metadata, V4L2_PIX_FMT_Z16, halDepthFormats,
+ ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_OUTPUT,
+ ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
+ ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
+ ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS);
+ }
+ if (hasColor) {
+ initOutputCharskeysByFormat(metadata, V4L2_PIX_FMT_MJPEG, halFormats,
+ ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
+ ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
+ ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
+ ANDROID_SCALER_AVAILABLE_STALL_DURATIONS);
+ }
+
+ calculateMinFps(metadata);
+
SupportedV4L2Format maximumFormat {.width = 0, .height = 0};
for (const auto& supportedFormat : mSupportedFormats) {
if (supportedFormat.width >= maximumFormat.width &&
@@ -790,11 +879,12 @@
sortedFmts = out;
}
-std::vector<SupportedV4L2Format>
-ExternalCameraDevice::getCandidateSupportedFormatsLocked(
- int fd, CroppingType cropType,
- const std::vector<ExternalCameraConfig::FpsLimitation>& fpsLimits,
- const Size& minStreamSize) {
+std::vector<SupportedV4L2Format> ExternalCameraDevice::getCandidateSupportedFormatsLocked(
+ int fd, CroppingType cropType,
+ const std::vector<ExternalCameraConfig::FpsLimitation>& fpsLimits,
+ const std::vector<ExternalCameraConfig::FpsLimitation>& depthFpsLimits,
+ const Size& minStreamSize,
+ bool depthEnabled) {
std::vector<SupportedV4L2Format> outFmts;
struct v4l2_fmtdesc fmtdesc {
.index = 0,
@@ -840,28 +930,10 @@
.fourcc = fmtdesc.pixelformat
};
- double fpsUpperBound = -1.0;
- for (const auto& limit : fpsLimits) {
- if (cropType == VERTICAL) {
- if (format.width <= limit.size.width) {
- fpsUpperBound = limit.fpsUpperBound;
- break;
- }
- } else { // HORIZONTAL
- if (format.height <= limit.size.height) {
- fpsUpperBound = limit.fpsUpperBound;
- break;
- }
- }
-
- }
- if (fpsUpperBound < 0.f) {
- continue;
- }
-
- getFrameRateList(fd, fpsUpperBound, &format);
- if (!format.frameRates.empty()) {
- outFmts.push_back(format);
+ if (format.fourcc == V4L2_PIX_FMT_Z16 && depthEnabled) {
+ updateFpsBounds(fd, cropType, depthFpsLimits, format, outFmts);
+ } else {
+ updateFpsBounds(fd, cropType, fpsLimits, format, outFmts);
}
}
}
@@ -873,12 +945,39 @@
return outFmts;
}
-void ExternalCameraDevice::initSupportedFormatsLocked(int fd) {
+void ExternalCameraDevice::updateFpsBounds(
+ int fd, CroppingType cropType,
+ const std::vector<ExternalCameraConfig::FpsLimitation>& fpsLimits, SupportedV4L2Format format,
+ std::vector<SupportedV4L2Format>& outFmts) {
+ double fpsUpperBound = -1.0;
+ for (const auto& limit : fpsLimits) {
+ if (cropType == VERTICAL) {
+ if (format.width <= limit.size.width) {
+ fpsUpperBound = limit.fpsUpperBound;
+ break;
+ }
+ } else { // HORIZONTAL
+ if (format.height <= limit.size.height) {
+ fpsUpperBound = limit.fpsUpperBound;
+ break;
+ }
+ }
+ }
+ if (fpsUpperBound < 0.f) {
+ return;
+ }
- std::vector<SupportedV4L2Format> horizontalFmts =
- getCandidateSupportedFormatsLocked(fd, HORIZONTAL, mCfg.fpsLimits, mCfg.minStreamSize);
- std::vector<SupportedV4L2Format> verticalFmts =
- getCandidateSupportedFormatsLocked(fd, VERTICAL, mCfg.fpsLimits, mCfg.minStreamSize);
+ getFrameRateList(fd, fpsUpperBound, &format);
+ if (!format.frameRates.empty()) {
+ outFmts.push_back(format);
+ }
+}
+
+void ExternalCameraDevice::initSupportedFormatsLocked(int fd) {
+ std::vector<SupportedV4L2Format> horizontalFmts = getCandidateSupportedFormatsLocked(
+ fd, HORIZONTAL, mCfg.fpsLimits, mCfg.depthFpsLimits, mCfg.minStreamSize, mCfg.depthEnabled);
+ std::vector<SupportedV4L2Format> verticalFmts = getCandidateSupportedFormatsLocked(
+ fd, VERTICAL, mCfg.fpsLimits, mCfg.depthFpsLimits, mCfg.minStreamSize, mCfg.depthEnabled);
size_t horiSize = horizontalFmts.size();
size_t vertSize = verticalFmts.size();
diff --git a/camera/device/3.4/default/ExternalCameraDeviceSession.cpp b/camera/device/3.4/default/ExternalCameraDeviceSession.cpp
index dce40ff..a12d8e4 100644
--- a/camera/device/3.4/default/ExternalCameraDeviceSession.cpp
+++ b/camera/device/3.4/default/ExternalCameraDeviceSession.cpp
@@ -1724,7 +1724,7 @@
return false;
};
- if (req->frameIn->mFourcc != V4L2_PIX_FMT_MJPEG) {
+ if (req->frameIn->mFourcc != V4L2_PIX_FMT_MJPEG && req->frameIn->mFourcc != V4L2_PIX_FMT_Z16) {
return onDeviceError("%s: do not support V4L2 format %c%c%c%c", __FUNCTION__,
req->frameIn->mFourcc & 0xFF,
(req->frameIn->mFourcc >> 8) & 0xFF,
@@ -1743,29 +1743,26 @@
}
// TODO: in some special case maybe we can decode jpg directly to gralloc output?
- ATRACE_BEGIN("MJPGtoI420");
- int res = libyuv::MJPGToI420(
- inData, inDataSize,
- static_cast<uint8_t*>(mYu12FrameLayout.y),
- mYu12FrameLayout.yStride,
- static_cast<uint8_t*>(mYu12FrameLayout.cb),
- mYu12FrameLayout.cStride,
- static_cast<uint8_t*>(mYu12FrameLayout.cr),
- mYu12FrameLayout.cStride,
- mYu12Frame->mWidth, mYu12Frame->mHeight,
- mYu12Frame->mWidth, mYu12Frame->mHeight);
- ATRACE_END();
+ if (req->frameIn->mFourcc == V4L2_PIX_FMT_MJPEG) {
+ ATRACE_BEGIN("MJPGtoI420");
+ int res = libyuv::MJPGToI420(
+ inData, inDataSize, static_cast<uint8_t*>(mYu12FrameLayout.y), mYu12FrameLayout.yStride,
+ static_cast<uint8_t*>(mYu12FrameLayout.cb), mYu12FrameLayout.cStride,
+ static_cast<uint8_t*>(mYu12FrameLayout.cr), mYu12FrameLayout.cStride,
+ mYu12Frame->mWidth, mYu12Frame->mHeight, mYu12Frame->mWidth, mYu12Frame->mHeight);
+ ATRACE_END();
- if (res != 0) {
- // For some webcam, the first few V4L2 frames might be malformed...
- ALOGE("%s: Convert V4L2 frame to YU12 failed! res %d", __FUNCTION__, res);
- lk.unlock();
- Status st = parent->processCaptureRequestError(req);
- if (st != Status::OK) {
- return onDeviceError("%s: failed to process capture request error!", __FUNCTION__);
+ if (res != 0) {
+ // For some webcam, the first few V4L2 frames might be malformed...
+ ALOGE("%s: Convert V4L2 frame to YU12 failed! res %d", __FUNCTION__, res);
+ lk.unlock();
+ Status st = parent->processCaptureRequestError(req);
+ if (st != Status::OK) {
+ return onDeviceError("%s: failed to process capture request error!", __FUNCTION__);
+ }
+ signalRequestDone();
+ return true;
}
- signalRequestDone();
- return true;
}
ALOGV("%s processing new request", __FUNCTION__);
@@ -1796,6 +1793,16 @@
__FUNCTION__, ret);
}
} break;
+ case PixelFormat::Y16: {
+ void* outLayout = sHandleImporter.lock(*(halBuf.bufPtr), halBuf.usage, inDataSize);
+
+ std::memcpy(outLayout, inData, inDataSize);
+
+ int relFence = sHandleImporter.unlock(*(halBuf.bufPtr));
+ if (relFence >= 0) {
+ halBuf.acquireFence = relFence;
+ }
+ } break;
case PixelFormat::YCBCR_420_888:
case PixelFormat::YV12: {
IMapper::Rect outRect {0, 0,
@@ -2063,11 +2070,6 @@
return false;
}
- if (ds & Dataspace::DEPTH) {
- ALOGI("%s: does not support depth output", __FUNCTION__);
- return false;
- }
-
switch (fmt) {
case PixelFormat::BLOB:
if (ds != static_cast<int32_t>(Dataspace::V0_JFIF)) {
@@ -2081,6 +2083,16 @@
// TODO: check what dataspace we can support here.
// intentional no-ops.
break;
+ case PixelFormat::Y16:
+ if (!mCfg.depthEnabled) {
+ ALOGI("%s: Depth is not Enabled", __FUNCTION__);
+ return false;
+ }
+ if (!(ds & Dataspace::DEPTH)) {
+ ALOGI("%s: Y16 supports only dataSpace DEPTH", __FUNCTION__);
+ return false;
+ }
+ break;
default:
ALOGI("%s: does not support format %x", __FUNCTION__, fmt);
return false;
@@ -2609,6 +2621,7 @@
case PixelFormat::BLOB:
case PixelFormat::YCBCR_420_888:
case PixelFormat::YV12: // Used by SurfaceTexture
+ case PixelFormat::Y16:
// No override
out->streams[i].v3_2.overrideFormat = config.streams[i].format;
break;
diff --git a/camera/device/3.4/default/ExternalCameraUtils.cpp b/camera/device/3.4/default/ExternalCameraUtils.cpp
index 680c95a..a07c629 100644
--- a/camera/device/3.4/default/ExternalCameraUtils.cpp
+++ b/camera/device/3.4/default/ExternalCameraUtils.cpp
@@ -21,7 +21,6 @@
#include <sys/mman.h>
#include <linux/videodev2.h>
#include "ExternalCameraUtils.h"
-#include "tinyxml2.h" // XML parsing
namespace android {
namespace hardware {
@@ -243,28 +242,28 @@
if (fpsList == nullptr) {
ALOGI("%s: no fps list specified", __FUNCTION__);
} else {
- std::vector<FpsLimitation> limits;
- XMLElement *row = fpsList->FirstChildElement("Limit");
- while (row != nullptr) {
- FpsLimitation prevLimit {{0, 0}, 1000.0};
- FpsLimitation limit;
- limit.size = {
- row->UnsignedAttribute("width", /*Default*/0),
- row->UnsignedAttribute("height", /*Default*/0)};
- limit.fpsUpperBound = row->DoubleAttribute("fpsBound", /*Default*/1000.0);
- if (limit.size.width <= prevLimit.size.width ||
- limit.size.height <= prevLimit.size.height ||
- limit.fpsUpperBound >= prevLimit.fpsUpperBound) {
- ALOGE("%s: FPS limit list must have increasing size and decreasing fps!"
- " Prev %dx%d@%f, Current %dx%d@%f", __FUNCTION__,
- prevLimit.size.width, prevLimit.size.height, prevLimit.fpsUpperBound,
- limit.size.width, limit.size.height, limit.fpsUpperBound);
+ if (!updateFpsList(fpsList, ret.fpsLimits)) {
+ return ret;
+ }
+ }
+
+ XMLElement *depth = deviceCfg->FirstChildElement("Depth16Supported");
+ if (depth == nullptr) {
+ ret.depthEnabled = false;
+ ALOGI("%s: depth output is not enabled", __FUNCTION__);
+ } else {
+ ret.depthEnabled = depth->BoolAttribute("enabled", false);
+ }
+
+ if(ret.depthEnabled) {
+ XMLElement *depthFpsList = deviceCfg->FirstChildElement("DepthFpsList");
+ if (depthFpsList == nullptr) {
+ ALOGW("%s: no depth fps list specified", __FUNCTION__);
+ } else {
+ if(!updateFpsList(depthFpsList, ret.depthFpsLimits)) {
return ret;
}
- limits.push_back(limit);
- row = row->NextSiblingElement("Limit");
}
- ret.fpsLimits = limits;
}
XMLElement *minStreamSize = deviceCfg->FirstChildElement("MinimumStreamSize");
@@ -284,15 +283,48 @@
ALOGI("%s: fpsLimitList: %dx%d@%f", __FUNCTION__,
limit.size.width, limit.size.height, limit.fpsUpperBound);
}
+ for (const auto& limit : ret.depthFpsLimits) {
+ ALOGI("%s: depthFpsLimitList: %dx%d@%f", __FUNCTION__, limit.size.width, limit.size.height,
+ limit.fpsUpperBound);
+ }
ALOGI("%s: minStreamSize: %dx%d" , __FUNCTION__,
ret.minStreamSize.width, ret.minStreamSize.height);
return ret;
}
+bool ExternalCameraConfig::updateFpsList(tinyxml2::XMLElement* fpsList,
+ std::vector<FpsLimitation>& fpsLimits) {
+ using namespace tinyxml2;
+ std::vector<FpsLimitation> limits;
+ XMLElement* row = fpsList->FirstChildElement("Limit");
+ while (row != nullptr) {
+ FpsLimitation prevLimit{{0, 0}, 1000.0};
+ FpsLimitation limit;
+ limit.size = {row->UnsignedAttribute("width", /*Default*/ 0),
+ row->UnsignedAttribute("height", /*Default*/ 0)};
+ limit.fpsUpperBound = row->DoubleAttribute("fpsBound", /*Default*/ 1000.0);
+ if (limit.size.width <= prevLimit.size.width ||
+ limit.size.height <= prevLimit.size.height ||
+ limit.fpsUpperBound >= prevLimit.fpsUpperBound) {
+ ALOGE(
+ "%s: FPS limit list must have increasing size and decreasing fps!"
+ " Prev %dx%d@%f, Current %dx%d@%f",
+ __FUNCTION__, prevLimit.size.width, prevLimit.size.height, prevLimit.fpsUpperBound,
+ limit.size.width, limit.size.height, limit.fpsUpperBound);
+ return false;
+ }
+ limits.push_back(limit);
+ row = row->NextSiblingElement("Limit");
+ }
+ fpsLimits = limits;
+ return true;
+}
+
ExternalCameraConfig::ExternalCameraConfig() :
maxJpegBufSize(kDefaultJpegBufSize),
numVideoBuffers(kDefaultNumVideoBuffer),
- numStillBuffers(kDefaultNumStillBuffer) {
+ numStillBuffers(kDefaultNumStillBuffer),
+ depthEnabled(false) {
fpsLimits.push_back({/*Size*/{ 640, 480}, /*FPS upper bound*/30.0});
fpsLimits.push_back({/*Size*/{1280, 720}, /*FPS upper bound*/7.5});
fpsLimits.push_back({/*Size*/{1920, 1080}, /*FPS upper bound*/5.0});
diff --git a/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraDevice_3_4.h b/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraDevice_3_4.h
index ff0cfb3..28b9cef 100644
--- a/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraDevice_3_4.h
+++ b/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraDevice_3_4.h
@@ -82,6 +82,9 @@
void initSupportedFormatsLocked(int fd);
status_t initCameraCharacteristics();
+ // Init available capabilities keys
+ status_t initAvailableCapabilities(
+ ::android::hardware::camera::common::V1_0::helper::CameraMetadata*);
// Init non-device dependent keys
status_t initDefaultCharsKeys(::android::hardware::camera::common::V1_0::helper::CameraMetadata*);
// Init camera control chars keys. Caller still owns fd
@@ -91,13 +94,30 @@
status_t initOutputCharsKeys(int fd,
::android::hardware::camera::common::V1_0::helper::CameraMetadata*);
+ // Helper function for initOutputCharskeys
+ template <size_t SIZE>
+ status_t initOutputCharskeysByFormat(
+ ::android::hardware::camera::common::V1_0::helper::CameraMetadata*,
+ uint32_t fourcc, const std::array<int, SIZE>& formats,
+ int scaler_stream_config_tag,
+ int stream_configuration, int min_frame_duration, int stall_duration);
+
+ bool calculateMinFps(::android::hardware::camera::common::V1_0::helper::CameraMetadata*);
+
static void getFrameRateList(int fd, double fpsUpperBound, SupportedV4L2Format* format);
+ static void updateFpsBounds(int fd, CroppingType cropType,
+ const std::vector<ExternalCameraConfig::FpsLimitation>& fpsLimits,
+ SupportedV4L2Format format,
+ std::vector<SupportedV4L2Format>& outFmts);
+
// Get candidate supported formats list of input cropping type.
static std::vector<SupportedV4L2Format> getCandidateSupportedFormatsLocked(
int fd, CroppingType cropType,
const std::vector<ExternalCameraConfig::FpsLimitation>& fpsLimits,
- const Size& minStreamSize);
+ const std::vector<ExternalCameraConfig::FpsLimitation>& depthFpsLimits,
+ const Size& minStreamSize,
+ bool depthEnabled);
// Trim supported format list by the cropping type. Also sort output formats by width/height
static void trimSupportedFormats(CroppingType cropType,
/*inout*/std::vector<SupportedV4L2Format>* pFmts);
diff --git a/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraUtils.h b/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraUtils.h
index 5754ccb..f696057 100644
--- a/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraUtils.h
+++ b/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraUtils.h
@@ -17,12 +17,13 @@
#ifndef ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_EXTCAMUTIL_H
#define ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_EXTCAMUTIL_H
-#include <inttypes.h>
-#include "utils/LightRefBase.h"
-#include <mutex>
-#include <vector>
-#include <unordered_set>
#include <android/hardware/graphics/mapper/2.0/IMapper.h>
+#include <inttypes.h>
+#include <mutex>
+#include <unordered_set>
+#include <vector>
+#include "tinyxml2.h" // XML parsing
+#include "utils/LightRefBase.h"
using android::hardware::graphics::mapper::V2_0::IMapper;
using android::hardware::graphics::mapper::V2_0::YCbCrLayout;
@@ -71,17 +72,22 @@
// Size of v4l2 buffer queue when streaming > kMaxVideoSize
uint32_t numStillBuffers;
+ // Indication that the device connected supports depth output
+ bool depthEnabled;
+
struct FpsLimitation {
Size size;
double fpsUpperBound;
};
std::vector<FpsLimitation> fpsLimits;
+ std::vector<FpsLimitation> depthFpsLimits;
// Minimum output stream size
Size minStreamSize;
private:
ExternalCameraConfig();
+ static bool updateFpsList(tinyxml2::XMLElement* fpsList, std::vector<FpsLimitation>& fpsLimits);
};
} // common