Y16 format enablement for external provider

added Y16 (depth) support for the external provider.
refactored initOutputCharsKeys to support both depth and color metadata according to the connected camera.
the VTS changes will be pushed to pie-vts-dev

Test: tested on intel depth camera D415 manually in addition to the VTS tests.

Change-Id: I358686e7c4330bb180dec4a9cce3bc1cf5475260
Signed-off-by: Emil Jahshan <emil.jahshan@intel.com>
diff --git a/camera/device/3.4/default/ExternalCameraDevice.cpp b/camera/device/3.4/default/ExternalCameraDevice.cpp
index 0f23657..3f04751 100644
--- a/camera/device/3.4/default/ExternalCameraDevice.cpp
+++ b/camera/device/3.4/default/ExternalCameraDevice.cpp
@@ -38,9 +38,8 @@
 // Other formats to consider in the future:
 // * V4L2_PIX_FMT_YVU420 (== YV12)
 // * V4L2_PIX_FMT_YVYU (YVYU: can be converted to YV12 or other YUV420_888 formats)
-const std::array<uint32_t, /*size*/1> kSupportedFourCCs {{
-    V4L2_PIX_FMT_MJPEG
-}}; // double braces required in C++11
+const std::array<uint32_t, /*size*/ 2> kSupportedFourCCs{
+    {V4L2_PIX_FMT_MJPEG, V4L2_PIX_FMT_Z16}};  // double braces required in C++11
 
 constexpr int MAX_RETRY = 5; // Allow retry v4l2 open failures a few times.
 constexpr int OPEN_RETRY_SLEEP_US = 100000; // 100ms * MAX_RETRY = 0.5 seconds
@@ -231,6 +230,13 @@
             mCameraCharacteristics.clear();
             return ret;
         }
+
+        ret = initAvailableCapabilities(&mCameraCharacteristics);
+        if (ret != OK) {
+            ALOGE("%s: init available capabilities key failed: errorno %d", __FUNCTION__, ret);
+            mCameraCharacteristics.clear();
+            return ret;
+        }
     }
     return OK;
 }
@@ -244,6 +250,39 @@
   }                                                \
 } while (0)
 
+status_t ExternalCameraDevice::initAvailableCapabilities(
+        ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) {
+
+    if (mSupportedFormats.empty()) {
+        ALOGE("%s: Supported formats list is empty", __FUNCTION__);
+        return UNKNOWN_ERROR;
+    }
+
+    bool hasDepth = false;
+    bool hasColor = false;
+    for (const auto& fmt : mSupportedFormats) {
+        switch (fmt.fourcc) {
+            case V4L2_PIX_FMT_Z16: hasDepth = true; break;
+            case V4L2_PIX_FMT_MJPEG: hasColor = true; break;
+            default: ALOGW("%s: Unsupported format found", __FUNCTION__);
+        }
+    }
+
+    std::vector<uint8_t> availableCapabilities;
+    if (hasDepth) {
+        availableCapabilities.push_back(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT);
+    }
+    if (hasColor) {
+        availableCapabilities.push_back(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
+    }
+    if(!availableCapabilities.empty()) {
+        UPDATE(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, availableCapabilities.data(),
+            availableCapabilities.size());
+    }
+
+    return OK;
+}
+
 status_t ExternalCameraDevice::initDefaultCharsKeys(
         ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) {
     const uint8_t hardware_level = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL;
@@ -330,12 +369,6 @@
            &noiseReductionMode, 1);
     UPDATE(ANDROID_NOISE_REDUCTION_MODE, &noiseReductionMode, 1);
 
-    // android.request
-    const uint8_t availableCapabilities[] = {
-        ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE};
-    UPDATE(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, availableCapabilities,
-           ARRAY_SIZE(availableCapabilities));
-
     const int32_t partialResultCount = 1;
     UPDATE(ANDROID_REQUEST_PARTIAL_RESULT_COUNT, &partialResultCount, 1);
 
@@ -544,9 +577,11 @@
     return OK;
 }
 
-status_t ExternalCameraDevice::initOutputCharsKeys(int fd,
-        ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) {
-    initSupportedFormatsLocked(fd);
+template <size_t SIZE>
+status_t ExternalCameraDevice::initOutputCharskeysByFormat(
+        ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata,
+        uint32_t fourcc, const std::array<int, SIZE>& halFormats,
+        int streamConfigTag, int streamConfiguration, int minFrameDuration, int stallDuration) {
     if (mSupportedFormats.empty()) {
         ALOGE("%s: Init supported format list failed", __FUNCTION__);
         return UNKNOWN_ERROR;
@@ -555,22 +590,17 @@
     std::vector<int32_t> streamConfigurations;
     std::vector<int64_t> minFrameDurations;
     std::vector<int64_t> stallDurations;
-    int32_t maxFps = std::numeric_limits<int32_t>::min();
-    int32_t minFps = std::numeric_limits<int32_t>::max();
-    std::set<int32_t> framerates;
-
-    std::array<int, /*size*/3> halFormats{{
-        HAL_PIXEL_FORMAT_BLOB,
-        HAL_PIXEL_FORMAT_YCbCr_420_888,
-        HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED}};
 
     for (const auto& supportedFormat : mSupportedFormats) {
+        if (supportedFormat.fourcc != fourcc) {
+            // Skip 4CCs not meant for the halFormats
+            continue;
+        }
         for (const auto& format : halFormats) {
             streamConfigurations.push_back(format);
             streamConfigurations.push_back(supportedFormat.width);
             streamConfigurations.push_back(supportedFormat.height);
-            streamConfigurations.push_back(
-                    ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
+            streamConfigurations.push_back(streamConfigTag);
         }
 
         int64_t minFrameDuration = std::numeric_limits<int64_t>::max();
@@ -582,14 +612,6 @@
             if (frameDuration < minFrameDuration) {
                 minFrameDuration = frameDuration;
             }
-            int32_t frameRateInt = static_cast<int32_t>(fr.getDouble());
-            if (minFps > frameRateInt) {
-                minFps = frameRateInt;
-            }
-            if (maxFps < frameRateInt) {
-                maxFps = frameRateInt;
-            }
-            framerates.insert(frameRateInt);
         }
 
         for (const auto& format : halFormats) {
@@ -613,6 +635,30 @@
         }
     }
 
+    UPDATE(streamConfiguration, streamConfigurations.data(), streamConfigurations.size());
+
+    UPDATE(minFrameDuration, minFrameDurations.data(), minFrameDurations.size());
+
+    UPDATE(stallDuration, stallDurations.data(), stallDurations.size());
+
+    return true;
+}
+
+bool ExternalCameraDevice::calculateMinFps(
+    ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) {
+    std::set<int32_t> framerates;
+    int32_t minFps = std::numeric_limits<int32_t>::max();
+
+    for (const auto& supportedFormat : mSupportedFormats) {
+        for (const auto& fr : supportedFormat.frameRates) {
+            int32_t frameRateInt = static_cast<int32_t>(fr.getDouble());
+            if (minFps > frameRateInt) {
+                minFps = frameRateInt;
+            }
+            framerates.insert(frameRateInt);
+        }
+    }
+
     std::vector<int32_t> fpsRanges;
     // FPS ranges
     for (const auto& framerate : framerates) {
@@ -626,17 +672,60 @@
     UPDATE(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, fpsRanges.data(),
            fpsRanges.size());
 
-    UPDATE(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
-           streamConfigurations.data(), streamConfigurations.size());
-
-    UPDATE(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
-           minFrameDurations.data(), minFrameDurations.size());
-
-    UPDATE(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS, stallDurations.data(),
-           stallDurations.size());
-
     UPDATE(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION, &maxFrameDuration, 1);
 
+    return true;
+}
+
+status_t ExternalCameraDevice::initOutputCharsKeys(
+    int fd, ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) {
+    initSupportedFormatsLocked(fd);
+    if (mSupportedFormats.empty()) {
+        ALOGE("%s: Init supported format list failed", __FUNCTION__);
+        return UNKNOWN_ERROR;
+    }
+
+    bool hasDepth = false;
+    bool hasColor = false;
+
+    // For V4L2_PIX_FMT_Z16
+    std::array<int, /*size*/ 1> halDepthFormats{{HAL_PIXEL_FORMAT_Y16}};
+    // For V4L2_PIX_FMT_MJPEG
+    std::array<int, /*size*/ 3> halFormats{{HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
+                                            HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED}};
+
+    for (const auto& supportedFormat : mSupportedFormats) {
+        switch (supportedFormat.fourcc) {
+            case V4L2_PIX_FMT_Z16:
+                hasDepth = true;
+                break;
+            case V4L2_PIX_FMT_MJPEG:
+                hasColor = true;
+                break;
+            default:
+                ALOGW("%s: format %c%c%c%c is not supported!", __FUNCTION__,
+                      supportedFormat.fourcc & 0xFF, (supportedFormat.fourcc >> 8) & 0xFF,
+                      (supportedFormat.fourcc >> 16) & 0xFF, (supportedFormat.fourcc >> 24) & 0xFF);
+        }
+    }
+
+    if (hasDepth) {
+        initOutputCharskeysByFormat(metadata, V4L2_PIX_FMT_Z16, halDepthFormats,
+                ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_OUTPUT,
+                ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
+                ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
+                ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS);
+    }
+    if (hasColor) {
+        initOutputCharskeysByFormat(metadata, V4L2_PIX_FMT_MJPEG, halFormats,
+                ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
+                ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
+                ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
+                ANDROID_SCALER_AVAILABLE_STALL_DURATIONS);
+    }
+
+    calculateMinFps(metadata);
+
     SupportedV4L2Format maximumFormat {.width = 0, .height = 0};
     for (const auto& supportedFormat : mSupportedFormats) {
         if (supportedFormat.width >= maximumFormat.width &&
@@ -758,11 +847,12 @@
     sortedFmts = out;
 }
 
-std::vector<SupportedV4L2Format>
-ExternalCameraDevice::getCandidateSupportedFormatsLocked(
-        int fd, CroppingType cropType,
-        const std::vector<ExternalCameraConfig::FpsLimitation>& fpsLimits,
-        const Size& minStreamSize) {
+std::vector<SupportedV4L2Format> ExternalCameraDevice::getCandidateSupportedFormatsLocked(
+    int fd, CroppingType cropType,
+    const std::vector<ExternalCameraConfig::FpsLimitation>& fpsLimits,
+    const std::vector<ExternalCameraConfig::FpsLimitation>& depthFpsLimits,
+    const Size& minStreamSize,
+    bool depthEnabled) {
     std::vector<SupportedV4L2Format> outFmts;
     struct v4l2_fmtdesc fmtdesc {
         .index = 0,
@@ -808,28 +898,10 @@
                             .fourcc = fmtdesc.pixelformat
                         };
 
-                        double fpsUpperBound = -1.0;
-                        for (const auto& limit : fpsLimits) {
-                            if (cropType == VERTICAL) {
-                                if (format.width <= limit.size.width) {
-                                    fpsUpperBound = limit.fpsUpperBound;
-                                    break;
-                                }
-                            } else { // HORIZONTAL
-                                if (format.height <= limit.size.height) {
-                                    fpsUpperBound = limit.fpsUpperBound;
-                                    break;
-                                }
-                            }
-
-                        }
-                        if (fpsUpperBound < 0.f) {
-                            continue;
-                        }
-
-                        getFrameRateList(fd, fpsUpperBound, &format);
-                        if (!format.frameRates.empty()) {
-                            outFmts.push_back(format);
+                        if (format.fourcc == V4L2_PIX_FMT_Z16 && depthEnabled) {
+                            updateFpsBounds(fd, cropType, depthFpsLimits, format, outFmts);
+                        } else {
+                            updateFpsBounds(fd, cropType, fpsLimits, format, outFmts);
                         }
                     }
                 }
@@ -841,12 +913,39 @@
     return outFmts;
 }
 
-void ExternalCameraDevice::initSupportedFormatsLocked(int fd) {
+void ExternalCameraDevice::updateFpsBounds(
+    int fd, CroppingType cropType,
+    const std::vector<ExternalCameraConfig::FpsLimitation>& fpsLimits, SupportedV4L2Format format,
+    std::vector<SupportedV4L2Format>& outFmts) {
+    double fpsUpperBound = -1.0;
+    for (const auto& limit : fpsLimits) {
+        if (cropType == VERTICAL) {
+            if (format.width <= limit.size.width) {
+                fpsUpperBound = limit.fpsUpperBound;
+                break;
+            }
+        } else {  // HORIZONTAL
+            if (format.height <= limit.size.height) {
+                fpsUpperBound = limit.fpsUpperBound;
+                break;
+            }
+        }
+    }
+    if (fpsUpperBound < 0.f) {
+        return;
+    }
 
-    std::vector<SupportedV4L2Format> horizontalFmts =
-            getCandidateSupportedFormatsLocked(fd, HORIZONTAL, mCfg.fpsLimits, mCfg.minStreamSize);
-    std::vector<SupportedV4L2Format> verticalFmts =
-            getCandidateSupportedFormatsLocked(fd, VERTICAL, mCfg.fpsLimits, mCfg.minStreamSize);
+    getFrameRateList(fd, fpsUpperBound, &format);
+    if (!format.frameRates.empty()) {
+        outFmts.push_back(format);
+    }
+}
+
+void ExternalCameraDevice::initSupportedFormatsLocked(int fd) {
+    std::vector<SupportedV4L2Format> horizontalFmts = getCandidateSupportedFormatsLocked(
+        fd, HORIZONTAL, mCfg.fpsLimits, mCfg.depthFpsLimits, mCfg.minStreamSize, mCfg.depthEnabled);
+    std::vector<SupportedV4L2Format> verticalFmts = getCandidateSupportedFormatsLocked(
+        fd, VERTICAL, mCfg.fpsLimits, mCfg.depthFpsLimits, mCfg.minStreamSize, mCfg.depthEnabled);
 
     size_t horiSize = horizontalFmts.size();
     size_t vertSize = verticalFmts.size();
diff --git a/camera/device/3.4/default/ExternalCameraDeviceSession.cpp b/camera/device/3.4/default/ExternalCameraDeviceSession.cpp
index 66b17db..32da968 100644
--- a/camera/device/3.4/default/ExternalCameraDeviceSession.cpp
+++ b/camera/device/3.4/default/ExternalCameraDeviceSession.cpp
@@ -1819,7 +1819,7 @@
         return false;
     };
 
-    if (req->frameIn->mFourcc != V4L2_PIX_FMT_MJPEG) {
+    if (req->frameIn->mFourcc != V4L2_PIX_FMT_MJPEG && req->frameIn->mFourcc != V4L2_PIX_FMT_Z16) {
         return onDeviceError("%s: do not support V4L2 format %c%c%c%c", __FUNCTION__,
                 req->frameIn->mFourcc & 0xFF,
                 (req->frameIn->mFourcc >> 8) & 0xFF,
@@ -1844,29 +1844,26 @@
     }
 
     // TODO: in some special case maybe we can decode jpg directly to gralloc output?
-    ATRACE_BEGIN("MJPGtoI420");
-    res = libyuv::MJPGToI420(
-            inData, inDataSize,
-            static_cast<uint8_t*>(mYu12FrameLayout.y),
-            mYu12FrameLayout.yStride,
-            static_cast<uint8_t*>(mYu12FrameLayout.cb),
-            mYu12FrameLayout.cStride,
-            static_cast<uint8_t*>(mYu12FrameLayout.cr),
-            mYu12FrameLayout.cStride,
-            mYu12Frame->mWidth, mYu12Frame->mHeight,
-            mYu12Frame->mWidth, mYu12Frame->mHeight);
-    ATRACE_END();
+    if (req->frameIn->mFourcc == V4L2_PIX_FMT_MJPEG) {
+        ATRACE_BEGIN("MJPGtoI420");
+        int res = libyuv::MJPGToI420(
+            inData, inDataSize, static_cast<uint8_t*>(mYu12FrameLayout.y), mYu12FrameLayout.yStride,
+            static_cast<uint8_t*>(mYu12FrameLayout.cb), mYu12FrameLayout.cStride,
+            static_cast<uint8_t*>(mYu12FrameLayout.cr), mYu12FrameLayout.cStride,
+            mYu12Frame->mWidth, mYu12Frame->mHeight, mYu12Frame->mWidth, mYu12Frame->mHeight);
+        ATRACE_END();
 
-    if (res != 0) {
-        // For some webcam, the first few V4L2 frames might be malformed...
-        ALOGE("%s: Convert V4L2 frame to YU12 failed! res %d", __FUNCTION__, res);
-        lk.unlock();
-        Status st = parent->processCaptureRequestError(req);
-        if (st != Status::OK) {
-            return onDeviceError("%s: failed to process capture request error!", __FUNCTION__);
+        if (res != 0) {
+            // For some webcam, the first few V4L2 frames might be malformed...
+            ALOGE("%s: Convert V4L2 frame to YU12 failed! res %d", __FUNCTION__, res);
+            lk.unlock();
+            Status st = parent->processCaptureRequestError(req);
+            if (st != Status::OK) {
+                return onDeviceError("%s: failed to process capture request error!", __FUNCTION__);
+            }
+            signalRequestDone();
+            return true;
         }
-        signalRequestDone();
-        return true;
     }
 
     ATRACE_BEGIN("Wait for BufferRequest done");
@@ -1910,6 +1907,16 @@
                           __FUNCTION__, ret);
                 }
             } break;
+            case PixelFormat::Y16: {
+                void* outLayout = sHandleImporter.lock(*(halBuf.bufPtr), halBuf.usage, inDataSize);
+
+                std::memcpy(outLayout, inData, inDataSize);
+
+                int relFence = sHandleImporter.unlock(*(halBuf.bufPtr));
+                if (relFence >= 0) {
+                    halBuf.acquireFence = relFence;
+                }
+            } break;
             case PixelFormat::YCBCR_420_888:
             case PixelFormat::YV12: {
                 IMapper::Rect outRect {0, 0,
@@ -2164,7 +2171,8 @@
 }
 
 bool ExternalCameraDeviceSession::isSupported(const Stream& stream,
-        const std::vector<SupportedV4L2Format>& supportedFormats) {
+        const std::vector<SupportedV4L2Format>& supportedFormats,
+        const ExternalCameraConfig& devCfg) {
     int32_t ds = static_cast<int32_t>(stream.dataSpace);
     PixelFormat fmt = stream.format;
     uint32_t width = stream.width;
@@ -2181,11 +2189,6 @@
         return false;
     }
 
-    if (ds & Dataspace::DEPTH) {
-        ALOGI("%s: does not support depth output", __FUNCTION__);
-        return false;
-    }
-
     switch (fmt) {
         case PixelFormat::BLOB:
             if (ds != static_cast<int32_t>(Dataspace::V0_JFIF)) {
@@ -2199,6 +2202,16 @@
             // TODO: check what dataspace we can support here.
             // intentional no-ops.
             break;
+        case PixelFormat::Y16:
+            if (!devCfg.depthEnabled) {
+                ALOGI("%s: Depth is not Enabled", __FUNCTION__);
+                return false;
+            }
+            if (!(ds & Dataspace::DEPTH)) {
+                ALOGI("%s: Y16 supports only dataSpace DEPTH", __FUNCTION__);
+                return false;
+            }
+            break;
         default:
             ALOGI("%s: does not support format %x", __FUNCTION__, fmt);
             return false;
@@ -2544,7 +2557,8 @@
 
 Status ExternalCameraDeviceSession::isStreamCombinationSupported(
         const V3_2::StreamConfiguration& config,
-        const std::vector<SupportedV4L2Format>& supportedFormats) {
+        const std::vector<SupportedV4L2Format>& supportedFormats,
+        const ExternalCameraConfig& devCfg) {
     if (config.operationMode != StreamConfigurationMode::NORMAL_MODE) {
         ALOGE("%s: unsupported operation mode: %d", __FUNCTION__, config.operationMode);
         return Status::ILLEGAL_ARGUMENT;
@@ -2559,7 +2573,7 @@
     int numStallStream = 0;
     for (const auto& stream : config.streams) {
         // Check if the format/width/height combo is supported
-        if (!isSupported(stream, supportedFormats)) {
+        if (!isSupported(stream, supportedFormats, devCfg)) {
             return Status::ILLEGAL_ARGUMENT;
         }
         if (stream.format == PixelFormat::BLOB) {
@@ -2590,7 +2604,7 @@
         uint32_t blobBufferSize) {
     ATRACE_CALL();
 
-    Status status = isStreamCombinationSupported(config, mSupportedFormats);
+    Status status = isStreamCombinationSupported(config, mSupportedFormats, mCfg);
     if (status != Status::OK) {
         return status;
     }
@@ -2744,6 +2758,7 @@
             case PixelFormat::BLOB:
             case PixelFormat::YCBCR_420_888:
             case PixelFormat::YV12: // Used by SurfaceTexture
+            case PixelFormat::Y16:
                 // No override
                 out->streams[i].v3_2.overrideFormat = config.streams[i].format;
                 break;
diff --git a/camera/device/3.4/default/ExternalCameraUtils.cpp b/camera/device/3.4/default/ExternalCameraUtils.cpp
index 0941052..e25deff 100644
--- a/camera/device/3.4/default/ExternalCameraUtils.cpp
+++ b/camera/device/3.4/default/ExternalCameraUtils.cpp
@@ -21,7 +21,6 @@
 #include <sys/mman.h>
 #include <linux/videodev2.h>
 #include "ExternalCameraUtils.h"
-#include "tinyxml2.h" // XML parsing
 
 namespace android {
 namespace hardware {
@@ -245,28 +244,28 @@
     if (fpsList == nullptr) {
         ALOGI("%s: no fps list specified", __FUNCTION__);
     } else {
-        std::vector<FpsLimitation> limits;
-        XMLElement *row = fpsList->FirstChildElement("Limit");
-        while (row != nullptr) {
-            FpsLimitation prevLimit {{0, 0}, 1000.0};
-            FpsLimitation limit;
-            limit.size = {
-                row->UnsignedAttribute("width", /*Default*/0),
-                row->UnsignedAttribute("height", /*Default*/0)};
-            limit.fpsUpperBound = row->DoubleAttribute("fpsBound", /*Default*/1000.0);
-            if (limit.size.width <= prevLimit.size.width ||
-                    limit.size.height <= prevLimit.size.height ||
-                    limit.fpsUpperBound >= prevLimit.fpsUpperBound) {
-                ALOGE("%s: FPS limit list must have increasing size and decreasing fps!"
-                        " Prev %dx%d@%f, Current %dx%d@%f", __FUNCTION__,
-                        prevLimit.size.width, prevLimit.size.height, prevLimit.fpsUpperBound,
-                        limit.size.width, limit.size.height, limit.fpsUpperBound);
+        if (!updateFpsList(fpsList, ret.fpsLimits)) {
+            return ret;
+        }
+    }
+
+    XMLElement *depth = deviceCfg->FirstChildElement("Depth16Supported");
+    if (depth == nullptr) {
+        ret.depthEnabled = false;
+        ALOGI("%s: depth output is not enabled", __FUNCTION__);
+    } else {
+        ret.depthEnabled = depth->BoolAttribute("enabled", false);
+    }
+
+    if(ret.depthEnabled) {
+        XMLElement *depthFpsList = deviceCfg->FirstChildElement("DepthFpsList");
+        if (depthFpsList == nullptr) {
+            ALOGW("%s: no depth fps list specified", __FUNCTION__);
+        } else {
+            if(!updateFpsList(depthFpsList, ret.depthFpsLimits)) {
                 return ret;
             }
-            limits.push_back(limit);
-            row = row->NextSiblingElement("Limit");
         }
-        ret.fpsLimits = limits;
     }
 
     XMLElement *minStreamSize = deviceCfg->FirstChildElement("MinimumStreamSize");
@@ -293,15 +292,48 @@
         ALOGI("%s: fpsLimitList: %dx%d@%f", __FUNCTION__,
                 limit.size.width, limit.size.height, limit.fpsUpperBound);
     }
+    for (const auto& limit : ret.depthFpsLimits) {
+        ALOGI("%s: depthFpsLimitList: %dx%d@%f", __FUNCTION__, limit.size.width, limit.size.height,
+              limit.fpsUpperBound);
+    }
     ALOGI("%s: minStreamSize: %dx%d" , __FUNCTION__,
          ret.minStreamSize.width, ret.minStreamSize.height);
     return ret;
 }
 
+bool ExternalCameraConfig::updateFpsList(tinyxml2::XMLElement* fpsList,
+        std::vector<FpsLimitation>& fpsLimits) {
+    using namespace tinyxml2;
+    std::vector<FpsLimitation> limits;
+    XMLElement* row = fpsList->FirstChildElement("Limit");
+    while (row != nullptr) {
+        FpsLimitation prevLimit{{0, 0}, 1000.0};
+        FpsLimitation limit;
+        limit.size = {row->UnsignedAttribute("width", /*Default*/ 0),
+                      row->UnsignedAttribute("height", /*Default*/ 0)};
+        limit.fpsUpperBound = row->DoubleAttribute("fpsBound", /*Default*/ 1000.0);
+        if (limit.size.width <= prevLimit.size.width ||
+            limit.size.height <= prevLimit.size.height ||
+            limit.fpsUpperBound >= prevLimit.fpsUpperBound) {
+            ALOGE(
+                "%s: FPS limit list must have increasing size and decreasing fps!"
+                " Prev %dx%d@%f, Current %dx%d@%f",
+                __FUNCTION__, prevLimit.size.width, prevLimit.size.height, prevLimit.fpsUpperBound,
+                limit.size.width, limit.size.height, limit.fpsUpperBound);
+            return false;
+        }
+        limits.push_back(limit);
+        row = row->NextSiblingElement("Limit");
+    }
+    fpsLimits = limits;
+    return true;
+}
+
 ExternalCameraConfig::ExternalCameraConfig() :
         maxJpegBufSize(kDefaultJpegBufSize),
         numVideoBuffers(kDefaultNumVideoBuffer),
         numStillBuffers(kDefaultNumStillBuffer),
+        depthEnabled(false),
         orientation(kDefaultOrientation) {
     fpsLimits.push_back({/*Size*/{ 640,  480}, /*FPS upper bound*/30.0});
     fpsLimits.push_back({/*Size*/{1280,  720}, /*FPS upper bound*/7.5});
diff --git a/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraDeviceSession.h b/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraDeviceSession.h
index 9cc55cb..71b7c17 100644
--- a/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraDeviceSession.h
+++ b/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraDeviceSession.h
@@ -194,7 +194,8 @@
     int v4l2StreamOffLocked();
     int setV4l2FpsLocked(double fps);
     static Status isStreamCombinationSupported(const V3_2::StreamConfiguration& config,
-            const std::vector<SupportedV4L2Format>& supportedFormats);
+            const std::vector<SupportedV4L2Format>& supportedFormats,
+            const ExternalCameraConfig& devCfg);
 
     // TODO: change to unique_ptr for better tracking
     sp<V4L2Frame> dequeueV4l2FrameLocked(/*out*/nsecs_t* shutterTs); // Called with mLock hold
@@ -202,7 +203,8 @@
 
     // Check if input Stream is one of supported stream setting on this device
     static bool isSupported(const Stream& stream,
-            const std::vector<SupportedV4L2Format>& supportedFormats);
+            const std::vector<SupportedV4L2Format>& supportedFormats,
+            const ExternalCameraConfig& cfg);
 
     // Validate and import request's output buffers and acquire fence
     virtual Status importRequestLocked(
diff --git a/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraDevice_3_4.h b/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraDevice_3_4.h
index 719a3ed..bd79807 100644
--- a/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraDevice_3_4.h
+++ b/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraDevice_3_4.h
@@ -104,6 +104,9 @@
 
     // Calls into virtual member function. Do not use it in constructor
     status_t initCameraCharacteristics();
+    // Init available capabilities keys
+    status_t initAvailableCapabilities(
+            ::android::hardware::camera::common::V1_0::helper::CameraMetadata*);
     // Init non-device dependent keys
     virtual status_t initDefaultCharsKeys(
             ::android::hardware::camera::common::V1_0::helper::CameraMetadata*);
@@ -114,13 +117,30 @@
     status_t initOutputCharsKeys(int fd,
             ::android::hardware::camera::common::V1_0::helper::CameraMetadata*);
 
+    // Helper function for initOutputCharskeys
+    template <size_t SIZE>
+    status_t initOutputCharskeysByFormat(
+            ::android::hardware::camera::common::V1_0::helper::CameraMetadata*,
+            uint32_t fourcc, const std::array<int, SIZE>& formats,
+            int scaler_stream_config_tag,
+            int stream_configuration, int min_frame_duration, int stall_duration);
+
+    bool calculateMinFps(::android::hardware::camera::common::V1_0::helper::CameraMetadata*);
+
     static void getFrameRateList(int fd, double fpsUpperBound, SupportedV4L2Format* format);
 
+    static void updateFpsBounds(int fd, CroppingType cropType,
+            const std::vector<ExternalCameraConfig::FpsLimitation>& fpsLimits,
+            SupportedV4L2Format format,
+            std::vector<SupportedV4L2Format>& outFmts);
+
     // Get candidate supported formats list of input cropping type.
     static std::vector<SupportedV4L2Format> getCandidateSupportedFormatsLocked(
             int fd, CroppingType cropType,
             const std::vector<ExternalCameraConfig::FpsLimitation>& fpsLimits,
-            const Size& minStreamSize);
+            const std::vector<ExternalCameraConfig::FpsLimitation>& depthFpsLimits,
+            const Size& minStreamSize,
+            bool depthEnabled);
     // Trim supported format list by the cropping type. Also sort output formats by width/height
     static void trimSupportedFormats(CroppingType cropType,
             /*inout*/std::vector<SupportedV4L2Format>* pFmts);
diff --git a/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraUtils.h b/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraUtils.h
index 3b1ac96..341c622 100644
--- a/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraUtils.h
+++ b/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraUtils.h
@@ -17,12 +17,13 @@
 #ifndef ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_EXTCAMUTIL_H
 #define ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_EXTCAMUTIL_H
 
-#include <inttypes.h>
-#include "utils/LightRefBase.h"
-#include <mutex>
-#include <vector>
-#include <unordered_set>
 #include <android/hardware/graphics/mapper/2.0/IMapper.h>
+#include <inttypes.h>
+#include <mutex>
+#include <unordered_set>
+#include <vector>
+#include "tinyxml2.h"  // XML parsing
+#include "utils/LightRefBase.h"
 
 using android::hardware::graphics::mapper::V2_0::IMapper;
 using android::hardware::graphics::mapper::V2_0::YCbCrLayout;
@@ -71,11 +72,15 @@
     // Size of v4l2 buffer queue when streaming > kMaxVideoSize
     uint32_t numStillBuffers;
 
+    // Indication that the device connected supports depth output
+    bool depthEnabled;
+
     struct FpsLimitation {
         Size size;
         double fpsUpperBound;
     };
     std::vector<FpsLimitation> fpsLimits;
+    std::vector<FpsLimitation> depthFpsLimits;
 
     // Minimum output stream size
     Size minStreamSize;
@@ -85,6 +90,7 @@
 
 private:
     ExternalCameraConfig();
+    static bool updateFpsList(tinyxml2::XMLElement* fpsList, std::vector<FpsLimitation>& fpsLimits);
 };
 
 } // common
diff --git a/camera/device/3.5/default/ExternalCameraDevice.cpp b/camera/device/3.5/default/ExternalCameraDevice.cpp
index 6a0b51e..d0de1a4 100644
--- a/camera/device/3.5/default/ExternalCameraDevice.cpp
+++ b/camera/device/3.5/default/ExternalCameraDevice.cpp
@@ -103,7 +103,7 @@
     }
     V3_2::StreamConfiguration streamConfig = {streamsV3_2, streams.operationMode};
     auto status = ExternalCameraDeviceSession::isStreamCombinationSupported(streamConfig,
-            mSupportedFormats);
+            mSupportedFormats, mCfg);
     _hidl_cb(Status::OK, Status::OK == status);
     return Void();
 }
diff --git a/camera/device/3.5/default/include/ext_device_v3_5_impl/ExternalCameraDeviceSession.h b/camera/device/3.5/default/include/ext_device_v3_5_impl/ExternalCameraDeviceSession.h
index d2b5e89..281f93a 100644
--- a/camera/device/3.5/default/include/ext_device_v3_5_impl/ExternalCameraDeviceSession.h
+++ b/camera/device/3.5/default/include/ext_device_v3_5_impl/ExternalCameraDeviceSession.h
@@ -91,9 +91,10 @@
     }
 
     static Status isStreamCombinationSupported(const V3_2::StreamConfiguration& config,
-            const std::vector<SupportedV4L2Format>& supportedFormats) {
+            const std::vector<SupportedV4L2Format>& supportedFormats,
+            const ExternalCameraConfig& devCfg) {
         return V3_4::implementation::ExternalCameraDeviceSession::isStreamCombinationSupported(
-                config, supportedFormats);
+                config, supportedFormats, devCfg);
     }
 
 protected: