Migrate reference external HAL implementation to AIDL

Android T migrated the HAL interface to AIDL, but the reference HAL
was never updated to use AIDL. This CL migrates the reference
HAL implementation for external cameras to use AIDL as well. The
external HAL uses V4L2 standard to expose USB cameras to the
cameraserver.

The reference HAL implementation for internal camera was dropped
because it is not possible to write a generic HAL that works with some
large percentage of internal cameras.

Bug: 219974678
Test: Existing CTS tests pass with external camera connected.
Change-Id: I35f3dc32c16670eca7735a4ac00fed3daf36aa65
diff --git a/camera/device/default/Android.bp b/camera/device/default/Android.bp
new file mode 100644
index 0000000..b577597
--- /dev/null
+++ b/camera/device/default/Android.bp
@@ -0,0 +1,71 @@
+//
+// Copyright (C) 2020 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "hardware_interfaces_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["hardware_interfaces_license"],
+}
+
+cc_library_shared {
+    name: "camera.device-external-impl",
+    defaults: ["hidl_defaults"],
+    proprietary: true,
+    srcs: [
+        "ExternalCameraDevice.cpp",
+        "ExternalCameraDeviceSession.cpp",
+        "ExternalCameraOfflineSession.cpp",
+        "ExternalCameraUtils.cpp",
+        "convert.cpp",
+    ],
+    shared_libs: [
+        "android.hardware.camera.common-V1-ndk",
+        "android.hardware.camera.device-V1-ndk",
+        "android.hardware.graphics.allocator-V1-ndk",
+        "android.hardware.graphics.common-V4-ndk",
+        "android.hardware.graphics.mapper@2.0",
+        "android.hardware.graphics.mapper@3.0",
+        "android.hardware.graphics.mapper@4.0",
+        "android.hidl.allocator@1.0",
+        "android.hidl.memory@1.0",
+        "libbinder_ndk",
+        "libcamera_metadata",
+        "libcutils",
+        "libexif",
+        "libfmq",
+        "libgralloctypes",
+        "libhardware",
+        "libhidlbase",
+        "libhidlmemory",
+        "libjpeg",
+        "liblog",
+        "libsync",
+        "libtinyxml2",
+        "libutils",
+        "libyuv",
+    ],
+    static_libs: [
+        "android.hardware.camera.common@1.0-helper",
+        "libaidlcommonsupport",
+    ],
+    header_libs: [
+        "media_plugin_headers",
+    ],
+    export_include_dirs: ["."],
+}
diff --git a/camera/device/default/ExternalCameraDevice.cpp b/camera/device/default/ExternalCameraDevice.cpp
new file mode 100644
index 0000000..677fb42
--- /dev/null
+++ b/camera/device/default/ExternalCameraDevice.cpp
@@ -0,0 +1,1001 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "ExtCamDev"
+// #define LOG_NDEBUG 0
+#include <log/log.h>
+
+#include "ExternalCameraDevice.h"
+
+#include <aidl/android/hardware/camera/common/Status.h>
+#include <convert.h>
+#include <linux/videodev2.h>
+#include <regex>
+#include <set>
+
+namespace android {
+namespace hardware {
+namespace camera {
+namespace device {
+namespace implementation {
+
+using ::aidl::android::hardware::camera::common::Status;
+
+namespace {
+// Only support MJPEG for now as it seems to be the one supports higher fps
+// Other formats to consider in the future:
+// * V4L2_PIX_FMT_YVU420 (== YV12)
+// * V4L2_PIX_FMT_YVYU (YVYU: can be converted to YV12 or other YUV420_888 formats)
+const std::array<uint32_t, /*size*/ 2> kSupportedFourCCs{
+        {V4L2_PIX_FMT_MJPEG, V4L2_PIX_FMT_Z16}};  // double braces required in C++11
+
+constexpr int MAX_RETRY = 5;                  // Allow retry v4l2 open failures a few times.
+constexpr int OPEN_RETRY_SLEEP_US = 100'000;  // 100ms * MAX_RETRY = 0.5 seconds
+
+const std::regex kDevicePathRE("/dev/video([0-9]+)");
+}  // namespace
+
+std::string ExternalCameraDevice::kDeviceVersion = "1.1";
+
+ExternalCameraDevice::ExternalCameraDevice(const std::string& devicePath,
+                                           const ExternalCameraConfig& config)
+    : mCameraId("-1"), mDevicePath(devicePath), mCfg(config) {
+    std::smatch sm;
+    if (std::regex_match(mDevicePath, sm, kDevicePathRE)) {
+        mCameraId = std::to_string(mCfg.cameraIdOffset + std::stoi(sm[1]));
+    } else {
+        ALOGE("%s: device path match failed for %s", __FUNCTION__, mDevicePath.c_str());
+    }
+}
+
+ExternalCameraDevice::~ExternalCameraDevice() {}
+
+ndk::ScopedAStatus ExternalCameraDevice::getCameraCharacteristics(CameraMetadata* _aidl_return) {
+    Mutex::Autolock _l(mLock);
+    if (_aidl_return == nullptr) {
+        return fromStatus(Status::ILLEGAL_ARGUMENT);
+    }
+
+    if (isInitFailedLocked()) {
+        return fromStatus(Status::INTERNAL_ERROR);
+    }
+
+    const camera_metadata_t* rawMetadata = mCameraCharacteristics.getAndLock();
+    convertToAidl(rawMetadata, _aidl_return);
+    mCameraCharacteristics.unlock(rawMetadata);
+    return fromStatus(Status::OK);
+}
+
+ndk::ScopedAStatus ExternalCameraDevice::getPhysicalCameraCharacteristics(const std::string&,
+                                                                          CameraMetadata*) {
+    ALOGE("%s: Physical camera functions are not supported for external cameras.", __FUNCTION__);
+    return fromStatus(Status::ILLEGAL_ARGUMENT);
+}
+
+ndk::ScopedAStatus ExternalCameraDevice::getResourceCost(CameraResourceCost* _aidl_return) {
+    if (_aidl_return == nullptr) {
+        return fromStatus(Status::ILLEGAL_ARGUMENT);
+    }
+
+    _aidl_return->resourceCost = 100;
+    return fromStatus(Status::OK);
+}
+
+ndk::ScopedAStatus ExternalCameraDevice::isStreamCombinationSupported(
+        const StreamConfiguration& in_streams, bool* _aidl_return) {
+    if (isInitFailed()) {
+        ALOGE("%s: camera %s. camera init failed!", __FUNCTION__, mCameraId.c_str());
+        return fromStatus(Status::INTERNAL_ERROR);
+    }
+    Status s = ExternalCameraDeviceSession::isStreamCombinationSupported(in_streams,
+                                                                         mSupportedFormats, mCfg);
+    *_aidl_return = s == Status::OK;
+    return fromStatus(Status::OK);
+}
+
+ndk::ScopedAStatus ExternalCameraDevice::open(
+        const std::shared_ptr<ICameraDeviceCallback>& in_callback,
+        std::shared_ptr<ICameraDeviceSession>* _aidl_return) {
+    if (_aidl_return == nullptr) {
+        ALOGE("%s: cannot open camera %s. return session ptr is null!", __FUNCTION__,
+              mCameraId.c_str());
+        return fromStatus(Status::ILLEGAL_ARGUMENT);
+    }
+
+    Mutex::Autolock _l(mLock);
+    if (isInitFailedLocked()) {
+        ALOGE("%s: cannot open camera %s. camera init failed!", __FUNCTION__, mCameraId.c_str());
+        return fromStatus(Status::INTERNAL_ERROR);
+    }
+
+    std::shared_ptr<ExternalCameraDeviceSession> session;
+    ALOGV("%s: Initializing device for camera %s", __FUNCTION__, mCameraId.c_str());
+    session = mSession.lock();
+
+    if (session != nullptr && !session->isClosed()) {
+        ALOGE("%s: cannot open an already opened camera!", __FUNCTION__);
+        return fromStatus(Status::CAMERA_IN_USE);
+    }
+
+    int numAttempt = 0;
+    unique_fd fd(::open(mDevicePath.c_str(), O_RDWR));
+    while (fd.get() < 0 && numAttempt < MAX_RETRY) {
+        // Previous retry attempts failed. Retry opening the device at most MAX_RETRY times
+        ALOGW("%s: v4l2 device %s open failed, wait 33ms and try again", __FUNCTION__,
+              mDevicePath.c_str());
+        usleep(OPEN_RETRY_SLEEP_US);  // sleep and try again
+        fd.reset(::open(mDevicePath.c_str(), O_RDWR));
+        numAttempt++;
+    }
+
+    if (fd.get() < 0) {
+        ALOGE("%s: v4l2 device open %s failed: %s", __FUNCTION__, mDevicePath.c_str(),
+              strerror(errno));
+        return fromStatus(Status::INTERNAL_ERROR);
+    }
+
+    session = createSession(in_callback, mCfg, mSupportedFormats, mCroppingType,
+                            mCameraCharacteristics, mCameraId, std::move(fd));
+    if (session == nullptr) {
+        ALOGE("%s: camera device session allocation failed", __FUNCTION__);
+        return fromStatus(Status::INTERNAL_ERROR);
+    }
+
+    if (session->isInitFailed()) {
+        ALOGE("%s: camera device session init failed", __FUNCTION__);
+        return fromStatus(Status::INTERNAL_ERROR);
+    }
+
+    mSession = session;
+    *_aidl_return = session;
+    return fromStatus(Status::OK);
+}
+
+ndk::ScopedAStatus ExternalCameraDevice::openInjectionSession(
+        const std::shared_ptr<ICameraDeviceCallback>&, std::shared_ptr<ICameraInjectionSession>*) {
+    return fromStatus(Status::OPERATION_NOT_SUPPORTED);
+}
+
+ndk::ScopedAStatus ExternalCameraDevice::setTorchMode(bool) {
+    return fromStatus(Status::OPERATION_NOT_SUPPORTED);
+}
+
+ndk::ScopedAStatus ExternalCameraDevice::turnOnTorchWithStrengthLevel(int32_t) {
+    return fromStatus(Status::OPERATION_NOT_SUPPORTED);
+}
+
+ndk::ScopedAStatus ExternalCameraDevice::getTorchStrengthLevel(int32_t*) {
+    return fromStatus(Status::OPERATION_NOT_SUPPORTED);
+}
+
+std::shared_ptr<ExternalCameraDeviceSession> ExternalCameraDevice::createSession(
+        const std::shared_ptr<ICameraDeviceCallback>& cb, const ExternalCameraConfig& cfg,
+        const std::vector<SupportedV4L2Format>& sortedFormats, const CroppingType& croppingType,
+        const common::V1_0::helper::CameraMetadata& chars, const std::string& cameraId,
+        unique_fd v4l2Fd) {
+    return ndk::SharedRefBase::make<ExternalCameraDeviceSession>(
+            cb, cfg, sortedFormats, croppingType, chars, cameraId, std::move(v4l2Fd));
+}
+
+bool ExternalCameraDevice::isInitFailed() {
+    Mutex::Autolock _l(mLock);
+    return isInitFailedLocked();
+}
+
+bool ExternalCameraDevice::isInitFailedLocked() {
+    if (!mInitialized) {
+        status_t ret = initCameraCharacteristics();
+        if (ret != OK) {
+            ALOGE("%s: init camera characteristics failed: errorno %d", __FUNCTION__, ret);
+            mInitFailed = true;
+        }
+        mInitialized = true;
+    }
+    return mInitFailed;
+}
+
+void ExternalCameraDevice::initSupportedFormatsLocked(int fd) {
+    std::vector<SupportedV4L2Format> horizontalFmts =
+            getCandidateSupportedFormatsLocked(fd, HORIZONTAL, mCfg.fpsLimits, mCfg.depthFpsLimits,
+                                               mCfg.minStreamSize, mCfg.depthEnabled);
+    std::vector<SupportedV4L2Format> verticalFmts =
+            getCandidateSupportedFormatsLocked(fd, VERTICAL, mCfg.fpsLimits, mCfg.depthFpsLimits,
+                                               mCfg.minStreamSize, mCfg.depthEnabled);
+
+    size_t horiSize = horizontalFmts.size();
+    size_t vertSize = verticalFmts.size();
+
+    if (horiSize == 0 && vertSize == 0) {
+        ALOGE("%s: cannot find suitable cropping type!", __FUNCTION__);
+        return;
+    }
+
+    if (horiSize == 0) {
+        mSupportedFormats = verticalFmts;
+        mCroppingType = VERTICAL;
+        return;
+    } else if (vertSize == 0) {
+        mSupportedFormats = horizontalFmts;
+        mCroppingType = HORIZONTAL;
+        return;
+    }
+
+    const auto& maxHoriSize = horizontalFmts[horizontalFmts.size() - 1];
+    const auto& maxVertSize = verticalFmts[verticalFmts.size() - 1];
+
+    // Try to keep the largest possible output size
+    // When they are the same or ambiguous, pick the one support more sizes
+    if (maxHoriSize.width == maxVertSize.width && maxHoriSize.height == maxVertSize.height) {
+        if (horiSize > vertSize) {
+            mSupportedFormats = horizontalFmts;
+            mCroppingType = HORIZONTAL;
+        } else {
+            mSupportedFormats = verticalFmts;
+            mCroppingType = VERTICAL;
+        }
+    } else if (maxHoriSize.width >= maxVertSize.width && maxHoriSize.height >= maxVertSize.height) {
+        mSupportedFormats = horizontalFmts;
+        mCroppingType = HORIZONTAL;
+    } else if (maxHoriSize.width <= maxVertSize.width && maxHoriSize.height <= maxVertSize.height) {
+        mSupportedFormats = verticalFmts;
+        mCroppingType = VERTICAL;
+    } else {
+        if (horiSize > vertSize) {
+            mSupportedFormats = horizontalFmts;
+            mCroppingType = HORIZONTAL;
+        } else {
+            mSupportedFormats = verticalFmts;
+            mCroppingType = VERTICAL;
+        }
+    }
+}
+
+status_t ExternalCameraDevice::initCameraCharacteristics() {
+    if (!mCameraCharacteristics.isEmpty()) {
+        // Camera Characteristics previously initialized. Skip.
+        return OK;
+    }
+
+    // init camera characteristics
+    unique_fd fd(::open(mDevicePath.c_str(), O_RDWR));
+    if (fd.get() < 0) {
+        ALOGE("%s: v4l2 device open %s failed", __FUNCTION__, mDevicePath.c_str());
+        return DEAD_OBJECT;
+    }
+
+    status_t ret;
+    ret = initDefaultCharsKeys(&mCameraCharacteristics);
+    if (ret != OK) {
+        ALOGE("%s: init default characteristics key failed: errorno %d", __FUNCTION__, ret);
+        mCameraCharacteristics.clear();
+        return ret;
+    }
+
+    ret = initCameraControlsCharsKeys(fd.get(), &mCameraCharacteristics);
+    if (ret != OK) {
+        ALOGE("%s: init camera control characteristics key failed: errorno %d", __FUNCTION__, ret);
+        mCameraCharacteristics.clear();
+        return ret;
+    }
+
+    ret = initOutputCharsKeys(fd.get(), &mCameraCharacteristics);
+    if (ret != OK) {
+        ALOGE("%s: init output characteristics key failed: errorno %d", __FUNCTION__, ret);
+        mCameraCharacteristics.clear();
+        return ret;
+    }
+
+    ret = initAvailableCapabilities(&mCameraCharacteristics);
+    if (ret != OK) {
+        ALOGE("%s: init available capabilities key failed: errorno %d", __FUNCTION__, ret);
+        mCameraCharacteristics.clear();
+        return ret;
+    }
+
+    return OK;
+}
+
+#define ARRAY_SIZE(a) (sizeof(a) / sizeof((a)[0]))
+#define UPDATE(tag, data, size)                        \
+    do {                                               \
+        if (metadata->update((tag), (data), (size))) { \
+            ALOGE("Update " #tag " failed!");          \
+            return -EINVAL;                            \
+        }                                              \
+    } while (0)
+
+status_t ExternalCameraDevice::initAvailableCapabilities(
+        ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) {
+    if (mSupportedFormats.empty()) {
+        ALOGE("%s: Supported formats list is empty", __FUNCTION__);
+        return UNKNOWN_ERROR;
+    }
+
+    bool hasDepth = false;
+    bool hasColor = false;
+    for (const auto& fmt : mSupportedFormats) {
+        switch (fmt.fourcc) {
+            case V4L2_PIX_FMT_Z16:
+                hasDepth = true;
+                break;
+            case V4L2_PIX_FMT_MJPEG:
+                hasColor = true;
+                break;
+            default:
+                ALOGW("%s: Unsupported format found", __FUNCTION__);
+        }
+    }
+
+    std::vector<uint8_t> availableCapabilities;
+    if (hasDepth) {
+        availableCapabilities.push_back(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT);
+    }
+    if (hasColor) {
+        availableCapabilities.push_back(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
+    }
+    if (!availableCapabilities.empty()) {
+        UPDATE(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, availableCapabilities.data(),
+               availableCapabilities.size());
+    }
+
+    return OK;
+}
+
+status_t ExternalCameraDevice::initDefaultCharsKeys(
+        ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) {
+    const uint8_t hardware_level = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL;
+    UPDATE(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL, &hardware_level, 1);
+
+    // android.colorCorrection
+    const uint8_t availableAberrationModes[] = {ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF};
+    UPDATE(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES, availableAberrationModes,
+           ARRAY_SIZE(availableAberrationModes));
+
+    // android.control
+    const uint8_t antibandingMode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
+    UPDATE(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, &antibandingMode, 1);
+
+    const int32_t controlMaxRegions[] = {/*AE*/ 0, /*AWB*/ 0, /*AF*/ 0};
+    UPDATE(ANDROID_CONTROL_MAX_REGIONS, controlMaxRegions, ARRAY_SIZE(controlMaxRegions));
+
+    const uint8_t videoStabilizationMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
+    UPDATE(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, &videoStabilizationMode, 1);
+
+    const uint8_t awbAvailableMode = ANDROID_CONTROL_AWB_MODE_AUTO;
+    UPDATE(ANDROID_CONTROL_AWB_AVAILABLE_MODES, &awbAvailableMode, 1);
+
+    const uint8_t aeAvailableMode = ANDROID_CONTROL_AE_MODE_ON;
+    UPDATE(ANDROID_CONTROL_AE_AVAILABLE_MODES, &aeAvailableMode, 1);
+
+    const uint8_t availableFffect = ANDROID_CONTROL_EFFECT_MODE_OFF;
+    UPDATE(ANDROID_CONTROL_AVAILABLE_EFFECTS, &availableFffect, 1);
+
+    const uint8_t controlAvailableModes[] = {ANDROID_CONTROL_MODE_OFF, ANDROID_CONTROL_MODE_AUTO};
+    UPDATE(ANDROID_CONTROL_AVAILABLE_MODES, controlAvailableModes,
+           ARRAY_SIZE(controlAvailableModes));
+
+    // android.edge
+    const uint8_t edgeMode = ANDROID_EDGE_MODE_OFF;
+    UPDATE(ANDROID_EDGE_AVAILABLE_EDGE_MODES, &edgeMode, 1);
+
+    // android.flash
+    const uint8_t flashInfo = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
+    UPDATE(ANDROID_FLASH_INFO_AVAILABLE, &flashInfo, 1);
+
+    // android.hotPixel
+    const uint8_t hotPixelMode = ANDROID_HOT_PIXEL_MODE_OFF;
+    UPDATE(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES, &hotPixelMode, 1);
+
+    // android.jpeg
+    const int32_t jpegAvailableThumbnailSizes[] = {0,   0,   176, 144, 240, 144, 256,
+                                                   144, 240, 160, 256, 154, 240, 180};
+    UPDATE(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, jpegAvailableThumbnailSizes,
+           ARRAY_SIZE(jpegAvailableThumbnailSizes));
+
+    const int32_t jpegMaxSize = mCfg.maxJpegBufSize;
+    UPDATE(ANDROID_JPEG_MAX_SIZE, &jpegMaxSize, 1);
+
+    // android.lens
+    const uint8_t focusDistanceCalibration =
+            ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED;
+    UPDATE(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION, &focusDistanceCalibration, 1);
+
+    const uint8_t opticalStabilizationMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
+    UPDATE(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION, &opticalStabilizationMode, 1);
+
+    const uint8_t facing = ANDROID_LENS_FACING_EXTERNAL;
+    UPDATE(ANDROID_LENS_FACING, &facing, 1);
+
+    // android.noiseReduction
+    const uint8_t noiseReductionMode = ANDROID_NOISE_REDUCTION_MODE_OFF;
+    UPDATE(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES, &noiseReductionMode, 1);
+    UPDATE(ANDROID_NOISE_REDUCTION_MODE, &noiseReductionMode, 1);
+
+    const int32_t partialResultCount = 1;
+    UPDATE(ANDROID_REQUEST_PARTIAL_RESULT_COUNT, &partialResultCount, 1);
+
+    // This means pipeline latency of X frame intervals. The maximum number is 4.
+    const uint8_t requestPipelineMaxDepth = 4;
+    UPDATE(ANDROID_REQUEST_PIPELINE_MAX_DEPTH, &requestPipelineMaxDepth, 1);
+
+    // Three numbers represent the maximum numbers of different types of output
+    // streams simultaneously. The types are raw sensor, processed (but not
+    // stalling), and processed (but stalling). For usb limited mode, raw sensor
+    // is not supported. Stalling stream is JPEG. Non-stalling streams are
+    // YUV_420_888 or YV12.
+    const int32_t requestMaxNumOutputStreams[] = {
+            /*RAW*/ 0,
+            /*Processed*/ ExternalCameraDeviceSession::kMaxProcessedStream,
+            /*Stall*/ ExternalCameraDeviceSession::kMaxStallStream};
+    UPDATE(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, requestMaxNumOutputStreams,
+           ARRAY_SIZE(requestMaxNumOutputStreams));
+
+    // Limited mode doesn't support reprocessing.
+    const int32_t requestMaxNumInputStreams = 0;
+    UPDATE(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS, &requestMaxNumInputStreams, 1);
+
+    // android.scaler
+    // TODO: b/72263447 V4L2_CID_ZOOM_*
+    const float scalerAvailableMaxDigitalZoom[] = {1};
+    UPDATE(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, scalerAvailableMaxDigitalZoom,
+           ARRAY_SIZE(scalerAvailableMaxDigitalZoom));
+
+    const uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
+    UPDATE(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
+
+    const int32_t testPatternModes[] = {ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,
+                                        ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR};
+    UPDATE(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES, testPatternModes,
+           ARRAY_SIZE(testPatternModes));
+
+    const uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN;
+    UPDATE(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE, &timestampSource, 1);
+
+    // Orientation is a bit odd for external camera, but consider it as the orientation
+    // between the external camera sensor (which is usually landscape) and the device's
+    // natural display orientation. For devices with natural landscape display (ex: tablet/TV), the
+    // orientation should be 0. For devices with natural portrait display (phone), the orientation
+    // should be 270.
+    const int32_t orientation = mCfg.orientation;
+    UPDATE(ANDROID_SENSOR_ORIENTATION, &orientation, 1);
+
+    // android.shading
+    const uint8_t availableMode = ANDROID_SHADING_MODE_OFF;
+    UPDATE(ANDROID_SHADING_AVAILABLE_MODES, &availableMode, 1);
+
+    // android.statistics
+    const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
+    UPDATE(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES, &faceDetectMode, 1);
+
+    const int32_t maxFaceCount = 0;
+    UPDATE(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, &maxFaceCount, 1);
+
+    const uint8_t availableHotpixelMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
+    UPDATE(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES, &availableHotpixelMode, 1);
+
+    const uint8_t lensShadingMapMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
+    UPDATE(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES, &lensShadingMapMode, 1);
+
+    // android.sync
+    const int32_t maxLatency = ANDROID_SYNC_MAX_LATENCY_UNKNOWN;
+    UPDATE(ANDROID_SYNC_MAX_LATENCY, &maxLatency, 1);
+
+    /* Other sensor/RAW related keys:
+     * android.sensor.info.colorFilterArrangement -> no need if we don't do RAW
+     * android.sensor.info.physicalSize           -> not available
+     * android.sensor.info.whiteLevel             -> not available/not needed
+     * android.sensor.info.lensShadingApplied     -> not needed
+     * android.sensor.info.preCorrectionActiveArraySize -> not available/not needed
+     * android.sensor.blackLevelPattern           -> not available/not needed
+     */
+
+    const int32_t availableRequestKeys[] = {ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
+                                            ANDROID_CONTROL_AE_ANTIBANDING_MODE,
+                                            ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
+                                            ANDROID_CONTROL_AE_LOCK,
+                                            ANDROID_CONTROL_AE_MODE,
+                                            ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
+                                            ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
+                                            ANDROID_CONTROL_AF_MODE,
+                                            ANDROID_CONTROL_AF_TRIGGER,
+                                            ANDROID_CONTROL_AWB_LOCK,
+                                            ANDROID_CONTROL_AWB_MODE,
+                                            ANDROID_CONTROL_CAPTURE_INTENT,
+                                            ANDROID_CONTROL_EFFECT_MODE,
+                                            ANDROID_CONTROL_MODE,
+                                            ANDROID_CONTROL_SCENE_MODE,
+                                            ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
+                                            ANDROID_FLASH_MODE,
+                                            ANDROID_JPEG_ORIENTATION,
+                                            ANDROID_JPEG_QUALITY,
+                                            ANDROID_JPEG_THUMBNAIL_QUALITY,
+                                            ANDROID_JPEG_THUMBNAIL_SIZE,
+                                            ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
+                                            ANDROID_NOISE_REDUCTION_MODE,
+                                            ANDROID_SCALER_CROP_REGION,
+                                            ANDROID_SENSOR_TEST_PATTERN_MODE,
+                                            ANDROID_STATISTICS_FACE_DETECT_MODE,
+                                            ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE};
+    UPDATE(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, availableRequestKeys,
+           ARRAY_SIZE(availableRequestKeys));
+
+    const int32_t availableResultKeys[] = {ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
+                                           ANDROID_CONTROL_AE_ANTIBANDING_MODE,
+                                           ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
+                                           ANDROID_CONTROL_AE_LOCK,
+                                           ANDROID_CONTROL_AE_MODE,
+                                           ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
+                                           ANDROID_CONTROL_AE_STATE,
+                                           ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
+                                           ANDROID_CONTROL_AF_MODE,
+                                           ANDROID_CONTROL_AF_STATE,
+                                           ANDROID_CONTROL_AF_TRIGGER,
+                                           ANDROID_CONTROL_AWB_LOCK,
+                                           ANDROID_CONTROL_AWB_MODE,
+                                           ANDROID_CONTROL_AWB_STATE,
+                                           ANDROID_CONTROL_CAPTURE_INTENT,
+                                           ANDROID_CONTROL_EFFECT_MODE,
+                                           ANDROID_CONTROL_MODE,
+                                           ANDROID_CONTROL_SCENE_MODE,
+                                           ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
+                                           ANDROID_FLASH_MODE,
+                                           ANDROID_FLASH_STATE,
+                                           ANDROID_JPEG_ORIENTATION,
+                                           ANDROID_JPEG_QUALITY,
+                                           ANDROID_JPEG_THUMBNAIL_QUALITY,
+                                           ANDROID_JPEG_THUMBNAIL_SIZE,
+                                           ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
+                                           ANDROID_NOISE_REDUCTION_MODE,
+                                           ANDROID_REQUEST_PIPELINE_DEPTH,
+                                           ANDROID_SCALER_CROP_REGION,
+                                           ANDROID_SENSOR_TIMESTAMP,
+                                           ANDROID_STATISTICS_FACE_DETECT_MODE,
+                                           ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
+                                           ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
+                                           ANDROID_STATISTICS_SCENE_FLICKER};
+    UPDATE(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, availableResultKeys,
+           ARRAY_SIZE(availableResultKeys));
+
+    UPDATE(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, AVAILABLE_CHARACTERISTICS_KEYS.data(),
+           AVAILABLE_CHARACTERISTICS_KEYS.size());
+
+    return OK;
+}
+
+status_t ExternalCameraDevice::initCameraControlsCharsKeys(
+        int, ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) {
+    // android.sensor.info.sensitivityRange   -> V4L2_CID_ISO_SENSITIVITY
+    // android.sensor.info.exposureTimeRange  -> V4L2_CID_EXPOSURE_ABSOLUTE
+    // android.sensor.info.maxFrameDuration   -> TBD
+    // android.lens.info.minimumFocusDistance -> V4L2_CID_FOCUS_ABSOLUTE
+    // android.lens.info.hyperfocalDistance
+    // android.lens.info.availableFocalLengths -> not available?
+
+    // android.control
+    // No AE compensation support for now.
+    // TODO: V4L2_CID_EXPOSURE_BIAS
+    const int32_t controlAeCompensationRange[] = {0, 0};
+    UPDATE(ANDROID_CONTROL_AE_COMPENSATION_RANGE, controlAeCompensationRange,
+           ARRAY_SIZE(controlAeCompensationRange));
+    const camera_metadata_rational_t controlAeCompensationStep[] = {{0, 1}};
+    UPDATE(ANDROID_CONTROL_AE_COMPENSATION_STEP, controlAeCompensationStep,
+           ARRAY_SIZE(controlAeCompensationStep));
+
+    // TODO: Check V4L2_CID_AUTO_FOCUS_*.
+    const uint8_t afAvailableModes[] = {ANDROID_CONTROL_AF_MODE_AUTO, ANDROID_CONTROL_AF_MODE_OFF};
+    UPDATE(ANDROID_CONTROL_AF_AVAILABLE_MODES, afAvailableModes, ARRAY_SIZE(afAvailableModes));
+
+    // TODO: V4L2_CID_SCENE_MODE
+    const uint8_t availableSceneMode = ANDROID_CONTROL_SCENE_MODE_DISABLED;
+    UPDATE(ANDROID_CONTROL_AVAILABLE_SCENE_MODES, &availableSceneMode, 1);
+
+    // TODO: V4L2_CID_3A_LOCK
+    const uint8_t aeLockAvailable = ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
+    UPDATE(ANDROID_CONTROL_AE_LOCK_AVAILABLE, &aeLockAvailable, 1);
+    const uint8_t awbLockAvailable = ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
+    UPDATE(ANDROID_CONTROL_AWB_LOCK_AVAILABLE, &awbLockAvailable, 1);
+
+    // TODO: V4L2_CID_ZOOM_*
+    const float scalerAvailableMaxDigitalZoom[] = {1};
+    UPDATE(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, scalerAvailableMaxDigitalZoom,
+           ARRAY_SIZE(scalerAvailableMaxDigitalZoom));
+
+    return OK;
+}
+
+status_t ExternalCameraDevice::initOutputCharsKeys(
+        int fd, ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) {
+    initSupportedFormatsLocked(fd);
+    if (mSupportedFormats.empty()) {
+        ALOGE("%s: Init supported format list failed", __FUNCTION__);
+        return UNKNOWN_ERROR;
+    }
+
+    bool hasDepth = false;
+    bool hasColor = false;
+
+    // For V4L2_PIX_FMT_Z16
+    std::array<int, /*size*/ 1> halDepthFormats{{HAL_PIXEL_FORMAT_Y16}};
+    // For V4L2_PIX_FMT_MJPEG
+    std::array<int, /*size*/ 3> halFormats{{HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
+                                            HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED}};
+
+    for (const auto& supportedFormat : mSupportedFormats) {
+        switch (supportedFormat.fourcc) {
+            case V4L2_PIX_FMT_Z16:
+                hasDepth = true;
+                break;
+            case V4L2_PIX_FMT_MJPEG:
+                hasColor = true;
+                break;
+            default:
+                ALOGW("%s: format %c%c%c%c is not supported!", __FUNCTION__,
+                      supportedFormat.fourcc & 0xFF, (supportedFormat.fourcc >> 8) & 0xFF,
+                      (supportedFormat.fourcc >> 16) & 0xFF, (supportedFormat.fourcc >> 24) & 0xFF);
+        }
+    }
+
+    if (hasDepth) {
+        status_t ret = initOutputCharsKeysByFormat(
+                metadata, V4L2_PIX_FMT_Z16, halDepthFormats,
+                ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_OUTPUT,
+                ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
+                ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
+                ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS);
+        if (ret != OK) {
+            ALOGE("%s: Unable to initialize depth format keys: %s", __FUNCTION__,
+                  statusToString(ret).c_str());
+            return ret;
+        }
+    }
+    if (hasColor) {
+        status_t ret =
+                initOutputCharsKeysByFormat(metadata, V4L2_PIX_FMT_MJPEG, halFormats,
+                                            ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
+                                            ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
+                                            ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
+                                            ANDROID_SCALER_AVAILABLE_STALL_DURATIONS);
+        if (ret != OK) {
+            ALOGE("%s: Unable to initialize color format keys: %s", __FUNCTION__,
+                  statusToString(ret).c_str());
+            return ret;
+        }
+    }
+
+    status_t ret = calculateMinFps(metadata);
+    if (ret != OK) {
+        ALOGE("%s: Unable to update fps metadata: %s", __FUNCTION__, statusToString(ret).c_str());
+        return ret;
+    }
+
+    SupportedV4L2Format maximumFormat{.width = 0, .height = 0};
+    for (const auto& supportedFormat : mSupportedFormats) {
+        if (supportedFormat.width >= maximumFormat.width &&
+            supportedFormat.height >= maximumFormat.height) {
+            maximumFormat = supportedFormat;
+        }
+    }
+    int32_t activeArraySize[] = {0, 0, static_cast<int32_t>(maximumFormat.width),
+                                 static_cast<int32_t>(maximumFormat.height)};
+    UPDATE(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE, activeArraySize,
+           ARRAY_SIZE(activeArraySize));
+    UPDATE(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, activeArraySize, ARRAY_SIZE(activeArraySize));
+
+    int32_t pixelArraySize[] = {static_cast<int32_t>(maximumFormat.width),
+                                static_cast<int32_t>(maximumFormat.height)};
+    UPDATE(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, pixelArraySize, ARRAY_SIZE(pixelArraySize));
+    return OK;
+}
+
+template <size_t SIZE>
+status_t ExternalCameraDevice::initOutputCharsKeysByFormat(
+        ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata,
+        uint32_t fourcc, const std::array<int, SIZE>& halFormats, int streamConfigTag,
+        int streamConfigurationKey, int minFrameDurationKey, int stallDurationKey) {
+    if (mSupportedFormats.empty()) {
+        ALOGE("%s: Init supported format list failed", __FUNCTION__);
+        return UNKNOWN_ERROR;
+    }
+
+    std::vector<int32_t> streamConfigurations;
+    std::vector<int64_t> minFrameDurations;
+    std::vector<int64_t> stallDurations;
+
+    for (const auto& supportedFormat : mSupportedFormats) {
+        if (supportedFormat.fourcc != fourcc) {
+            // Skip 4CCs not meant for the halFormats
+            continue;
+        }
+        for (const auto& format : halFormats) {
+            streamConfigurations.push_back(format);
+            streamConfigurations.push_back(supportedFormat.width);
+            streamConfigurations.push_back(supportedFormat.height);
+            streamConfigurations.push_back(streamConfigTag);
+        }
+
+        int64_t minFrameDuration = std::numeric_limits<int64_t>::max();
+        for (const auto& fr : supportedFormat.frameRates) {
+            // 1000000000LL < (2^32 - 1) and
+            // fr.durationNumerator is uint32_t, so no overflow here
+            int64_t frameDuration = 1000000000LL * fr.durationNumerator / fr.durationDenominator;
+            if (frameDuration < minFrameDuration) {
+                minFrameDuration = frameDuration;
+            }
+        }
+
+        for (const auto& format : halFormats) {
+            minFrameDurations.push_back(format);
+            minFrameDurations.push_back(supportedFormat.width);
+            minFrameDurations.push_back(supportedFormat.height);
+            minFrameDurations.push_back(minFrameDuration);
+        }
+
+        // The stall duration is 0 for non-jpeg formats. For JPEG format, stall
+        // duration can be 0 if JPEG is small. Here we choose 1 sec for JPEG.
+        // TODO: b/72261675. Maybe set this dynamically
+        for (const auto& format : halFormats) {
+            const int64_t NS_TO_SECOND = 1E9;
+            int64_t stall_duration = (format == HAL_PIXEL_FORMAT_BLOB) ? NS_TO_SECOND : 0;
+            stallDurations.push_back(format);
+            stallDurations.push_back(supportedFormat.width);
+            stallDurations.push_back(supportedFormat.height);
+            stallDurations.push_back(stall_duration);
+        }
+    }
+
+    UPDATE(streamConfigurationKey, streamConfigurations.data(), streamConfigurations.size());
+
+    UPDATE(minFrameDurationKey, minFrameDurations.data(), minFrameDurations.size());
+
+    UPDATE(stallDurationKey, stallDurations.data(), stallDurations.size());
+
+    return OK;
+}
+
+status_t ExternalCameraDevice::calculateMinFps(
+        ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) {
+    std::set<int32_t> framerates;
+    int32_t minFps = std::numeric_limits<int32_t>::max();
+
+    for (const auto& supportedFormat : mSupportedFormats) {
+        for (const auto& fr : supportedFormat.frameRates) {
+            int32_t frameRateInt = static_cast<int32_t>(fr.getFramesPerSecond());
+            if (minFps > frameRateInt) {
+                minFps = frameRateInt;
+            }
+            framerates.insert(frameRateInt);
+        }
+    }
+
+    std::vector<int32_t> fpsRanges;
+    // FPS ranges
+    for (const auto& framerate : framerates) {
+        // Empirical: webcams often have close to 2x fps error and cannot support fixed fps range
+        fpsRanges.push_back(framerate / 2);
+        fpsRanges.push_back(framerate);
+    }
+    minFps /= 2;
+    int64_t maxFrameDuration = 1000000000LL / minFps;
+
+    UPDATE(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, fpsRanges.data(), fpsRanges.size());
+
+    UPDATE(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION, &maxFrameDuration, 1);
+
+    return OK;
+}
+
+#undef ARRAY_SIZE
+#undef UPDATE
+
+void ExternalCameraDevice::getFrameRateList(int fd, double fpsUpperBound,
+                                            SupportedV4L2Format* format) {
+    format->frameRates.clear();
+
+    v4l2_frmivalenum frameInterval{
+            .index = 0,
+            .pixel_format = format->fourcc,
+            .width = static_cast<__u32>(format->width),
+            .height = static_cast<__u32>(format->height),
+    };
+
+    for (frameInterval.index = 0;
+         TEMP_FAILURE_RETRY(ioctl(fd, VIDIOC_ENUM_FRAMEINTERVALS, &frameInterval)) == 0;
+         ++frameInterval.index) {
+        if (frameInterval.type == V4L2_FRMIVAL_TYPE_DISCRETE) {
+            if (frameInterval.discrete.numerator != 0) {
+                SupportedV4L2Format::FrameRate fr = {frameInterval.discrete.numerator,
+                                                     frameInterval.discrete.denominator};
+                double framerate = fr.getFramesPerSecond();
+                if (framerate > fpsUpperBound) {
+                    continue;
+                }
+                ALOGV("index:%d, format:%c%c%c%c, w %d, h %d, framerate %f", frameInterval.index,
+                      frameInterval.pixel_format & 0xFF, (frameInterval.pixel_format >> 8) & 0xFF,
+                      (frameInterval.pixel_format >> 16) & 0xFF,
+                      (frameInterval.pixel_format >> 24) & 0xFF, frameInterval.width,
+                      frameInterval.height, framerate);
+                format->frameRates.push_back(fr);
+            }
+        }
+    }
+
+    if (format->frameRates.empty()) {
+        ALOGE("%s: failed to get supported frame rates for format:%c%c%c%c w %d h %d", __FUNCTION__,
+              frameInterval.pixel_format & 0xFF, (frameInterval.pixel_format >> 8) & 0xFF,
+              (frameInterval.pixel_format >> 16) & 0xFF, (frameInterval.pixel_format >> 24) & 0xFF,
+              frameInterval.width, frameInterval.height);
+    }
+}
+
+void ExternalCameraDevice::updateFpsBounds(
+        int fd, CroppingType cropType,
+        const std::vector<ExternalCameraConfig::FpsLimitation>& fpsLimits,
+        SupportedV4L2Format format, std::vector<SupportedV4L2Format>& outFmts) {
+    double fpsUpperBound = -1.0;
+    for (const auto& limit : fpsLimits) {
+        if (cropType == VERTICAL) {
+            if (format.width <= limit.size.width) {
+                fpsUpperBound = limit.fpsUpperBound;
+                break;
+            }
+        } else {  // HORIZONTAL
+            if (format.height <= limit.size.height) {
+                fpsUpperBound = limit.fpsUpperBound;
+                break;
+            }
+        }
+    }
+    if (fpsUpperBound < 0.f) {
+        return;
+    }
+
+    getFrameRateList(fd, fpsUpperBound, &format);
+    if (!format.frameRates.empty()) {
+        outFmts.push_back(format);
+    }
+}
+
+std::vector<SupportedV4L2Format> ExternalCameraDevice::getCandidateSupportedFormatsLocked(
+        int fd, CroppingType cropType,
+        const std::vector<ExternalCameraConfig::FpsLimitation>& fpsLimits,
+        const std::vector<ExternalCameraConfig::FpsLimitation>& depthFpsLimits,
+        const Size& minStreamSize, bool depthEnabled) {
+    std::vector<SupportedV4L2Format> outFmts;
+    struct v4l2_fmtdesc fmtdesc {
+        .index = 0, .type = V4L2_BUF_TYPE_VIDEO_CAPTURE
+    };
+    int ret = 0;
+    while (ret == 0) {
+        ret = TEMP_FAILURE_RETRY(ioctl(fd, VIDIOC_ENUM_FMT, &fmtdesc));
+        ALOGV("index:%d,ret:%d, format:%c%c%c%c", fmtdesc.index, ret, fmtdesc.pixelformat & 0xFF,
+              (fmtdesc.pixelformat >> 8) & 0xFF, (fmtdesc.pixelformat >> 16) & 0xFF,
+              (fmtdesc.pixelformat >> 24) & 0xFF);
+
+        if (ret != 0 || (fmtdesc.flags & V4L2_FMT_FLAG_EMULATED)) {
+            // Skip if IOCTL failed, or if the format is emulated
+            fmtdesc.index++;
+            continue;
+        }
+        auto it =
+                std::find(kSupportedFourCCs.begin(), kSupportedFourCCs.end(), fmtdesc.pixelformat);
+        if (it == kSupportedFourCCs.end()) {
+            fmtdesc.index++;
+            continue;
+        }
+
+        // Found supported format
+        v4l2_frmsizeenum frameSize{.index = 0, .pixel_format = fmtdesc.pixelformat};
+        for (; TEMP_FAILURE_RETRY(ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frameSize)) == 0;
+             ++frameSize.index) {
+            if (frameSize.type == V4L2_FRMSIZE_TYPE_DISCRETE) {
+                ALOGV("index:%d, format:%c%c%c%c, w %d, h %d", frameSize.index,
+                      fmtdesc.pixelformat & 0xFF, (fmtdesc.pixelformat >> 8) & 0xFF,
+                      (fmtdesc.pixelformat >> 16) & 0xFF, (fmtdesc.pixelformat >> 24) & 0xFF,
+                      frameSize.discrete.width, frameSize.discrete.height);
+
+                // Disregard h > w formats so all aspect ratio (h/w) <= 1.0
+                // This will simplify the crop/scaling logic down the road
+                if (frameSize.discrete.height > frameSize.discrete.width) {
+                    continue;
+                }
+
+                // Discard all formats which is smaller than minStreamSize
+                if (frameSize.discrete.width < minStreamSize.width ||
+                    frameSize.discrete.height < minStreamSize.height) {
+                    continue;
+                }
+
+                SupportedV4L2Format format{
+                        .width = static_cast<int32_t>(frameSize.discrete.width),
+                        .height = static_cast<int32_t>(frameSize.discrete.height),
+                        .fourcc = fmtdesc.pixelformat};
+
+                if (format.fourcc == V4L2_PIX_FMT_Z16 && depthEnabled) {
+                    updateFpsBounds(fd, cropType, depthFpsLimits, format, outFmts);
+                } else {
+                    updateFpsBounds(fd, cropType, fpsLimits, format, outFmts);
+                }
+            }
+        }
+        fmtdesc.index++;
+    }
+    trimSupportedFormats(cropType, &outFmts);
+    return outFmts;
+}
+
+void ExternalCameraDevice::trimSupportedFormats(CroppingType cropType,
+                                                std::vector<SupportedV4L2Format>* pFmts) {
+    std::vector<SupportedV4L2Format>& sortedFmts = *pFmts;
+    if (cropType == VERTICAL) {
+        std::sort(sortedFmts.begin(), sortedFmts.end(),
+                  [](const SupportedV4L2Format& a, const SupportedV4L2Format& b) -> bool {
+                      if (a.width == b.width) {
+                          return a.height < b.height;
+                      }
+                      return a.width < b.width;
+                  });
+    } else {
+        std::sort(sortedFmts.begin(), sortedFmts.end(),
+                  [](const SupportedV4L2Format& a, const SupportedV4L2Format& b) -> bool {
+                      if (a.height == b.height) {
+                          return a.width < b.width;
+                      }
+                      return a.height < b.height;
+                  });
+    }
+
+    if (sortedFmts.empty()) {
+        ALOGE("%s: input format list is empty!", __FUNCTION__);
+        return;
+    }
+
+    const auto& maxSize = sortedFmts[sortedFmts.size() - 1];
+    float maxSizeAr = ASPECT_RATIO(maxSize);
+
+    // Remove formats that has aspect ratio not croppable from largest size
+    std::vector<SupportedV4L2Format> out;
+    for (const auto& fmt : sortedFmts) {
+        float ar = ASPECT_RATIO(fmt);
+        if (isAspectRatioClose(ar, maxSizeAr)) {
+            out.push_back(fmt);
+        } else if (cropType == HORIZONTAL && ar < maxSizeAr) {
+            out.push_back(fmt);
+        } else if (cropType == VERTICAL && ar > maxSizeAr) {
+            out.push_back(fmt);
+        } else {
+            ALOGV("%s: size (%d,%d) is removed due to unable to crop %s from (%d,%d)", __FUNCTION__,
+                  fmt.width, fmt.height, cropType == VERTICAL ? "vertically" : "horizontally",
+                  maxSize.width, maxSize.height);
+        }
+    }
+    sortedFmts = out;
+}
+
+binder_status_t ExternalCameraDevice::dump(int fd, const char** args, uint32_t numArgs) {
+    std::shared_ptr<ExternalCameraDeviceSession> session = mSession.lock();
+    if (session == nullptr) {
+        dprintf(fd, "No active camera device session instance\n");
+        return STATUS_OK;
+    }
+
+    return session->dump(fd, args, numArgs);
+}
+
+}  // namespace implementation
+}  // namespace device
+}  // namespace camera
+}  // namespace hardware
+}  // namespace android
\ No newline at end of file
diff --git a/camera/device/default/ExternalCameraDevice.h b/camera/device/default/ExternalCameraDevice.h
new file mode 100644
index 0000000..bcae194
--- /dev/null
+++ b/camera/device/default/ExternalCameraDevice.h
@@ -0,0 +1,191 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef HARDWARE_INTERFACES_CAMERA_DEVICE_DEFAULT_EXTERNALCAMERADEVICE_H_
+#define HARDWARE_INTERFACES_CAMERA_DEVICE_DEFAULT_EXTERNALCAMERADEVICE_H_
+
+#include <ExternalCameraDeviceSession.h>
+#include <ExternalCameraUtils.h>
+#include <aidl/android/hardware/camera/device/BnCameraDevice.h>
+
+namespace android {
+namespace hardware {
+namespace camera {
+namespace device {
+namespace implementation {
+
+using ::aidl::android::hardware::camera::common::CameraResourceCost;
+using ::aidl::android::hardware::camera::device::BnCameraDevice;
+using ::aidl::android::hardware::camera::device::CameraMetadata;
+using ::aidl::android::hardware::camera::device::ICameraDeviceCallback;
+using ::aidl::android::hardware::camera::device::ICameraDeviceSession;
+using ::aidl::android::hardware::camera::device::ICameraInjectionSession;
+using ::aidl::android::hardware::camera::device::StreamConfiguration;
+using ::android::hardware::camera::external::common::ExternalCameraConfig;
+
+class ExternalCameraDevice : public BnCameraDevice {
+  public:
+    // Called by external camera provider HAL.
+    // Provider HAL must ensure the uniqueness of CameraDevice object per cameraId, or there could
+    // be multiple CameraDevice trying to access the same physical camera.  Also, provider will have
+    // to keep track of all CameraDevice objects in order to notify CameraDevice when the underlying
+    // camera is detached.
+    ExternalCameraDevice(const std::string& devicePath, const ExternalCameraConfig& config);
+    ~ExternalCameraDevice() override;
+
+    ndk::ScopedAStatus getCameraCharacteristics(CameraMetadata* _aidl_return) override;
+    ndk::ScopedAStatus getPhysicalCameraCharacteristics(const std::string& in_physicalCameraId,
+                                                        CameraMetadata* _aidl_return) override;
+    ndk::ScopedAStatus getResourceCost(CameraResourceCost* _aidl_return) override;
+    ndk::ScopedAStatus isStreamCombinationSupported(const StreamConfiguration& in_streams,
+                                                    bool* _aidl_return) override;
+    ndk::ScopedAStatus open(const std::shared_ptr<ICameraDeviceCallback>& in_callback,
+                            std::shared_ptr<ICameraDeviceSession>* _aidl_return) override;
+    ndk::ScopedAStatus openInjectionSession(
+            const std::shared_ptr<ICameraDeviceCallback>& in_callback,
+            std::shared_ptr<ICameraInjectionSession>* _aidl_return) override;
+    ndk::ScopedAStatus setTorchMode(bool in_on) override;
+    ndk::ScopedAStatus turnOnTorchWithStrengthLevel(int32_t in_torchStrength) override;
+    ndk::ScopedAStatus getTorchStrengthLevel(int32_t* _aidl_return) override;
+
+    binder_status_t dump(int fd, const char** args, uint32_t numArgs) override;
+
+    // Caller must use this method to check if CameraDevice ctor failed
+    bool isInitFailed();
+
+    // Device version to be used by the external camera provider.
+    // Should be of the form <major>.<minor>
+    static std::string kDeviceVersion;
+
+  private:
+    virtual std::shared_ptr<ExternalCameraDeviceSession> createSession(
+            const std::shared_ptr<ICameraDeviceCallback>&, const ExternalCameraConfig& cfg,
+            const std::vector<SupportedV4L2Format>& sortedFormats, const CroppingType& croppingType,
+            const common::V1_0::helper::CameraMetadata& chars, const std::string& cameraId,
+            unique_fd v4l2Fd);
+
+    bool isInitFailedLocked();
+
+    // Init supported w/h/format/fps in mSupportedFormats. Caller still owns fd
+    void initSupportedFormatsLocked(int fd);
+
+    // Calls into virtual member function. Do not use it in constructor
+    status_t initCameraCharacteristics();
+    // Init available capabilities keys
+    virtual status_t initAvailableCapabilities(
+            ::android::hardware::camera::common::V1_0::helper::CameraMetadata*);
+    // Init non-device dependent keys
+    virtual status_t initDefaultCharsKeys(
+            ::android::hardware::camera::common::V1_0::helper::CameraMetadata*);
+    // Init camera control chars keys. Caller still owns fd
+    status_t initCameraControlsCharsKeys(
+            int fd, ::android::hardware::camera::common::V1_0::helper::CameraMetadata*);
+    // Init camera output configuration related keys.  Caller still owns fd
+    status_t initOutputCharsKeys(
+            int fd, ::android::hardware::camera::common::V1_0::helper::CameraMetadata*);
+
+    // Helper function for initOutputCharskeys
+    template <size_t SIZE>
+    status_t initOutputCharsKeysByFormat(
+            ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata,
+            uint32_t fourcc, const std::array<int, SIZE>& halFormats, int streamConfigTag,
+            int streamConfiguration, int minFrameDuration, int stallDuration);
+
+    status_t calculateMinFps(::android::hardware::camera::common::V1_0::helper::CameraMetadata*);
+
+    static void getFrameRateList(int fd, double fpsUpperBound, SupportedV4L2Format* format);
+
+    static void updateFpsBounds(int fd, CroppingType cropType,
+                                const std::vector<ExternalCameraConfig::FpsLimitation>& fpsLimits,
+                                SupportedV4L2Format format,
+                                std::vector<SupportedV4L2Format>& outFmts);
+
+    // Get candidate supported formats list of input cropping type.
+    static std::vector<SupportedV4L2Format> getCandidateSupportedFormatsLocked(
+            int fd, CroppingType cropType,
+            const std::vector<ExternalCameraConfig::FpsLimitation>& fpsLimits,
+            const std::vector<ExternalCameraConfig::FpsLimitation>& depthFpsLimits,
+            const Size& minStreamSize, bool depthEnabled);
+    // Trim supported format list by the cropping type. Also sort output formats by width/height
+    static void trimSupportedFormats(CroppingType cropType,
+                                     /*inout*/ std::vector<SupportedV4L2Format>* pFmts);
+
+    Mutex mLock;
+    bool mInitialized = false;
+    bool mInitFailed = false;
+    std::string mCameraId;
+    std::string mDevicePath;
+    const ExternalCameraConfig& mCfg;
+    std::vector<SupportedV4L2Format> mSupportedFormats;
+    CroppingType mCroppingType;
+
+    std::weak_ptr<ExternalCameraDeviceSession> mSession =
+            std::weak_ptr<ExternalCameraDeviceSession>();
+
+    ::android::hardware::camera::common::V1_0::helper::CameraMetadata mCameraCharacteristics;
+
+    const std::vector<int32_t> AVAILABLE_CHARACTERISTICS_KEYS = {
+            ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
+            ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
+            ANDROID_CONTROL_AE_AVAILABLE_MODES,
+            ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
+            ANDROID_CONTROL_AE_COMPENSATION_RANGE,
+            ANDROID_CONTROL_AE_COMPENSATION_STEP,
+            ANDROID_CONTROL_AE_LOCK_AVAILABLE,
+            ANDROID_CONTROL_AF_AVAILABLE_MODES,
+            ANDROID_CONTROL_AVAILABLE_EFFECTS,
+            ANDROID_CONTROL_AVAILABLE_MODES,
+            ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
+            ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
+            ANDROID_CONTROL_AWB_AVAILABLE_MODES,
+            ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
+            ANDROID_CONTROL_MAX_REGIONS,
+            ANDROID_FLASH_INFO_AVAILABLE,
+            ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
+            ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
+            ANDROID_LENS_FACING,
+            ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
+            ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
+            ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
+            ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
+            ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
+            ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
+            ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
+            ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
+            ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
+            ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
+            ANDROID_SCALER_CROPPING_TYPE,
+            ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
+            ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
+            ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
+            ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE,
+            ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
+            ANDROID_SENSOR_ORIENTATION,
+            ANDROID_SHADING_AVAILABLE_MODES,
+            ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
+            ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
+            ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
+            ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
+            ANDROID_SYNC_MAX_LATENCY};
+};
+
+}  // namespace implementation
+}  // namespace device
+}  // namespace camera
+}  // namespace hardware
+}  // namespace android
+
+#endif  // HARDWARE_INTERFACES_CAMERA_DEVICE_DEFAULT_EXTERNALCAMERADEVICE_H_
diff --git a/camera/device/default/ExternalCameraDeviceSession.cpp b/camera/device/default/ExternalCameraDeviceSession.cpp
new file mode 100644
index 0000000..68c6d2e
--- /dev/null
+++ b/camera/device/default/ExternalCameraDeviceSession.cpp
@@ -0,0 +1,2947 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "ExtCamDevSsn"
+// #define LOG_NDEBUG 0
+#include <log/log.h>
+
+#include "ExternalCameraDeviceSession.h"
+
+#include <Exif.h>
+#include <ExternalCameraOfflineSession.h>
+#include <aidl/android/hardware/camera/device/CameraBlob.h>
+#include <aidl/android/hardware/camera/device/CameraBlobId.h>
+#include <aidl/android/hardware/camera/device/ErrorMsg.h>
+#include <aidl/android/hardware/camera/device/ShutterMsg.h>
+#include <aidl/android/hardware/camera/device/StreamBufferRet.h>
+#include <aidl/android/hardware/camera/device/StreamBuffersVal.h>
+#include <aidl/android/hardware/camera/device/StreamConfigurationMode.h>
+#include <aidl/android/hardware/camera/device/StreamRotation.h>
+#include <aidl/android/hardware/camera/device/StreamType.h>
+#include <aidl/android/hardware/graphics/common/Dataspace.h>
+#include <aidlcommonsupport/NativeHandle.h>
+#include <convert.h>
+#include <linux/videodev2.h>
+#include <sync/sync.h>
+#include <utils/Trace.h>
+#include <deque>
+
+#define HAVE_JPEG  // required for libyuv.h to export MJPEG decode APIs
+#include <libyuv.h>
+#include <libyuv/convert.h>
+
+namespace android {
+namespace hardware {
+namespace camera {
+namespace device {
+namespace implementation {
+
+namespace {
+
+// Size of request/result metadata fast message queue. Change to 0 to always use hwbinder buffer.
+static constexpr size_t kMetadataMsgQueueSize = 1 << 18 /* 256kB */;
+
+const int kBadFramesAfterStreamOn = 1;  // drop x frames after streamOn to get rid of some initial
+                                        // bad frames. TODO: develop a better bad frame detection
+                                        // method
+constexpr int MAX_RETRY = 15;  // Allow retry some ioctl failures a few times to account for some
+                               // webcam showing temporarily ioctl failures.
+constexpr int IOCTL_RETRY_SLEEP_US = 33000;  // 33ms * MAX_RETRY = 0.5 seconds
+
+// Constants for tryLock during dumpstate
+static constexpr int kDumpLockRetries = 50;
+static constexpr int kDumpLockSleep = 60000;
+
+bool tryLock(Mutex& mutex) {
+    bool locked = false;
+    for (int i = 0; i < kDumpLockRetries; ++i) {
+        if (mutex.tryLock() == NO_ERROR) {
+            locked = true;
+            break;
+        }
+        usleep(kDumpLockSleep);
+    }
+    return locked;
+}
+
+bool tryLock(std::mutex& mutex) {
+    bool locked = false;
+    for (int i = 0; i < kDumpLockRetries; ++i) {
+        if (mutex.try_lock()) {
+            locked = true;
+            break;
+        }
+        usleep(kDumpLockSleep);
+    }
+    return locked;
+}
+
+}  // anonymous namespace
+
+using ::aidl::android::hardware::camera::device::BufferRequestStatus;
+using ::aidl::android::hardware::camera::device::CameraBlob;
+using ::aidl::android::hardware::camera::device::CameraBlobId;
+using ::aidl::android::hardware::camera::device::ErrorMsg;
+using ::aidl::android::hardware::camera::device::ShutterMsg;
+using ::aidl::android::hardware::camera::device::StreamBuffer;
+using ::aidl::android::hardware::camera::device::StreamBufferRet;
+using ::aidl::android::hardware::camera::device::StreamBuffersVal;
+using ::aidl::android::hardware::camera::device::StreamConfigurationMode;
+using ::aidl::android::hardware::camera::device::StreamRotation;
+using ::aidl::android::hardware::camera::device::StreamType;
+using ::aidl::android::hardware::graphics::common::Dataspace;
+using ::android::hardware::camera::common::V1_0::helper::ExifUtils;
+
+// Static instances
+const int ExternalCameraDeviceSession::kMaxProcessedStream;
+const int ExternalCameraDeviceSession::kMaxStallStream;
+HandleImporter ExternalCameraDeviceSession::sHandleImporter;
+
+ExternalCameraDeviceSession::ExternalCameraDeviceSession(
+        const std::shared_ptr<ICameraDeviceCallback>& callback, const ExternalCameraConfig& cfg,
+        const std::vector<SupportedV4L2Format>& sortedFormats, const CroppingType& croppingType,
+        const common::V1_0::helper::CameraMetadata& chars, const std::string& cameraId,
+        unique_fd v4l2Fd)
+    : mCallback(callback),
+      mCfg(cfg),
+      mCameraCharacteristics(chars),
+      mSupportedFormats(sortedFormats),
+      mCroppingType(croppingType),
+      mCameraId(cameraId),
+      mV4l2Fd(std::move(v4l2Fd)),
+      mMaxThumbResolution(getMaxThumbResolution()),
+      mMaxJpegResolution(getMaxJpegResolution()) {}
+
+Size ExternalCameraDeviceSession::getMaxThumbResolution() const {
+    return getMaxThumbnailResolution(mCameraCharacteristics);
+}
+
+Size ExternalCameraDeviceSession::getMaxJpegResolution() const {
+    Size ret{0, 0};
+    for (auto& fmt : mSupportedFormats) {
+        if (fmt.width * fmt.height > ret.width * ret.height) {
+            ret = Size{fmt.width, fmt.height};
+        }
+    }
+    return ret;
+}
+
+bool ExternalCameraDeviceSession::initialize() {
+    if (mV4l2Fd.get() < 0) {
+        ALOGE("%s: invalid v4l2 device fd %d!", __FUNCTION__, mV4l2Fd.get());
+        return true;
+    }
+
+    struct v4l2_capability capability;
+    int ret = ioctl(mV4l2Fd.get(), VIDIOC_QUERYCAP, &capability);
+    std::string make, model;
+    if (ret < 0) {
+        ALOGW("%s v4l2 QUERYCAP failed", __FUNCTION__);
+        mExifMake = "Generic UVC webcam";
+        mExifModel = "Generic UVC webcam";
+    } else {
+        // capability.card is UTF-8 encoded
+        char card[32];
+        int j = 0;
+        for (int i = 0; i < 32; i++) {
+            if (capability.card[i] < 128) {
+                card[j++] = capability.card[i];
+            }
+            if (capability.card[i] == '\0') {
+                break;
+            }
+        }
+        if (j == 0 || card[j - 1] != '\0') {
+            mExifMake = "Generic UVC webcam";
+            mExifModel = "Generic UVC webcam";
+        } else {
+            mExifMake = card;
+            mExifModel = card;
+        }
+    }
+
+    initOutputThread();
+    if (mOutputThread == nullptr) {
+        ALOGE("%s: init OutputThread failed!", __FUNCTION__);
+        return true;
+    }
+    mOutputThread->setExifMakeModel(mExifMake, mExifModel);
+
+    status_t status = initDefaultRequests();
+    if (status != OK) {
+        ALOGE("%s: init default requests failed!", __FUNCTION__);
+        return true;
+    }
+
+    mRequestMetadataQueue =
+            std::make_unique<RequestMetadataQueue>(kMetadataMsgQueueSize, false /* non blocking */);
+    if (!mRequestMetadataQueue->isValid()) {
+        ALOGE("%s: invalid request fmq", __FUNCTION__);
+        return true;
+    }
+
+    mResultMetadataQueue =
+            std::make_shared<ResultMetadataQueue>(kMetadataMsgQueueSize, false /* non blocking */);
+    if (!mResultMetadataQueue->isValid()) {
+        ALOGE("%s: invalid result fmq", __FUNCTION__);
+        return true;
+    }
+
+    mOutputThread->run();
+    return false;
+}
+
+bool ExternalCameraDeviceSession::isInitFailed() {
+    Mutex::Autolock _l(mLock);
+    if (!mInitialized) {
+        mInitFail = initialize();
+        mInitialized = true;
+    }
+    return mInitFail;
+}
+
+void ExternalCameraDeviceSession::initOutputThread() {
+    // Grab a shared_ptr to 'this' from ndk::SharedRefBase::ref()
+    std::shared_ptr<ExternalCameraDeviceSession> thiz = ref<ExternalCameraDeviceSession>();
+
+    if (mSupportBufMgr) {
+        mBufferRequestThread = std::make_shared<BufferRequestThread>(/*parent=*/thiz, mCallback);
+        mBufferRequestThread->run();
+    }
+    mOutputThread = std::make_shared<OutputThread>(/*parent=*/thiz, mCroppingType,
+                                                   mCameraCharacteristics, mBufferRequestThread);
+}
+
+void ExternalCameraDeviceSession::closeOutputThread() {
+    closeOutputThreadImpl();
+}
+
+void ExternalCameraDeviceSession::closeOutputThreadImpl() {
+    if (mOutputThread != nullptr) {
+        mOutputThread->flush();
+        mOutputThread->requestExitAndWait();
+        mOutputThread.reset();
+    }
+}
+
+Status ExternalCameraDeviceSession::initStatus() const {
+    Mutex::Autolock _l(mLock);
+    Status status = Status::OK;
+    if (mInitFail || mClosed) {
+        ALOGI("%s: session initFailed %d closed %d", __FUNCTION__, mInitFail, mClosed);
+        status = Status::INTERNAL_ERROR;
+    }
+    return status;
+}
+
+ExternalCameraDeviceSession::~ExternalCameraDeviceSession() {
+    if (!isClosed()) {
+        ALOGE("ExternalCameraDeviceSession deleted before close!");
+        close(/*callerIsDtor*/ true);
+    }
+}
+
+ScopedAStatus ExternalCameraDeviceSession::constructDefaultRequestSettings(
+        RequestTemplate in_type, CameraMetadata* _aidl_return) {
+    CameraMetadata emptyMetadata;
+    Status status = initStatus();
+    if (status != Status::OK) {
+        return fromStatus(status);
+    }
+    switch (in_type) {
+        case RequestTemplate::PREVIEW:
+        case RequestTemplate::STILL_CAPTURE:
+        case RequestTemplate::VIDEO_RECORD:
+        case RequestTemplate::VIDEO_SNAPSHOT: {
+            *_aidl_return = mDefaultRequests[in_type];
+            break;
+        }
+        case RequestTemplate::MANUAL:
+        case RequestTemplate::ZERO_SHUTTER_LAG:
+            // Don't support MANUAL, ZSL templates
+            status = Status::ILLEGAL_ARGUMENT;
+            break;
+        default:
+            ALOGE("%s: unknown request template type %d", __FUNCTION__, static_cast<int>(in_type));
+            status = Status::ILLEGAL_ARGUMENT;
+            break;
+    }
+    return fromStatus(status);
+}
+
+ScopedAStatus ExternalCameraDeviceSession::configureStreams(
+        const StreamConfiguration& in_requestedConfiguration,
+        std::vector<HalStream>* _aidl_return) {
+    uint32_t blobBufferSize = 0;
+    _aidl_return->clear();
+    Mutex::Autolock _il(mInterfaceLock);
+
+    Status status =
+            isStreamCombinationSupported(in_requestedConfiguration, mSupportedFormats, mCfg);
+    if (status != Status::OK) {
+        return fromStatus(status);
+    }
+
+    status = initStatus();
+    if (status != Status::OK) {
+        return fromStatus(status);
+    }
+
+    {
+        std::lock_guard<std::mutex> lk(mInflightFramesLock);
+        if (!mInflightFrames.empty()) {
+            ALOGE("%s: trying to configureStreams while there are still %zu inflight frames!",
+                  __FUNCTION__, mInflightFrames.size());
+            return fromStatus(Status::INTERNAL_ERROR);
+        }
+    }
+
+    Mutex::Autolock _l(mLock);
+    {
+        Mutex::Autolock _cl(mCbsLock);
+        // Add new streams
+        for (const auto& stream : in_requestedConfiguration.streams) {
+            if (mStreamMap.count(stream.id) == 0) {
+                mStreamMap[stream.id] = stream;
+                mCirculatingBuffers.emplace(stream.id, CirculatingBuffers{});
+            }
+        }
+
+        // Cleanup removed streams
+        for (auto it = mStreamMap.begin(); it != mStreamMap.end();) {
+            int id = it->first;
+            bool found = false;
+            for (const auto& stream : in_requestedConfiguration.streams) {
+                if (id == stream.id) {
+                    found = true;
+                    break;
+                }
+            }
+            if (!found) {
+                // Unmap all buffers of deleted stream
+                cleanupBuffersLocked(id);
+                it = mStreamMap.erase(it);
+            } else {
+                ++it;
+            }
+        }
+    }
+
+    // Now select a V4L2 format to produce all output streams
+    float desiredAr = (mCroppingType == VERTICAL) ? kMaxAspectRatio : kMinAspectRatio;
+    uint32_t maxDim = 0;
+    for (const auto& stream : in_requestedConfiguration.streams) {
+        float aspectRatio = ASPECT_RATIO(stream);
+        ALOGI("%s: request stream %dx%d", __FUNCTION__, stream.width, stream.height);
+        if ((mCroppingType == VERTICAL && aspectRatio < desiredAr) ||
+            (mCroppingType == HORIZONTAL && aspectRatio > desiredAr)) {
+            desiredAr = aspectRatio;
+        }
+
+        // The dimension that's not cropped
+        uint32_t dim = (mCroppingType == VERTICAL) ? stream.width : stream.height;
+        if (dim > maxDim) {
+            maxDim = dim;
+        }
+    }
+
+    // Find the smallest format that matches the desired aspect ratio and is wide/high enough
+    SupportedV4L2Format v4l2Fmt{.width = 0, .height = 0};
+    for (const auto& fmt : mSupportedFormats) {
+        uint32_t dim = (mCroppingType == VERTICAL) ? fmt.width : fmt.height;
+        if (dim >= maxDim) {
+            float aspectRatio = ASPECT_RATIO(fmt);
+            if (isAspectRatioClose(aspectRatio, desiredAr)) {
+                v4l2Fmt = fmt;
+                // since mSupportedFormats is sorted by width then height, the first matching fmt
+                // will be the smallest one with matching aspect ratio
+                break;
+            }
+        }
+    }
+
+    if (v4l2Fmt.width == 0) {
+        // Cannot find exact good aspect ratio candidate, try to find a close one
+        for (const auto& fmt : mSupportedFormats) {
+            uint32_t dim = (mCroppingType == VERTICAL) ? fmt.width : fmt.height;
+            if (dim >= maxDim) {
+                float aspectRatio = ASPECT_RATIO(fmt);
+                if ((mCroppingType == VERTICAL && aspectRatio < desiredAr) ||
+                    (mCroppingType == HORIZONTAL && aspectRatio > desiredAr)) {
+                    v4l2Fmt = fmt;
+                    break;
+                }
+            }
+        }
+    }
+
+    if (v4l2Fmt.width == 0) {
+        ALOGE("%s: unable to find a resolution matching (%s at least %d, aspect ratio %f)",
+              __FUNCTION__, (mCroppingType == VERTICAL) ? "width" : "height", maxDim, desiredAr);
+        return fromStatus(Status::ILLEGAL_ARGUMENT);
+    }
+
+    if (configureV4l2StreamLocked(v4l2Fmt) != 0) {
+        ALOGE("V4L configuration failed!, format:%c%c%c%c, w %d, h %d", v4l2Fmt.fourcc & 0xFF,
+              (v4l2Fmt.fourcc >> 8) & 0xFF, (v4l2Fmt.fourcc >> 16) & 0xFF,
+              (v4l2Fmt.fourcc >> 24) & 0xFF, v4l2Fmt.width, v4l2Fmt.height);
+        return fromStatus(Status::INTERNAL_ERROR);
+    }
+
+    Size v4lSize = {v4l2Fmt.width, v4l2Fmt.height};
+    Size thumbSize{0, 0};
+    camera_metadata_ro_entry entry =
+            mCameraCharacteristics.find(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES);
+    for (uint32_t i = 0; i < entry.count; i += 2) {
+        Size sz{entry.data.i32[i], entry.data.i32[i + 1]};
+        if (sz.width * sz.height > thumbSize.width * thumbSize.height) {
+            thumbSize = sz;
+        }
+    }
+
+    if (thumbSize.width * thumbSize.height == 0) {
+        ALOGE("%s: non-zero thumbnail size not available", __FUNCTION__);
+        return fromStatus(Status::INTERNAL_ERROR);
+    }
+
+    mBlobBufferSize = blobBufferSize;
+    status = mOutputThread->allocateIntermediateBuffers(
+            v4lSize, mMaxThumbResolution, in_requestedConfiguration.streams, blobBufferSize);
+    if (status != Status::OK) {
+        ALOGE("%s: allocating intermediate buffers failed!", __FUNCTION__);
+        return fromStatus(status);
+    }
+
+    std::vector<HalStream>& out = *_aidl_return;
+    out.resize(in_requestedConfiguration.streams.size());
+    for (size_t i = 0; i < in_requestedConfiguration.streams.size(); i++) {
+        out[i].overrideDataSpace = in_requestedConfiguration.streams[i].dataSpace;
+        out[i].id = in_requestedConfiguration.streams[i].id;
+        // TODO: double check should we add those CAMERA flags
+        mStreamMap[in_requestedConfiguration.streams[i].id].usage = out[i].producerUsage =
+                static_cast<BufferUsage>(((int64_t)in_requestedConfiguration.streams[i].usage) |
+                                         ((int64_t)BufferUsage::CPU_WRITE_OFTEN) |
+                                         ((int64_t)BufferUsage::CAMERA_OUTPUT));
+        out[i].consumerUsage = static_cast<BufferUsage>(0);
+        out[i].maxBuffers = static_cast<int32_t>(mV4L2BufferCount);
+
+        switch (in_requestedConfiguration.streams[i].format) {
+            case PixelFormat::BLOB:
+            case PixelFormat::YCBCR_420_888:
+            case PixelFormat::YV12:  // Used by SurfaceTexture
+            case PixelFormat::Y16:
+                // No override
+                out[i].overrideFormat = in_requestedConfiguration.streams[i].format;
+                break;
+            case PixelFormat::IMPLEMENTATION_DEFINED:
+                // Implementation Defined
+                // This should look at the Stream's dataspace flag to determine the format or leave
+                // it as is if the rest of the system knows how to handle a private format. To keep
+                // this HAL generic, this is being overridden to YUV420
+                out[i].overrideFormat = PixelFormat::YCBCR_420_888;
+                // Save overridden format in mStreamMap
+                mStreamMap[in_requestedConfiguration.streams[i].id].format = out[i].overrideFormat;
+                break;
+            default:
+                ALOGE("%s: unsupported format 0x%x", __FUNCTION__,
+                      in_requestedConfiguration.streams[i].format);
+                return fromStatus(Status::ILLEGAL_ARGUMENT);
+        }
+    }
+
+    mFirstRequest = true;
+    mLastStreamConfigCounter = in_requestedConfiguration.streamConfigCounter;
+    return fromStatus(Status::OK);
+}
+
+ScopedAStatus ExternalCameraDeviceSession::flush() {
+    ATRACE_CALL();
+    Mutex::Autolock _il(mInterfaceLock);
+    Status status = initStatus();
+    if (status != Status::OK) {
+        return fromStatus(status);
+    }
+    mOutputThread->flush();
+    return fromStatus(Status::OK);
+}
+
+ScopedAStatus ExternalCameraDeviceSession::getCaptureRequestMetadataQueue(
+        MQDescriptor<int8_t, SynchronizedReadWrite>* _aidl_return) {
+    Mutex::Autolock _il(mInterfaceLock);
+    *_aidl_return = mRequestMetadataQueue->dupeDesc();
+    return fromStatus(Status::OK);
+}
+
+ScopedAStatus ExternalCameraDeviceSession::getCaptureResultMetadataQueue(
+        MQDescriptor<int8_t, SynchronizedReadWrite>* _aidl_return) {
+    Mutex::Autolock _il(mInterfaceLock);
+    *_aidl_return = mResultMetadataQueue->dupeDesc();
+    return fromStatus(Status::OK);
+}
+
+ScopedAStatus ExternalCameraDeviceSession::isReconfigurationRequired(
+        const CameraMetadata& in_oldSessionParams, const CameraMetadata& in_newSessionParams,
+        bool* _aidl_return) {
+    // reconfiguration required if there is any change in the session params
+    *_aidl_return = in_oldSessionParams != in_newSessionParams;
+    return fromStatus(Status::OK);
+}
+
+ScopedAStatus ExternalCameraDeviceSession::processCaptureRequest(
+        const std::vector<CaptureRequest>& in_requests,
+        const std::vector<BufferCache>& in_cachesToRemove, int32_t* _aidl_return) {
+    Mutex::Autolock _il(mInterfaceLock);
+    updateBufferCaches(in_cachesToRemove);
+
+    int32_t& numRequestProcessed = *_aidl_return;
+    numRequestProcessed = 0;
+    Status s = Status::OK;
+    for (size_t i = 0; i < in_requests.size(); i++, numRequestProcessed++) {
+        s = processOneCaptureRequest(in_requests[i]);
+        if (s != Status::OK) {
+            break;
+        }
+    }
+
+    return fromStatus(s);
+}
+
+Status ExternalCameraDeviceSession::processOneCaptureRequest(const CaptureRequest& request) {
+    ATRACE_CALL();
+    Status status = initStatus();
+    if (status != Status::OK) {
+        return status;
+    }
+
+    if (request.inputBuffer.streamId != -1) {
+        ALOGE("%s: external camera does not support reprocessing!", __FUNCTION__);
+        return Status::ILLEGAL_ARGUMENT;
+    }
+
+    Mutex::Autolock _l(mLock);
+    if (!mV4l2Streaming) {
+        ALOGE("%s: cannot process request in streamOff state!", __FUNCTION__);
+        return Status::INTERNAL_ERROR;
+    }
+
+    const camera_metadata_t* rawSettings = nullptr;
+    bool converted;
+    CameraMetadata settingsFmq;  // settings from FMQ
+
+    if (request.fmqSettingsSize > 0) {
+        // non-blocking read; client must write metadata before calling
+        // processOneCaptureRequest
+        settingsFmq.metadata.resize(request.fmqSettingsSize);
+        bool read = mRequestMetadataQueue->read(
+                reinterpret_cast<int8_t*>(settingsFmq.metadata.data()), request.fmqSettingsSize);
+        if (read) {
+            converted = convertFromAidl(settingsFmq, &rawSettings);
+        } else {
+            ALOGE("%s: capture request settings metadata couldn't be read from fmq!", __FUNCTION__);
+            converted = false;
+        }
+    } else {
+        converted = convertFromAidl(request.settings, &rawSettings);
+    }
+
+    if (converted && rawSettings != nullptr) {
+        mLatestReqSetting = rawSettings;
+    }
+
+    if (!converted) {
+        ALOGE("%s: capture request settings metadata is corrupt!", __FUNCTION__);
+        return Status::ILLEGAL_ARGUMENT;
+    }
+
+    if (mFirstRequest && rawSettings == nullptr) {
+        ALOGE("%s: capture request settings must not be null for first request!", __FUNCTION__);
+        return Status::ILLEGAL_ARGUMENT;
+    }
+
+    std::vector<buffer_handle_t*> allBufPtrs;
+    std::vector<int> allFences;
+    size_t numOutputBufs = request.outputBuffers.size();
+
+    if (numOutputBufs == 0) {
+        ALOGE("%s: capture request must have at least one output buffer!", __FUNCTION__);
+        return Status::ILLEGAL_ARGUMENT;
+    }
+
+    camera_metadata_entry fpsRange = mLatestReqSetting.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE);
+    if (fpsRange.count == 2) {
+        double requestFpsMax = fpsRange.data.i32[1];
+        double closestFps = 0.0;
+        double fpsError = 1000.0;
+        bool fpsSupported = false;
+        for (const auto& fr : mV4l2StreamingFmt.frameRates) {
+            double f = fr.getFramesPerSecond();
+            if (std::fabs(requestFpsMax - f) < 1.0) {
+                fpsSupported = true;
+                break;
+            }
+            if (std::fabs(requestFpsMax - f) < fpsError) {
+                fpsError = std::fabs(requestFpsMax - f);
+                closestFps = f;
+            }
+        }
+        if (!fpsSupported) {
+            /* This can happen in a few scenarios:
+             * 1. The application is sending an FPS range not supported by the configured outputs.
+             * 2. The application is sending a valid FPS range for all configured outputs, but
+             *    the selected V4L2 size can only run at slower speed. This should be very rare
+             *    though: for this to happen a sensor needs to support at least 3 different aspect
+             *    ratio outputs, and when (at least) two outputs are both not the main aspect ratio
+             *    of the webcam, a third size that's larger might be picked and runs into this
+             *    issue.
+             */
+            ALOGW("%s: cannot reach fps %d! Will do %f instead", __FUNCTION__, fpsRange.data.i32[1],
+                  closestFps);
+            requestFpsMax = closestFps;
+        }
+
+        if (requestFpsMax != mV4l2StreamingFps) {
+            {
+                std::unique_lock<std::mutex> lk(mV4l2BufferLock);
+                while (mNumDequeuedV4l2Buffers != 0) {
+                    // Wait until pipeline is idle before reconfigure stream
+                    int waitRet = waitForV4L2BufferReturnLocked(lk);
+                    if (waitRet != 0) {
+                        ALOGE("%s: wait for pipeline idle failed!", __FUNCTION__);
+                        return Status::INTERNAL_ERROR;
+                    }
+                }
+            }
+            configureV4l2StreamLocked(mV4l2StreamingFmt, requestFpsMax);
+        }
+    }
+
+    status = importRequestLocked(request, allBufPtrs, allFences);
+    if (status != Status::OK) {
+        return status;
+    }
+
+    nsecs_t shutterTs = 0;
+    std::unique_ptr<V4L2Frame> frameIn = dequeueV4l2FrameLocked(&shutterTs);
+    if (frameIn == nullptr) {
+        ALOGE("%s: V4L2 deque frame failed!", __FUNCTION__);
+        return Status::INTERNAL_ERROR;
+    }
+
+    std::shared_ptr<HalRequest> halReq = std::make_shared<HalRequest>();
+    halReq->frameNumber = request.frameNumber;
+    halReq->setting = mLatestReqSetting;
+    halReq->frameIn = std::move(frameIn);
+    halReq->shutterTs = shutterTs;
+    halReq->buffers.resize(numOutputBufs);
+    for (size_t i = 0; i < numOutputBufs; i++) {
+        HalStreamBuffer& halBuf = halReq->buffers[i];
+        int streamId = halBuf.streamId = request.outputBuffers[i].streamId;
+        halBuf.bufferId = request.outputBuffers[i].bufferId;
+        const Stream& stream = mStreamMap[streamId];
+        halBuf.width = stream.width;
+        halBuf.height = stream.height;
+        halBuf.format = stream.format;
+        halBuf.usage = stream.usage;
+        halBuf.bufPtr = allBufPtrs[i];
+        halBuf.acquireFence = allFences[i];
+        halBuf.fenceTimeout = false;
+    }
+    {
+        std::lock_guard<std::mutex> lk(mInflightFramesLock);
+        mInflightFrames.insert(halReq->frameNumber);
+    }
+    // Send request to OutputThread for the rest of processing
+    mOutputThread->submitRequest(halReq);
+    mFirstRequest = false;
+    return Status::OK;
+}
+
+ScopedAStatus ExternalCameraDeviceSession::signalStreamFlush(
+        const std::vector<int32_t>& /*in_streamIds*/, int32_t in_streamConfigCounter) {
+    {
+        Mutex::Autolock _l(mLock);
+        if (in_streamConfigCounter < mLastStreamConfigCounter) {
+            // stale call. new streams have been configured since this call was issued.
+            // Do nothing.
+            return fromStatus(Status::OK);
+        }
+    }
+
+    // TODO: implement if needed.
+    return fromStatus(Status::OK);
+}
+
+ScopedAStatus ExternalCameraDeviceSession::switchToOffline(
+        const std::vector<int32_t>& in_streamsToKeep,
+        CameraOfflineSessionInfo* out_offlineSessionInfo,
+        std::shared_ptr<ICameraOfflineSession>* _aidl_return) {
+    std::vector<NotifyMsg> msgs;
+    std::vector<CaptureResult> results;
+    CameraOfflineSessionInfo info;
+    std::shared_ptr<ICameraOfflineSession> session;
+    Status st = switchToOffline(in_streamsToKeep, &msgs, &results, &info, &session);
+
+    mCallback->notify(msgs);
+    invokeProcessCaptureResultCallback(results, /* tryWriteFmq= */ true);
+    freeReleaseFences(results);
+
+    // setup return values
+    *out_offlineSessionInfo = info;
+    *_aidl_return = session;
+    return fromStatus(st);
+}
+
+Status ExternalCameraDeviceSession::switchToOffline(
+        const std::vector<int32_t>& offlineStreams, std::vector<NotifyMsg>* msgs,
+        std::vector<CaptureResult>* results, CameraOfflineSessionInfo* info,
+        std::shared_ptr<ICameraOfflineSession>* session) {
+    ATRACE_CALL();
+    if (offlineStreams.size() > 1) {
+        ALOGE("%s: more than one offline stream is not supported", __FUNCTION__);
+        return Status::ILLEGAL_ARGUMENT;
+    }
+
+    if (info == nullptr || results == nullptr || info == nullptr || session == nullptr) {
+        ALOGE("%s, output arguments (%p, %p, %p, %p) much not be null", __FUNCTION__, msgs, results,
+              info, session);
+    }
+
+    Mutex::Autolock _il(mInterfaceLock);
+    Status status = initStatus();
+    if (status != Status::OK) {
+        return status;
+    }
+
+    Mutex::Autolock _l(mLock);
+    for (auto streamId : offlineStreams) {
+        if (!supportOfflineLocked(streamId)) {
+            return Status::ILLEGAL_ARGUMENT;
+        }
+    }
+
+    // pause output thread and get all remaining inflight requests
+    auto remainingReqs = mOutputThread->switchToOffline();
+    std::vector<std::shared_ptr<HalRequest>> halReqs;
+
+    // Send out buffer/request error for remaining requests and filter requests
+    // to be handled in offline mode
+    for (auto& halReq : remainingReqs) {
+        bool dropReq = canDropRequest(offlineStreams, halReq);
+        if (dropReq) {
+            // Request is dropped completely. Just send request error and
+            // there is no need to send the request to offline session
+            processCaptureRequestError(halReq, msgs, results);
+            continue;
+        }
+
+        // All requests reach here must have at least one offline stream output
+        NotifyMsg shutter;
+        aidl::android::hardware::camera::device::ShutterMsg shutterMsg = {
+                .frameNumber = static_cast<int32_t>(halReq->frameNumber),
+                .timestamp = halReq->shutterTs};
+        shutter.set<NotifyMsg::Tag::shutter>(shutterMsg);
+        msgs->push_back(shutter);
+
+        std::vector<HalStreamBuffer> offlineBuffers;
+        for (const auto& buffer : halReq->buffers) {
+            bool dropBuffer = true;
+            for (auto offlineStreamId : offlineStreams) {
+                if (buffer.streamId == offlineStreamId) {
+                    dropBuffer = false;
+                    break;
+                }
+            }
+            if (dropBuffer) {
+                aidl::android::hardware::camera::device::ErrorMsg errorMsg = {
+                        .frameNumber = static_cast<int32_t>(halReq->frameNumber),
+                        .errorStreamId = buffer.streamId,
+                        .errorCode = ErrorCode::ERROR_BUFFER};
+
+                NotifyMsg error;
+                error.set<NotifyMsg::Tag::error>(errorMsg);
+                msgs->push_back(error);
+
+                results->push_back({
+                        .frameNumber = static_cast<int32_t>(halReq->frameNumber),
+                        .outputBuffers = {},
+                        .inputBuffer = {.streamId = -1},
+                        .partialResult = 0,  // buffer only result
+                });
+
+                CaptureResult& result = results->back();
+                result.outputBuffers.resize(1);
+                StreamBuffer& outputBuffer = result.outputBuffers[0];
+                outputBuffer.streamId = buffer.streamId;
+                outputBuffer.bufferId = buffer.bufferId;
+                outputBuffer.status = BufferStatus::ERROR;
+                if (buffer.acquireFence >= 0) {
+                    native_handle_t* handle = native_handle_create(/*numFds*/ 1, /*numInts*/ 0);
+                    handle->data[0] = buffer.acquireFence;
+                    outputBuffer.releaseFence = android::makeToAidl(handle);
+                }
+            } else {
+                offlineBuffers.push_back(buffer);
+            }
+        }
+        halReq->buffers = offlineBuffers;
+        halReqs.push_back(halReq);
+    }
+
+    // convert hal requests to offline request
+    std::deque<std::shared_ptr<HalRequest>> offlineReqs(halReqs.size());
+    size_t i = 0;
+    for (auto& v4lReq : halReqs) {
+        offlineReqs[i] = std::make_shared<HalRequest>();
+        offlineReqs[i]->frameNumber = v4lReq->frameNumber;
+        offlineReqs[i]->setting = v4lReq->setting;
+        offlineReqs[i]->shutterTs = v4lReq->shutterTs;
+        offlineReqs[i]->buffers = v4lReq->buffers;
+        std::shared_ptr<V4L2Frame> v4l2Frame(static_cast<V4L2Frame*>(v4lReq->frameIn.get()));
+        offlineReqs[i]->frameIn = std::make_shared<AllocatedV4L2Frame>(v4l2Frame);
+        i++;
+        // enqueue V4L2 frame
+        enqueueV4l2Frame(v4l2Frame);
+    }
+
+    // Collect buffer caches/streams
+    std::vector<Stream> streamInfos(offlineStreams.size());
+    std::map<int, CirculatingBuffers> circulatingBuffers;
+    {
+        Mutex::Autolock _cbsl(mCbsLock);
+        for (auto streamId : offlineStreams) {
+            circulatingBuffers[streamId] = mCirculatingBuffers.at(streamId);
+            mCirculatingBuffers.erase(streamId);
+            streamInfos.push_back(mStreamMap.at(streamId));
+            mStreamMap.erase(streamId);
+        }
+    }
+
+    fillOfflineSessionInfo(offlineStreams, offlineReqs, circulatingBuffers, info);
+    // create the offline session object
+    bool afTrigger;
+    {
+        std::lock_guard<std::mutex> _lk(mAfTriggerLock);
+        afTrigger = mAfTrigger;
+    }
+
+    std::shared_ptr<ExternalCameraOfflineSession> sessionImpl =
+            ndk::SharedRefBase::make<ExternalCameraOfflineSession>(
+                    mCroppingType, mCameraCharacteristics, mCameraId, mExifMake, mExifModel,
+                    mBlobBufferSize, afTrigger, streamInfos, offlineReqs, circulatingBuffers);
+
+    bool initFailed = sessionImpl->initialize();
+    if (initFailed) {
+        ALOGE("%s: offline session initialize failed!", __FUNCTION__);
+        return Status::INTERNAL_ERROR;
+    }
+
+    // cleanup stream and buffer caches
+    {
+        Mutex::Autolock _cbsl(mCbsLock);
+        for (auto pair : mStreamMap) {
+            cleanupBuffersLocked(/*Stream ID*/ pair.first);
+        }
+        mCirculatingBuffers.clear();
+    }
+    mStreamMap.clear();
+
+    // update inflight records
+    {
+        std::lock_guard<std::mutex> _lk(mInflightFramesLock);
+        mInflightFrames.clear();
+    }
+
+    // stop v4l2 streaming
+    if (v4l2StreamOffLocked() != 0) {
+        ALOGE("%s: stop V4L2 streaming failed!", __FUNCTION__);
+        return Status::INTERNAL_ERROR;
+    }
+
+    // No need to return session if there is no offline requests left
+    if (!offlineReqs.empty()) {
+        *session = sessionImpl;
+    } else {
+        *session = nullptr;
+    }
+
+    return Status::OK;
+}
+
+#define ARRAY_SIZE(a) (sizeof(a) / sizeof(a[0]))
+#define UPDATE(md, tag, data, size)               \
+    do {                                          \
+        if ((md).update((tag), (data), (size))) { \
+            ALOGE("Update " #tag " failed!");     \
+            return BAD_VALUE;                     \
+        }                                         \
+    } while (0)
+
+status_t ExternalCameraDeviceSession::initDefaultRequests() {
+    common::V1_0::helper::CameraMetadata md;
+
+    const uint8_t aberrationMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
+    UPDATE(md, ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &aberrationMode, 1);
+
+    const int32_t exposureCompensation = 0;
+    UPDATE(md, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &exposureCompensation, 1);
+
+    const uint8_t videoStabilizationMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
+    UPDATE(md, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &videoStabilizationMode, 1);
+
+    const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
+    UPDATE(md, ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
+
+    const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
+    UPDATE(md, ANDROID_CONTROL_AE_MODE, &aeMode, 1);
+
+    const uint8_t aePrecaptureTrigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
+    UPDATE(md, ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &aePrecaptureTrigger, 1);
+
+    const uint8_t afMode = ANDROID_CONTROL_AF_MODE_AUTO;
+    UPDATE(md, ANDROID_CONTROL_AF_MODE, &afMode, 1);
+
+    const uint8_t afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
+    UPDATE(md, ANDROID_CONTROL_AF_TRIGGER, &afTrigger, 1);
+
+    const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_DISABLED;
+    UPDATE(md, ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
+
+    const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
+    UPDATE(md, ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
+
+    const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
+    UPDATE(md, ANDROID_FLASH_MODE, &flashMode, 1);
+
+    const int32_t thumbnailSize[] = {240, 180};
+    UPDATE(md, ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSize, 2);
+
+    const uint8_t jpegQuality = 90;
+    UPDATE(md, ANDROID_JPEG_QUALITY, &jpegQuality, 1);
+    UPDATE(md, ANDROID_JPEG_THUMBNAIL_QUALITY, &jpegQuality, 1);
+
+    const int32_t jpegOrientation = 0;
+    UPDATE(md, ANDROID_JPEG_ORIENTATION, &jpegOrientation, 1);
+
+    const uint8_t oisMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
+    UPDATE(md, ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &oisMode, 1);
+
+    const uint8_t nrMode = ANDROID_NOISE_REDUCTION_MODE_OFF;
+    UPDATE(md, ANDROID_NOISE_REDUCTION_MODE, &nrMode, 1);
+
+    const int32_t testPatternModes = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
+    UPDATE(md, ANDROID_SENSOR_TEST_PATTERN_MODE, &testPatternModes, 1);
+
+    const uint8_t fdMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
+    UPDATE(md, ANDROID_STATISTICS_FACE_DETECT_MODE, &fdMode, 1);
+
+    const uint8_t hotpixelMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
+    UPDATE(md, ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotpixelMode, 1);
+
+    bool support30Fps = false;
+    int32_t maxFps = std::numeric_limits<int32_t>::min();
+    for (const auto& supportedFormat : mSupportedFormats) {
+        for (const auto& fr : supportedFormat.frameRates) {
+            int32_t framerateInt = static_cast<int32_t>(fr.getFramesPerSecond());
+            if (maxFps < framerateInt) {
+                maxFps = framerateInt;
+            }
+            if (framerateInt == 30) {
+                support30Fps = true;
+                break;
+            }
+        }
+        if (support30Fps) {
+            break;
+        }
+    }
+
+    int32_t defaultFramerate = support30Fps ? 30 : maxFps;
+    int32_t defaultFpsRange[] = {defaultFramerate / 2, defaultFramerate};
+    UPDATE(md, ANDROID_CONTROL_AE_TARGET_FPS_RANGE, defaultFpsRange, ARRAY_SIZE(defaultFpsRange));
+
+    uint8_t antibandingMode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
+    UPDATE(md, ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibandingMode, 1);
+
+    const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
+    UPDATE(md, ANDROID_CONTROL_MODE, &controlMode, 1);
+
+    for (const auto& type : ndk::enum_range<RequestTemplate>()) {
+        common::V1_0::helper::CameraMetadata mdCopy = md;
+        uint8_t intent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
+        switch (type) {
+            case RequestTemplate::PREVIEW:
+                intent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
+                break;
+            case RequestTemplate::STILL_CAPTURE:
+                intent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
+                break;
+            case RequestTemplate::VIDEO_RECORD:
+                intent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
+                break;
+            case RequestTemplate::VIDEO_SNAPSHOT:
+                intent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
+                break;
+            default:
+                ALOGV("%s: unsupported RequestTemplate type %d", __FUNCTION__, type);
+                continue;
+        }
+        UPDATE(mdCopy, ANDROID_CONTROL_CAPTURE_INTENT, &intent, 1);
+        camera_metadata_t* mdPtr = mdCopy.release();
+        uint8_t* rawMd = reinterpret_cast<uint8_t*>(mdPtr);
+        CameraMetadata aidlMd;
+        aidlMd.metadata.assign(rawMd, rawMd + get_camera_metadata_size(mdPtr));
+        mDefaultRequests[type] = aidlMd;
+        free_camera_metadata(mdPtr);
+    }
+    return OK;
+}
+
+status_t ExternalCameraDeviceSession::fillCaptureResult(common::V1_0::helper::CameraMetadata& md,
+                                                        nsecs_t timestamp) {
+    bool afTrigger = false;
+    {
+        std::lock_guard<std::mutex> lk(mAfTriggerLock);
+        afTrigger = mAfTrigger;
+        if (md.exists(ANDROID_CONTROL_AF_TRIGGER)) {
+            camera_metadata_entry entry = md.find(ANDROID_CONTROL_AF_TRIGGER);
+            if (entry.data.u8[0] == ANDROID_CONTROL_AF_TRIGGER_START) {
+                mAfTrigger = afTrigger = true;
+            } else if (entry.data.u8[0] == ANDROID_CONTROL_AF_TRIGGER_CANCEL) {
+                mAfTrigger = afTrigger = false;
+            }
+        }
+    }
+
+    // For USB camera, the USB camera handles everything and we don't have control
+    // over AF. We only simply fake the AF metadata based on the request
+    // received here.
+    uint8_t afState;
+    if (afTrigger) {
+        afState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
+    } else {
+        afState = ANDROID_CONTROL_AF_STATE_INACTIVE;
+    }
+    UPDATE(md, ANDROID_CONTROL_AF_STATE, &afState, 1);
+
+    camera_metadata_ro_entry activeArraySize =
+            mCameraCharacteristics.find(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE);
+
+    return fillCaptureResultCommon(md, timestamp, activeArraySize);
+}
+
+int ExternalCameraDeviceSession::configureV4l2StreamLocked(const SupportedV4L2Format& v4l2Fmt,
+                                                           double requestFps) {
+    ATRACE_CALL();
+    int ret = v4l2StreamOffLocked();
+    if (ret != OK) {
+        ALOGE("%s: stop v4l2 streaming failed: ret %d", __FUNCTION__, ret);
+        return ret;
+    }
+
+    // VIDIOC_S_FMT w/h/fmt
+    v4l2_format fmt;
+    fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+    fmt.fmt.pix.width = v4l2Fmt.width;
+    fmt.fmt.pix.height = v4l2Fmt.height;
+    fmt.fmt.pix.pixelformat = v4l2Fmt.fourcc;
+
+    {
+        int numAttempt = 0;
+        do {
+            ret = TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_S_FMT, &fmt));
+            if (numAttempt == MAX_RETRY) {
+                break;
+            }
+            numAttempt++;
+            if (ret < 0) {
+                ALOGW("%s: VIDIOC_S_FMT failed, wait 33ms and try again", __FUNCTION__);
+                usleep(IOCTL_RETRY_SLEEP_US);  // sleep and try again
+            }
+        } while (ret < 0);
+        if (ret < 0) {
+            ALOGE("%s: S_FMT ioctl failed: %s", __FUNCTION__, strerror(errno));
+            return -errno;
+        }
+    }
+
+    if (v4l2Fmt.width != fmt.fmt.pix.width || v4l2Fmt.height != fmt.fmt.pix.height ||
+        v4l2Fmt.fourcc != fmt.fmt.pix.pixelformat) {
+        ALOGE("%s: S_FMT expect %c%c%c%c %dx%d, got %c%c%c%c %dx%d instead!", __FUNCTION__,
+              v4l2Fmt.fourcc & 0xFF, (v4l2Fmt.fourcc >> 8) & 0xFF, (v4l2Fmt.fourcc >> 16) & 0xFF,
+              (v4l2Fmt.fourcc >> 24) & 0xFF, v4l2Fmt.width, v4l2Fmt.height,
+              fmt.fmt.pix.pixelformat & 0xFF, (fmt.fmt.pix.pixelformat >> 8) & 0xFF,
+              (fmt.fmt.pix.pixelformat >> 16) & 0xFF, (fmt.fmt.pix.pixelformat >> 24) & 0xFF,
+              fmt.fmt.pix.width, fmt.fmt.pix.height);
+        return -EINVAL;
+    }
+
+    uint32_t bufferSize = fmt.fmt.pix.sizeimage;
+    ALOGI("%s: V4L2 buffer size is %d", __FUNCTION__, bufferSize);
+    uint32_t expectedMaxBufferSize = kMaxBytesPerPixel * fmt.fmt.pix.width * fmt.fmt.pix.height;
+    if ((bufferSize == 0) || (bufferSize > expectedMaxBufferSize)) {
+        ALOGE("%s: V4L2 buffer size: %u looks invalid. Expected maximum size: %u", __FUNCTION__,
+              bufferSize, expectedMaxBufferSize);
+        return -EINVAL;
+    }
+    mMaxV4L2BufferSize = bufferSize;
+
+    const double kDefaultFps = 30.0;
+    double fps = std::numeric_limits<double>::max();
+    if (requestFps != 0.0) {
+        fps = requestFps;
+    } else {
+        double maxFps = -1.0;
+        // Try to pick the slowest fps that is at least 30
+        for (const auto& fr : v4l2Fmt.frameRates) {
+            double f = fr.getFramesPerSecond();
+            if (maxFps < f) {
+                maxFps = f;
+            }
+            if (f >= kDefaultFps && f < fps) {
+                fps = f;
+            }
+        }
+        // No fps > 30 found, use the highest fps available within supported formats.
+        if (fps == std::numeric_limits<double>::max()) {
+            fps = maxFps;
+        }
+    }
+
+    int fpsRet = setV4l2FpsLocked(fps);
+    if (fpsRet != 0 && fpsRet != -EINVAL) {
+        ALOGE("%s: set fps failed: %s", __FUNCTION__, strerror(fpsRet));
+        return fpsRet;
+    }
+
+    uint32_t v4lBufferCount = (fps >= kDefaultFps) ? mCfg.numVideoBuffers : mCfg.numStillBuffers;
+
+    // VIDIOC_REQBUFS: create buffers
+    v4l2_requestbuffers req_buffers{};
+    req_buffers.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+    req_buffers.memory = V4L2_MEMORY_MMAP;
+    req_buffers.count = v4lBufferCount;
+    if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_REQBUFS, &req_buffers)) < 0) {
+        ALOGE("%s: VIDIOC_REQBUFS failed: %s", __FUNCTION__, strerror(errno));
+        return -errno;
+    }
+
+    // Driver can indeed return more buffer if it needs more to operate
+    if (req_buffers.count < v4lBufferCount) {
+        ALOGE("%s: VIDIOC_REQBUFS expected %d buffers, got %d instead", __FUNCTION__,
+              v4lBufferCount, req_buffers.count);
+        return NO_MEMORY;
+    }
+
+    // VIDIOC_QUERYBUF:  get buffer offset in the V4L2 fd
+    // VIDIOC_QBUF: send buffer to driver
+    mV4L2BufferCount = req_buffers.count;
+    for (uint32_t i = 0; i < req_buffers.count; i++) {
+        v4l2_buffer buffer = {
+                .index = i, .type = V4L2_BUF_TYPE_VIDEO_CAPTURE, .memory = V4L2_MEMORY_MMAP};
+
+        if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_QUERYBUF, &buffer)) < 0) {
+            ALOGE("%s: QUERYBUF %d failed: %s", __FUNCTION__, i, strerror(errno));
+            return -errno;
+        }
+
+        if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_QBUF, &buffer)) < 0) {
+            ALOGE("%s: QBUF %d failed: %s", __FUNCTION__, i, strerror(errno));
+            return -errno;
+        }
+    }
+
+    {
+        // VIDIOC_STREAMON: start streaming
+        v4l2_buf_type capture_type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+        int numAttempt = 0;
+        do {
+            ret = TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_STREAMON, &capture_type));
+            if (numAttempt == MAX_RETRY) {
+                break;
+            }
+            if (ret < 0) {
+                ALOGW("%s: VIDIOC_STREAMON failed, wait 33ms and try again", __FUNCTION__);
+                usleep(IOCTL_RETRY_SLEEP_US);  // sleep 100 ms and try again
+            }
+        } while (ret < 0);
+
+        if (ret < 0) {
+            ALOGE("%s: VIDIOC_STREAMON ioctl failed: %s", __FUNCTION__, strerror(errno));
+            return -errno;
+        }
+    }
+
+    // Swallow first few frames after streamOn to account for bad frames from some devices
+    for (int i = 0; i < kBadFramesAfterStreamOn; i++) {
+        v4l2_buffer buffer{};
+        buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+        buffer.memory = V4L2_MEMORY_MMAP;
+        if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_DQBUF, &buffer)) < 0) {
+            ALOGE("%s: DQBUF fails: %s", __FUNCTION__, strerror(errno));
+            return -errno;
+        }
+
+        if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_QBUF, &buffer)) < 0) {
+            ALOGE("%s: QBUF index %d fails: %s", __FUNCTION__, buffer.index, strerror(errno));
+            return -errno;
+        }
+    }
+
+    ALOGI("%s: start V4L2 streaming %dx%d@%ffps", __FUNCTION__, v4l2Fmt.width, v4l2Fmt.height, fps);
+    mV4l2StreamingFmt = v4l2Fmt;
+    mV4l2Streaming = true;
+    return OK;
+}
+
+std::unique_ptr<V4L2Frame> ExternalCameraDeviceSession::dequeueV4l2FrameLocked(nsecs_t* shutterTs) {
+    ATRACE_CALL();
+    std::unique_ptr<V4L2Frame> ret = nullptr;
+    if (shutterTs == nullptr) {
+        ALOGE("%s: shutterTs must not be null!", __FUNCTION__);
+        return ret;
+    }
+
+    {
+        std::unique_lock<std::mutex> lk(mV4l2BufferLock);
+        if (mNumDequeuedV4l2Buffers == mV4L2BufferCount) {
+            int waitRet = waitForV4L2BufferReturnLocked(lk);
+            if (waitRet != 0) {
+                return ret;
+            }
+        }
+    }
+
+    ATRACE_BEGIN("VIDIOC_DQBUF");
+    v4l2_buffer buffer{};
+    buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+    buffer.memory = V4L2_MEMORY_MMAP;
+    if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_DQBUF, &buffer)) < 0) {
+        ALOGE("%s: DQBUF fails: %s", __FUNCTION__, strerror(errno));
+        return ret;
+    }
+    ATRACE_END();
+
+    if (buffer.index >= mV4L2BufferCount) {
+        ALOGE("%s: Invalid buffer id: %d", __FUNCTION__, buffer.index);
+        return ret;
+    }
+
+    if (buffer.flags & V4L2_BUF_FLAG_ERROR) {
+        ALOGE("%s: v4l2 buf error! buf flag 0x%x", __FUNCTION__, buffer.flags);
+        // TODO: try to dequeue again
+    }
+
+    if (buffer.bytesused > mMaxV4L2BufferSize) {
+        ALOGE("%s: v4l2 buffer bytes used: %u maximum %u", __FUNCTION__, buffer.bytesused,
+              mMaxV4L2BufferSize);
+        return ret;
+    }
+
+    if (buffer.flags & V4L2_BUF_FLAG_TIMESTAMP_MONOTONIC) {
+        // Ideally we should also check for V4L2_BUF_FLAG_TSTAMP_SRC_SOE, but
+        // even V4L2_BUF_FLAG_TSTAMP_SRC_EOF is better than capture a timestamp now
+        *shutterTs = static_cast<nsecs_t>(buffer.timestamp.tv_sec) * 1000000000LL +
+                     buffer.timestamp.tv_usec * 1000LL;
+    } else {
+        *shutterTs = systemTime(SYSTEM_TIME_MONOTONIC);
+    }
+
+    {
+        std::lock_guard<std::mutex> lk(mV4l2BufferLock);
+        mNumDequeuedV4l2Buffers++;
+    }
+
+    return std::make_unique<V4L2Frame>(mV4l2StreamingFmt.width, mV4l2StreamingFmt.height,
+                                       mV4l2StreamingFmt.fourcc, buffer.index, mV4l2Fd.get(),
+                                       buffer.bytesused, buffer.m.offset);
+}
+
+void ExternalCameraDeviceSession::enqueueV4l2Frame(const std::shared_ptr<V4L2Frame>& frame) {
+    ATRACE_CALL();
+    frame->unmap();
+    ATRACE_BEGIN("VIDIOC_QBUF");
+    v4l2_buffer buffer{};
+    buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+    buffer.memory = V4L2_MEMORY_MMAP;
+    buffer.index = frame->mBufferIndex;
+    if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_QBUF, &buffer)) < 0) {
+        ALOGE("%s: QBUF index %d fails: %s", __FUNCTION__, frame->mBufferIndex, strerror(errno));
+        return;
+    }
+    ATRACE_END();
+
+    {
+        std::lock_guard<std::mutex> lk(mV4l2BufferLock);
+        mNumDequeuedV4l2Buffers--;
+    }
+    mV4L2BufferReturned.notify_one();
+}
+
+bool ExternalCameraDeviceSession::isSupported(
+        const Stream& stream, const std::vector<SupportedV4L2Format>& supportedFormats,
+        const ExternalCameraConfig& devCfg) {
+    Dataspace ds = stream.dataSpace;
+    PixelFormat fmt = stream.format;
+    uint32_t width = stream.width;
+    uint32_t height = stream.height;
+    // TODO: check usage flags
+
+    if (stream.streamType != StreamType::OUTPUT) {
+        ALOGE("%s: does not support non-output stream type", __FUNCTION__);
+        return false;
+    }
+
+    if (stream.rotation != StreamRotation::ROTATION_0) {
+        ALOGE("%s: does not support stream rotation", __FUNCTION__);
+        return false;
+    }
+
+    switch (fmt) {
+        case PixelFormat::BLOB:
+            if (ds != Dataspace::JFIF) {
+                ALOGI("%s: BLOB format does not support dataSpace %x", __FUNCTION__, ds);
+                return false;
+            }
+            break;
+        case PixelFormat::IMPLEMENTATION_DEFINED:
+        case PixelFormat::YCBCR_420_888:
+        case PixelFormat::YV12:
+            // TODO: check what dataspace we can support here.
+            // intentional no-ops.
+            break;
+        case PixelFormat::Y16:
+            if (!devCfg.depthEnabled) {
+                ALOGI("%s: Depth is not Enabled", __FUNCTION__);
+                return false;
+            }
+            if (!(static_cast<int32_t>(ds) & static_cast<int32_t>(Dataspace::DEPTH))) {
+                ALOGI("%s: Y16 supports only dataSpace DEPTH", __FUNCTION__);
+                return false;
+            }
+            break;
+        default:
+            ALOGI("%s: does not support format %x", __FUNCTION__, fmt);
+            return false;
+    }
+
+    // Assume we can convert any V4L2 format to any of supported output format for now, i.e.
+    // ignoring v4l2Fmt.fourcc for now. Might need more subtle check if we support more v4l format
+    // in the futrue.
+    for (const auto& v4l2Fmt : supportedFormats) {
+        if (width == v4l2Fmt.width && height == v4l2Fmt.height) {
+            return true;
+        }
+    }
+    ALOGI("%s: resolution %dx%d is not supported", __FUNCTION__, width, height);
+    return false;
+}
+
+Status ExternalCameraDeviceSession::importRequestLocked(const CaptureRequest& request,
+                                                        std::vector<buffer_handle_t*>& allBufPtrs,
+                                                        std::vector<int>& allFences) {
+    return importRequestLockedImpl(request, allBufPtrs, allFences);
+}
+
+Status ExternalCameraDeviceSession::importRequestLockedImpl(
+        const CaptureRequest& request, std::vector<buffer_handle_t*>& allBufPtrs,
+        std::vector<int>& allFences) {
+    size_t numOutputBufs = request.outputBuffers.size();
+    size_t numBufs = numOutputBufs;
+    // Validate all I/O buffers
+    std::vector<buffer_handle_t> allBufs;
+    std::vector<uint64_t> allBufIds;
+    allBufs.resize(numBufs);
+    allBufIds.resize(numBufs);
+    allBufPtrs.resize(numBufs);
+    allFences.resize(numBufs);
+    std::vector<int32_t> streamIds(numBufs);
+
+    for (size_t i = 0; i < numOutputBufs; i++) {
+        allBufs[i] = ::android::makeFromAidl(request.outputBuffers[i].buffer);
+        allBufIds[i] = request.outputBuffers[i].bufferId;
+        allBufPtrs[i] = &allBufs[i];
+        streamIds[i] = request.outputBuffers[i].streamId;
+    }
+
+    {
+        Mutex::Autolock _l(mCbsLock);
+        for (size_t i = 0; i < numBufs; i++) {
+            Status st = importBufferLocked(streamIds[i], allBufIds[i], allBufs[i], &allBufPtrs[i]);
+            if (st != Status::OK) {
+                // Detailed error logs printed in importBuffer
+                return st;
+            }
+        }
+    }
+
+    // All buffers are imported. Now validate output buffer acquire fences
+    for (size_t i = 0; i < numOutputBufs; i++) {
+        if (!sHandleImporter.importFence(
+                    ::android::makeFromAidl(request.outputBuffers[i].acquireFence), allFences[i])) {
+            ALOGE("%s: output buffer %zu acquire fence is invalid", __FUNCTION__, i);
+            cleanupInflightFences(allFences, i);
+            return Status::INTERNAL_ERROR;
+        }
+    }
+    return Status::OK;
+}
+
+Status ExternalCameraDeviceSession::importBuffer(int32_t streamId, uint64_t bufId,
+                                                 buffer_handle_t buf,
+                                                 /*out*/ buffer_handle_t** outBufPtr) {
+    Mutex::Autolock _l(mCbsLock);
+    return importBufferLocked(streamId, bufId, buf, outBufPtr);
+}
+
+Status ExternalCameraDeviceSession::importBufferLocked(int32_t streamId, uint64_t bufId,
+                                                       buffer_handle_t buf,
+                                                       buffer_handle_t** outBufPtr) {
+    return importBufferImpl(mCirculatingBuffers, sHandleImporter, streamId, bufId, buf, outBufPtr);
+}
+
+ScopedAStatus ExternalCameraDeviceSession::close() {
+    close(false);
+    return fromStatus(Status::OK);
+}
+
+void ExternalCameraDeviceSession::close(bool callerIsDtor) {
+    Mutex::Autolock _il(mInterfaceLock);
+    bool closed = isClosed();
+    if (!closed) {
+        if (callerIsDtor) {
+            closeOutputThreadImpl();
+        } else {
+            closeOutputThread();
+        }
+
+        Mutex::Autolock _l(mLock);
+        // free all buffers
+        {
+            Mutex::Autolock _cbsl(mCbsLock);
+            for (auto pair : mStreamMap) {
+                cleanupBuffersLocked(/*Stream ID*/ pair.first);
+            }
+        }
+        v4l2StreamOffLocked();
+        ALOGV("%s: closing V4L2 camera FD %d", __FUNCTION__, mV4l2Fd.get());
+        mV4l2Fd.reset();
+        mClosed = true;
+    }
+}
+
+bool ExternalCameraDeviceSession::isClosed() {
+    Mutex::Autolock _l(mLock);
+    return mClosed;
+}
+
+ScopedAStatus ExternalCameraDeviceSession::repeatingRequestEnd(
+        int32_t /*in_frameNumber*/, const std::vector<int32_t>& /*in_streamIds*/) {
+    // TODO: Figure this one out.
+    return fromStatus(Status::OK);
+}
+
+int ExternalCameraDeviceSession::v4l2StreamOffLocked() {
+    if (!mV4l2Streaming) {
+        return OK;
+    }
+
+    {
+        std::lock_guard<std::mutex> lk(mV4l2BufferLock);
+        if (mNumDequeuedV4l2Buffers != 0) {
+            ALOGE("%s: there are %zu inflight V4L buffers", __FUNCTION__, mNumDequeuedV4l2Buffers);
+            return -1;
+        }
+    }
+    mV4L2BufferCount = 0;
+
+    // VIDIOC_STREAMOFF
+    v4l2_buf_type capture_type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+    if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_STREAMOFF, &capture_type)) < 0) {
+        ALOGE("%s: STREAMOFF failed: %s", __FUNCTION__, strerror(errno));
+        return -errno;
+    }
+
+    // VIDIOC_REQBUFS: clear buffers
+    v4l2_requestbuffers req_buffers{};
+    req_buffers.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+    req_buffers.memory = V4L2_MEMORY_MMAP;
+    req_buffers.count = 0;
+    if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_REQBUFS, &req_buffers)) < 0) {
+        ALOGE("%s: REQBUFS failed: %s", __FUNCTION__, strerror(errno));
+        return -errno;
+    }
+
+    mV4l2Streaming = false;
+    return OK;
+}
+
+int ExternalCameraDeviceSession::setV4l2FpsLocked(double fps) {
+    // VIDIOC_G_PARM/VIDIOC_S_PARM: set fps
+    v4l2_streamparm streamparm = {.type = V4L2_BUF_TYPE_VIDEO_CAPTURE};
+    // The following line checks that the driver knows about framerate get/set.
+    int ret = TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_G_PARM, &streamparm));
+    if (ret != 0) {
+        if (errno == -EINVAL) {
+            ALOGW("%s: device does not support VIDIOC_G_PARM", __FUNCTION__);
+        }
+        return -errno;
+    }
+    // Now check if the device is able to accept a capture framerate set.
+    if (!(streamparm.parm.capture.capability & V4L2_CAP_TIMEPERFRAME)) {
+        ALOGW("%s: device does not support V4L2_CAP_TIMEPERFRAME", __FUNCTION__);
+        return -EINVAL;
+    }
+
+    // fps is float, approximate by a fraction.
+    const int kFrameRatePrecision = 10000;
+    streamparm.parm.capture.timeperframe.numerator = kFrameRatePrecision;
+    streamparm.parm.capture.timeperframe.denominator = (fps * kFrameRatePrecision);
+
+    if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_S_PARM, &streamparm)) < 0) {
+        ALOGE("%s: failed to set framerate to %f: %s", __FUNCTION__, fps, strerror(errno));
+        return -1;
+    }
+
+    double retFps = streamparm.parm.capture.timeperframe.denominator /
+                    static_cast<double>(streamparm.parm.capture.timeperframe.numerator);
+    if (std::fabs(fps - retFps) > 1.0) {
+        ALOGE("%s: expect fps %f, got %f instead", __FUNCTION__, fps, retFps);
+        return -1;
+    }
+    mV4l2StreamingFps = fps;
+    return 0;
+}
+
+void ExternalCameraDeviceSession::cleanupInflightFences(std::vector<int>& allFences,
+                                                        size_t numFences) {
+    for (size_t j = 0; j < numFences; j++) {
+        sHandleImporter.closeFence(allFences[j]);
+    }
+}
+
+void ExternalCameraDeviceSession::cleanupBuffersLocked(int id) {
+    for (auto& pair : mCirculatingBuffers.at(id)) {
+        sHandleImporter.freeBuffer(pair.second);
+    }
+    mCirculatingBuffers[id].clear();
+    mCirculatingBuffers.erase(id);
+}
+
+void ExternalCameraDeviceSession::notifyShutter(int32_t frameNumber, nsecs_t shutterTs) {
+    NotifyMsg msg;
+    msg.set<NotifyMsg::Tag::shutter>(ShutterMsg{
+            .frameNumber = frameNumber,
+            .timestamp = shutterTs,
+    });
+    mCallback->notify({msg});
+}
+void ExternalCameraDeviceSession::notifyError(int32_t frameNumber, int32_t streamId, ErrorCode ec) {
+    NotifyMsg msg;
+    msg.set<NotifyMsg::Tag::error>(ErrorMsg{
+            .frameNumber = frameNumber,
+            .errorStreamId = streamId,
+            .errorCode = ec,
+    });
+    mCallback->notify({msg});
+}
+
+void ExternalCameraDeviceSession::invokeProcessCaptureResultCallback(
+        std::vector<CaptureResult>& results, bool tryWriteFmq) {
+    if (mProcessCaptureResultLock.tryLock() != OK) {
+        const nsecs_t NS_TO_SECOND = 1000000000;
+        ALOGV("%s: previous call is not finished! waiting 1s...", __FUNCTION__);
+        if (mProcessCaptureResultLock.timedLock(/* 1s */ NS_TO_SECOND) != OK) {
+            ALOGE("%s: cannot acquire lock in 1s, cannot proceed", __FUNCTION__);
+            return;
+        }
+    }
+    if (tryWriteFmq && mResultMetadataQueue->availableToWrite() > 0) {
+        for (CaptureResult& result : results) {
+            CameraMetadata& md = result.result;
+            if (!md.metadata.empty()) {
+                if (mResultMetadataQueue->write(reinterpret_cast<int8_t*>(md.metadata.data()),
+                                                md.metadata.size())) {
+                    result.fmqResultSize = md.metadata.size();
+                    md.metadata.resize(0);
+                } else {
+                    ALOGW("%s: couldn't utilize fmq, fall back to hwbinder", __FUNCTION__);
+                    result.fmqResultSize = 0;
+                }
+            } else {
+                result.fmqResultSize = 0;
+            }
+        }
+    }
+    auto status = mCallback->processCaptureResult(results);
+    if (!status.isOk()) {
+        ALOGE("%s: processCaptureResult ERROR : %d:%d", __FUNCTION__, status.getExceptionCode(),
+              status.getServiceSpecificError());
+    }
+
+    mProcessCaptureResultLock.unlock();
+}
+
+int ExternalCameraDeviceSession::waitForV4L2BufferReturnLocked(std::unique_lock<std::mutex>& lk) {
+    ATRACE_CALL();
+    auto timeout = std::chrono::seconds(kBufferWaitTimeoutSec);
+    mLock.unlock();
+    auto st = mV4L2BufferReturned.wait_for(lk, timeout);
+    // Here we introduce an order where mV4l2BufferLock is acquired before mLock, while
+    // the normal lock acquisition order is reversed. This is fine because in most of
+    // cases we are protected by mInterfaceLock. The only thread that can cause deadlock
+    // is the OutputThread, where we do need to make sure we don't acquire mLock then
+    // mV4l2BufferLock
+    mLock.lock();
+    if (st == std::cv_status::timeout) {
+        ALOGE("%s: wait for V4L2 buffer return timeout!", __FUNCTION__);
+        return -1;
+    }
+    return 0;
+}
+
+bool ExternalCameraDeviceSession::supportOfflineLocked(int32_t streamId) {
+    const Stream& stream = mStreamMap[streamId];
+    if (stream.format == PixelFormat::BLOB &&
+        static_cast<int32_t>(stream.dataSpace) == static_cast<int32_t>(Dataspace::JFIF)) {
+        return true;
+    }
+    // TODO: support YUV output stream?
+    return false;
+}
+
+bool ExternalCameraDeviceSession::canDropRequest(const std::vector<int32_t>& offlineStreams,
+                                                 std::shared_ptr<HalRequest> halReq) {
+    for (const auto& buffer : halReq->buffers) {
+        for (auto offlineStreamId : offlineStreams) {
+            if (buffer.streamId == offlineStreamId) {
+                return false;
+            }
+        }
+    }
+    // Only drop a request completely if it has no offline output
+    return true;
+}
+
+void ExternalCameraDeviceSession::fillOfflineSessionInfo(
+        const std::vector<int32_t>& offlineStreams,
+        std::deque<std::shared_ptr<HalRequest>>& offlineReqs,
+        const std::map<int, CirculatingBuffers>& circulatingBuffers,
+        CameraOfflineSessionInfo* info) {
+    if (info == nullptr) {
+        ALOGE("%s: output info must not be null!", __FUNCTION__);
+        return;
+    }
+
+    info->offlineStreams.resize(offlineStreams.size());
+    info->offlineRequests.resize(offlineReqs.size());
+
+    // Fill in offline reqs and count outstanding buffers
+    for (size_t i = 0; i < offlineReqs.size(); i++) {
+        info->offlineRequests[i].frameNumber = offlineReqs[i]->frameNumber;
+        info->offlineRequests[i].pendingStreams.resize(offlineReqs[i]->buffers.size());
+        for (size_t bIdx = 0; bIdx < offlineReqs[i]->buffers.size(); bIdx++) {
+            int32_t streamId = offlineReqs[i]->buffers[bIdx].streamId;
+            info->offlineRequests[i].pendingStreams[bIdx] = streamId;
+        }
+    }
+
+    for (size_t i = 0; i < offlineStreams.size(); i++) {
+        int32_t streamId = offlineStreams[i];
+        info->offlineStreams[i].id = streamId;
+        // outstanding buffers are 0 since we are doing hal buffer management and
+        // offline session will ask for those buffers later
+        info->offlineStreams[i].numOutstandingBuffers = 0;
+        const CirculatingBuffers& bufIdMap = circulatingBuffers.at(streamId);
+        info->offlineStreams[i].circulatingBufferIds.resize(bufIdMap.size());
+        size_t bIdx = 0;
+        for (const auto& pair : bufIdMap) {
+            // Fill in bufferId
+            info->offlineStreams[i].circulatingBufferIds[bIdx++] = pair.first;
+        }
+    }
+}
+
+Status ExternalCameraDeviceSession::isStreamCombinationSupported(
+        const StreamConfiguration& config, const std::vector<SupportedV4L2Format>& supportedFormats,
+        const ExternalCameraConfig& devCfg) {
+    if (config.operationMode != StreamConfigurationMode::NORMAL_MODE) {
+        ALOGE("%s: unsupported operation mode: %d", __FUNCTION__, config.operationMode);
+        return Status::ILLEGAL_ARGUMENT;
+    }
+
+    if (config.streams.size() == 0) {
+        ALOGE("%s: cannot configure zero stream", __FUNCTION__);
+        return Status::ILLEGAL_ARGUMENT;
+    }
+
+    int numProcessedStream = 0;
+    int numStallStream = 0;
+    for (const auto& stream : config.streams) {
+        // Check if the format/width/height combo is supported
+        if (!isSupported(stream, supportedFormats, devCfg)) {
+            return Status::ILLEGAL_ARGUMENT;
+        }
+        if (stream.format == PixelFormat::BLOB) {
+            numStallStream++;
+        } else {
+            numProcessedStream++;
+        }
+    }
+
+    if (numProcessedStream > kMaxProcessedStream) {
+        ALOGE("%s: too many processed streams (expect <= %d, got %d)", __FUNCTION__,
+              kMaxProcessedStream, numProcessedStream);
+        return Status::ILLEGAL_ARGUMENT;
+    }
+
+    if (numStallStream > kMaxStallStream) {
+        ALOGE("%s: too many stall streams (expect <= %d, got %d)", __FUNCTION__, kMaxStallStream,
+              numStallStream);
+        return Status::ILLEGAL_ARGUMENT;
+    }
+
+    return Status::OK;
+}
+void ExternalCameraDeviceSession::updateBufferCaches(
+        const std::vector<BufferCache>& cachesToRemove) {
+    Mutex::Autolock _l(mCbsLock);
+    for (auto& cache : cachesToRemove) {
+        auto cbsIt = mCirculatingBuffers.find(cache.streamId);
+        if (cbsIt == mCirculatingBuffers.end()) {
+            // The stream could have been removed
+            continue;
+        }
+        CirculatingBuffers& cbs = cbsIt->second;
+        auto it = cbs.find(cache.bufferId);
+        if (it != cbs.end()) {
+            sHandleImporter.freeBuffer(it->second);
+            cbs.erase(it);
+        } else {
+            ALOGE("%s: stream %d buffer %" PRIu64 " is not cached", __FUNCTION__, cache.streamId,
+                  cache.bufferId);
+        }
+    }
+}
+
+Status ExternalCameraDeviceSession::processCaptureRequestError(
+        const std::shared_ptr<HalRequest>& req, std::vector<NotifyMsg>* outMsgs,
+        std::vector<CaptureResult>* outResults) {
+    ATRACE_CALL();
+    // Return V4L2 buffer to V4L2 buffer queue
+    std::shared_ptr<V4L2Frame> v4l2Frame = std::static_pointer_cast<V4L2Frame>(req->frameIn);
+    enqueueV4l2Frame(v4l2Frame);
+
+    if (outMsgs == nullptr) {
+        notifyShutter(req->frameNumber, req->shutterTs);
+        notifyError(/*frameNum*/ req->frameNumber, /*stream*/ -1, ErrorCode::ERROR_REQUEST);
+    } else {
+        NotifyMsg shutter;
+        shutter.set<NotifyMsg::Tag::shutter>(
+                ShutterMsg{.frameNumber = req->frameNumber, .timestamp = req->shutterTs});
+
+        NotifyMsg error;
+        error.set<NotifyMsg::Tag::error>(ErrorMsg{.frameNumber = req->frameNumber,
+                                                  .errorStreamId = -1,
+                                                  .errorCode = ErrorCode::ERROR_REQUEST});
+        outMsgs->push_back(shutter);
+        outMsgs->push_back(error);
+    }
+
+    // Fill output buffers
+    CaptureResult result;
+    result.frameNumber = req->frameNumber;
+    result.partialResult = 1;
+    result.inputBuffer.streamId = -1;
+    result.outputBuffers.resize(req->buffers.size());
+    for (size_t i = 0; i < req->buffers.size(); i++) {
+        result.outputBuffers[i].streamId = req->buffers[i].streamId;
+        result.outputBuffers[i].bufferId = req->buffers[i].bufferId;
+        result.outputBuffers[i].status = BufferStatus::ERROR;
+        if (req->buffers[i].acquireFence >= 0) {
+            native_handle_t* handle = native_handle_create(/*numFds*/ 1, /*numInts*/ 0);
+            handle->data[0] = req->buffers[i].acquireFence;
+            result.outputBuffers[i].releaseFence = ::android::makeToAidl(handle);
+        }
+    }
+
+    // update inflight records
+    {
+        std::lock_guard<std::mutex> lk(mInflightFramesLock);
+        mInflightFrames.erase(req->frameNumber);
+    }
+
+    if (outResults == nullptr) {
+        // Callback into framework
+        std::vector<CaptureResult> results(1);
+        results[0] = std::move(result);
+        invokeProcessCaptureResultCallback(results, /* tryWriteFmq */ true);
+        freeReleaseFences(results);
+    } else {
+        outResults->push_back(std::move(result));
+    }
+    return Status::OK;
+}
+
+Status ExternalCameraDeviceSession::processCaptureResult(std::shared_ptr<HalRequest>& req) {
+    ATRACE_CALL();
+    // Return V4L2 buffer to V4L2 buffer queue
+    std::shared_ptr<V4L2Frame> v4l2Frame = std::static_pointer_cast<V4L2Frame>(req->frameIn);
+    enqueueV4l2Frame(v4l2Frame);
+
+    // NotifyShutter
+    notifyShutter(req->frameNumber, req->shutterTs);
+
+    // Fill output buffers;
+    std::vector<CaptureResult> results(1);
+    CaptureResult& result = results[0];
+    result.frameNumber = req->frameNumber;
+    result.partialResult = 1;
+    result.inputBuffer.streamId = -1;
+    result.outputBuffers.resize(req->buffers.size());
+    for (size_t i = 0; i < req->buffers.size(); i++) {
+        result.outputBuffers[i].streamId = req->buffers[i].streamId;
+        result.outputBuffers[i].bufferId = req->buffers[i].bufferId;
+        if (req->buffers[i].fenceTimeout) {
+            result.outputBuffers[i].status = BufferStatus::ERROR;
+            if (req->buffers[i].acquireFence >= 0) {
+                native_handle_t* handle = native_handle_create(/*numFds*/ 1, /*numInts*/ 0);
+                handle->data[0] = req->buffers[i].acquireFence;
+                result.outputBuffers[i].releaseFence = ::android::makeToAidl(handle);
+            }
+            notifyError(req->frameNumber, req->buffers[i].streamId, ErrorCode::ERROR_BUFFER);
+        } else {
+            result.outputBuffers[i].status = BufferStatus::OK;
+            // TODO: refactor
+            if (req->buffers[i].acquireFence >= 0) {
+                native_handle_t* handle = native_handle_create(/*numFds*/ 1, /*numInts*/ 0);
+                handle->data[0] = req->buffers[i].acquireFence;
+                result.outputBuffers[i].releaseFence = ::android::makeToAidl(handle);
+            }
+        }
+    }
+
+    // Fill capture result metadata
+    fillCaptureResult(req->setting, req->shutterTs);
+    const camera_metadata_t* rawResult = req->setting.getAndLock();
+    convertToAidl(rawResult, &result.result);
+    req->setting.unlock(rawResult);
+
+    // update inflight records
+    {
+        std::lock_guard<std::mutex> lk(mInflightFramesLock);
+        mInflightFrames.erase(req->frameNumber);
+    }
+
+    // Callback into framework
+    invokeProcessCaptureResultCallback(results, /* tryWriteFmq */ true);
+    freeReleaseFences(results);
+    return Status::OK;
+}
+
+ssize_t ExternalCameraDeviceSession::getJpegBufferSize(int32_t width, int32_t height) const {
+    // Constant from camera3.h
+    const ssize_t kMinJpegBufferSize = 256 * 1024 + sizeof(CameraBlob);
+    // Get max jpeg size (area-wise).
+    if (mMaxJpegResolution.width == 0) {
+        ALOGE("%s: No supported JPEG stream", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    // Get max jpeg buffer size
+    ssize_t maxJpegBufferSize = 0;
+    camera_metadata_ro_entry jpegBufMaxSize = mCameraCharacteristics.find(ANDROID_JPEG_MAX_SIZE);
+    if (jpegBufMaxSize.count == 0) {
+        ALOGE("%s: Can't find maximum JPEG size in static metadata!", __FUNCTION__);
+        return BAD_VALUE;
+    }
+    maxJpegBufferSize = jpegBufMaxSize.data.i32[0];
+
+    if (maxJpegBufferSize <= kMinJpegBufferSize) {
+        ALOGE("%s: ANDROID_JPEG_MAX_SIZE (%zd) <= kMinJpegBufferSize (%zd)", __FUNCTION__,
+              maxJpegBufferSize, kMinJpegBufferSize);
+        return BAD_VALUE;
+    }
+
+    // Calculate final jpeg buffer size for the given resolution.
+    float scaleFactor =
+            ((float)(width * height)) / (mMaxJpegResolution.width * mMaxJpegResolution.height);
+    ssize_t jpegBufferSize =
+            scaleFactor * (maxJpegBufferSize - kMinJpegBufferSize) + kMinJpegBufferSize;
+    if (jpegBufferSize > maxJpegBufferSize) {
+        jpegBufferSize = maxJpegBufferSize;
+    }
+
+    return jpegBufferSize;
+}
+binder_status_t ExternalCameraDeviceSession::dump(int fd, const char** /*args*/,
+                                                  uint32_t /*numArgs*/) {
+    bool intfLocked = tryLock(mInterfaceLock);
+    if (!intfLocked) {
+        dprintf(fd, "!! ExternalCameraDeviceSession interface may be deadlocked !!\n");
+    }
+
+    if (isClosed()) {
+        dprintf(fd, "External camera %s is closed\n", mCameraId.c_str());
+        return STATUS_OK;
+    }
+
+    bool streaming = false;
+    size_t v4L2BufferCount = 0;
+    SupportedV4L2Format streamingFmt;
+    {
+        bool sessionLocked = tryLock(mLock);
+        if (!sessionLocked) {
+            dprintf(fd, "!! ExternalCameraDeviceSession mLock may be deadlocked !!\n");
+        }
+        streaming = mV4l2Streaming;
+        streamingFmt = mV4l2StreamingFmt;
+        v4L2BufferCount = mV4L2BufferCount;
+
+        if (sessionLocked) {
+            mLock.unlock();
+        }
+    }
+
+    std::unordered_set<uint32_t> inflightFrames;
+    {
+        bool iffLocked = tryLock(mInflightFramesLock);
+        if (!iffLocked) {
+            dprintf(fd,
+                    "!! ExternalCameraDeviceSession mInflightFramesLock may be deadlocked !!\n");
+        }
+        inflightFrames = mInflightFrames;
+        if (iffLocked) {
+            mInflightFramesLock.unlock();
+        }
+    }
+
+    dprintf(fd, "External camera %s V4L2 FD %d, cropping type %s, %s\n", mCameraId.c_str(),
+            mV4l2Fd.get(), (mCroppingType == VERTICAL) ? "vertical" : "horizontal",
+            streaming ? "streaming" : "not streaming");
+
+    if (streaming) {
+        // TODO: dump fps later
+        dprintf(fd, "Current V4L2 format %c%c%c%c %dx%d @ %ffps\n", streamingFmt.fourcc & 0xFF,
+                (streamingFmt.fourcc >> 8) & 0xFF, (streamingFmt.fourcc >> 16) & 0xFF,
+                (streamingFmt.fourcc >> 24) & 0xFF, streamingFmt.width, streamingFmt.height,
+                mV4l2StreamingFps);
+
+        size_t numDequeuedV4l2Buffers = 0;
+        {
+            std::lock_guard<std::mutex> lk(mV4l2BufferLock);
+            numDequeuedV4l2Buffers = mNumDequeuedV4l2Buffers;
+        }
+        dprintf(fd, "V4L2 buffer queue size %zu, dequeued %zu\n", v4L2BufferCount,
+                numDequeuedV4l2Buffers);
+    }
+
+    dprintf(fd, "In-flight frames (not sorted):");
+    for (const auto& frameNumber : inflightFrames) {
+        dprintf(fd, "%d, ", frameNumber);
+    }
+    dprintf(fd, "\n");
+    mOutputThread->dump(fd);
+    dprintf(fd, "\n");
+
+    if (intfLocked) {
+        mInterfaceLock.unlock();
+    }
+
+    return STATUS_OK;
+}
+
+// Start ExternalCameraDeviceSession::BufferRequestThread functions
+ExternalCameraDeviceSession::BufferRequestThread::BufferRequestThread(
+        std::weak_ptr<OutputThreadInterface> parent,
+        std::shared_ptr<ICameraDeviceCallback> callbacks)
+    : mParent(parent), mCallbacks(callbacks) {}
+
+int ExternalCameraDeviceSession::BufferRequestThread::requestBufferStart(
+        const std::vector<HalStreamBuffer>& bufReqs) {
+    if (bufReqs.empty()) {
+        ALOGE("%s: bufReqs is empty!", __FUNCTION__);
+        return -1;
+    }
+
+    {
+        std::lock_guard<std::mutex> lk(mLock);
+        if (mRequestingBuffer) {
+            ALOGE("%s: BufferRequestThread does not support more than one concurrent request!",
+                  __FUNCTION__);
+            return -1;
+        }
+
+        mBufferReqs = bufReqs;
+        mRequestingBuffer = true;
+    }
+    mRequestCond.notify_one();
+    return 0;
+}
+
+int ExternalCameraDeviceSession::BufferRequestThread::waitForBufferRequestDone(
+        std::vector<HalStreamBuffer>* outBufReqs) {
+    std::unique_lock<std::mutex> lk(mLock);
+    if (!mRequestingBuffer) {
+        ALOGE("%s: no pending buffer request!", __FUNCTION__);
+        return -1;
+    }
+
+    if (mPendingReturnBufferReqs.empty()) {
+        std::chrono::milliseconds timeout = std::chrono::milliseconds(kReqProcTimeoutMs);
+        auto st = mRequestDoneCond.wait_for(lk, timeout);
+        if (st == std::cv_status::timeout) {
+            ALOGE("%s: wait for buffer request finish timeout!", __FUNCTION__);
+            return -1;
+        }
+    }
+    mRequestingBuffer = false;
+    *outBufReqs = std::move(mPendingReturnBufferReqs);
+    mPendingReturnBufferReqs.clear();
+    return 0;
+}
+
+void ExternalCameraDeviceSession::BufferRequestThread::waitForNextRequest() {
+    ATRACE_CALL();
+    std::unique_lock<std::mutex> lk(mLock);
+    int waitTimes = 0;
+    while (mBufferReqs.empty()) {
+        if (exitPending()) {
+            return;
+        }
+        auto timeout = std::chrono::milliseconds(kReqWaitTimeoutMs);
+        auto st = mRequestCond.wait_for(lk, timeout);
+        if (st == std::cv_status::timeout) {
+            waitTimes++;
+            if (waitTimes == kReqWaitTimesWarn) {
+                // BufferRequestThread just wait forever for new buffer request
+                // But it will print some periodic warning indicating it's waiting
+                ALOGV("%s: still waiting for new buffer request", __FUNCTION__);
+                waitTimes = 0;
+            }
+        }
+    }
+
+    // Fill in BufferRequest
+    mHalBufferReqs.resize(mBufferReqs.size());
+    for (size_t i = 0; i < mHalBufferReqs.size(); i++) {
+        mHalBufferReqs[i].streamId = mBufferReqs[i].streamId;
+        mHalBufferReqs[i].numBuffersRequested = 1;
+    }
+}
+
+bool ExternalCameraDeviceSession::BufferRequestThread::threadLoop() {
+    waitForNextRequest();
+    if (exitPending()) {
+        return false;
+    }
+
+    ATRACE_BEGIN("AIDL requestStreamBuffers");
+    BufferRequestStatus status;
+    std::vector<StreamBufferRet> bufRets;
+    ScopedAStatus ret = mCallbacks->requestStreamBuffers(mHalBufferReqs, &bufRets, &status);
+    if (!ret.isOk()) {
+        ALOGE("%s: Transaction error: %d:%d", __FUNCTION__, ret.getExceptionCode(),
+              ret.getServiceSpecificError());
+        return false;
+    }
+
+    std::unique_lock<std::mutex> lk(mLock);
+    if (status == BufferRequestStatus::OK || status == BufferRequestStatus::FAILED_PARTIAL) {
+        if (bufRets.size() != mHalBufferReqs.size()) {
+            ALOGE("%s: expect %zu buffer requests returned, only got %zu", __FUNCTION__,
+                  mHalBufferReqs.size(), bufRets.size());
+            return false;
+        }
+
+        auto parent = mParent.lock();
+        if (parent == nullptr) {
+            ALOGE("%s: session has been disconnected!", __FUNCTION__);
+            return false;
+        }
+
+        std::vector<int> importedFences;
+        importedFences.resize(bufRets.size());
+        for (size_t i = 0; i < bufRets.size(); i++) {
+            int streamId = bufRets[i].streamId;
+            switch (bufRets[i].val.getTag()) {
+                case StreamBuffersVal::Tag::error:
+                    continue;
+                case StreamBuffersVal::Tag::buffers: {
+                    const std::vector<StreamBuffer>& hBufs =
+                            bufRets[i].val.get<StreamBuffersVal::Tag::buffers>();
+                    if (hBufs.size() != 1) {
+                        ALOGE("%s: expect 1 buffer returned, got %zu!", __FUNCTION__, hBufs.size());
+                        return false;
+                    }
+                    const StreamBuffer& hBuf = hBufs[0];
+
+                    mBufferReqs[i].bufferId = hBuf.bufferId;
+                    // TODO: create a batch import API so we don't need to lock/unlock mCbsLock
+                    // repeatedly?
+                    lk.unlock();
+                    Status s =
+                            parent->importBuffer(streamId, hBuf.bufferId, makeFromAidl(hBuf.buffer),
+                                                 /*out*/ &mBufferReqs[i].bufPtr);
+                    lk.lock();
+
+                    if (s != Status::OK) {
+                        ALOGE("%s: stream %d import buffer failed!", __FUNCTION__, streamId);
+                        cleanupInflightFences(importedFences, i - 1);
+                        return false;
+                    }
+                    if (!sHandleImporter.importFence(makeFromAidl(hBuf.acquireFence),
+                                                     mBufferReqs[i].acquireFence)) {
+                        ALOGE("%s: stream %d import fence failed!", __FUNCTION__, streamId);
+                        cleanupInflightFences(importedFences, i - 1);
+                        return false;
+                    }
+                    importedFences[i] = mBufferReqs[i].acquireFence;
+                } break;
+                default:
+                    ALOGE("%s: Unknown StreamBuffersVal!", __FUNCTION__);
+                    return false;
+            }
+        }
+    } else {
+        ALOGE("%s: requestStreamBuffers call failed!", __FUNCTION__);
+    }
+
+    mPendingReturnBufferReqs = std::move(mBufferReqs);
+    mBufferReqs.clear();
+
+    lk.unlock();
+    mRequestDoneCond.notify_one();
+    return true;
+}
+
+// End ExternalCameraDeviceSession::BufferRequestThread functions
+
+// Start ExternalCameraDeviceSession::OutputThread functions
+
+ExternalCameraDeviceSession::OutputThread::OutputThread(
+        std::weak_ptr<OutputThreadInterface> parent, CroppingType ct,
+        const common::V1_0::helper::CameraMetadata& chars,
+        std::shared_ptr<BufferRequestThread> bufReqThread)
+    : mParent(parent),
+      mCroppingType(ct),
+      mCameraCharacteristics(chars),
+      mBufferRequestThread(bufReqThread) {}
+
+ExternalCameraDeviceSession::OutputThread::~OutputThread() {}
+
+Status ExternalCameraDeviceSession::OutputThread::allocateIntermediateBuffers(
+        const Size& v4lSize, const Size& thumbSize, const std::vector<Stream>& streams,
+        uint32_t blobBufferSize) {
+    std::lock_guard<std::mutex> lk(mBufferLock);
+    if (!mScaledYu12Frames.empty()) {
+        ALOGE("%s: intermediate buffer pool has %zu inflight buffers! (expect 0)", __FUNCTION__,
+              mScaledYu12Frames.size());
+        return Status::INTERNAL_ERROR;
+    }
+
+    // Allocating intermediate YU12 frame
+    if (mYu12Frame == nullptr || mYu12Frame->mWidth != v4lSize.width ||
+        mYu12Frame->mHeight != v4lSize.height) {
+        mYu12Frame.reset();
+        mYu12Frame = std::make_shared<AllocatedFrame>(v4lSize.width, v4lSize.height);
+        int ret = mYu12Frame->allocate(&mYu12FrameLayout);
+        if (ret != 0) {
+            ALOGE("%s: allocating YU12 frame failed!", __FUNCTION__);
+            return Status::INTERNAL_ERROR;
+        }
+    }
+
+    // Allocating intermediate YU12 thumbnail frame
+    if (mYu12ThumbFrame == nullptr || mYu12ThumbFrame->mWidth != thumbSize.width ||
+        mYu12ThumbFrame->mHeight != thumbSize.height) {
+        mYu12ThumbFrame.reset();
+        mYu12ThumbFrame = std::make_shared<AllocatedFrame>(thumbSize.width, thumbSize.height);
+        int ret = mYu12ThumbFrame->allocate(&mYu12ThumbFrameLayout);
+        if (ret != 0) {
+            ALOGE("%s: allocating YU12 thumb frame failed!", __FUNCTION__);
+            return Status::INTERNAL_ERROR;
+        }
+    }
+
+    // Allocating scaled buffers
+    for (const auto& stream : streams) {
+        Size sz = {stream.width, stream.height};
+        if (sz == v4lSize) {
+            continue;  // Don't need an intermediate buffer same size as v4lBuffer
+        }
+        if (mIntermediateBuffers.count(sz) == 0) {
+            // Create new intermediate buffer
+            std::shared_ptr<AllocatedFrame> buf =
+                    std::make_shared<AllocatedFrame>(stream.width, stream.height);
+            int ret = buf->allocate();
+            if (ret != 0) {
+                ALOGE("%s: allocating intermediate YU12 frame %dx%d failed!", __FUNCTION__,
+                      stream.width, stream.height);
+                return Status::INTERNAL_ERROR;
+            }
+            mIntermediateBuffers[sz] = buf;
+        }
+    }
+
+    // Remove unconfigured buffers
+    auto it = mIntermediateBuffers.begin();
+    while (it != mIntermediateBuffers.end()) {
+        bool configured = false;
+        auto sz = it->first;
+        for (const auto& stream : streams) {
+            if (stream.width == sz.width && stream.height == sz.height) {
+                configured = true;
+                break;
+            }
+        }
+        if (configured) {
+            it++;
+        } else {
+            it = mIntermediateBuffers.erase(it);
+        }
+    }
+
+    // Allocate mute test pattern frame
+    mMuteTestPatternFrame.resize(mYu12Frame->mWidth * mYu12Frame->mHeight * 3);
+
+    mBlobBufferSize = blobBufferSize;
+    return Status::OK;
+}
+
+Status ExternalCameraDeviceSession::OutputThread::submitRequest(
+        const std::shared_ptr<HalRequest>& req) {
+    std::unique_lock<std::mutex> lk(mRequestListLock);
+    mRequestList.push_back(req);
+    lk.unlock();
+    mRequestCond.notify_one();
+    return Status::OK;
+}
+
+void ExternalCameraDeviceSession::OutputThread::flush() {
+    ATRACE_CALL();
+    auto parent = mParent.lock();
+    if (parent == nullptr) {
+        ALOGE("%s: session has been disconnected!", __FUNCTION__);
+        return;
+    }
+
+    std::unique_lock<std::mutex> lk(mRequestListLock);
+    std::list<std::shared_ptr<HalRequest>> reqs = std::move(mRequestList);
+    mRequestList.clear();
+    if (mProcessingRequest) {
+        auto timeout = std::chrono::seconds(kFlushWaitTimeoutSec);
+        auto st = mRequestDoneCond.wait_for(lk, timeout);
+        if (st == std::cv_status::timeout) {
+            ALOGE("%s: wait for inflight request finish timeout!", __FUNCTION__);
+        }
+    }
+
+    ALOGV("%s: flushing inflight requests", __FUNCTION__);
+    lk.unlock();
+    for (const auto& req : reqs) {
+        parent->processCaptureRequestError(req);
+    }
+}
+
+void ExternalCameraDeviceSession::OutputThread::dump(int fd) {
+    std::lock_guard<std::mutex> lk(mRequestListLock);
+    if (mProcessingRequest) {
+        dprintf(fd, "OutputThread processing frame %d\n", mProcessingFrameNumber);
+    } else {
+        dprintf(fd, "OutputThread not processing any frames\n");
+    }
+    dprintf(fd, "OutputThread request list contains frame: ");
+    for (const auto& req : mRequestList) {
+        dprintf(fd, "%d, ", req->frameNumber);
+    }
+    dprintf(fd, "\n");
+}
+
+void ExternalCameraDeviceSession::OutputThread::setExifMakeModel(const std::string& make,
+                                                                 const std::string& model) {
+    mExifMake = make;
+    mExifModel = model;
+}
+
+std::list<std::shared_ptr<HalRequest>>
+ExternalCameraDeviceSession::OutputThread::switchToOffline() {
+    ATRACE_CALL();
+    auto parent = mParent.lock();
+    if (parent == nullptr) {
+        ALOGE("%s: session has been disconnected!", __FUNCTION__);
+        return {};
+    }
+
+    std::unique_lock<std::mutex> lk(mRequestListLock);
+    std::list<std::shared_ptr<HalRequest>> reqs = std::move(mRequestList);
+    mRequestList.clear();
+    if (mProcessingRequest) {
+        auto timeout = std::chrono::seconds(kFlushWaitTimeoutSec);
+        auto st = mRequestDoneCond.wait_for(lk, timeout);
+        if (st == std::cv_status::timeout) {
+            ALOGE("%s: wait for inflight request finish timeout!", __FUNCTION__);
+        }
+    }
+    lk.unlock();
+    clearIntermediateBuffers();
+    ALOGV("%s: returning %zu request for offline processing", __FUNCTION__, reqs.size());
+    return reqs;
+}
+
+int ExternalCameraDeviceSession::OutputThread::requestBufferStart(
+        const std::vector<HalStreamBuffer>& bufs) {
+    if (mBufferRequestThread == nullptr) {
+        return 0;
+    }
+    return mBufferRequestThread->requestBufferStart(bufs);
+}
+
+int ExternalCameraDeviceSession::OutputThread::waitForBufferRequestDone(
+        std::vector<HalStreamBuffer>* outBufs) {
+    if (mBufferRequestThread == nullptr) {
+        return 0;
+    }
+    return mBufferRequestThread->waitForBufferRequestDone(outBufs);
+}
+
+void ExternalCameraDeviceSession::OutputThread::waitForNextRequest(
+        std::shared_ptr<HalRequest>* out) {
+    ATRACE_CALL();
+    if (out == nullptr) {
+        ALOGE("%s: out is null", __FUNCTION__);
+        return;
+    }
+
+    std::unique_lock<std::mutex> lk(mRequestListLock);
+    int waitTimes = 0;
+    while (mRequestList.empty()) {
+        if (exitPending()) {
+            return;
+        }
+        auto timeout = std::chrono::milliseconds(kReqWaitTimeoutMs);
+        auto st = mRequestCond.wait_for(lk, timeout);
+        if (st == std::cv_status::timeout) {
+            waitTimes++;
+            if (waitTimes == kReqWaitTimesMax) {
+                // no new request, return
+                return;
+            }
+        }
+    }
+    *out = mRequestList.front();
+    mRequestList.pop_front();
+    mProcessingRequest = true;
+    mProcessingFrameNumber = (*out)->frameNumber;
+}
+
+void ExternalCameraDeviceSession::OutputThread::signalRequestDone() {
+    std::unique_lock<std::mutex> lk(mRequestListLock);
+    mProcessingRequest = false;
+    mProcessingFrameNumber = 0;
+    lk.unlock();
+    mRequestDoneCond.notify_one();
+}
+
+int ExternalCameraDeviceSession::OutputThread::cropAndScaleLocked(
+        std::shared_ptr<AllocatedFrame>& in, const Size& outSz, YCbCrLayout* out) {
+    Size inSz = {in->mWidth, in->mHeight};
+
+    int ret;
+    if (inSz == outSz) {
+        ret = in->getLayout(out);
+        if (ret != 0) {
+            ALOGE("%s: failed to get input image layout", __FUNCTION__);
+            return ret;
+        }
+        return ret;
+    }
+
+    // Cropping to output aspect ratio
+    IMapper::Rect inputCrop;
+    ret = getCropRect(mCroppingType, inSz, outSz, &inputCrop);
+    if (ret != 0) {
+        ALOGE("%s: failed to compute crop rect for output size %dx%d", __FUNCTION__, outSz.width,
+              outSz.height);
+        return ret;
+    }
+
+    YCbCrLayout croppedLayout;
+    ret = in->getCroppedLayout(inputCrop, &croppedLayout);
+    if (ret != 0) {
+        ALOGE("%s: failed to crop input image %dx%d to output size %dx%d", __FUNCTION__, inSz.width,
+              inSz.height, outSz.width, outSz.height);
+        return ret;
+    }
+
+    if ((mCroppingType == VERTICAL && inSz.width == outSz.width) ||
+        (mCroppingType == HORIZONTAL && inSz.height == outSz.height)) {
+        // No scale is needed
+        *out = croppedLayout;
+        return 0;
+    }
+
+    auto it = mScaledYu12Frames.find(outSz);
+    std::shared_ptr<AllocatedFrame> scaledYu12Buf;
+    if (it != mScaledYu12Frames.end()) {
+        scaledYu12Buf = it->second;
+    } else {
+        it = mIntermediateBuffers.find(outSz);
+        if (it == mIntermediateBuffers.end()) {
+            ALOGE("%s: failed to find intermediate buffer size %dx%d", __FUNCTION__, outSz.width,
+                  outSz.height);
+            return -1;
+        }
+        scaledYu12Buf = it->second;
+    }
+    // Scale
+    YCbCrLayout outLayout;
+    ret = scaledYu12Buf->getLayout(&outLayout);
+    if (ret != 0) {
+        ALOGE("%s: failed to get output buffer layout", __FUNCTION__);
+        return ret;
+    }
+
+    ret = libyuv::I420Scale(
+            static_cast<uint8_t*>(croppedLayout.y), croppedLayout.yStride,
+            static_cast<uint8_t*>(croppedLayout.cb), croppedLayout.cStride,
+            static_cast<uint8_t*>(croppedLayout.cr), croppedLayout.cStride, inputCrop.width,
+            inputCrop.height, static_cast<uint8_t*>(outLayout.y), outLayout.yStride,
+            static_cast<uint8_t*>(outLayout.cb), outLayout.cStride,
+            static_cast<uint8_t*>(outLayout.cr), outLayout.cStride, outSz.width, outSz.height,
+            // TODO: b/72261744 see if we can use better filter without losing too much perf
+            libyuv::FilterMode::kFilterNone);
+
+    if (ret != 0) {
+        ALOGE("%s: failed to scale buffer from %dx%d to %dx%d. Ret %d", __FUNCTION__,
+              inputCrop.width, inputCrop.height, outSz.width, outSz.height, ret);
+        return ret;
+    }
+
+    *out = outLayout;
+    mScaledYu12Frames.insert({outSz, scaledYu12Buf});
+    return 0;
+}
+
+int ExternalCameraDeviceSession::OutputThread::cropAndScaleThumbLocked(
+        std::shared_ptr<AllocatedFrame>& in, const Size& outSz, YCbCrLayout* out) {
+    Size inSz{in->mWidth, in->mHeight};
+
+    if ((outSz.width * outSz.height) > (mYu12ThumbFrame->mWidth * mYu12ThumbFrame->mHeight)) {
+        ALOGE("%s: Requested thumbnail size too big (%d,%d) > (%d,%d)", __FUNCTION__, outSz.width,
+              outSz.height, mYu12ThumbFrame->mWidth, mYu12ThumbFrame->mHeight);
+        return -1;
+    }
+
+    int ret;
+
+    /* This will crop-and-zoom the input YUV frame to the thumbnail size
+     * Based on the following logic:
+     *  1) Square pixels come in, square pixels come out, therefore single
+     *  scale factor is computed to either make input bigger or smaller
+     *  depending on if we are upscaling or downscaling
+     *  2) That single scale factor would either make height too tall or width
+     *  too wide so we need to crop the input either horizontally or vertically
+     *  but not both
+     */
+
+    /* Convert the input and output dimensions into floats for ease of math */
+    float fWin = static_cast<float>(inSz.width);
+    float fHin = static_cast<float>(inSz.height);
+    float fWout = static_cast<float>(outSz.width);
+    float fHout = static_cast<float>(outSz.height);
+
+    /* Compute the one scale factor from (1) above, it will be the smaller of
+     * the two possibilities. */
+    float scaleFactor = std::min(fHin / fHout, fWin / fWout);
+
+    /* Since we are crop-and-zooming (as opposed to letter/pillar boxing) we can
+     * simply multiply the output by our scaleFactor to get the cropped input
+     * size. Note that at least one of {fWcrop, fHcrop} is going to wind up
+     * being {fWin, fHin} respectively because fHout or fWout cancels out the
+     * scaleFactor calculation above.
+     *
+     * Specifically:
+     *  if ( fHin / fHout ) < ( fWin / fWout ) we crop the sides off
+     * input, in which case
+     *    scaleFactor = fHin / fHout
+     *    fWcrop = fHin / fHout * fWout
+     *    fHcrop = fHin
+     *
+     * Note that fWcrop <= fWin ( because ( fHin / fHout ) * fWout < fWin, which
+     * is just the inequality above with both sides multiplied by fWout
+     *
+     * on the other hand if ( fWin / fWout ) < ( fHin / fHout) we crop the top
+     * and the bottom off of input, and
+     *    scaleFactor = fWin / fWout
+     *    fWcrop = fWin
+     *    fHCrop = fWin / fWout * fHout
+     */
+    float fWcrop = scaleFactor * fWout;
+    float fHcrop = scaleFactor * fHout;
+
+    /* Convert to integer and truncate to an even number */
+    Size cropSz = {.width = 2 * static_cast<int32_t>(fWcrop / 2.0f),
+                   .height = 2 * static_cast<int32_t>(fHcrop / 2.0f)};
+
+    /* Convert to a centered rectange with even top/left */
+    IMapper::Rect inputCrop{.left = 2 * static_cast<int32_t>((inSz.width - cropSz.width) / 4),
+                            .top = 2 * static_cast<int32_t>((inSz.height - cropSz.height) / 4),
+                            .width = static_cast<int32_t>(cropSz.width),
+                            .height = static_cast<int32_t>(cropSz.height)};
+
+    if ((inputCrop.top < 0) || (inputCrop.top >= static_cast<int32_t>(inSz.height)) ||
+        (inputCrop.left < 0) || (inputCrop.left >= static_cast<int32_t>(inSz.width)) ||
+        (inputCrop.width <= 0) ||
+        (inputCrop.width + inputCrop.left > static_cast<int32_t>(inSz.width)) ||
+        (inputCrop.height <= 0) ||
+        (inputCrop.height + inputCrop.top > static_cast<int32_t>(inSz.height))) {
+        ALOGE("%s: came up with really wrong crop rectangle", __FUNCTION__);
+        ALOGE("%s: input layout %dx%d to for output size %dx%d", __FUNCTION__, inSz.width,
+              inSz.height, outSz.width, outSz.height);
+        ALOGE("%s: computed input crop +%d,+%d %dx%d", __FUNCTION__, inputCrop.left, inputCrop.top,
+              inputCrop.width, inputCrop.height);
+        return -1;
+    }
+
+    YCbCrLayout inputLayout;
+    ret = in->getCroppedLayout(inputCrop, &inputLayout);
+    if (ret != 0) {
+        ALOGE("%s: failed to crop input layout %dx%d to for output size %dx%d", __FUNCTION__,
+              inSz.width, inSz.height, outSz.width, outSz.height);
+        ALOGE("%s: computed input crop +%d,+%d %dx%d", __FUNCTION__, inputCrop.left, inputCrop.top,
+              inputCrop.width, inputCrop.height);
+        return ret;
+    }
+    ALOGV("%s: crop input layout %dx%d to for output size %dx%d", __FUNCTION__, inSz.width,
+          inSz.height, outSz.width, outSz.height);
+    ALOGV("%s: computed input crop +%d,+%d %dx%d", __FUNCTION__, inputCrop.left, inputCrop.top,
+          inputCrop.width, inputCrop.height);
+
+    // Scale
+    YCbCrLayout outFullLayout;
+
+    ret = mYu12ThumbFrame->getLayout(&outFullLayout);
+    if (ret != 0) {
+        ALOGE("%s: failed to get output buffer layout", __FUNCTION__);
+        return ret;
+    }
+
+    ret = libyuv::I420Scale(static_cast<uint8_t*>(inputLayout.y), inputLayout.yStride,
+                            static_cast<uint8_t*>(inputLayout.cb), inputLayout.cStride,
+                            static_cast<uint8_t*>(inputLayout.cr), inputLayout.cStride,
+                            inputCrop.width, inputCrop.height,
+                            static_cast<uint8_t*>(outFullLayout.y), outFullLayout.yStride,
+                            static_cast<uint8_t*>(outFullLayout.cb), outFullLayout.cStride,
+                            static_cast<uint8_t*>(outFullLayout.cr), outFullLayout.cStride,
+                            outSz.width, outSz.height, libyuv::FilterMode::kFilterNone);
+
+    if (ret != 0) {
+        ALOGE("%s: failed to scale buffer from %dx%d to %dx%d. Ret %d", __FUNCTION__,
+              inputCrop.width, inputCrop.height, outSz.width, outSz.height, ret);
+        return ret;
+    }
+
+    *out = outFullLayout;
+    return 0;
+}
+
+int ExternalCameraDeviceSession::OutputThread::createJpegLocked(
+        HalStreamBuffer& halBuf, const common::V1_0::helper::CameraMetadata& setting) {
+    ATRACE_CALL();
+    int ret;
+    auto lfail = [&](auto... args) {
+        ALOGE(args...);
+
+        return 1;
+    };
+    auto parent = mParent.lock();
+    if (parent == nullptr) {
+        ALOGE("%s: session has been disconnected!", __FUNCTION__);
+        return 1;
+    }
+
+    ALOGV("%s: HAL buffer sid: %d bid: %" PRIu64 " w: %u h: %u", __FUNCTION__, halBuf.streamId,
+          static_cast<uint64_t>(halBuf.bufferId), halBuf.width, halBuf.height);
+    ALOGV("%s: HAL buffer fmt: %x usage: %" PRIx64 " ptr: %p", __FUNCTION__, halBuf.format,
+          static_cast<uint64_t>(halBuf.usage), halBuf.bufPtr);
+    ALOGV("%s: YV12 buffer %d x %d", __FUNCTION__, mYu12Frame->mWidth, mYu12Frame->mHeight);
+
+    int jpegQuality, thumbQuality;
+    Size thumbSize;
+    bool outputThumbnail = true;
+
+    if (setting.exists(ANDROID_JPEG_QUALITY)) {
+        camera_metadata_ro_entry entry = setting.find(ANDROID_JPEG_QUALITY);
+        jpegQuality = entry.data.u8[0];
+    } else {
+        return lfail("%s: ANDROID_JPEG_QUALITY not set", __FUNCTION__);
+    }
+
+    if (setting.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
+        camera_metadata_ro_entry entry = setting.find(ANDROID_JPEG_THUMBNAIL_QUALITY);
+        thumbQuality = entry.data.u8[0];
+    } else {
+        return lfail("%s: ANDROID_JPEG_THUMBNAIL_QUALITY not set", __FUNCTION__);
+    }
+
+    if (setting.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
+        camera_metadata_ro_entry entry = setting.find(ANDROID_JPEG_THUMBNAIL_SIZE);
+        thumbSize = Size{.width = entry.data.i32[0], .height = entry.data.i32[1]};
+        if (thumbSize.width == 0 && thumbSize.height == 0) {
+            outputThumbnail = false;
+        }
+    } else {
+        return lfail("%s: ANDROID_JPEG_THUMBNAIL_SIZE not set", __FUNCTION__);
+    }
+
+    /* Cropped and scaled YU12 buffer for main and thumbnail */
+    YCbCrLayout yu12Main;
+    Size jpegSize{halBuf.width, halBuf.height};
+
+    /* Compute temporary buffer sizes accounting for the following:
+     * thumbnail can't exceed APP1 size of 64K
+     * main image needs to hold APP1, headers, and at most a poorly
+     * compressed image */
+    const ssize_t maxThumbCodeSize = 64 * 1024;
+    const ssize_t maxJpegCodeSize =
+            mBlobBufferSize == 0 ? parent->getJpegBufferSize(jpegSize.width, jpegSize.height)
+                                 : mBlobBufferSize;
+
+    /* Check that getJpegBufferSize did not return an error */
+    if (maxJpegCodeSize < 0) {
+        return lfail("%s: getJpegBufferSize returned %zd", __FUNCTION__, maxJpegCodeSize);
+    }
+
+    /* Hold actual thumbnail and main image code sizes */
+    size_t thumbCodeSize = 0, jpegCodeSize = 0;
+    /* Temporary thumbnail code buffer */
+    std::vector<uint8_t> thumbCode(outputThumbnail ? maxThumbCodeSize : 0);
+
+    YCbCrLayout yu12Thumb;
+    if (outputThumbnail) {
+        ret = cropAndScaleThumbLocked(mYu12Frame, thumbSize, &yu12Thumb);
+
+        if (ret != 0) {
+            return lfail("%s: crop and scale thumbnail failed!", __FUNCTION__);
+        }
+    }
+
+    /* Scale and crop main jpeg */
+    ret = cropAndScaleLocked(mYu12Frame, jpegSize, &yu12Main);
+
+    if (ret != 0) {
+        return lfail("%s: crop and scale main failed!", __FUNCTION__);
+    }
+
+    /* Encode the thumbnail image */
+    if (outputThumbnail) {
+        ret = encodeJpegYU12(thumbSize, yu12Thumb, thumbQuality, 0, 0, &thumbCode[0],
+                             maxThumbCodeSize, thumbCodeSize);
+
+        if (ret != 0) {
+            return lfail("%s: thumbnail encodeJpegYU12 failed with %d", __FUNCTION__, ret);
+        }
+    }
+
+    /* Combine camera characteristics with request settings to form EXIF
+     * metadata */
+    common::V1_0::helper::CameraMetadata meta(mCameraCharacteristics);
+    meta.append(setting);
+
+    /* Generate EXIF object */
+    std::unique_ptr<ExifUtils> utils(ExifUtils::create());
+    /* Make sure it's initialized */
+    utils->initialize();
+
+    utils->setFromMetadata(meta, jpegSize.width, jpegSize.height);
+    utils->setMake(mExifMake);
+    utils->setModel(mExifModel);
+
+    ret = utils->generateApp1(outputThumbnail ? &thumbCode[0] : nullptr, thumbCodeSize);
+
+    if (!ret) {
+        return lfail("%s: generating APP1 failed", __FUNCTION__);
+    }
+
+    /* Get internal buffer */
+    size_t exifDataSize = utils->getApp1Length();
+    const uint8_t* exifData = utils->getApp1Buffer();
+
+    /* Lock the HAL jpeg code buffer */
+    void* bufPtr = sHandleImporter.lock(*(halBuf.bufPtr), static_cast<uint64_t>(halBuf.usage),
+                                        maxJpegCodeSize);
+
+    if (!bufPtr) {
+        return lfail("%s: could not lock %zu bytes", __FUNCTION__, maxJpegCodeSize);
+    }
+
+    /* Encode the main jpeg image */
+    ret = encodeJpegYU12(jpegSize, yu12Main, jpegQuality, exifData, exifDataSize, bufPtr,
+                         maxJpegCodeSize, jpegCodeSize);
+
+    /* TODO: Not sure this belongs here, maybe better to pass jpegCodeSize out
+     * and do this when returning buffer to parent */
+    CameraBlob blob{CameraBlobId::JPEG, static_cast<int32_t>(jpegCodeSize)};
+    void* blobDst = reinterpret_cast<void*>(reinterpret_cast<uintptr_t>(bufPtr) + maxJpegCodeSize -
+                                            sizeof(CameraBlob));
+    memcpy(blobDst, &blob, sizeof(CameraBlob));
+
+    /* Unlock the HAL jpeg code buffer */
+    int relFence = sHandleImporter.unlock(*(halBuf.bufPtr));
+    if (relFence >= 0) {
+        halBuf.acquireFence = relFence;
+    }
+
+    /* Check if our JPEG actually succeeded */
+    if (ret != 0) {
+        return lfail("%s: encodeJpegYU12 failed with %d", __FUNCTION__, ret);
+    }
+
+    ALOGV("%s: encoded JPEG (ret:%d) with Q:%d max size: %zu", __FUNCTION__, ret, jpegQuality,
+          maxJpegCodeSize);
+
+    return 0;
+}
+
+void ExternalCameraDeviceSession::OutputThread::clearIntermediateBuffers() {
+    std::lock_guard<std::mutex> lk(mBufferLock);
+    mYu12Frame.reset();
+    mYu12ThumbFrame.reset();
+    mIntermediateBuffers.clear();
+    mMuteTestPatternFrame.clear();
+    mBlobBufferSize = 0;
+}
+
+bool ExternalCameraDeviceSession::OutputThread::threadLoop() {
+    std::shared_ptr<HalRequest> req;
+    auto parent = mParent.lock();
+    if (parent == nullptr) {
+        ALOGE("%s: session has been disconnected!", __FUNCTION__);
+        return false;
+    }
+
+    // TODO: maybe we need to setup a sensor thread to dq/enq v4l frames
+    //       regularly to prevent v4l buffer queue filled with stale buffers
+    //       when app doesn't program a preview request
+    waitForNextRequest(&req);
+    if (req == nullptr) {
+        // No new request, wait again
+        return true;
+    }
+
+    auto onDeviceError = [&](auto... args) {
+        ALOGE(args...);
+        parent->notifyError(req->frameNumber, /*stream*/ -1, ErrorCode::ERROR_DEVICE);
+        signalRequestDone();
+        return false;
+    };
+
+    if (req->frameIn->mFourcc != V4L2_PIX_FMT_MJPEG && req->frameIn->mFourcc != V4L2_PIX_FMT_Z16) {
+        return onDeviceError("%s: do not support V4L2 format %c%c%c%c", __FUNCTION__,
+                             req->frameIn->mFourcc & 0xFF, (req->frameIn->mFourcc >> 8) & 0xFF,
+                             (req->frameIn->mFourcc >> 16) & 0xFF,
+                             (req->frameIn->mFourcc >> 24) & 0xFF);
+    }
+
+    int res = requestBufferStart(req->buffers);
+    if (res != 0) {
+        ALOGE("%s: send BufferRequest failed! res %d", __FUNCTION__, res);
+        return onDeviceError("%s: failed to send buffer request!", __FUNCTION__);
+    }
+
+    std::unique_lock<std::mutex> lk(mBufferLock);
+    // Convert input V4L2 frame to YU12 of the same size
+    // TODO: see if we can save some computation by converting to YV12 here
+    uint8_t* inData;
+    size_t inDataSize;
+    if (req->frameIn->getData(&inData, &inDataSize) != 0) {
+        lk.unlock();
+        return onDeviceError("%s: V4L2 buffer map failed", __FUNCTION__);
+    }
+
+    // Process camera mute state
+    auto testPatternMode = req->setting.find(ANDROID_SENSOR_TEST_PATTERN_MODE);
+    if (testPatternMode.count == 1) {
+        if (mCameraMuted != (testPatternMode.data.u8[0] != ANDROID_SENSOR_TEST_PATTERN_MODE_OFF)) {
+            mCameraMuted = !mCameraMuted;
+            // Get solid color for test pattern, if any was set
+            if (testPatternMode.data.u8[0] == ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR) {
+                auto entry = req->setting.find(ANDROID_SENSOR_TEST_PATTERN_DATA);
+                if (entry.count == 4) {
+                    // Update the mute frame if the pattern color has changed
+                    if (memcmp(entry.data.i32, mTestPatternData, sizeof(mTestPatternData)) != 0) {
+                        memcpy(mTestPatternData, entry.data.i32, sizeof(mTestPatternData));
+                        // Fill the mute frame with the solid color, use only 8 MSB of RGGB as RGB
+                        for (int i = 0; i < mMuteTestPatternFrame.size(); i += 3) {
+                            mMuteTestPatternFrame[i] = entry.data.i32[0] >> 24;
+                            mMuteTestPatternFrame[i + 1] = entry.data.i32[1] >> 24;
+                            mMuteTestPatternFrame[i + 2] = entry.data.i32[3] >> 24;
+                        }
+                    }
+                }
+            }
+        }
+    }
+
+    // TODO: in some special case maybe we can decode jpg directly to gralloc output?
+    if (req->frameIn->mFourcc == V4L2_PIX_FMT_MJPEG) {
+        ATRACE_BEGIN("MJPGtoI420");
+        res = 0;
+        if (mCameraMuted) {
+            res = libyuv::ConvertToI420(
+                    mMuteTestPatternFrame.data(), mMuteTestPatternFrame.size(),
+                    static_cast<uint8_t*>(mYu12FrameLayout.y), mYu12FrameLayout.yStride,
+                    static_cast<uint8_t*>(mYu12FrameLayout.cb), mYu12FrameLayout.cStride,
+                    static_cast<uint8_t*>(mYu12FrameLayout.cr), mYu12FrameLayout.cStride, 0, 0,
+                    mYu12Frame->mWidth, mYu12Frame->mHeight, mYu12Frame->mWidth,
+                    mYu12Frame->mHeight, libyuv::kRotate0, libyuv::FOURCC_RAW);
+        } else {
+            res = libyuv::MJPGToI420(
+                    inData, inDataSize, static_cast<uint8_t*>(mYu12FrameLayout.y),
+                    mYu12FrameLayout.yStride, static_cast<uint8_t*>(mYu12FrameLayout.cb),
+                    mYu12FrameLayout.cStride, static_cast<uint8_t*>(mYu12FrameLayout.cr),
+                    mYu12FrameLayout.cStride, mYu12Frame->mWidth, mYu12Frame->mHeight,
+                    mYu12Frame->mWidth, mYu12Frame->mHeight);
+        }
+        ATRACE_END();
+
+        if (res != 0) {
+            // For some webcam, the first few V4L2 frames might be malformed...
+            ALOGE("%s: Convert V4L2 frame to YU12 failed! res %d", __FUNCTION__, res);
+            lk.unlock();
+            Status st = parent->processCaptureRequestError(req);
+            if (st != Status::OK) {
+                return onDeviceError("%s: failed to process capture request error!", __FUNCTION__);
+            }
+            signalRequestDone();
+            return true;
+        }
+    }
+
+    ATRACE_BEGIN("Wait for BufferRequest done");
+    res = waitForBufferRequestDone(&req->buffers);
+    ATRACE_END();
+
+    if (res != 0) {
+        ALOGE("%s: wait for BufferRequest done failed! res %d", __FUNCTION__, res);
+        lk.unlock();
+        return onDeviceError("%s: failed to process buffer request error!", __FUNCTION__);
+    }
+
+    ALOGV("%s processing new request", __FUNCTION__);
+    const int kSyncWaitTimeoutMs = 500;
+    for (auto& halBuf : req->buffers) {
+        if (*(halBuf.bufPtr) == nullptr) {
+            ALOGW("%s: buffer for stream %d missing", __FUNCTION__, halBuf.streamId);
+            halBuf.fenceTimeout = true;
+        } else if (halBuf.acquireFence >= 0) {
+            int ret = sync_wait(halBuf.acquireFence, kSyncWaitTimeoutMs);
+            if (ret) {
+                halBuf.fenceTimeout = true;
+            } else {
+                ::close(halBuf.acquireFence);
+                halBuf.acquireFence = -1;
+            }
+        }
+
+        if (halBuf.fenceTimeout) {
+            continue;
+        }
+
+        // Gralloc lockYCbCr the buffer
+        switch (halBuf.format) {
+            case PixelFormat::BLOB: {
+                int ret = createJpegLocked(halBuf, req->setting);
+
+                if (ret != 0) {
+                    lk.unlock();
+                    return onDeviceError("%s: createJpegLocked failed with %d", __FUNCTION__, ret);
+                }
+            } break;
+            case PixelFormat::Y16: {
+                void* outLayout = sHandleImporter.lock(
+                        *(halBuf.bufPtr), static_cast<uint64_t>(halBuf.usage), inDataSize);
+
+                std::memcpy(outLayout, inData, inDataSize);
+
+                int relFence = sHandleImporter.unlock(*(halBuf.bufPtr));
+                if (relFence >= 0) {
+                    halBuf.acquireFence = relFence;
+                }
+            } break;
+            case PixelFormat::YCBCR_420_888:
+            case PixelFormat::YV12: {
+                IMapper::Rect outRect{0, 0, static_cast<int32_t>(halBuf.width),
+                                      static_cast<int32_t>(halBuf.height)};
+                YCbCrLayout outLayout = sHandleImporter.lockYCbCr(
+                        *(halBuf.bufPtr), static_cast<uint64_t>(halBuf.usage), outRect);
+                ALOGV("%s: outLayout y %p cb %p cr %p y_str %d c_str %d c_step %d", __FUNCTION__,
+                      outLayout.y, outLayout.cb, outLayout.cr, outLayout.yStride, outLayout.cStride,
+                      outLayout.chromaStep);
+
+                // Convert to output buffer size/format
+                uint32_t outputFourcc = getFourCcFromLayout(outLayout);
+                ALOGV("%s: converting to format %c%c%c%c", __FUNCTION__, outputFourcc & 0xFF,
+                      (outputFourcc >> 8) & 0xFF, (outputFourcc >> 16) & 0xFF,
+                      (outputFourcc >> 24) & 0xFF);
+
+                YCbCrLayout cropAndScaled;
+                ATRACE_BEGIN("cropAndScaleLocked");
+                int ret = cropAndScaleLocked(mYu12Frame, Size{halBuf.width, halBuf.height},
+                                             &cropAndScaled);
+                ATRACE_END();
+                if (ret != 0) {
+                    lk.unlock();
+                    return onDeviceError("%s: crop and scale failed!", __FUNCTION__);
+                }
+
+                Size sz{halBuf.width, halBuf.height};
+                ATRACE_BEGIN("formatConvert");
+                ret = formatConvert(cropAndScaled, outLayout, sz, outputFourcc);
+                ATRACE_END();
+                if (ret != 0) {
+                    lk.unlock();
+                    return onDeviceError("%s: format conversion failed!", __FUNCTION__);
+                }
+                int relFence = sHandleImporter.unlock(*(halBuf.bufPtr));
+                if (relFence >= 0) {
+                    halBuf.acquireFence = relFence;
+                }
+            } break;
+            default:
+                lk.unlock();
+                return onDeviceError("%s: unknown output format %x", __FUNCTION__, halBuf.format);
+        }
+    }  // for each buffer
+    mScaledYu12Frames.clear();
+
+    // Don't hold the lock while calling back to parent
+    lk.unlock();
+    Status st = parent->processCaptureResult(req);
+    if (st != Status::OK) {
+        return onDeviceError("%s: failed to process capture result!", __FUNCTION__);
+    }
+    signalRequestDone();
+    return true;
+}
+
+// End ExternalCameraDeviceSession::OutputThread functions
+
+}  // namespace implementation
+}  // namespace device
+}  // namespace camera
+}  // namespace hardware
+}  // namespace android
diff --git a/camera/device/default/ExternalCameraDeviceSession.h b/camera/device/default/ExternalCameraDeviceSession.h
new file mode 100644
index 0000000..5d42092
--- /dev/null
+++ b/camera/device/default/ExternalCameraDeviceSession.h
@@ -0,0 +1,399 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef HARDWARE_INTERFACES_CAMERA_DEVICE_DEFAULT_EXTERNALCAMERADEVICESESSION_H_
+#define HARDWARE_INTERFACES_CAMERA_DEVICE_DEFAULT_EXTERNALCAMERADEVICESESSION_H_
+
+#include <ExternalCameraUtils.h>
+#include <SimpleThread.h>
+#include <aidl/android/hardware/camera/common/Status.h>
+#include <aidl/android/hardware/camera/device/BnCameraDeviceSession.h>
+#include <aidl/android/hardware/camera/device/BufferRequest.h>
+#include <aidl/android/hardware/camera/device/Stream.h>
+#include <android-base/unique_fd.h>
+#include <fmq/AidlMessageQueue.h>
+#include <utils/Thread.h>
+#include <deque>
+#include <list>
+
+namespace android {
+namespace hardware {
+namespace camera {
+namespace device {
+namespace implementation {
+
+using ::aidl::android::hardware::camera::common::Status;
+using ::aidl::android::hardware::camera::device::BnCameraDeviceSession;
+using ::aidl::android::hardware::camera::device::BufferCache;
+using ::aidl::android::hardware::camera::device::BufferRequest;
+using ::aidl::android::hardware::camera::device::CameraMetadata;
+using ::aidl::android::hardware::camera::device::CameraOfflineSessionInfo;
+using ::aidl::android::hardware::camera::device::CaptureRequest;
+using ::aidl::android::hardware::camera::device::HalStream;
+using ::aidl::android::hardware::camera::device::ICameraDeviceCallback;
+using ::aidl::android::hardware::camera::device::ICameraOfflineSession;
+using ::aidl::android::hardware::camera::device::RequestTemplate;
+using ::aidl::android::hardware::camera::device::Stream;
+using ::aidl::android::hardware::camera::device::StreamConfiguration;
+using ::aidl::android::hardware::common::fmq::MQDescriptor;
+using ::aidl::android::hardware::common::fmq::SynchronizedReadWrite;
+using ::android::AidlMessageQueue;
+using ::android::base::unique_fd;
+using ::android::hardware::camera::common::helper::SimpleThread;
+using ::android::hardware::camera::external::common::ExternalCameraConfig;
+using ::android::hardware::camera::external::common::SizeHasher;
+using ::ndk::ScopedAStatus;
+
+class ExternalCameraDeviceSession : public BnCameraDeviceSession, public OutputThreadInterface {
+  public:
+    ExternalCameraDeviceSession(const std::shared_ptr<ICameraDeviceCallback>&,
+                                const ExternalCameraConfig& cfg,
+                                const std::vector<SupportedV4L2Format>& sortedFormats,
+                                const CroppingType& croppingType,
+                                const common::V1_0::helper::CameraMetadata& chars,
+                                const std::string& cameraId, unique_fd v4l2Fd);
+    ~ExternalCameraDeviceSession() override;
+
+    // Caller must use this method to check if CameraDeviceSession ctor failed
+    bool isInitFailed();
+    bool isClosed();
+
+    ScopedAStatus close() override;
+
+    ScopedAStatus configureStreams(const StreamConfiguration& in_requestedConfiguration,
+                                   std::vector<HalStream>* _aidl_return) override;
+    ScopedAStatus constructDefaultRequestSettings(RequestTemplate in_type,
+                                                  CameraMetadata* _aidl_return) override;
+    ScopedAStatus flush() override;
+    ScopedAStatus getCaptureRequestMetadataQueue(
+            MQDescriptor<int8_t, SynchronizedReadWrite>* _aidl_return) override;
+    ScopedAStatus getCaptureResultMetadataQueue(
+            MQDescriptor<int8_t, SynchronizedReadWrite>* _aidl_return) override;
+    ScopedAStatus isReconfigurationRequired(const CameraMetadata& in_oldSessionParams,
+                                            const CameraMetadata& in_newSessionParams,
+                                            bool* _aidl_return) override;
+    ScopedAStatus processCaptureRequest(const std::vector<CaptureRequest>& in_requests,
+                                        const std::vector<BufferCache>& in_cachesToRemove,
+                                        int32_t* _aidl_return) override;
+    ScopedAStatus signalStreamFlush(const std::vector<int32_t>& in_streamIds,
+                                    int32_t in_streamConfigCounter) override;
+    ScopedAStatus switchToOffline(const std::vector<int32_t>& in_streamsToKeep,
+                                  CameraOfflineSessionInfo* out_offlineSessionInfo,
+                                  std::shared_ptr<ICameraOfflineSession>* _aidl_return) override;
+    ScopedAStatus repeatingRequestEnd(int32_t in_frameNumber,
+                                      const std::vector<int32_t>& in_streamIds) override;
+
+    Status importBuffer(int32_t streamId, uint64_t bufId, buffer_handle_t buf,
+                        buffer_handle_t** outBufPtr) override;
+
+    void notifyError(int32_t frameNumber, int32_t streamId, ErrorCode ec) override;
+
+    Status processCaptureRequestError(const std::shared_ptr<HalRequest>& ptr,
+                                      std::vector<NotifyMsg>* msgs,
+                                      std::vector<CaptureResult>* results) override;
+
+    Status processCaptureResult(std::shared_ptr<HalRequest>& ptr) override;
+    ssize_t getJpegBufferSize(int32_t width, int32_t height) const override;
+
+    // Called by CameraDevice to dump active device states
+    binder_status_t dump(int fd, const char** args, uint32_t numArgs) override;
+
+    static Status isStreamCombinationSupported(
+            const StreamConfiguration& config,
+            const std::vector<SupportedV4L2Format>& supportedFormats,
+            const ExternalCameraConfig& devCfg);
+
+    static const int kMaxProcessedStream = 2;
+    static const int kMaxStallStream = 1;
+    static const uint32_t kMaxBytesPerPixel = 2;
+
+    class BufferRequestThread : public SimpleThread {
+      public:
+        BufferRequestThread(std::weak_ptr<OutputThreadInterface> parent,
+                            std::shared_ptr<ICameraDeviceCallback> callbacks);
+
+        int requestBufferStart(const std::vector<HalStreamBuffer>&);
+        int waitForBufferRequestDone(
+                /*out*/ std::vector<HalStreamBuffer>*);
+
+        bool threadLoop() override;
+
+      private:
+        void waitForNextRequest();
+
+        const std::weak_ptr<OutputThreadInterface> mParent;
+        const std::shared_ptr<ICameraDeviceCallback> mCallbacks;
+
+        std::mutex mLock;
+        bool mRequestingBuffer = false;
+
+        std::vector<HalStreamBuffer> mBufferReqs;
+        std::vector<HalStreamBuffer> mPendingReturnBufferReqs;
+        // mHalBufferReqs is not under mLock protection during the HIDL transaction
+        std::vector<BufferRequest> mHalBufferReqs;
+
+        // request buffers takes much less time in steady state, but can take much longer
+        // when requesting 1st buffer from a stream.
+        // TODO: consider a separate timeout for new vs. steady state?
+        // TODO: or make sure framework is warming up the pipeline during configure new stream?
+        static const int kReqProcTimeoutMs = 66;
+
+        static const int kReqWaitTimeoutMs = 33;
+        static const int kReqWaitTimesWarn = 90;   // 33ms * 90 ~= 3 sec
+        std::condition_variable mRequestCond;      // signaled when a new buffer request incoming
+        std::condition_variable mRequestDoneCond;  // signaled when a request is done
+    };
+
+    class OutputThread : public SimpleThread {
+      public:
+        OutputThread(std::weak_ptr<OutputThreadInterface> parent, CroppingType,
+                     const common::V1_0::helper::CameraMetadata&,
+                     std::shared_ptr<BufferRequestThread> bufReqThread);
+        ~OutputThread();
+
+        Status allocateIntermediateBuffers(const Size& v4lSize, const Size& thumbSize,
+                                           const std::vector<Stream>& streams,
+                                           uint32_t blobBufferSize);
+        Status submitRequest(const std::shared_ptr<HalRequest>&);
+        void flush();
+        void dump(int fd);
+        bool threadLoop() override;
+
+        void setExifMakeModel(const std::string& make, const std::string& model);
+
+        // The remaining request list is returned for offline processing
+        std::list<std::shared_ptr<HalRequest>> switchToOffline();
+
+      protected:
+        static const int kFlushWaitTimeoutSec = 3;  // 3 sec
+        static const int kReqWaitTimeoutMs = 33;    // 33ms
+        static const int kReqWaitTimesMax = 90;     // 33ms * 90 ~= 3 sec
+
+        // Methods to request output buffer in parallel
+        int requestBufferStart(const std::vector<HalStreamBuffer>&);
+        int waitForBufferRequestDone(
+                /*out*/ std::vector<HalStreamBuffer>*);
+
+        void waitForNextRequest(std::shared_ptr<HalRequest>* out);
+        void signalRequestDone();
+
+        int cropAndScaleLocked(std::shared_ptr<AllocatedFrame>& in, const Size& outSize,
+                               YCbCrLayout* out);
+
+        int cropAndScaleThumbLocked(std::shared_ptr<AllocatedFrame>& in, const Size& outSize,
+                                    YCbCrLayout* out);
+
+        int createJpegLocked(HalStreamBuffer& halBuf,
+                             const common::V1_0::helper::CameraMetadata& settings);
+
+        void clearIntermediateBuffers();
+
+        const std::weak_ptr<OutputThreadInterface> mParent;
+        const CroppingType mCroppingType;
+        const common::V1_0::helper::CameraMetadata mCameraCharacteristics;
+
+        mutable std::mutex mRequestListLock;       // Protect access to mRequestList,
+                                                   // mProcessingRequest and mProcessingFrameNumber
+        std::condition_variable mRequestCond;      // signaled when a new request is submitted
+        std::condition_variable mRequestDoneCond;  // signaled when a request is done processing
+        std::list<std::shared_ptr<HalRequest>> mRequestList;
+        bool mProcessingRequest = false;
+        uint32_t mProcessingFrameNumber = 0;
+
+        // V4L2 frameIn
+        // (MJPG decode)-> mYu12Frame
+        // (Scale)-> mScaledYu12Frames
+        // (Format convert) -> output gralloc frames
+        mutable std::mutex mBufferLock;  // Protect access to intermediate buffers
+        std::shared_ptr<AllocatedFrame> mYu12Frame;
+        std::shared_ptr<AllocatedFrame> mYu12ThumbFrame;
+        std::unordered_map<Size, std::shared_ptr<AllocatedFrame>, SizeHasher> mIntermediateBuffers;
+        std::unordered_map<Size, std::shared_ptr<AllocatedFrame>, SizeHasher> mScaledYu12Frames;
+        YCbCrLayout mYu12FrameLayout;
+        YCbCrLayout mYu12ThumbFrameLayout;
+        std::vector<uint8_t> mMuteTestPatternFrame;
+        uint32_t mTestPatternData[4] = {0, 0, 0, 0};
+        bool mCameraMuted = false;
+        uint32_t mBlobBufferSize = 0;  // 0 -> HAL derive buffer size, else: use given size
+
+        std::string mExifMake;
+        std::string mExifModel;
+
+        const std::shared_ptr<BufferRequestThread> mBufferRequestThread;
+    };
+
+  private:
+    bool initialize();
+    // To init/close different version of output thread
+    void initOutputThread();
+    void closeOutputThread();
+    void closeOutputThreadImpl();
+
+    void close(bool callerIsDtor);
+    Status initStatus() const;
+    status_t initDefaultRequests();
+
+    status_t fillCaptureResult(common::V1_0::helper::CameraMetadata& md, nsecs_t timestamp);
+    int configureV4l2StreamLocked(const SupportedV4L2Format& fmt, double fps = 0.0);
+    int v4l2StreamOffLocked();
+
+    int setV4l2FpsLocked(double fps);
+
+    std::unique_ptr<V4L2Frame> dequeueV4l2FrameLocked(
+            /*out*/ nsecs_t* shutterTs);  // Called with mLock held
+
+    void enqueueV4l2Frame(const std::shared_ptr<V4L2Frame>&);
+
+    // Check if input Stream is one of supported stream setting on this device
+    static bool isSupported(const Stream& stream,
+                            const std::vector<SupportedV4L2Format>& supportedFormats,
+                            const ExternalCameraConfig& cfg);
+
+    // Validate and import request's output buffers and acquire fence
+    Status importRequestLocked(const CaptureRequest& request,
+                               std::vector<buffer_handle_t*>& allBufPtrs,
+                               std::vector<int>& allFences);
+
+    Status importRequestLockedImpl(const CaptureRequest& request,
+                                   std::vector<buffer_handle_t*>& allBufPtrs,
+                                   std::vector<int>& allFences);
+
+    Status importBufferLocked(int32_t streamId, uint64_t bufId, buffer_handle_t buf,
+                              /*out*/ buffer_handle_t** outBufPtr);
+    static void cleanupInflightFences(std::vector<int>& allFences, size_t numFences);
+    void cleanupBuffersLocked(int id);
+
+    void updateBufferCaches(const std::vector<BufferCache>& cachesToRemove);
+
+    Status processOneCaptureRequest(const CaptureRequest& request);
+    void notifyShutter(int32_t frameNumber, nsecs_t shutterTs);
+
+    void invokeProcessCaptureResultCallback(std::vector<CaptureResult>& results, bool tryWriteFmq);
+    Size getMaxJpegResolution() const;
+
+    Size getMaxThumbResolution() const;
+
+    int waitForV4L2BufferReturnLocked(std::unique_lock<std::mutex>& lk);
+
+    // Main body of switchToOffline. This method does not invoke any callbacks
+    // but instead returns the necessary callbacks in output arguments so callers
+    // can callback later without holding any locks
+    Status switchToOffline(const std::vector<int32_t>& offlineStreams,
+                           /*out*/ std::vector<NotifyMsg>* msgs,
+                           /*out*/ std::vector<CaptureResult>* results,
+                           /*out*/ CameraOfflineSessionInfo* info,
+                           /*out*/ std::shared_ptr<ICameraOfflineSession>* session);
+
+    bool supportOfflineLocked(int32_t streamId);
+
+    // Whether a request can be completely dropped when switching to offline
+    bool canDropRequest(const std::vector<int32_t>& offlineStreams,
+                        std::shared_ptr<HalRequest> halReq);
+
+    void fillOfflineSessionInfo(const std::vector<int32_t>& offlineStreams,
+                                std::deque<std::shared_ptr<HalRequest>>& offlineReqs,
+                                const std::map<int, CirculatingBuffers>& circulatingBuffers,
+                                /*out*/ CameraOfflineSessionInfo* info);
+
+    // Protect (most of) HIDL interface methods from synchronized-entering
+    mutable Mutex mInterfaceLock;
+
+    mutable Mutex mLock;  // Protect all private members except otherwise noted
+    const std::shared_ptr<ICameraDeviceCallback> mCallback;
+    const ExternalCameraConfig& mCfg;
+    const common::V1_0::helper::CameraMetadata mCameraCharacteristics;
+    const std::vector<SupportedV4L2Format> mSupportedFormats;
+    const CroppingType mCroppingType;
+    const std::string mCameraId;
+
+    // Not protected by mLock, this is almost a const.
+    // Setup in constructor, reset in close() after OutputThread is joined
+    unique_fd mV4l2Fd;
+
+    // device is closed either
+    //    - closed by user
+    //    - init failed
+    //    - camera disconnected
+    bool mClosed = false;
+    bool mInitialized = false;
+    bool mInitFail = false;
+    bool mFirstRequest = false;
+    common::V1_0::helper::CameraMetadata mLatestReqSetting;
+
+    bool mV4l2Streaming = false;
+    SupportedV4L2Format mV4l2StreamingFmt;
+    double mV4l2StreamingFps = 0.0;
+    size_t mV4L2BufferCount = 0;
+
+    static const int kBufferWaitTimeoutSec = 3;  // TODO: handle long exposure (or not allowing)
+    std::mutex mV4l2BufferLock;                  // protect the buffer count and condition below
+    std::condition_variable mV4L2BufferReturned;
+    size_t mNumDequeuedV4l2Buffers = 0;
+    uint32_t mMaxV4L2BufferSize = 0;
+
+    // Not protected by mLock (but might be used when mLock is locked)
+    std::shared_ptr<OutputThread> mOutputThread;
+
+    // Stream ID -> Stream cache
+    std::unordered_map<int, Stream> mStreamMap;
+
+    std::mutex mInflightFramesLock;  // protect mInflightFrames
+    std::unordered_set<uint32_t> mInflightFrames;
+
+    // Stream ID -> circulating buffers map
+    std::map<int, CirculatingBuffers> mCirculatingBuffers;
+    // Protect mCirculatingBuffers, must not lock mLock after acquiring this lock
+    mutable Mutex mCbsLock;
+
+    std::mutex mAfTriggerLock;  // protect mAfTrigger
+    bool mAfTrigger = false;
+
+    uint32_t mBlobBufferSize = 0;
+
+    static HandleImporter sHandleImporter;
+
+    bool mSupportBufMgr;
+    std::shared_ptr<BufferRequestThread> mBufferRequestThread;
+
+    /* Beginning of members not changed after initialize() */
+    using RequestMetadataQueue = AidlMessageQueue<int8_t, SynchronizedReadWrite>;
+    std::unique_ptr<RequestMetadataQueue> mRequestMetadataQueue;
+    using ResultMetadataQueue = AidlMessageQueue<int8_t, SynchronizedReadWrite>;
+    std::shared_ptr<ResultMetadataQueue> mResultMetadataQueue;
+
+    // Protect against invokeProcessCaptureResultCallback()
+    Mutex mProcessCaptureResultLock;
+
+    // tracks last seen stream config counter
+    int32_t mLastStreamConfigCounter = -1;
+
+    std::unordered_map<RequestTemplate, CameraMetadata> mDefaultRequests;
+
+    const Size mMaxThumbResolution;
+    const Size mMaxJpegResolution;
+
+    std::string mExifMake;
+    std::string mExifModel;
+    /* End of members not changed after initialize() */
+};
+
+}  // namespace implementation
+}  // namespace device
+}  // namespace camera
+}  // namespace hardware
+}  // namespace android
+
+#endif  // HARDWARE_INTERFACES_CAMERA_DEVICE_DEFAULT_EXTERNALCAMERADEVICESESSION_H_
diff --git a/camera/device/default/ExternalCameraOfflineSession.cpp b/camera/device/default/ExternalCameraOfflineSession.cpp
new file mode 100644
index 0000000..4c7f732
--- /dev/null
+++ b/camera/device/default/ExternalCameraOfflineSession.cpp
@@ -0,0 +1,547 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "ExtCamOfflnSsn"
+#include <android/log.h>
+
+#include "ExternalCameraOfflineSession.h"
+
+#include <aidl/android/hardware/camera/device/BufferStatus.h>
+#include <aidl/android/hardware/camera/device/ErrorMsg.h>
+#include <aidl/android/hardware/camera/device/ShutterMsg.h>
+#include <aidl/android/hardware/camera/device/StreamBuffer.h>
+#include <aidlcommonsupport/NativeHandle.h>
+#include <convert.h>
+#include <linux/videodev2.h>
+#include <sync/sync.h>
+#include <utils/Trace.h>
+
+#define HAVE_JPEG  // required for libyuv.h to export MJPEG decode APIs
+#include <libyuv.h>
+
+namespace {
+
+// Size of request/result metadata fast message queue. Change to 0 to always use hwbinder buffer.
+constexpr size_t kMetadataMsgQueueSize = 1 << 18 /* 256kB */;
+
+}  // anonymous namespace
+
+namespace android {
+namespace hardware {
+namespace camera {
+namespace device {
+namespace implementation {
+
+using ::aidl::android::hardware::camera::device::BufferStatus;
+using ::aidl::android::hardware::camera::device::ErrorMsg;
+using ::aidl::android::hardware::camera::device::ShutterMsg;
+using ::aidl::android::hardware::camera::device::StreamBuffer;
+
+// Static instance
+HandleImporter ExternalCameraOfflineSession::sHandleImporter;
+
+ExternalCameraOfflineSession::ExternalCameraOfflineSession(
+        const CroppingType& croppingType, const common::V1_0::helper::CameraMetadata& chars,
+        const std::string& cameraId, const std::string& exifMake, const std::string& exifModel,
+        uint32_t blobBufferSize, bool afTrigger, const std::vector<Stream>& offlineStreams,
+        std::deque<std::shared_ptr<HalRequest>>& offlineReqs,
+        const std::map<int, CirculatingBuffers>& circulatingBuffers)
+    : mCroppingType(croppingType),
+      mChars(chars),
+      mCameraId(cameraId),
+      mExifMake(exifMake),
+      mExifModel(exifModel),
+      mBlobBufferSize(blobBufferSize),
+      mAfTrigger(afTrigger),
+      mOfflineStreams(offlineStreams),
+      mOfflineReqs(offlineReqs),
+      mCirculatingBuffers(circulatingBuffers) {}
+
+ExternalCameraOfflineSession::~ExternalCameraOfflineSession() {
+    close();
+}
+
+bool ExternalCameraOfflineSession::initialize() {
+    mResultMetadataQueue =
+            std::make_shared<ResultMetadataQueue>(kMetadataMsgQueueSize, false /* non blocking */);
+    if (!mResultMetadataQueue->isValid()) {
+        ALOGE("%s: invalid result fmq", __FUNCTION__);
+        return true;
+    }
+    return false;
+}
+
+Status ExternalCameraOfflineSession::importBuffer(int32_t streamId, uint64_t bufId,
+                                                  buffer_handle_t buf,
+                                                  buffer_handle_t** outBufPtr) {
+    Mutex::Autolock _l(mCbsLock);
+    return importBufferImpl(mCirculatingBuffers, sHandleImporter, streamId, bufId, buf, outBufPtr);
+}
+
+Status ExternalCameraOfflineSession::processCaptureResult(std::shared_ptr<HalRequest>& req) {
+    ATRACE_CALL();
+    // Fill output buffers
+    std::vector<CaptureResult> results;
+    results.resize(1);
+    CaptureResult& result = results[0];
+    result.frameNumber = req->frameNumber;
+    result.partialResult = 1;
+    result.inputBuffer.streamId = -1;
+    result.outputBuffers.resize(req->buffers.size());
+    for (size_t i = 0; i < req->buffers.size(); i++) {
+        StreamBuffer& outputBuffer = result.outputBuffers[i];
+        outputBuffer.streamId = req->buffers[i].streamId;
+        outputBuffer.bufferId = req->buffers[i].bufferId;
+        if (req->buffers[i].fenceTimeout) {
+            outputBuffer.status = BufferStatus::ERROR;
+            if (req->buffers[i].acquireFence >= 0) {
+                native_handle_t* handle = native_handle_create(/*numFds*/ 1, /*numInts*/ 0);
+                handle->data[0] = req->buffers[i].acquireFence;
+                result.outputBuffers[i].releaseFence = android::makeToAidl(handle);
+            }
+            notifyError(req->frameNumber, req->buffers[i].streamId, ErrorCode::ERROR_BUFFER);
+        } else {
+            result.outputBuffers[i].status = BufferStatus::OK;
+            // TODO: refactor
+            if (req->buffers[i].acquireFence >= 0) {
+                native_handle_t* handle = native_handle_create(/*numFds*/ 1, /*numInts*/ 0);
+                handle->data[0] = req->buffers[i].acquireFence;
+                outputBuffer.releaseFence = android::makeToAidl(handle);
+            }
+        }
+    }
+
+    // Fill capture result metadata
+    fillCaptureResult(req->setting, req->shutterTs);
+    const camera_metadata_t* rawResult = req->setting.getAndLock();
+    convertToAidl(rawResult, &result.result);
+    req->setting.unlock(rawResult);
+
+    // Callback into framework
+    invokeProcessCaptureResultCallback(results, /* tryWriteFmq */ true);
+    freeReleaseFences(results);
+    return Status::OK;
+}
+
+#define UPDATE(md, tag, data, size)               \
+    do {                                          \
+        if ((md).update((tag), (data), (size))) { \
+            ALOGE("Update " #tag " failed!");     \
+            return BAD_VALUE;                     \
+        }                                         \
+    } while (0)
+
+status_t ExternalCameraOfflineSession::fillCaptureResult(common::V1_0::helper::CameraMetadata md,
+                                                         nsecs_t timestamp) {
+    bool afTrigger = false;
+    {
+        std::lock_guard<std::mutex> lk(mAfTriggerLock);
+        afTrigger = mAfTrigger;
+        if (md.exists(ANDROID_CONTROL_AF_TRIGGER)) {
+            camera_metadata_entry entry = md.find(ANDROID_CONTROL_AF_TRIGGER);
+            if (entry.data.u8[0] == ANDROID_CONTROL_AF_TRIGGER_START) {
+                mAfTrigger = afTrigger = true;
+            } else if (entry.data.u8[0] == ANDROID_CONTROL_AF_TRIGGER_CANCEL) {
+                mAfTrigger = afTrigger = false;
+            }
+        }
+    }
+
+    // For USB camera, the USB camera handles everything and we don't have control
+    // over AF. We only simply fake the AF metadata based on the request
+    // received here.
+    uint8_t afState;
+    if (afTrigger) {
+        afState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
+    } else {
+        afState = ANDROID_CONTROL_AF_STATE_INACTIVE;
+    }
+    UPDATE(md, ANDROID_CONTROL_AF_STATE, &afState, 1);
+
+    camera_metadata_ro_entry activeArraySize = mChars.find(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE);
+
+    return fillCaptureResultCommon(md, timestamp, activeArraySize);
+}
+void ExternalCameraOfflineSession::invokeProcessCaptureResultCallback(
+        std::vector<CaptureResult>& results, bool tryWriteFmq) {
+    if (mProcessCaptureResultLock.tryLock() != OK) {
+        const nsecs_t NS_TO_SECOND = 1E9;
+        ALOGV("%s: previous call is not finished! waiting 1s...", __FUNCTION__);
+        if (mProcessCaptureResultLock.timedLock(/* 1s */ NS_TO_SECOND) != OK) {
+            ALOGE("%s: cannot acquire lock in 1s, cannot proceed", __FUNCTION__);
+            return;
+        }
+    }
+    if (tryWriteFmq && mResultMetadataQueue->availableToWrite() > 0) {
+        for (CaptureResult& result : results) {
+            if (!result.result.metadata.empty()) {
+                if (mResultMetadataQueue->write(
+                            reinterpret_cast<int8_t*>(result.result.metadata.data()),
+                            result.result.metadata.size())) {
+                    result.fmqResultSize = result.result.metadata.size();
+                    result.result.metadata.clear();
+                } else {
+                    ALOGW("%s: couldn't utilize fmq, fall back to hwbinder", __FUNCTION__);
+                    result.fmqResultSize = 0;
+                }
+            } else {
+                result.fmqResultSize = 0;
+            }
+        }
+    }
+    auto status = mCallback->processCaptureResult(results);
+    if (!status.isOk()) {
+        ALOGE("%s: processCaptureResult ERROR : %d:%d", __FUNCTION__, status.getExceptionCode(),
+              status.getServiceSpecificError());
+    }
+
+    mProcessCaptureResultLock.unlock();
+}
+
+Status ExternalCameraOfflineSession::processCaptureRequestError(
+        const std::shared_ptr<HalRequest>& req, std::vector<NotifyMsg>* outMsgs,
+        std::vector<CaptureResult>* outResults) {
+    ATRACE_CALL();
+
+    if (outMsgs == nullptr) {
+        notifyError(/*frameNum*/ req->frameNumber, /*stream*/ -1, ErrorCode::ERROR_REQUEST);
+    } else {
+        NotifyMsg shutter;
+        shutter.set<NotifyMsg::Tag::shutter>(ShutterMsg{
+                .frameNumber = req->frameNumber,
+                .timestamp = req->shutterTs,
+        });
+
+        NotifyMsg error;
+        error.set<NotifyMsg::Tag::error>(ErrorMsg{.frameNumber = req->frameNumber,
+                                                  .errorStreamId = -1,
+                                                  .errorCode = ErrorCode::ERROR_REQUEST});
+        outMsgs->push_back(shutter);
+        outMsgs->push_back(error);
+    }
+
+    // Fill output buffers
+    CaptureResult result;
+    result.frameNumber = req->frameNumber;
+    result.partialResult = 1;
+    result.inputBuffer.streamId = -1;
+    result.outputBuffers.resize(req->buffers.size());
+    for (size_t i = 0; i < req->buffers.size(); i++) {
+        StreamBuffer& outputBuffer = result.outputBuffers[i];
+        outputBuffer.streamId = req->buffers[i].streamId;
+        outputBuffer.bufferId = req->buffers[i].bufferId;
+        outputBuffer.status = BufferStatus::ERROR;
+        if (req->buffers[i].acquireFence >= 0) {
+            native_handle_t* handle = native_handle_create(/*numFds*/ 1, /*numInts*/ 0);
+            handle->data[0] = req->buffers[i].acquireFence;
+            outputBuffer.releaseFence = makeToAidl(handle);
+        }
+    }
+
+    if (outResults == nullptr) {
+        // Callback into framework
+        std::vector<CaptureResult> results(1);
+        results[0] = std::move(result);
+        invokeProcessCaptureResultCallback(results, /* tryWriteFmq */ true);
+        freeReleaseFences(results);
+    } else {
+        outResults->push_back(std::move(result));
+    }
+    return Status::OK;
+}
+
+ssize_t ExternalCameraOfflineSession::getJpegBufferSize(int32_t, int32_t) const {
+    // Empty implementation here as the jpeg buffer size is passed in by ctor
+    return 0;
+}
+
+void ExternalCameraOfflineSession::notifyError(int32_t frameNumber, int32_t streamId,
+                                               ErrorCode ec) {
+    NotifyMsg msg;
+    msg.set<NotifyMsg::Tag::error>(
+            ErrorMsg{.frameNumber = frameNumber, .errorStreamId = streamId, .errorCode = ec});
+    mCallback->notify({msg});
+}
+
+ScopedAStatus ExternalCameraOfflineSession::setCallback(
+        const std::shared_ptr<ICameraDeviceCallback>& in_cb) {
+    Mutex::Autolock _il(mInterfaceLock);
+    if (mCallback != nullptr && in_cb != nullptr) {
+        ALOGE("%s: callback must not be set twice!", __FUNCTION__);
+        return fromStatus(Status::OK);
+    }
+    mCallback = in_cb;
+
+    initOutputThread();
+
+    if (mOutputThread == nullptr) {
+        ALOGE("%s: init OutputThread failed!", __FUNCTION__);
+    }
+    return fromStatus(Status::OK);
+}
+void ExternalCameraOfflineSession::initOutputThread() {
+    if (mOutputThread != nullptr) {
+        ALOGE("%s: OutputThread already exist!", __FUNCTION__);
+        return;
+    }
+
+    // Grab a shared_ptr to 'this' from ndk::SharedRefBase::ref()
+    std::shared_ptr<ExternalCameraOfflineSession> thiz = ref<ExternalCameraOfflineSession>();
+
+    mBufferRequestThread = std::make_shared<ExternalCameraDeviceSession::BufferRequestThread>(
+            /*parent=*/thiz, mCallback);
+    mBufferRequestThread->run();
+
+    mOutputThread = std::make_shared<OutputThread>(/*parent=*/thiz, mCroppingType, mChars,
+                                                   mBufferRequestThread, mOfflineReqs);
+
+    mOutputThread->setExifMakeModel(mExifMake, mExifModel);
+
+    Size inputSize = {mOfflineReqs[0]->frameIn->mWidth, mOfflineReqs[0]->frameIn->mHeight};
+    Size maxThumbSize = getMaxThumbnailResolution(mChars);
+    mOutputThread->allocateIntermediateBuffers(inputSize, maxThumbSize, mOfflineStreams,
+                                               mBlobBufferSize);
+
+    mOutputThread->run();
+}
+
+ScopedAStatus ExternalCameraOfflineSession::getCaptureResultMetadataQueue(
+        MQDescriptor<int8_t, SynchronizedReadWrite>* _aidl_return) {
+    Mutex::Autolock _il(mInterfaceLock);
+    *_aidl_return = mResultMetadataQueue->dupeDesc();
+    return fromStatus(Status::OK);
+}
+
+ScopedAStatus ExternalCameraOfflineSession::close() {
+    Mutex::Autolock _il(mInterfaceLock);
+    {
+        Mutex::Autolock _l(mLock);
+        if (mClosed) {
+            ALOGW("%s: offline session already closed!", __FUNCTION__);
+            return fromStatus(Status::OK);
+        }
+    }
+    if (mBufferRequestThread != nullptr) {
+        mBufferRequestThread->requestExitAndWait();
+        mBufferRequestThread.reset();
+    }
+    if (mOutputThread) {
+        mOutputThread->flush();
+        mOutputThread->requestExitAndWait();
+        mOutputThread.reset();
+    }
+
+    Mutex::Autolock _l(mLock);
+    // free all buffers
+    {
+        Mutex::Autolock _cbl(mCbsLock);
+        for (auto& stream : mOfflineStreams) {
+            cleanupBuffersLocked(stream.id);
+        }
+    }
+    mCallback.reset();
+    mClosed = true;
+    return fromStatus(Status::OK);
+}
+void ExternalCameraOfflineSession::cleanupBuffersLocked(int32_t id) {
+    for (auto& pair : mCirculatingBuffers.at(id)) {
+        sHandleImporter.freeBuffer(pair.second);
+    }
+    mCirculatingBuffers[id].clear();
+    mCirculatingBuffers.erase(id);
+}
+
+bool ExternalCameraOfflineSession::OutputThread::threadLoop() {
+    auto parent = mParent.lock();
+    if (parent == nullptr) {
+        ALOGE("%s: session has been disconnected!", __FUNCTION__);
+        return false;
+    }
+
+    if (mOfflineReqs.empty()) {
+        ALOGI("%s: all offline requests are processed. Stopping.", __FUNCTION__);
+        return false;
+    }
+
+    std::shared_ptr<HalRequest> req = mOfflineReqs.front();
+    mOfflineReqs.pop_front();
+
+    auto onDeviceError = [&](auto... args) {
+        ALOGE(args...);
+        parent->notifyError(req->frameNumber, /*stream*/ -1, ErrorCode::ERROR_DEVICE);
+        signalRequestDone();
+        return false;
+    };
+
+    if (req->frameIn->mFourcc != V4L2_PIX_FMT_MJPEG && req->frameIn->mFourcc != V4L2_PIX_FMT_Z16) {
+        return onDeviceError("%s: do not support V4L2 format %c%c%c%c", __FUNCTION__,
+                             req->frameIn->mFourcc & 0xFF, (req->frameIn->mFourcc >> 8) & 0xFF,
+                             (req->frameIn->mFourcc >> 16) & 0xFF,
+                             (req->frameIn->mFourcc >> 24) & 0xFF);
+    }
+
+    int res = requestBufferStart(req->buffers);
+    if (res != 0) {
+        ALOGE("%s: send BufferRequest failed! res %d", __FUNCTION__, res);
+        return onDeviceError("%s: failed to send buffer request!", __FUNCTION__);
+    }
+
+    std::unique_lock<std::mutex> lk(mBufferLock);
+    // Convert input V4L2 frame to YU12 of the same size
+    // TODO: see if we can save some computation by converting to YV12 here
+    uint8_t* inData;
+    size_t inDataSize;
+    if (req->frameIn->getData(&inData, &inDataSize) != 0) {
+        lk.unlock();
+        return onDeviceError("%s: V4L2 buffer map failed", __FUNCTION__);
+    }
+
+    // TODO: in some special case maybe we can decode jpg directly to gralloc output?
+    if (req->frameIn->mFourcc == V4L2_PIX_FMT_MJPEG) {
+        ATRACE_BEGIN("MJPGtoI420");
+        int convRes = libyuv::MJPGToI420(
+                inData, inDataSize, static_cast<uint8_t*>(mYu12FrameLayout.y),
+                mYu12FrameLayout.yStride, static_cast<uint8_t*>(mYu12FrameLayout.cb),
+                mYu12FrameLayout.cStride, static_cast<uint8_t*>(mYu12FrameLayout.cr),
+                mYu12FrameLayout.cStride, mYu12Frame->mWidth, mYu12Frame->mHeight,
+                mYu12Frame->mWidth, mYu12Frame->mHeight);
+        ATRACE_END();
+
+        if (convRes != 0) {
+            // For some webcam, the first few V4L2 frames might be malformed...
+            ALOGE("%s: Convert V4L2 frame to YU12 failed! res %d", __FUNCTION__, convRes);
+            lk.unlock();
+            Status st = parent->processCaptureRequestError(req);
+            if (st != Status::OK) {
+                return onDeviceError("%s: failed to process capture request error!", __FUNCTION__);
+            }
+            signalRequestDone();
+            return true;
+        }
+    }
+
+    ATRACE_BEGIN("Wait for BufferRequest done");
+    res = waitForBufferRequestDone(&req->buffers);
+    ATRACE_END();
+
+    if (res != 0) {
+        ALOGE("%s: wait for BufferRequest done failed! res %d", __FUNCTION__, res);
+        lk.unlock();
+        return onDeviceError("%s: failed to process buffer request error!", __FUNCTION__);
+    }
+
+    ALOGV("%s processing new request", __FUNCTION__);
+    const int kSyncWaitTimeoutMs = 500;
+    for (auto& halBuf : req->buffers) {
+        if (*(halBuf.bufPtr) == nullptr) {
+            ALOGW("%s: buffer for stream %d missing", __FUNCTION__, halBuf.streamId);
+            halBuf.fenceTimeout = true;
+        } else if (halBuf.acquireFence >= 0) {
+            int ret = sync_wait(halBuf.acquireFence, kSyncWaitTimeoutMs);
+            if (ret) {
+                halBuf.fenceTimeout = true;
+            } else {
+                ::close(halBuf.acquireFence);
+                halBuf.acquireFence = -1;
+            }
+        }
+
+        if (halBuf.fenceTimeout) {
+            continue;
+        }
+
+        // Gralloc lockYCbCr the buffer
+        switch (halBuf.format) {
+            case PixelFormat::BLOB: {
+                int ret = createJpegLocked(halBuf, req->setting);
+
+                if (ret != 0) {
+                    lk.unlock();
+                    return onDeviceError("%s: createJpegLocked failed with %d", __FUNCTION__, ret);
+                }
+            } break;
+            case PixelFormat::Y16: {
+                void* outLayout = sHandleImporter.lock(
+                        *(halBuf.bufPtr), static_cast<uint64_t>(halBuf.usage), inDataSize);
+
+                std::memcpy(outLayout, inData, inDataSize);
+
+                int relFence = sHandleImporter.unlock(*(halBuf.bufPtr));
+                if (relFence >= 0) {
+                    halBuf.acquireFence = relFence;
+                }
+            } break;
+            case PixelFormat::YCBCR_420_888:
+            case PixelFormat::YV12: {
+                IMapper::Rect outRect{0, 0, static_cast<int32_t>(halBuf.width),
+                                      static_cast<int32_t>(halBuf.height)};
+                YCbCrLayout outLayout = sHandleImporter.lockYCbCr(
+                        *(halBuf.bufPtr), static_cast<uint64_t>(halBuf.usage), outRect);
+                ALOGV("%s: outLayout y %p cb %p cr %p y_str %d c_str %d c_step %d", __FUNCTION__,
+                      outLayout.y, outLayout.cb, outLayout.cr, outLayout.yStride, outLayout.cStride,
+                      outLayout.chromaStep);
+
+                // Convert to output buffer size/format
+                uint32_t outputFourcc = getFourCcFromLayout(outLayout);
+                ALOGV("%s: converting to format %c%c%c%c", __FUNCTION__, outputFourcc & 0xFF,
+                      (outputFourcc >> 8) & 0xFF, (outputFourcc >> 16) & 0xFF,
+                      (outputFourcc >> 24) & 0xFF);
+
+                YCbCrLayout cropAndScaled;
+                ATRACE_BEGIN("cropAndScaleLocked");
+                int ret = cropAndScaleLocked(mYu12Frame, Size{halBuf.width, halBuf.height},
+                                             &cropAndScaled);
+                ATRACE_END();
+                if (ret != 0) {
+                    lk.unlock();
+                    return onDeviceError("%s: crop and scale failed!", __FUNCTION__);
+                }
+
+                Size sz{halBuf.width, halBuf.height};
+                ATRACE_BEGIN("formatConvert");
+                ret = formatConvert(cropAndScaled, outLayout, sz, outputFourcc);
+                ATRACE_END();
+                if (ret != 0) {
+                    lk.unlock();
+                    return onDeviceError("%s: format coversion failed!", __FUNCTION__);
+                }
+                int relFence = sHandleImporter.unlock(*(halBuf.bufPtr));
+                if (relFence >= 0) {
+                    halBuf.acquireFence = relFence;
+                }
+            } break;
+            default:
+                lk.unlock();
+                return onDeviceError("%s: unknown output format %x", __FUNCTION__, halBuf.format);
+        }
+    }  // for each buffer
+    mScaledYu12Frames.clear();
+
+    // Don't hold the lock while calling back to parent
+    lk.unlock();
+    Status st = parent->processCaptureResult(req);
+    if (st != Status::OK) {
+        return onDeviceError("%s: failed to process capture result!", __FUNCTION__);
+    }
+    signalRequestDone();
+    return true;
+}
+
+}  // namespace implementation
+}  // namespace device
+}  // namespace camera
+}  // namespace hardware
+}  // namespace android
\ No newline at end of file
diff --git a/camera/device/default/ExternalCameraOfflineSession.h b/camera/device/default/ExternalCameraOfflineSession.h
new file mode 100644
index 0000000..5795c95
--- /dev/null
+++ b/camera/device/default/ExternalCameraOfflineSession.h
@@ -0,0 +1,143 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef HARDWARE_INTERFACES_CAMERA_DEVICE_DEFAULT_EXTERNALCAMERAOFFLINESESSION_H_
+#define HARDWARE_INTERFACES_CAMERA_DEVICE_DEFAULT_EXTERNALCAMERAOFFLINESESSION_H_
+
+#include <ExternalCameraDeviceSession.h>
+#include <ExternalCameraUtils.h>
+#include <aidl/android/hardware/camera/common/Status.h>
+#include <aidl/android/hardware/camera/device/BnCameraOfflineSession.h>
+#include <aidl/android/hardware/camera/device/Stream.h>
+#include <fmq/AidlMessageQueue.h>
+#include <utils/RefBase.h>
+#include <deque>
+
+namespace android {
+namespace hardware {
+namespace camera {
+namespace device {
+namespace implementation {
+
+using ::aidl::android::hardware::camera::common::Status;
+using ::aidl::android::hardware::camera::device::BnCameraOfflineSession;
+using ::aidl::android::hardware::camera::device::ICameraDeviceCallback;
+using ::aidl::android::hardware::camera::device::Stream;
+using ::aidl::android::hardware::common::fmq::MQDescriptor;
+using ::aidl::android::hardware::common::fmq::SynchronizedReadWrite;
+
+class ExternalCameraOfflineSession : public BnCameraOfflineSession,
+                                     public virtual RefBase,
+                                     public virtual OutputThreadInterface {
+  public:
+    ExternalCameraOfflineSession(const CroppingType& croppingType,
+                                 const common::V1_0::helper::CameraMetadata& chars,
+                                 const std::string& cameraId, const std::string& exifMake,
+                                 const std::string& exifModel, uint32_t blobBufferSize,
+                                 bool afTrigger, const std::vector<Stream>& offlineStreams,
+                                 std::deque<std::shared_ptr<HalRequest>>& offlineReqs,
+                                 const std::map<int, CirculatingBuffers>& circulatingBuffers);
+
+    ~ExternalCameraOfflineSession() override;
+
+    bool initialize();
+
+    // Methods from OutputThreadInterface
+    Status importBuffer(int32_t streamId, uint64_t bufId, buffer_handle_t buf,
+                        /*out*/ buffer_handle_t** outBufPtr) override;
+
+    Status processCaptureResult(std::shared_ptr<HalRequest>&) override;
+
+    Status processCaptureRequestError(const std::shared_ptr<HalRequest>&,
+                                      /*out*/ std::vector<NotifyMsg>* msgs,
+                                      /*out*/ std::vector<CaptureResult>* results) override;
+
+    ssize_t getJpegBufferSize(int32_t width, int32_t height) const override;
+
+    void notifyError(int32_t frameNumber, int32_t streamId, ErrorCode ec) override;
+    // End of OutputThreadInterface methods
+
+    ScopedAStatus setCallback(const std::shared_ptr<ICameraDeviceCallback>& in_cb) override;
+    ScopedAStatus getCaptureResultMetadataQueue(
+            MQDescriptor<int8_t, SynchronizedReadWrite>* _aidl_return) override;
+    ScopedAStatus close() override;
+
+  private:
+    class OutputThread : public ExternalCameraDeviceSession::OutputThread {
+      public:
+        OutputThread(std::weak_ptr<OutputThreadInterface> parent, CroppingType ct,
+                     const common::V1_0::helper::CameraMetadata& chars,
+                     std::shared_ptr<ExternalCameraDeviceSession::BufferRequestThread> bufReqThread,
+                     std::deque<std::shared_ptr<HalRequest>>& offlineReqs)
+            : ExternalCameraDeviceSession::OutputThread(std::move(parent), ct, chars,
+                                                        std::move(bufReqThread)),
+              mOfflineReqs(offlineReqs) {}
+
+        bool threadLoop() override;
+
+      protected:
+        std::deque<std::shared_ptr<HalRequest>> mOfflineReqs;
+    };  // OutputThread
+
+    status_t fillCaptureResult(common::V1_0::helper::CameraMetadata md, nsecs_t timestamp);
+    void invokeProcessCaptureResultCallback(std::vector<CaptureResult>& results, bool tryWriteFmq);
+    void initOutputThread();
+    void cleanupBuffersLocked(int32_t id);
+
+    // Protect (most of) HIDL interface methods from synchronized-entering
+    mutable Mutex mInterfaceLock;
+
+    mutable Mutex mLock;  // Protect all data members except otherwise noted
+
+    bool mClosed = false;
+    const CroppingType mCroppingType;
+    const common::V1_0::helper::CameraMetadata mChars;
+    const std::string mCameraId;
+    const std::string mExifMake;
+    const std::string mExifModel;
+    const uint32_t mBlobBufferSize;
+
+    std::mutex mAfTriggerLock;  // protect mAfTrigger
+    bool mAfTrigger;
+
+    const std::vector<Stream> mOfflineStreams;
+    std::deque<std::shared_ptr<HalRequest>> mOfflineReqs;
+
+    // Protect mCirculatingBuffers, must not lock mLock after acquiring this lock
+    mutable Mutex mCbsLock;
+    std::map<int, CirculatingBuffers> mCirculatingBuffers;
+
+    static HandleImporter sHandleImporter;
+
+    using ResultMetadataQueue = AidlMessageQueue<int8_t, SynchronizedReadWrite>;
+    std::shared_ptr<ResultMetadataQueue> mResultMetadataQueue;
+
+    // Protect against invokeProcessCaptureResultCallback()
+    Mutex mProcessCaptureResultLock;
+
+    std::shared_ptr<ICameraDeviceCallback> mCallback;
+
+    std::shared_ptr<ExternalCameraDeviceSession::BufferRequestThread> mBufferRequestThread;
+    std::shared_ptr<OutputThread> mOutputThread;
+};
+
+}  // namespace implementation
+}  // namespace device
+}  // namespace camera
+}  // namespace hardware
+}  // namespace android
+
+#endif  // HARDWARE_INTERFACES_CAMERA_DEVICE_DEFAULT_EXTERNALCAMERAOFFLINESESSION_H_
diff --git a/camera/device/default/ExternalCameraUtils.cpp b/camera/device/default/ExternalCameraUtils.cpp
new file mode 100644
index 0000000..cfb95f2
--- /dev/null
+++ b/camera/device/default/ExternalCameraUtils.cpp
@@ -0,0 +1,860 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "ExtCamUtils"
+// #define LOG_NDEBUG 0
+
+#include "ExternalCameraUtils.h"
+
+#include <aidlcommonsupport/NativeHandle.h>
+#include <jpeglib.h>
+#include <linux/videodev2.h>
+#include <log/log.h>
+#include <algorithm>
+#include <cinttypes>
+#include <cmath>
+
+#define HAVE_JPEG  // required for libyuv.h to export MJPEG decode APIs
+#include <libyuv.h>
+
+namespace android {
+namespace hardware {
+namespace camera {
+
+namespace external {
+namespace common {
+
+namespace {
+const int kDefaultCameraIdOffset = 100;
+const int kDefaultJpegBufSize = 5 << 20;  // 5MB
+const int kDefaultNumVideoBuffer = 4;
+const int kDefaultNumStillBuffer = 2;
+const int kDefaultOrientation = 0;  // suitable for natural landscape displays like tablet/TV
+                                    // For phone devices 270 is better
+}  // anonymous namespace
+
+const char* ExternalCameraConfig::kDefaultCfgPath = "/vendor/etc/external_camera_config.xml";
+
+ExternalCameraConfig ExternalCameraConfig::loadFromCfg(const char* cfgPath) {
+    using namespace tinyxml2;
+    ExternalCameraConfig ret;
+
+    XMLDocument configXml;
+    XMLError err = configXml.LoadFile(cfgPath);
+    if (err != XML_SUCCESS) {
+        ALOGE("%s: Unable to load external camera config file '%s'. Error: %s", __FUNCTION__,
+              cfgPath, XMLDocument::ErrorIDToName(err));
+        return ret;
+    } else {
+        ALOGI("%s: load external camera config succeeded!", __FUNCTION__);
+    }
+
+    XMLElement* extCam = configXml.FirstChildElement("ExternalCamera");
+    if (extCam == nullptr) {
+        ALOGI("%s: no external camera config specified", __FUNCTION__);
+        return ret;
+    }
+
+    XMLElement* providerCfg = extCam->FirstChildElement("Provider");
+    if (providerCfg == nullptr) {
+        ALOGI("%s: no external camera provider config specified", __FUNCTION__);
+        return ret;
+    }
+
+    XMLElement* cameraIdOffset = providerCfg->FirstChildElement("CameraIdOffset");
+    if (cameraIdOffset != nullptr) {
+        ret.cameraIdOffset = std::atoi(cameraIdOffset->GetText());
+    }
+
+    XMLElement* ignore = providerCfg->FirstChildElement("ignore");
+    if (ignore == nullptr) {
+        ALOGI("%s: no internal ignored device specified", __FUNCTION__);
+        return ret;
+    }
+
+    XMLElement* id = ignore->FirstChildElement("id");
+    while (id != nullptr) {
+        const char* text = id->GetText();
+        if (text != nullptr) {
+            ret.mInternalDevices.insert(text);
+            ALOGI("%s: device %s will be ignored by external camera provider", __FUNCTION__, text);
+        }
+        id = id->NextSiblingElement("id");
+    }
+
+    XMLElement* deviceCfg = extCam->FirstChildElement("Device");
+    if (deviceCfg == nullptr) {
+        ALOGI("%s: no external camera device config specified", __FUNCTION__);
+        return ret;
+    }
+
+    XMLElement* jpegBufSz = deviceCfg->FirstChildElement("MaxJpegBufferSize");
+    if (jpegBufSz == nullptr) {
+        ALOGI("%s: no max jpeg buffer size specified", __FUNCTION__);
+    } else {
+        ret.maxJpegBufSize = jpegBufSz->UnsignedAttribute("bytes", /*Default*/ kDefaultJpegBufSize);
+    }
+
+    XMLElement* numVideoBuf = deviceCfg->FirstChildElement("NumVideoBuffers");
+    if (numVideoBuf == nullptr) {
+        ALOGI("%s: no num video buffers specified", __FUNCTION__);
+    } else {
+        ret.numVideoBuffers =
+                numVideoBuf->UnsignedAttribute("count", /*Default*/ kDefaultNumVideoBuffer);
+    }
+
+    XMLElement* numStillBuf = deviceCfg->FirstChildElement("NumStillBuffers");
+    if (numStillBuf == nullptr) {
+        ALOGI("%s: no num still buffers specified", __FUNCTION__);
+    } else {
+        ret.numStillBuffers =
+                numStillBuf->UnsignedAttribute("count", /*Default*/ kDefaultNumStillBuffer);
+    }
+
+    XMLElement* fpsList = deviceCfg->FirstChildElement("FpsList");
+    if (fpsList == nullptr) {
+        ALOGI("%s: no fps list specified", __FUNCTION__);
+    } else {
+        if (!updateFpsList(fpsList, ret.fpsLimits)) {
+            return ret;
+        }
+    }
+
+    XMLElement* depth = deviceCfg->FirstChildElement("Depth16Supported");
+    if (depth == nullptr) {
+        ret.depthEnabled = false;
+        ALOGI("%s: depth output is not enabled", __FUNCTION__);
+    } else {
+        ret.depthEnabled = depth->BoolAttribute("enabled", false);
+    }
+
+    if (ret.depthEnabled) {
+        XMLElement* depthFpsList = deviceCfg->FirstChildElement("DepthFpsList");
+        if (depthFpsList == nullptr) {
+            ALOGW("%s: no depth fps list specified", __FUNCTION__);
+        } else {
+            if (!updateFpsList(depthFpsList, ret.depthFpsLimits)) {
+                return ret;
+            }
+        }
+    }
+
+    XMLElement* minStreamSize = deviceCfg->FirstChildElement("MinimumStreamSize");
+    if (minStreamSize == nullptr) {
+        ALOGI("%s: no minimum stream size specified", __FUNCTION__);
+    } else {
+        ret.minStreamSize = {
+                static_cast<int32_t>(minStreamSize->UnsignedAttribute("width", /*Default*/ 0)),
+                static_cast<int32_t>(minStreamSize->UnsignedAttribute("height", /*Default*/ 0))};
+    }
+
+    XMLElement* orientation = deviceCfg->FirstChildElement("Orientation");
+    if (orientation == nullptr) {
+        ALOGI("%s: no sensor orientation specified", __FUNCTION__);
+    } else {
+        ret.orientation = orientation->IntAttribute("degree", /*Default*/ kDefaultOrientation);
+    }
+
+    ALOGI("%s: external camera cfg loaded: maxJpgBufSize %d,"
+          " num video buffers %d, num still buffers %d, orientation %d",
+          __FUNCTION__, ret.maxJpegBufSize, ret.numVideoBuffers, ret.numStillBuffers,
+          ret.orientation);
+    for (const auto& limit : ret.fpsLimits) {
+        ALOGI("%s: fpsLimitList: %dx%d@%f", __FUNCTION__, limit.size.width, limit.size.height,
+              limit.fpsUpperBound);
+    }
+    for (const auto& limit : ret.depthFpsLimits) {
+        ALOGI("%s: depthFpsLimitList: %dx%d@%f", __FUNCTION__, limit.size.width, limit.size.height,
+              limit.fpsUpperBound);
+    }
+    ALOGI("%s: minStreamSize: %dx%d", __FUNCTION__, ret.minStreamSize.width,
+          ret.minStreamSize.height);
+    return ret;
+}
+
+bool ExternalCameraConfig::updateFpsList(tinyxml2::XMLElement* fpsList,
+                                         std::vector<FpsLimitation>& fpsLimits) {
+    using namespace tinyxml2;
+    std::vector<FpsLimitation> limits;
+    XMLElement* row = fpsList->FirstChildElement("Limit");
+    while (row != nullptr) {
+        FpsLimitation prevLimit{{0, 0}, 1000.0};
+        FpsLimitation limit = {
+                {/* width */ static_cast<int32_t>(row->UnsignedAttribute("width", /*Default*/ 0)),
+                 /* height */ static_cast<int32_t>(
+                         row->UnsignedAttribute("height", /*Default*/ 0))},
+                /* fpsUpperBound */ row->DoubleAttribute("fpsBound", /*Default*/ 1000.0)};
+        if (limit.size.width <= prevLimit.size.width ||
+            limit.size.height <= prevLimit.size.height ||
+            limit.fpsUpperBound >= prevLimit.fpsUpperBound) {
+            ALOGE("%s: FPS limit list must have increasing size and decreasing fps!"
+                  " Prev %dx%d@%f, Current %dx%d@%f",
+                  __FUNCTION__, prevLimit.size.width, prevLimit.size.height,
+                  prevLimit.fpsUpperBound, limit.size.width, limit.size.height,
+                  limit.fpsUpperBound);
+            return false;
+        }
+        limits.push_back(limit);
+        row = row->NextSiblingElement("Limit");
+    }
+    fpsLimits = limits;
+    return true;
+}
+
+ExternalCameraConfig::ExternalCameraConfig()
+    : cameraIdOffset(kDefaultCameraIdOffset),
+      maxJpegBufSize(kDefaultJpegBufSize),
+      numVideoBuffers(kDefaultNumVideoBuffer),
+      numStillBuffers(kDefaultNumStillBuffer),
+      depthEnabled(false),
+      orientation(kDefaultOrientation) {
+    fpsLimits.push_back({/* size */ {/* width */ 640, /* height */ 480}, /* fpsUpperBound */ 30.0});
+    fpsLimits.push_back({/* size */ {/* width */ 1280, /* height */ 720}, /* fpsUpperBound */ 7.5});
+    fpsLimits.push_back(
+            {/* size */ {/* width */ 1920, /* height */ 1080}, /* fpsUpperBound */ 5.0});
+    minStreamSize = {0, 0};
+}
+
+}  // namespace common
+}  // namespace external
+
+namespace device {
+namespace implementation {
+
+double SupportedV4L2Format::FrameRate::getFramesPerSecond() const {
+    return static_cast<double>(durationDenominator) / durationNumerator;
+}
+
+Frame::Frame(uint32_t width, uint32_t height, uint32_t fourcc)
+    : mWidth(width), mHeight(height), mFourcc(fourcc) {}
+Frame::~Frame() {}
+
+V4L2Frame::V4L2Frame(uint32_t w, uint32_t h, uint32_t fourcc, int bufIdx, int fd, uint32_t dataSize,
+                     uint64_t offset)
+    : Frame(w, h, fourcc), mBufferIndex(bufIdx), mFd(fd), mDataSize(dataSize), mOffset(offset) {}
+
+V4L2Frame::~V4L2Frame() {
+    unmap();
+}
+
+int V4L2Frame::getData(uint8_t** outData, size_t* dataSize) {
+    return map(outData, dataSize);
+}
+
+int V4L2Frame::map(uint8_t** data, size_t* dataSize) {
+    if (data == nullptr || dataSize == nullptr) {
+        ALOGI("%s: V4L2 buffer map bad argument: data %p, dataSize %p", __FUNCTION__, data,
+              dataSize);
+        return -EINVAL;
+    }
+
+    std::lock_guard<std::mutex> lk(mLock);
+    if (!mMapped) {
+        void* addr = mmap(nullptr, mDataSize, PROT_READ, MAP_SHARED, mFd, mOffset);
+        if (addr == MAP_FAILED) {
+            ALOGE("%s: V4L2 buffer map failed: %s", __FUNCTION__, strerror(errno));
+            return -EINVAL;
+        }
+        mData = static_cast<uint8_t*>(addr);
+        mMapped = true;
+    }
+    *data = mData;
+    *dataSize = mDataSize;
+    ALOGV("%s: V4L map FD %d, data %p size %zu", __FUNCTION__, mFd, mData, mDataSize);
+    return 0;
+}
+
+int V4L2Frame::unmap() {
+    std::lock_guard<std::mutex> lk(mLock);
+    if (mMapped) {
+        ALOGV("%s: V4L unmap data %p size %zu", __FUNCTION__, mData, mDataSize);
+        if (munmap(mData, mDataSize) != 0) {
+            ALOGE("%s: V4L2 buffer unmap failed: %s", __FUNCTION__, strerror(errno));
+            return -EINVAL;
+        }
+        mMapped = false;
+    }
+    return 0;
+}
+
+AllocatedFrame::AllocatedFrame(uint32_t w, uint32_t h) : Frame(w, h, V4L2_PIX_FMT_YUV420) {}
+AllocatedFrame::~AllocatedFrame() {}
+
+int AllocatedFrame::getData(uint8_t** outData, size_t* dataSize) {
+    YCbCrLayout layout;
+    int ret = allocate(&layout);
+    if (ret != 0) {
+        return ret;
+    }
+    *outData = mData.data();
+    *dataSize = mBufferSize;
+    return 0;
+}
+
+int AllocatedFrame::allocate(YCbCrLayout* out) {
+    std::lock_guard<std::mutex> lk(mLock);
+    if ((mWidth % 2) || (mHeight % 2)) {
+        ALOGE("%s: bad dimension %dx%d (not multiple of 2)", __FUNCTION__, mWidth, mHeight);
+        return -EINVAL;
+    }
+
+    // This frame might be sent to jpeglib to be encoded. Since AllocatedFrame only contains YUV420,
+    // jpeglib expects height and width of Y component to be an integral multiple of 2*DCTSIZE,
+    // and heights and widths of Cb and Cr components to be an integral multiple of DCTSIZE. If the
+    // image size does not meet this requirement, libjpeg expects its input to be padded to meet the
+    // constraints. This padding is removed from the final encoded image so the content in the
+    // padding doesn't matter. What matters is that the memory is accessible to jpeglib at the time
+    // of encoding.
+    // For example, if the image size is 1500x844 and DCTSIZE is 8, jpeglib expects a YUV 420
+    // frame with components of following sizes:
+    //   Y:      1504x848 because 1504 and 848 are the next smallest multiples of 2*8
+    //   Cb/Cr:  752x424 which are the next smallest multiples of 8
+
+    // jpeglib takes an array of row pointers which makes vertical padding trivial when setting up
+    // the pointers. Padding horizontally is a bit more complicated. AllocatedFrame holds the data
+    // in a flattened buffer, which means memory accesses past a row will flow into the next logical
+    // row. For any row of a component, we can consider the first few bytes of the next row as
+    // padding for the current one. This is true for Y and Cb components and all but last row of the
+    // Cr component. Reading past the last row of Cr component will lead to undefined behavior as
+    // libjpeg attempts to read memory past the allocated buffer. To prevent undefined behavior,
+    // the buffer allocated here is padded such that libjpeg never accesses unallocated memory when
+    // reading the last row. Effectively, we only need to ensure that the last row of Cr component
+    // has width that is an integral multiple of DCTSIZE.
+
+    size_t dataSize = mWidth * mHeight * 3 / 2;  // YUV420
+
+    size_t cbWidth = mWidth / 2;
+    size_t requiredCbWidth = DCTSIZE * ((cbWidth + DCTSIZE - 1) / DCTSIZE);
+    size_t padding = requiredCbWidth - cbWidth;
+    size_t finalSize = dataSize + padding;
+
+    if (mData.size() != finalSize) {
+        mData.resize(finalSize);
+        mBufferSize = dataSize;
+    }
+
+    if (out != nullptr) {
+        out->y = mData.data();
+        out->yStride = mWidth;
+        uint8_t* cbStart = mData.data() + mWidth * mHeight;
+        uint8_t* crStart = cbStart + mWidth * mHeight / 4;
+        out->cb = cbStart;
+        out->cr = crStart;
+        out->cStride = mWidth / 2;
+        out->chromaStep = 1;
+    }
+    return 0;
+}
+
+int AllocatedFrame::getLayout(YCbCrLayout* out) {
+    IMapper::Rect noCrop = {0, 0, static_cast<int32_t>(mWidth), static_cast<int32_t>(mHeight)};
+    return getCroppedLayout(noCrop, out);
+}
+
+int AllocatedFrame::getCroppedLayout(const IMapper::Rect& rect, YCbCrLayout* out) {
+    if (out == nullptr) {
+        ALOGE("%s: null out", __FUNCTION__);
+        return -1;
+    }
+
+    std::lock_guard<std::mutex> lk(mLock);
+    if ((rect.left + rect.width) > static_cast<int>(mWidth) ||
+        (rect.top + rect.height) > static_cast<int>(mHeight) || (rect.left % 2) || (rect.top % 2) ||
+        (rect.width % 2) || (rect.height % 2)) {
+        ALOGE("%s: bad rect left %d top %d w %d h %d", __FUNCTION__, rect.left, rect.top,
+              rect.width, rect.height);
+        return -1;
+    }
+
+    out->y = mData.data() + mWidth * rect.top + rect.left;
+    out->yStride = mWidth;
+    uint8_t* cbStart = mData.data() + mWidth * mHeight;
+    uint8_t* crStart = cbStart + mWidth * mHeight / 4;
+    out->cb = cbStart + mWidth * rect.top / 4 + rect.left / 2;
+    out->cr = crStart + mWidth * rect.top / 4 + rect.left / 2;
+    out->cStride = mWidth / 2;
+    out->chromaStep = 1;
+    return 0;
+}
+
+bool isAspectRatioClose(float ar1, float ar2) {
+    constexpr float kAspectRatioMatchThres = 0.025f;  // This threshold is good enough to
+                                                      // distinguish 4:3/16:9/20:9 1.33/1.78/2
+    return std::abs(ar1 - ar2) < kAspectRatioMatchThres;
+}
+
+aidl::android::hardware::camera::common::Status importBufferImpl(
+        /*inout*/ std::map<int, CirculatingBuffers>& circulatingBuffers,
+        /*inout*/ HandleImporter& handleImporter, int32_t streamId, uint64_t bufId,
+        buffer_handle_t buf,
+        /*out*/ buffer_handle_t** outBufPtr) {
+    using ::aidl::android::hardware::camera::common::Status;
+    if (buf == nullptr && bufId == BUFFER_ID_NO_BUFFER) {
+        ALOGE("%s: bufferId %" PRIu64 " has null buffer handle!", __FUNCTION__, bufId);
+        return Status::ILLEGAL_ARGUMENT;
+    }
+
+    CirculatingBuffers& cbs = circulatingBuffers[streamId];
+    if (cbs.count(bufId) == 0) {
+        if (buf == nullptr) {
+            ALOGE("%s: bufferId %" PRIu64 " has null buffer handle!", __FUNCTION__, bufId);
+            return Status::ILLEGAL_ARGUMENT;
+        }
+        // Register a newly seen buffer
+        buffer_handle_t importedBuf = buf;
+        handleImporter.importBuffer(importedBuf);
+        if (importedBuf == nullptr) {
+            ALOGE("%s: output buffer for stream %d is invalid!", __FUNCTION__, streamId);
+            return Status::INTERNAL_ERROR;
+        } else {
+            cbs[bufId] = importedBuf;
+        }
+    }
+    *outBufPtr = &cbs[bufId];
+    return Status::OK;
+}
+
+uint32_t getFourCcFromLayout(const YCbCrLayout& layout) {
+    intptr_t cb = reinterpret_cast<intptr_t>(layout.cb);
+    intptr_t cr = reinterpret_cast<intptr_t>(layout.cr);
+    if (std::abs(cb - cr) == 1 && layout.chromaStep == 2) {
+        // Interleaved format
+        if (layout.cb > layout.cr) {
+            return V4L2_PIX_FMT_NV21;
+        } else {
+            return V4L2_PIX_FMT_NV12;
+        }
+    } else if (layout.chromaStep == 1) {
+        // Planar format
+        if (layout.cb > layout.cr) {
+            return V4L2_PIX_FMT_YVU420;  // YV12
+        } else {
+            return V4L2_PIX_FMT_YUV420;  // YU12
+        }
+    } else {
+        return FLEX_YUV_GENERIC;
+    }
+}
+
+int getCropRect(CroppingType ct, const Size& inSize, const Size& outSize, IMapper::Rect* out) {
+    if (out == nullptr) {
+        ALOGE("%s: out is null", __FUNCTION__);
+        return -1;
+    }
+
+    uint32_t inW = inSize.width;
+    uint32_t inH = inSize.height;
+    uint32_t outW = outSize.width;
+    uint32_t outH = outSize.height;
+
+    // Handle special case where aspect ratio is close to input but scaled
+    // dimension is slightly larger than input
+    float arIn = ASPECT_RATIO(inSize);
+    float arOut = ASPECT_RATIO(outSize);
+    if (isAspectRatioClose(arIn, arOut)) {
+        out->left = 0;
+        out->top = 0;
+        out->width = static_cast<int32_t>(inW);
+        out->height = static_cast<int32_t>(inH);
+        return 0;
+    }
+
+    if (ct == VERTICAL) {
+        uint64_t scaledOutH = static_cast<uint64_t>(outH) * inW / outW;
+        if (scaledOutH > inH) {
+            ALOGE("%s: Output size %dx%d cannot be vertically cropped from input size %dx%d",
+                  __FUNCTION__, outW, outH, inW, inH);
+            return -1;
+        }
+        scaledOutH = scaledOutH & ~0x1;  // make it multiple of 2
+
+        out->left = 0;
+        out->top = static_cast<int32_t>((inH - scaledOutH) / 2) & ~0x1;
+        out->width = static_cast<int32_t>(inW);
+        out->height = static_cast<int32_t>(scaledOutH);
+        ALOGV("%s: crop %dx%d to %dx%d: top %d, scaledH %d", __FUNCTION__, inW, inH, outW, outH,
+              out->top, static_cast<int32_t>(scaledOutH));
+    } else {
+        uint64_t scaledOutW = static_cast<uint64_t>(outW) * inH / outH;
+        if (scaledOutW > inW) {
+            ALOGE("%s: Output size %dx%d cannot be horizontally cropped from input size %dx%d",
+                  __FUNCTION__, outW, outH, inW, inH);
+            return -1;
+        }
+        scaledOutW = scaledOutW & ~0x1;  // make it multiple of 2
+
+        out->left = static_cast<int32_t>((inW - scaledOutW) / 2) & ~0x1;
+        out->top = 0;
+        out->width = static_cast<int32_t>(scaledOutW);
+        out->height = static_cast<int32_t>(inH);
+        ALOGV("%s: crop %dx%d to %dx%d: top %d, scaledW %d", __FUNCTION__, inW, inH, outW, outH,
+              out->top, static_cast<int32_t>(scaledOutW));
+    }
+
+    return 0;
+}
+
+int formatConvert(const YCbCrLayout& in, const YCbCrLayout& out, Size sz, uint32_t format) {
+    int ret = 0;
+    switch (format) {
+        case V4L2_PIX_FMT_NV21:
+            ret = libyuv::I420ToNV21(
+                    static_cast<uint8_t*>(in.y), static_cast<int32_t>(in.yStride),
+                    static_cast<uint8_t*>(in.cb), static_cast<int32_t>(in.cStride),
+                    static_cast<uint8_t*>(in.cr), static_cast<int32_t>(in.cStride),
+                    static_cast<uint8_t*>(out.y), static_cast<int32_t>(out.yStride),
+                    static_cast<uint8_t*>(out.cr), static_cast<int32_t>(out.cStride),
+                    static_cast<int32_t>(sz.width), static_cast<int32_t>(sz.height));
+            if (ret != 0) {
+                ALOGE("%s: convert to NV21 buffer failed! ret %d", __FUNCTION__, ret);
+                return ret;
+            }
+            break;
+        case V4L2_PIX_FMT_NV12:
+            ret = libyuv::I420ToNV12(
+                    static_cast<uint8_t*>(in.y), static_cast<int32_t>(in.yStride),
+                    static_cast<uint8_t*>(in.cb), static_cast<int32_t>(in.cStride),
+                    static_cast<uint8_t*>(in.cr), static_cast<int32_t>(in.cStride),
+                    static_cast<uint8_t*>(out.y), static_cast<int32_t>(out.yStride),
+                    static_cast<uint8_t*>(out.cb), static_cast<int32_t>(out.cStride),
+                    static_cast<int32_t>(sz.width), static_cast<int32_t>(sz.height));
+            if (ret != 0) {
+                ALOGE("%s: convert to NV12 buffer failed! ret %d", __FUNCTION__, ret);
+                return ret;
+            }
+            break;
+        case V4L2_PIX_FMT_YVU420:  // YV12
+        case V4L2_PIX_FMT_YUV420:  // YU12
+            // TODO: maybe we can speed up here by somehow save this copy?
+            ret = libyuv::I420Copy(static_cast<uint8_t*>(in.y), static_cast<int32_t>(in.yStride),
+                                   static_cast<uint8_t*>(in.cb), static_cast<int32_t>(in.cStride),
+                                   static_cast<uint8_t*>(in.cr), static_cast<int32_t>(in.cStride),
+                                   static_cast<uint8_t*>(out.y), static_cast<int32_t>(out.yStride),
+                                   static_cast<uint8_t*>(out.cb), static_cast<int32_t>(out.cStride),
+                                   static_cast<uint8_t*>(out.cr), static_cast<int32_t>(out.cStride),
+                                   static_cast<int32_t>(sz.width), static_cast<int32_t>(sz.height));
+            if (ret != 0) {
+                ALOGE("%s: copy to YV12 or YU12 buffer failed! ret %d", __FUNCTION__, ret);
+                return ret;
+            }
+            break;
+        case FLEX_YUV_GENERIC:
+            // TODO: b/72261744 write to arbitrary flexible YUV layout. Slow.
+            ALOGE("%s: unsupported flexible yuv layout"
+                  " y %p cb %p cr %p y_str %d c_str %d c_step %d",
+                  __FUNCTION__, out.y, out.cb, out.cr, out.yStride, out.cStride, out.chromaStep);
+            return -1;
+        default:
+            ALOGE("%s: unknown YUV format 0x%x!", __FUNCTION__, format);
+            return -1;
+    }
+    return 0;
+}
+
+int encodeJpegYU12(const Size& inSz, const YCbCrLayout& inLayout, int jpegQuality,
+                   const void* app1Buffer, size_t app1Size, void* out, size_t maxOutSize,
+                   size_t& actualCodeSize) {
+    /* libjpeg is a C library so we use C-style "inheritance" by
+     * putting libjpeg's jpeg_destination_mgr first in our custom
+     * struct. This allows us to cast jpeg_destination_mgr* to
+     * CustomJpegDestMgr* when we get it passed to us in a callback */
+    struct CustomJpegDestMgr {
+        struct jpeg_destination_mgr mgr;
+        JOCTET* mBuffer;
+        size_t mBufferSize;
+        size_t mEncodedSize;
+        bool mSuccess;
+    } dmgr;
+
+    jpeg_compress_struct cinfo = {};
+    jpeg_error_mgr jerr;
+
+    /* Initialize error handling with standard callbacks, but
+     * then override output_message (to print to ALOG) and
+     * error_exit to set a flag and print a message instead
+     * of killing the whole process */
+    cinfo.err = jpeg_std_error(&jerr);
+
+    cinfo.err->output_message = [](j_common_ptr cinfo) {
+        char buffer[JMSG_LENGTH_MAX];
+
+        /* Create the message */
+        (*cinfo->err->format_message)(cinfo, buffer);
+        ALOGE("libjpeg error: %s", buffer);
+    };
+    cinfo.err->error_exit = [](j_common_ptr cinfo) {
+        (*cinfo->err->output_message)(cinfo);
+        if (cinfo->client_data) {
+            auto& dmgr = *reinterpret_cast<CustomJpegDestMgr*>(cinfo->client_data);
+            dmgr.mSuccess = false;
+        }
+    };
+
+    /* Now that we initialized some callbacks, let's create our compressor */
+    jpeg_create_compress(&cinfo);
+
+    /* Initialize our destination manager */
+    dmgr.mBuffer = static_cast<JOCTET*>(out);
+    dmgr.mBufferSize = maxOutSize;
+    dmgr.mEncodedSize = 0;
+    dmgr.mSuccess = true;
+    cinfo.client_data = static_cast<void*>(&dmgr);
+
+    /* These lambdas become C-style function pointers and as per C++11 spec
+     * may not capture anything */
+    dmgr.mgr.init_destination = [](j_compress_ptr cinfo) {
+        auto& dmgr = reinterpret_cast<CustomJpegDestMgr&>(*cinfo->dest);
+        dmgr.mgr.next_output_byte = dmgr.mBuffer;
+        dmgr.mgr.free_in_buffer = dmgr.mBufferSize;
+        ALOGV("%s:%d jpeg start: %p [%zu]", __FUNCTION__, __LINE__, dmgr.mBuffer, dmgr.mBufferSize);
+    };
+
+    dmgr.mgr.empty_output_buffer = [](j_compress_ptr cinfo __unused) {
+        ALOGV("%s:%d Out of buffer", __FUNCTION__, __LINE__);
+        return 0;
+    };
+
+    dmgr.mgr.term_destination = [](j_compress_ptr cinfo) {
+        auto& dmgr = reinterpret_cast<CustomJpegDestMgr&>(*cinfo->dest);
+        dmgr.mEncodedSize = dmgr.mBufferSize - dmgr.mgr.free_in_buffer;
+        ALOGV("%s:%d Done with jpeg: %zu", __FUNCTION__, __LINE__, dmgr.mEncodedSize);
+    };
+    cinfo.dest = reinterpret_cast<struct jpeg_destination_mgr*>(&dmgr);
+
+    /* We are going to be using JPEG in raw data mode, so we are passing
+     * straight subsampled planar YCbCr and it will not touch our pixel
+     * data or do any scaling or anything */
+    cinfo.image_width = inSz.width;
+    cinfo.image_height = inSz.height;
+    cinfo.input_components = 3;
+    cinfo.in_color_space = JCS_YCbCr;
+
+    /* Initialize defaults and then override what we want */
+    jpeg_set_defaults(&cinfo);
+
+    jpeg_set_quality(&cinfo, jpegQuality, 1);
+    jpeg_set_colorspace(&cinfo, JCS_YCbCr);
+    cinfo.raw_data_in = 1;
+    cinfo.dct_method = JDCT_IFAST;
+
+    /* Configure sampling factors. The sampling factor is JPEG subsampling 420
+     * because the source format is YUV420. Note that libjpeg sampling factors
+     * are... a little weird. Sampling of Y=2,U=1,V=1 means there is 1 U and
+     * 1 V value for each 2 Y values */
+    cinfo.comp_info[0].h_samp_factor = 2;
+    cinfo.comp_info[0].v_samp_factor = 2;
+    cinfo.comp_info[1].h_samp_factor = 1;
+    cinfo.comp_info[1].v_samp_factor = 1;
+    cinfo.comp_info[2].h_samp_factor = 1;
+    cinfo.comp_info[2].v_samp_factor = 1;
+
+    /* Start the compressor */
+    jpeg_start_compress(&cinfo, TRUE);
+
+    /* Let's not hardcode YUV420 in 6 places... 5 was enough */
+    int maxVSampFactor = cinfo.max_v_samp_factor;
+    int cVSubSampling = cinfo.comp_info[0].v_samp_factor / cinfo.comp_info[1].v_samp_factor;
+
+    /* Compute our macroblock height, so we can pad our input to be vertically
+     * macroblock aligned. No need to for horizontal alignment since AllocatedFrame already
+     * pads horizontally */
+
+    size_t mcuV = DCTSIZE * maxVSampFactor;
+    size_t paddedHeight = mcuV * ((inSz.height + mcuV - 1) / mcuV);
+
+    /* libjpeg uses arrays of row pointers, which makes it really easy to pad
+     * data vertically (unfortunately doesn't help horizontally) */
+    std::vector<JSAMPROW> yLines(paddedHeight);
+    std::vector<JSAMPROW> cbLines(paddedHeight / cVSubSampling);
+    std::vector<JSAMPROW> crLines(paddedHeight / cVSubSampling);
+
+    uint8_t* py = static_cast<uint8_t*>(inLayout.y);
+    uint8_t* pcb = static_cast<uint8_t*>(inLayout.cb);
+    uint8_t* pcr = static_cast<uint8_t*>(inLayout.cr);
+
+    for (int32_t i = 0; i < paddedHeight; i++) {
+        /* Once we are in the padding territory we still point to the last line
+         * effectively replicating it several times ~ CLAMP_TO_EDGE */
+        int li = std::min(i, inSz.height - 1);
+        yLines[i] = static_cast<JSAMPROW>(py + li * inLayout.yStride);
+        if (i < paddedHeight / cVSubSampling) {
+            li = std::min(i, (inSz.height - 1) / cVSubSampling);
+            cbLines[i] = static_cast<JSAMPROW>(pcb + li * inLayout.cStride);
+            crLines[i] = static_cast<JSAMPROW>(pcr + li * inLayout.cStride);
+        }
+    }
+
+    /* If APP1 data was passed in, use it */
+    if (app1Buffer && app1Size) {
+        jpeg_write_marker(&cinfo, JPEG_APP0 + 1, static_cast<const JOCTET*>(app1Buffer), app1Size);
+    }
+
+    /* While we still have padded height left to go, keep giving it one
+     * macroblock at a time. */
+    while (cinfo.next_scanline < cinfo.image_height) {
+        const uint32_t batchSize = DCTSIZE * maxVSampFactor;
+        const uint32_t nl = cinfo.next_scanline;
+        JSAMPARRAY planes[3]{&yLines[nl], &cbLines[nl / cVSubSampling],
+                             &crLines[nl / cVSubSampling]};
+
+        uint32_t done = jpeg_write_raw_data(&cinfo, planes, batchSize);
+
+        if (done != batchSize) {
+            ALOGE("%s: compressed %u lines, expected %u (total %u/%u)", __FUNCTION__, done,
+                  batchSize, cinfo.next_scanline, cinfo.image_height);
+            return -1;
+        }
+    }
+
+    /* This will flush everything */
+    jpeg_finish_compress(&cinfo);
+
+    /* Grab the actual code size and set it */
+    actualCodeSize = dmgr.mEncodedSize;
+
+    return 0;
+}
+
+Size getMaxThumbnailResolution(const common::V1_0::helper::CameraMetadata& chars) {
+    Size thumbSize{0, 0};
+    camera_metadata_ro_entry entry = chars.find(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES);
+    for (uint32_t i = 0; i < entry.count; i += 2) {
+        Size sz{.width = entry.data.i32[i], .height = entry.data.i32[i + 1]};
+        if (sz.width * sz.height > thumbSize.width * thumbSize.height) {
+            thumbSize = sz;
+        }
+    }
+
+    if (thumbSize.width * thumbSize.height == 0) {
+        ALOGW("%s: non-zero thumbnail size not available", __FUNCTION__);
+    }
+
+    return thumbSize;
+}
+
+void freeReleaseFences(std::vector<CaptureResult>& results) {
+    for (auto& result : results) {
+        native_handle_t* inputReleaseFence =
+                ::android::makeFromAidl(result.inputBuffer.releaseFence);
+        if (inputReleaseFence != nullptr) {
+            native_handle_close(inputReleaseFence);
+            native_handle_delete(inputReleaseFence);
+        }
+        for (auto& buf : result.outputBuffers) {
+            native_handle_t* outReleaseFence = ::android::makeFromAidl(buf.releaseFence);
+            if (outReleaseFence != nullptr) {
+                native_handle_close(outReleaseFence);
+                native_handle_delete(outReleaseFence);
+            }
+        }
+    }
+}
+
+#define ARRAY_SIZE(a) (sizeof(a) / sizeof((a)[0]))
+#define UPDATE(md, tag, data, size)               \
+    do {                                          \
+        if ((md).update((tag), (data), (size))) { \
+            ALOGE("Update " #tag " failed!");     \
+            return BAD_VALUE;                     \
+        }                                         \
+    } while (0)
+
+status_t fillCaptureResultCommon(CameraMetadata& md, nsecs_t timestamp,
+                                 camera_metadata_ro_entry& activeArraySize) {
+    if (activeArraySize.count < 4) {
+        ALOGE("%s: cannot find active array size!", __FUNCTION__);
+        return -EINVAL;
+    }
+    // android.control
+    // For USB camera, we don't know the AE state. Set the state to converged to
+    // indicate the frame should be good to use. Then apps don't have to wait the
+    // AE state.
+    const uint8_t aeState = ANDROID_CONTROL_AE_STATE_CONVERGED;
+    UPDATE(md, ANDROID_CONTROL_AE_STATE, &aeState, 1);
+
+    const uint8_t ae_lock = ANDROID_CONTROL_AE_LOCK_OFF;
+    UPDATE(md, ANDROID_CONTROL_AE_LOCK, &ae_lock, 1);
+
+    // Set AWB state to converged to indicate the frame should be good to use.
+    const uint8_t awbState = ANDROID_CONTROL_AWB_STATE_CONVERGED;
+    UPDATE(md, ANDROID_CONTROL_AWB_STATE, &awbState, 1);
+
+    const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
+    UPDATE(md, ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
+
+    const uint8_t flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
+    UPDATE(md, ANDROID_FLASH_STATE, &flashState, 1);
+
+    // This means pipeline latency of X frame intervals. The maximum number is 4.
+    const uint8_t requestPipelineMaxDepth = 4;
+    UPDATE(md, ANDROID_REQUEST_PIPELINE_DEPTH, &requestPipelineMaxDepth, 1);
+
+    // android.scaler
+    const int32_t crop_region[] = {
+            activeArraySize.data.i32[0],
+            activeArraySize.data.i32[1],
+            activeArraySize.data.i32[2],
+            activeArraySize.data.i32[3],
+    };
+    UPDATE(md, ANDROID_SCALER_CROP_REGION, crop_region, ARRAY_SIZE(crop_region));
+
+    // android.sensor
+    UPDATE(md, ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
+
+    // android.statistics
+    const uint8_t lensShadingMapMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
+    UPDATE(md, ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMapMode, 1);
+
+    const uint8_t sceneFlicker = ANDROID_STATISTICS_SCENE_FLICKER_NONE;
+    UPDATE(md, ANDROID_STATISTICS_SCENE_FLICKER, &sceneFlicker, 1);
+
+    return OK;
+}
+
+#undef ARRAY_SIZE
+#undef UPDATE
+
+AllocatedV4L2Frame::AllocatedV4L2Frame(std::shared_ptr<V4L2Frame> frameIn)
+    : Frame(frameIn->mWidth, frameIn->mHeight, frameIn->mFourcc) {
+    uint8_t* dataIn;
+    size_t dataSize;
+    if (frameIn->getData(&dataIn, &dataSize) != 0) {
+        ALOGE("%s: map input V4L2 frame failed!", __FUNCTION__);
+        return;
+    }
+
+    mData.resize(dataSize);
+    std::memcpy(mData.data(), dataIn, dataSize);
+}
+
+AllocatedV4L2Frame::~AllocatedV4L2Frame() {}
+
+int AllocatedV4L2Frame::getData(uint8_t** outData, size_t* dataSize) {
+    if (outData == nullptr || dataSize == nullptr) {
+        ALOGE("%s: outData(%p)/dataSize(%p) must not be null", __FUNCTION__, outData, dataSize);
+        return -1;
+    }
+
+    *outData = mData.data();
+    *dataSize = mData.size();
+    return 0;
+}
+
+}  // namespace implementation
+}  // namespace device
+}  // namespace camera
+}  // namespace hardware
+}  // namespace android
\ No newline at end of file
diff --git a/camera/device/default/ExternalCameraUtils.h b/camera/device/default/ExternalCameraUtils.h
new file mode 100644
index 0000000..b37933c
--- /dev/null
+++ b/camera/device/default/ExternalCameraUtils.h
@@ -0,0 +1,300 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef HARDWARE_INTERFACES_CAMERA_DEVICE_DEFAULT_EXTERNALCAMERAUTILS_H_
+#define HARDWARE_INTERFACES_CAMERA_DEVICE_DEFAULT_EXTERNALCAMERAUTILS_H_
+
+#include <CameraMetadata.h>
+#include <HandleImporter.h>
+#include <aidl/android/hardware/camera/common/Status.h>
+#include <aidl/android/hardware/camera/device/CaptureResult.h>
+#include <aidl/android/hardware/camera/device/ErrorCode.h>
+#include <aidl/android/hardware/camera/device/NotifyMsg.h>
+#include <aidl/android/hardware/graphics/common/BufferUsage.h>
+#include <aidl/android/hardware/graphics/common/PixelFormat.h>
+#include <tinyxml2.h>
+#include <unordered_map>
+#include <unordered_set>
+
+using ::aidl::android::hardware::camera::common::Status;
+using ::aidl::android::hardware::camera::device::CaptureResult;
+using ::aidl::android::hardware::camera::device::ErrorCode;
+using ::aidl::android::hardware::camera::device::NotifyMsg;
+using ::aidl::android::hardware::graphics::common::BufferUsage;
+using ::aidl::android::hardware::graphics::common::PixelFormat;
+using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
+using ::android::hardware::camera::common::V1_0::helper::HandleImporter;
+
+namespace android {
+namespace hardware {
+namespace camera {
+
+namespace external {
+namespace common {
+
+struct Size {
+    int32_t width;
+    int32_t height;
+
+    bool operator==(const Size& other) const {
+        return (width == other.width && height == other.height);
+    }
+};
+
+struct SizeHasher {
+    size_t operator()(const Size& sz) const {
+        size_t result = 1;
+        result = 31 * result + sz.width;
+        result = 31 * result + sz.height;
+        return result;
+    }
+};
+
+struct ExternalCameraConfig {
+    static const char* kDefaultCfgPath;
+    static ExternalCameraConfig loadFromCfg(const char* cfgPath = kDefaultCfgPath);
+
+    // CameraId base offset for numerical representation
+    uint32_t cameraIdOffset;
+
+    // List of internal V4L2 video nodes external camera HAL must ignore.
+    std::unordered_set<std::string> mInternalDevices;
+
+    // Maximal size of a JPEG buffer, in bytes
+    int32_t maxJpegBufSize;
+
+    // Maximum Size that can sustain 30fps streaming
+    Size maxVideoSize;
+
+    // Size of v4l2 buffer queue when streaming <= kMaxVideoSize
+    uint32_t numVideoBuffers;
+
+    // Size of v4l2 buffer queue when streaming > kMaxVideoSize
+    uint32_t numStillBuffers;
+
+    // Indication that the device connected supports depth output
+    bool depthEnabled;
+
+    struct FpsLimitation {
+        Size size;
+        double fpsUpperBound;
+    };
+    std::vector<FpsLimitation> fpsLimits;
+    std::vector<FpsLimitation> depthFpsLimits;
+
+    // Minimum output stream size
+    Size minStreamSize;
+
+    // The value of android.sensor.orientation
+    int32_t orientation;
+
+  private:
+    ExternalCameraConfig();
+    static bool updateFpsList(tinyxml2::XMLElement* fpsList, std::vector<FpsLimitation>& fpsLimits);
+};
+
+}  // namespace common
+}  // namespace external
+
+namespace device {
+namespace implementation {
+
+struct SupportedV4L2Format {
+    int32_t width;
+    int32_t height;
+    uint32_t fourcc;
+    // All supported frame rate for this w/h/fourcc combination
+    struct FrameRate {
+        // Duration (in seconds) of a single frame.
+        // Numerator and denominator of the frame duration are stored separately.
+        // For ex. a frame lasting 1/30 of a second will be stored as {1, 30}
+        uint32_t durationNumerator;         // frame duration numerator.   Ex: 1
+        uint32_t durationDenominator;       // frame duration denominator. Ex: 30
+        double getFramesPerSecond() const;  // FPS as double.        Ex: 30.0
+    };
+    std::vector<FrameRate> frameRates;
+};
+
+// A Base class with basic information about a frame
+struct Frame : public std::enable_shared_from_this<Frame> {
+  public:
+    Frame(uint32_t width, uint32_t height, uint32_t fourcc);
+    virtual ~Frame();
+    const int32_t mWidth;
+    const int32_t mHeight;
+    const uint32_t mFourcc;
+
+    // getData might involve map/allocation
+    virtual int getData(uint8_t** outData, size_t* dataSize) = 0;
+};
+
+// A class provide access to a dequeued V4L2 frame buffer (mostly in MJPG format)
+// Also contains necessary information to enqueue the buffer back to V4L2 buffer queue
+class V4L2Frame : public Frame {
+  public:
+    V4L2Frame(uint32_t w, uint32_t h, uint32_t fourcc, int bufIdx, int fd, uint32_t dataSize,
+              uint64_t offset);
+    virtual ~V4L2Frame();
+
+    virtual int getData(uint8_t** outData, size_t* dataSize) override;
+
+    const int mBufferIndex;  // for later enqueue
+    int map(uint8_t** data, size_t* dataSize);
+    int unmap();
+
+  private:
+    std::mutex mLock;
+    const int mFd;  // used for mmap but doesn't claim ownership
+    const size_t mDataSize;
+    const uint64_t mOffset;  // used for mmap
+    uint8_t* mData = nullptr;
+    bool mMapped = false;
+};
+
+// A RAII class representing a CPU allocated YUV frame used as intermediate buffers
+// when generating output images.
+class AllocatedFrame : public Frame {
+  public:
+    AllocatedFrame(uint32_t w, uint32_t h);  // only support V4L2_PIX_FMT_YUV420 for now
+    ~AllocatedFrame() override;
+
+    virtual int getData(uint8_t** outData, size_t* dataSize) override;
+
+    int allocate(YCbCrLayout* out = nullptr);
+    int getLayout(YCbCrLayout* out);
+    int getCroppedLayout(const IMapper::Rect&, YCbCrLayout* out);  // return non-zero for bad input
+  private:
+    std::mutex mLock;
+    std::vector<uint8_t> mData;
+    size_t mBufferSize;  // size of mData before padding. Actual size of mData might be slightly
+                         // bigger to horizontally pad the frame for jpeglib.
+};
+
+enum CroppingType { HORIZONTAL = 0, VERTICAL = 1 };
+
+// Aspect ratio is defined as width/height here and ExternalCameraDevice
+// will guarantee all supported sizes has width >= height (so aspect ratio >= 1.0)
+#define ASPECT_RATIO(sz) (static_cast<float>((sz).width) / (sz).height)
+const float kMaxAspectRatio = std::numeric_limits<float>::max();
+const float kMinAspectRatio = 1.f;
+
+bool isAspectRatioClose(float ar1, float ar2);
+
+struct HalStreamBuffer {
+    int32_t streamId;
+    int64_t bufferId;
+    int32_t width;
+    int32_t height;
+    ::aidl::android::hardware::graphics::common::PixelFormat format;
+    ::aidl::android::hardware::graphics::common::BufferUsage usage;
+    buffer_handle_t* bufPtr;
+    int acquireFence;
+    bool fenceTimeout;
+};
+
+struct HalRequest {
+    int32_t frameNumber;
+    common::V1_0::helper::CameraMetadata setting;
+    std::shared_ptr<Frame> frameIn;
+    nsecs_t shutterTs;
+    std::vector<HalStreamBuffer> buffers;
+};
+
+static const uint64_t BUFFER_ID_NO_BUFFER = 0;
+
+// buffers currently circulating between HAL and camera service
+// key: bufferId sent via HIDL interface
+// value: imported buffer_handle_t
+// Buffer will be imported during processCaptureRequest (or requestStreamBuffer
+// in the case of HAL buffer manager is enabled) and will be freed
+// when the stream is deleted or camera device session is closed
+typedef std::unordered_map<uint64_t, buffer_handle_t> CirculatingBuffers;
+
+aidl::android::hardware::camera::common::Status importBufferImpl(
+        /*inout*/ std::map<int, CirculatingBuffers>& circulatingBuffers,
+        /*inout*/ HandleImporter& handleImporter, int32_t streamId, uint64_t bufId,
+        buffer_handle_t buf,
+        /*out*/ buffer_handle_t** outBufPtr);
+
+static const uint32_t FLEX_YUV_GENERIC =
+        static_cast<uint32_t>('F') | static_cast<uint32_t>('L') << 8 |
+        static_cast<uint32_t>('E') << 16 | static_cast<uint32_t>('X') << 24;
+
+// returns FLEX_YUV_GENERIC for formats other than YV12/YU12/NV12/NV21
+uint32_t getFourCcFromLayout(const YCbCrLayout&);
+
+using ::android::hardware::camera::external::common::Size;
+int getCropRect(CroppingType ct, const Size& inSize, const Size& outSize, IMapper::Rect* out);
+
+int formatConvert(const YCbCrLayout& in, const YCbCrLayout& out, Size sz, uint32_t format);
+
+int encodeJpegYU12(const Size& inSz, const YCbCrLayout& inLayout, int jpegQuality,
+                   const void* app1Buffer, size_t app1Size, void* out, size_t maxOutSize,
+                   size_t& actualCodeSize);
+
+Size getMaxThumbnailResolution(const common::V1_0::helper::CameraMetadata&);
+
+void freeReleaseFences(std::vector<CaptureResult>&);
+
+status_t fillCaptureResultCommon(common::V1_0::helper::CameraMetadata& md, nsecs_t timestamp,
+                                 camera_metadata_ro_entry& activeArraySize);
+
+// Interface for OutputThread calling back to parent
+struct OutputThreadInterface {
+    virtual ~OutputThreadInterface() {}
+    virtual aidl::android::hardware::camera::common::Status importBuffer(
+            int32_t streamId, uint64_t bufId, buffer_handle_t buf,
+            /*out*/ buffer_handle_t** outBufPtr) = 0;
+
+    virtual void notifyError(int32_t frameNumber, int32_t streamId, ErrorCode ec) = 0;
+
+    // Callbacks are fired within the method if msgs/results are nullptr.
+    // Otherwise the callbacks will be returned and caller is responsible to
+    // fire the callback later
+    virtual aidl::android::hardware::camera::common::Status processCaptureRequestError(
+            const std::shared_ptr<HalRequest>&,
+            /*out*/ std::vector<NotifyMsg>* msgs,
+            /*out*/ std::vector<CaptureResult>* results) = 0;
+
+    virtual aidl::android::hardware::camera::common::Status processCaptureRequestError(
+            const std::shared_ptr<HalRequest>& reqs) final {
+        return processCaptureRequestError(reqs, nullptr, nullptr);
+    }
+
+    virtual aidl::android::hardware::camera::common::Status processCaptureResult(
+            std::shared_ptr<HalRequest>&) = 0;
+
+    virtual ssize_t getJpegBufferSize(int32_t width, int32_t height) const = 0;
+};
+
+// A CPU copy of a mapped V4L2Frame. Will map the input V4L2 frame.
+class AllocatedV4L2Frame : public Frame {
+  public:
+    AllocatedV4L2Frame(std::shared_ptr<V4L2Frame> frameIn);
+    ~AllocatedV4L2Frame() override;
+    virtual int getData(uint8_t** outData, size_t* dataSize) override;
+
+  private:
+    std::vector<uint8_t> mData;
+};
+
+}  // namespace implementation
+}  // namespace device
+}  // namespace camera
+}  // namespace hardware
+}  // namespace android
+
+#endif  // HARDWARE_INTERFACES_CAMERA_DEVICE_DEFAULT_EXTERNALCAMERAUTILS_H_
diff --git a/camera/device/default/OWNERS b/camera/device/default/OWNERS
new file mode 100644
index 0000000..f48a95c
--- /dev/null
+++ b/camera/device/default/OWNERS
@@ -0,0 +1 @@
+include platform/frameworks/av:/camera/OWNERS
diff --git a/camera/device/default/convert.cpp b/camera/device/default/convert.cpp
new file mode 100644
index 0000000..8134dd5
--- /dev/null
+++ b/camera/device/default/convert.cpp
@@ -0,0 +1,71 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "android.hardware.camera.device@3.4-convert-impl"
+#include <log/log.h>
+
+#include "convert.h"
+
+#include <aidl/android/hardware/graphics/common/BufferUsage.h>
+#include <aidl/android/hardware/graphics/common/PixelFormat.h>
+#include <hardware/camera_common.h>
+
+namespace android {
+namespace hardware {
+namespace camera {
+namespace device {
+namespace implementation {
+
+using ::aidl::android::hardware::camera::device::ErrorCode;
+using ::aidl::android::hardware::camera::device::ErrorMsg;
+using ::aidl::android::hardware::camera::device::ShutterMsg;
+using ::aidl::android::hardware::graphics::common::BufferUsage;
+using ::aidl::android::hardware::graphics::common::PixelFormat;
+
+void convertToAidl(const camera_metadata_t* src, CameraMetadata* dest) {
+    if (src == nullptr) {
+        return;
+    }
+
+    size_t size = get_camera_metadata_size(src);
+    auto* src_start = (uint8_t*)src;
+    uint8_t* src_end = src_start + size;
+    dest->metadata.assign(src_start, src_end);
+}
+
+bool convertFromAidl(const CameraMetadata& src, const camera_metadata_t** dst) {
+    const std::vector<uint8_t>& metadata = src.metadata;
+    if (metadata.empty()) {
+        // Special case for null metadata
+        *dst = nullptr;
+        return true;
+    }
+
+    const uint8_t* data = metadata.data();
+    // check that the size of CameraMetadata match underlying camera_metadata_t
+    if (get_camera_metadata_size((camera_metadata_t*)data) != metadata.size()) {
+        ALOGE("%s: input CameraMetadata is corrupt!", __FUNCTION__);
+        return false;
+    }
+    *dst = (camera_metadata_t*)data;
+    return true;
+}
+
+}  // namespace implementation
+}  // namespace device
+}  // namespace camera
+}  // namespace hardware
+}  // namespace android
diff --git a/camera/device/default/convert.h b/camera/device/default/convert.h
new file mode 100644
index 0000000..5a508fc
--- /dev/null
+++ b/camera/device/default/convert.h
@@ -0,0 +1,58 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef HARDWARE_INTERFACES_CAMERA_DEVICE_DEFAULT_CONVERT_H_
+#define HARDWARE_INTERFACES_CAMERA_DEVICE_DEFAULT_CONVERT_H_
+
+#include <aidl/android/hardware/camera/common/Status.h>
+#include <aidl/android/hardware/camera/device/BufferStatus.h>
+#include <aidl/android/hardware/camera/device/CameraMetadata.h>
+#include <aidl/android/hardware/camera/device/HalStream.h>
+#include <aidl/android/hardware/camera/device/NotifyMsg.h>
+#include <aidl/android/hardware/camera/device/Stream.h>
+#include <hardware/camera3.h>
+#include <system/camera_metadata.h>
+
+namespace android {
+namespace hardware {
+namespace camera {
+namespace device {
+namespace implementation {
+
+using ::aidl::android::hardware::camera::common::Status;
+using ::aidl::android::hardware::camera::device::BufferStatus;
+using ::aidl::android::hardware::camera::device::CameraMetadata;
+using ::aidl::android::hardware::camera::device::HalStream;
+using ::aidl::android::hardware::camera::device::NotifyMsg;
+using ::aidl::android::hardware::camera::device::Stream;
+
+void convertToAidl(const camera_metadata_t* src, CameraMetadata* dest);
+
+bool convertFromAidl(const CameraMetadata& src, const camera_metadata_t** dst);
+
+inline ndk::ScopedAStatus fromStatus(Status status) {
+    return status == Status::OK
+                   ? ndk::ScopedAStatus::ok()
+                   : ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(status));
+}
+
+}  // namespace implementation
+}  // namespace device
+}  // namespace camera
+}  // namespace hardware
+}  // namespace android
+
+#endif  // HARDWARE_INTERFACES_CAMERA_DEVICE_DEFAULT_CONVERT_H_