Merge Android U (ab/10368041)

Bug: 291102124
Merged-In: Ied8e295ae059db07463ba06d3e6d747659b2757f
Change-Id: Ib79234b765308e957b682871b2178b66769f5660
diff --git a/camera/Android.bp b/camera/Android.bp
index 3e28e4f..b3f70f4 100644
--- a/camera/Android.bp
+++ b/camera/Android.bp
@@ -47,7 +47,7 @@
     name: "camera_headers",
     export_include_dirs: ["include"],
 }
-cc_library_shared {
+cc_library {
     name: "libcamera_client",
 
     aidl: {
@@ -142,14 +142,15 @@
 filegroup {
     name: "libcamera_client_aidl",
     srcs: [
+        "aidl/android/hardware/CameraExtensionSessionStats.aidl",
         "aidl/android/hardware/ICameraService.aidl",
         "aidl/android/hardware/ICameraServiceListener.aidl",
         "aidl/android/hardware/ICameraServiceProxy.aidl",
         "aidl/android/hardware/camera2/ICameraDeviceCallbacks.aidl",
         "aidl/android/hardware/camera2/ICameraDeviceUser.aidl",
-        "aidl/android/hardware/camera2/ICameraOfflineSession.aidl",
         "aidl/android/hardware/camera2/ICameraInjectionCallback.aidl",
         "aidl/android/hardware/camera2/ICameraInjectionSession.aidl",
+        "aidl/android/hardware/camera2/ICameraOfflineSession.aidl",
     ],
     path: "aidl",
 }
diff --git a/camera/CameraSessionStats.cpp b/camera/CameraSessionStats.cpp
index fb7bf29..36bf24c 100644
--- a/camera/CameraSessionStats.cpp
+++ b/camera/CameraSessionStats.cpp
@@ -131,6 +131,12 @@
         return err;
     }
 
+    int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED;
+    if ((err = parcel->readInt32(&colorSpace)) != OK) {
+        ALOGE("%s: Failed to read color space from parcel", __FUNCTION__);
+        return err;
+    }
+
     mWidth = width;
     mHeight = height;
     mFormat = format;
@@ -147,6 +153,7 @@
     mHistogramCounts = std::move(histogramCounts);
     mDynamicRangeProfile = dynamicRangeProfile;
     mStreamUseCase = streamUseCase;
+    mColorSpace = colorSpace;
 
     return OK;
 }
@@ -239,6 +246,11 @@
         return err;
     }
 
+    if ((err = parcel->writeInt32(mColorSpace)) != OK) {
+        ALOGE("%s: Failed to write color space", __FUNCTION__);
+        return err;
+    }
+
     return OK;
 }
 
@@ -260,17 +272,20 @@
         mApiLevel(0),
         mIsNdk(false),
         mLatencyMs(-1),
+        mLogId(0),
         mMaxPreviewFps(0),
         mSessionType(0),
         mInternalReconfigure(0),
         mRequestCount(0),
         mResultErrorCount(0),
         mDeviceError(false),
-        mVideoStabilizationMode(-1) {}
+        mVideoStabilizationMode(-1),
+        mSessionIndex(0),
+        mCameraExtensionSessionStats() {}
 
 CameraSessionStats::CameraSessionStats(const std::string& cameraId,
         int facing, int newCameraState, const std::string& clientName,
-        int apiLevel, bool isNdk, int32_t latencyMs) :
+        int apiLevel, bool isNdk, int32_t latencyMs, int64_t logId) :
                 mCameraId(cameraId),
                 mFacing(facing),
                 mNewCameraState(newCameraState),
@@ -278,13 +293,16 @@
                 mApiLevel(apiLevel),
                 mIsNdk(isNdk),
                 mLatencyMs(latencyMs),
+                mLogId(logId),
                 mMaxPreviewFps(0),
                 mSessionType(0),
                 mInternalReconfigure(0),
                 mRequestCount(0),
                 mResultErrorCount(0),
                 mDeviceError(0),
-                mVideoStabilizationMode(-1) {}
+                mVideoStabilizationMode(-1),
+                mSessionIndex(0),
+                mCameraExtensionSessionStats() {}
 
 status_t CameraSessionStats::readFromParcel(const android::Parcel* parcel) {
     if (parcel == NULL) {
@@ -336,6 +354,12 @@
         return err;
     }
 
+    int64_t logId;
+    if ((err = parcel->readInt64(&logId)) != OK) {
+        ALOGE("%s: Failed to read log ID from parcel", __FUNCTION__);
+        return err;
+    }
+
     float maxPreviewFps;
     if ((err = parcel->readFloat(&maxPreviewFps)) != OK) {
         ALOGE("%s: Failed to read maxPreviewFps from parcel", __FUNCTION__);
@@ -390,6 +414,18 @@
         return err;
     }
 
+    int32_t sessionIdx;
+    if ((err = parcel->readInt32(&sessionIdx)) != OK) {
+        ALOGE("%s: Failed to read session index from parcel", __FUNCTION__);
+        return err;
+    }
+
+    CameraExtensionSessionStats extStats{};
+    if ((err = extStats.readFromParcel(parcel)) != OK) {
+        ALOGE("%s: Failed to read extension session stats from parcel", __FUNCTION__);
+        return err;
+    }
+
     mCameraId = toStdString(id);
     mFacing = facing;
     mNewCameraState = newCameraState;
@@ -397,6 +433,7 @@
     mApiLevel = apiLevel;
     mIsNdk = isNdk;
     mLatencyMs = latencyMs;
+    mLogId = logId;
     mMaxPreviewFps = maxPreviewFps;
     mSessionType = sessionType;
     mInternalReconfigure = internalReconfigure;
@@ -406,6 +443,8 @@
     mStreamStats = std::move(streamStats);
     mUserTag = toStdString(userTag);
     mVideoStabilizationMode = videoStabilizationMode;
+    mSessionIndex = sessionIdx;
+    mCameraExtensionSessionStats = extStats;
 
     return OK;
 }
@@ -453,6 +492,11 @@
         return err;
     }
 
+    if ((err = parcel->writeInt64(mLogId)) != OK) {
+        ALOGE("%s: Failed to write log ID!", __FUNCTION__);
+        return err;
+    }
+
     if ((err = parcel->writeFloat(mMaxPreviewFps)) != OK) {
         ALOGE("%s: Failed to write maxPreviewFps!", __FUNCTION__);
         return err;
@@ -497,6 +541,17 @@
         ALOGE("%s: Failed to write video stabilization mode!", __FUNCTION__);
         return err;
     }
+
+    if ((err = parcel->writeInt32(mSessionIndex)) != OK) {
+        ALOGE("%s: Failed to write session index!", __FUNCTION__);
+        return err;
+    }
+
+    if ((err = mCameraExtensionSessionStats.writeToParcel(parcel)) != OK) {
+        ALOGE("%s: Failed to write extension sessions stats!", __FUNCTION__);
+        return err;
+    }
+
     return OK;
 }
 
diff --git a/camera/VendorTagDescriptor.cpp b/camera/VendorTagDescriptor.cpp
index 6cb9197..ecf8a91 100644
--- a/camera/VendorTagDescriptor.cpp
+++ b/camera/VendorTagDescriptor.cpp
@@ -598,7 +598,6 @@
 status_t VendorTagDescriptor::setAsGlobalVendorTagDescriptor(const sp<VendorTagDescriptor>& desc) {
     status_t res = OK;
     Mutex::Autolock al(sLock);
-    sGlobalVendorTagDescriptor = desc;
 
     vendor_tag_ops_t* opsPtr = NULL;
     if (desc != NULL) {
@@ -613,6 +612,9 @@
         ALOGE("%s: Could not set vendor tag descriptor, received error %s (%d)."
                 , __FUNCTION__, strerror(-res), res);
     }
+
+    sGlobalVendorTagDescriptor = desc;
+
     return res;
 }
 
@@ -631,7 +633,6 @@
         const sp<VendorTagDescriptorCache>& cache) {
     status_t res = OK;
     Mutex::Autolock al(sLock);
-    sGlobalVendorTagDescriptorCache = cache;
 
     struct vendor_tag_cache_ops* opsPtr = NULL;
     if (cache != NULL) {
@@ -646,6 +647,9 @@
         ALOGE("%s: Could not set vendor tag cache, received error %s (%d)."
                 , __FUNCTION__, strerror(-res), res);
     }
+
+    sGlobalVendorTagDescriptorCache = cache;
+
     return res;
 }
 
diff --git a/camera/aidl/android/hardware/CameraExtensionSessionStats.aidl b/camera/aidl/android/hardware/CameraExtensionSessionStats.aidl
new file mode 100644
index 0000000..1c81831
--- /dev/null
+++ b/camera/aidl/android/hardware/CameraExtensionSessionStats.aidl
@@ -0,0 +1,69 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware;
+
+/**
+ * Metrics specific to Extension Sessions (see CameraExtensionSession) for logging.
+ *
+ * Each Extension Session is mapped to one camera session internally, and will be sent to
+ * CameraServiceProxy with IDLE/CLOSE calls.
+ * @hide
+ */
+parcelable CameraExtensionSessionStats {
+    /**
+     * Value should match {@code CameraExtensionCharacteristics#EXTENSION_*}
+     */
+    @Backing(type="int")
+    enum Type {
+        EXTENSION_NONE = -1,
+        EXTENSION_AUTOMATIC = 0,
+        EXTENSION_FACE_RETOUCH = 1,
+        EXTENSION_BOKEH = 2,
+        EXTENSION_HDR = 3,
+        EXTENSION_NIGHT = 4
+    }
+
+    /**
+     * Key to uniquely identify the session this stat is associated with. The first call to
+     * 'ICameraService.reportExtensionSessionStats' should set this to an empty string.
+     * 'ICameraService.reportExtensionSessionStats' will return the key which should be used with
+     * the next calls.
+     */
+    String key;
+
+    /**
+     * Camera ID for which the stats is being reported.
+     */
+    String cameraId;
+
+    /**
+     * Package name of the client using the camera
+     */
+    String clientName;
+
+
+    /**
+     * Type of extension session requested by the app. Note that EXTENSION_AUTOMATIC is reported
+     * as such.
+     */
+    Type type = Type.EXTENSION_NONE;
+
+    /**
+     * true if advanced extensions are being used, false otherwise
+     */
+    boolean isAdvanced = false;
+}
\ No newline at end of file
diff --git a/camera/aidl/android/hardware/ICameraService.aidl b/camera/aidl/android/hardware/ICameraService.aidl
index 1d6f9b9..ed37b2d 100644
--- a/camera/aidl/android/hardware/ICameraService.aidl
+++ b/camera/aidl/android/hardware/ICameraService.aidl
@@ -30,6 +30,7 @@
 import android.hardware.ICameraServiceListener;
 import android.hardware.CameraInfo;
 import android.hardware.CameraStatus;
+import android.hardware.CameraExtensionSessionStats;
 
 /**
  * Binder interface for the native camera service running in mediaserver.
@@ -214,6 +215,26 @@
      */
     oneway void notifyDeviceStateChange(long newState);
 
+    /**
+     * Report Extension specific metrics to camera service for logging. This should only be called
+     * by CameraExtensionSession to log extension metrics. All calls after the first must set
+     * CameraExtensionSessionStats.key to the value returned by this function.
+     *
+     * Each subsequent call fully overwrites the existing CameraExtensionSessionStats for the
+     * current session, so the caller is responsible for keeping the stats complete.
+     *
+     * Due to cameraservice and cameraservice_proxy architecture, there is no guarantee that
+     * {@code stats} will be logged immediately (or at all). CameraService will log whatever
+     * extension stats it has at the time of camera session closing which may be before the app
+     * process receives a session/device closed callback; so CameraExtensionSession
+     * should send metrics to the cameraservice preriodically, and cameraservice must handle calls
+     * to this function from sessions that have not been logged yet and from sessions that have
+     * already been closed.
+     *
+     * @return the key that must be used to report updates to previously reported stats.
+     */
+    @utf8InCpp String reportExtensionSessionStats(in CameraExtensionSessionStats stats);
+
     // Bitfield constants for notifyDeviceStateChange
     // All bits >= 32 are for custom vendor states
     // Written as ints since AIDL does not support long constants.
diff --git a/camera/aidl/android/hardware/ICameraServiceProxy.aidl b/camera/aidl/android/hardware/ICameraServiceProxy.aidl
index ea40b3f..dcd69b0 100644
--- a/camera/aidl/android/hardware/ICameraServiceProxy.aidl
+++ b/camera/aidl/android/hardware/ICameraServiceProxy.aidl
@@ -17,6 +17,7 @@
 package android.hardware;
 
 import android.hardware.CameraSessionStats;
+import android.hardware.CameraExtensionSessionStats;
 
 /**
  * Binder interface for the camera service proxy running in system_server.
@@ -46,6 +47,13 @@
     int getRotateAndCropOverride(@utf8InCpp String packageName, int lensFacing, int userId);
 
     /**
+     * Returns the necessary autoframing override for the top activity which
+     * will be one of ({@link android.hardware.camera2.CameraMetadata#AUTOFRAMING_FALSE},
+     * {@link android.hardware.camera2.CameraMetadata#AUTOFRAMING_TRUE}).
+     */
+    int getAutoframingOverride(@utf8InCpp String packageName);
+
+    /**
      * Checks if the camera has been disabled via device policy.
      */
     boolean isCameraDisabled(int userId);
diff --git a/camera/camera2/OutputConfiguration.cpp b/camera/camera2/OutputConfiguration.cpp
index 895543f..73b153c 100644
--- a/camera/camera2/OutputConfiguration.cpp
+++ b/camera/camera2/OutputConfiguration.cpp
@@ -27,8 +27,8 @@
 #include <system/camera_metadata.h>
 #include <utils/String8.h>
 
-namespace android {
 
+namespace android {
 
 const int OutputConfiguration::INVALID_ROTATION = -1;
 const int OutputConfiguration::INVALID_SET_ID = -1;
@@ -82,6 +82,10 @@
     return mDynamicRangeProfile;
 }
 
+int32_t OutputConfiguration::getColorSpace() const {
+    return mColorSpace;
+}
+
 int64_t OutputConfiguration::getStreamUseCase() const {
     return mStreamUseCase;
 }
@@ -94,6 +98,10 @@
     return mMirrorMode;
 }
 
+bool OutputConfiguration::useReadoutTimestamp() const {
+    return mUseReadoutTimestamp;
+}
+
 OutputConfiguration::OutputConfiguration() :
         mRotation(INVALID_ROTATION),
         mSurfaceSetID(INVALID_SET_ID),
@@ -104,9 +112,11 @@
         mIsShared(false),
         mIsMultiResolution(false),
         mDynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
+        mColorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED),
         mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
         mTimestampBase(TIMESTAMP_BASE_DEFAULT),
-        mMirrorMode(MIRROR_MODE_AUTO) {
+        mMirrorMode(MIRROR_MODE_AUTO),
+        mUseReadoutTimestamp(false) {
 }
 
 OutputConfiguration::OutputConfiguration(const android::Parcel& parcel) :
@@ -194,6 +204,11 @@
         ALOGE("%s: Failed to read surface dynamic range profile flag from parcel", __FUNCTION__);
         return err;
     }
+    int32_t colorSpace;
+    if ((err = parcel->readInt32(&colorSpace)) != OK) {
+        ALOGE("%s: Failed to read surface color space flag from parcel", __FUNCTION__);
+        return err;
+    }
 
     int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
     if ((err = parcel->readInt64(&streamUseCase)) != OK) {
@@ -213,6 +228,12 @@
         return err;
     }
 
+    int useReadoutTimestamp = 0;
+    if ((err = parcel->readInt32(&useReadoutTimestamp)) != OK) {
+        ALOGE("%s: Failed to read useReadoutTimestamp flag from parcel", __FUNCTION__);
+        return err;
+    }
+
     mRotation = rotation;
     mSurfaceSetID = setID;
     mSurfaceType = surfaceType;
@@ -224,6 +245,7 @@
     mStreamUseCase = streamUseCase;
     mTimestampBase = timestampBase;
     mMirrorMode = mirrorMode;
+    mUseReadoutTimestamp = useReadoutTimestamp != 0;
     for (auto& surface : surfaceShims) {
         ALOGV("%s: OutputConfiguration: %p, name %s", __FUNCTION__,
                 surface.graphicBufferProducer.get(),
@@ -233,13 +255,14 @@
 
     mSensorPixelModesUsed = std::move(sensorPixelModesUsed);
     mDynamicRangeProfile = dynamicProfile;
+    mColorSpace = colorSpace;
 
     ALOGV("%s: OutputConfiguration: rotation = %d, setId = %d, surfaceType = %d,"
           " physicalCameraId = %s, isMultiResolution = %d, streamUseCase = %" PRId64
-          ", timestampBase = %d, mirrorMode = %d",
+          ", timestampBase = %d, mirrorMode = %d, useReadoutTimestamp = %d",
           __FUNCTION__, mRotation, mSurfaceSetID, mSurfaceType,
           mPhysicalCameraId.c_str(), mIsMultiResolution, mStreamUseCase, timestampBase,
-          mMirrorMode);
+          mMirrorMode, mUseReadoutTimestamp);
 
     return err;
 }
@@ -255,9 +278,11 @@
     mPhysicalCameraId = physicalId;
     mIsMultiResolution = false;
     mDynamicRangeProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
+    mColorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED;
     mStreamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
     mTimestampBase = TIMESTAMP_BASE_DEFAULT;
     mMirrorMode = MIRROR_MODE_AUTO;
+    mUseReadoutTimestamp = false;
 }
 
 OutputConfiguration::OutputConfiguration(
@@ -268,9 +293,10 @@
     mWidth(width), mHeight(height), mIsDeferred(false), mIsShared(isShared),
     mPhysicalCameraId(physicalCameraId), mIsMultiResolution(false),
     mDynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
+    mColorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED),
     mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
     mTimestampBase(TIMESTAMP_BASE_DEFAULT),
-    mMirrorMode(MIRROR_MODE_AUTO) { }
+    mMirrorMode(MIRROR_MODE_AUTO), mUseReadoutTimestamp(false) { }
 
 status_t OutputConfiguration::writeToParcel(android::Parcel* parcel) const {
 
@@ -321,6 +347,9 @@
     err = parcel->writeInt64(mDynamicRangeProfile);
     if (err != OK) return err;
 
+    err = parcel->writeInt32(mColorSpace);
+    if (err != OK) return err;
+
     err = parcel->writeInt64(mStreamUseCase);
     if (err != OK) return err;
 
@@ -330,6 +359,9 @@
     err = parcel->writeInt32(mMirrorMode);
     if (err != OK) return err;
 
+    err = parcel->writeInt32(mUseReadoutTimestamp ? 1 : 0);
+    if (err != OK) return err;
+
     return OK;
 }
 
diff --git a/camera/cameraserver/Android.bp b/camera/cameraserver/Android.bp
index 094a3c1..8472562 100644
--- a/camera/cameraserver/Android.bp
+++ b/camera/cameraserver/Android.bp
@@ -37,13 +37,14 @@
         "libui",
         "libgui",
         "libbinder",
+        "libbinder_ndk",
         "libhidlbase",
         "android.hardware.camera.common@1.0",
         "android.hardware.camera.provider@2.4",
         "android.hardware.camera.provider@2.5",
         "android.hardware.camera.provider@2.6",
         "android.hardware.camera.provider@2.7",
-        "android.hardware.camera.provider-V1-ndk",
+        "android.hardware.camera.provider-V2-ndk",
         "android.hardware.camera.device@1.0",
         "android.hardware.camera.device@3.2",
         "android.hardware.camera.device@3.4",
@@ -59,6 +60,6 @@
     init_rc: ["cameraserver.rc"],
 
     vintf_fragments: [
-        "manifest_android.frameworks.cameraservice.service@2.2.xml",
+        "manifest_android.frameworks.cameraservice.service.xml",
     ],
 }
diff --git a/camera/cameraserver/cameraserver.rc b/camera/cameraserver/cameraserver.rc
index 8f51458..e307653 100644
--- a/camera/cameraserver/cameraserver.rc
+++ b/camera/cameraserver/cameraserver.rc
@@ -5,3 +5,5 @@
     ioprio rt 4
     task_profiles CameraServiceCapacity MaxPerformance
     rlimit rtprio 10 10
+    onrestart class_restart cameraWatchdog
+    interface aidl android.frameworks.cameraservice.service.ICameraService/default
diff --git a/camera/cameraserver/main_cameraserver.cpp b/camera/cameraserver/main_cameraserver.cpp
index cef8ef5..c494732 100644
--- a/camera/cameraserver/main_cameraserver.cpp
+++ b/camera/cameraserver/main_cameraserver.cpp
@@ -18,6 +18,7 @@
 //#define LOG_NDEBUG 0
 
 #include "CameraService.h"
+#include <android/binder_process.h>
 #include <hidl/HidlTransportSupport.h>
 
 using namespace android;
@@ -26,15 +27,21 @@
 {
     signal(SIGPIPE, SIG_IGN);
 
-    // Set 5 threads for HIDL calls. Now cameraserver will serve HIDL calls in
-    // addition to consuming them from the Camera HAL as well.
+    // Set 5 threads for HIDL calls. Now cameraserver will serve HIDL calls.
     hardware::configureRpcThreadpool(5, /*willjoin*/ false);
 
+    // Set 5 threads for VNDK AIDL calls. Now cameraserver will serve
+    // VNDK AIDL calls in addition to consuming them from the Camera HAL as well.
+    ABinderProcess_setThreadPoolMaxThreadCount(5);
+
     sp<ProcessState> proc(ProcessState::self());
     sp<IServiceManager> sm = defaultServiceManager();
     ALOGI("ServiceManager: %p", sm.get());
     CameraService::instantiate();
     ALOGI("ServiceManager: %p done instantiate", sm.get());
     ProcessState::self()->startThreadPool();
+    ABinderProcess_startThreadPool();
+
     IPCThreadState::self()->joinThreadPool();
+    ABinderProcess_joinThreadPool();
 }
diff --git a/camera/cameraserver/manifest_android.frameworks.cameraservice.service.xml b/camera/cameraserver/manifest_android.frameworks.cameraservice.service.xml
new file mode 100644
index 0000000..f7e455f
--- /dev/null
+++ b/camera/cameraserver/manifest_android.frameworks.cameraservice.service.xml
@@ -0,0 +1,20 @@
+<manifest version="1.0" type="framework">
+    <hal format="hidl" max-level="7">
+        <name>android.frameworks.cameraservice.service</name>
+        <transport>hwbinder</transport>
+        <version>2.2</version>
+        <interface>
+            <name>ICameraService</name>
+            <instance>default</instance>
+        </interface>
+    </hal>
+
+    <hal format="aidl">
+        <name>android.frameworks.cameraservice.service</name>
+        <version>1</version>
+        <interface>
+            <name>ICameraService</name>
+            <instance>default</instance>
+        </interface>
+    </hal>
+</manifest>
diff --git a/camera/cameraserver/manifest_android.frameworks.cameraservice.service@2.2.xml b/camera/cameraserver/manifest_android.frameworks.cameraservice.service@2.2.xml
deleted file mode 100644
index eeafc91..0000000
--- a/camera/cameraserver/manifest_android.frameworks.cameraservice.service@2.2.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-<manifest version="1.0" type="framework">
-    <hal>
-        <name>android.frameworks.cameraservice.service</name>
-        <transport>hwbinder</transport>
-        <version>2.2</version>
-        <interface>
-            <name>ICameraService</name>
-            <instance>default</instance>
-        </interface>
-    </hal>
-</manifest>
diff --git a/camera/include/camera/CameraSessionStats.h b/camera/include/camera/CameraSessionStats.h
index 9e61cf0..70ca0b3 100644
--- a/camera/include/camera/CameraSessionStats.h
+++ b/camera/include/camera/CameraSessionStats.h
@@ -22,6 +22,7 @@
 #include <binder/Parcelable.h>
 
 #include <camera/CameraMetadata.h>
+#include <android/hardware/CameraExtensionSessionStats.h>
 
 namespace android {
 namespace hardware {
@@ -69,22 +70,26 @@
     int64_t mDynamicRangeProfile;
     // Stream use case
     int64_t mStreamUseCase;
+    // Color space
+    int32_t mColorSpace;
 
     CameraStreamStats() :
             mWidth(0), mHeight(0), mFormat(0), mMaxPreviewFps(0), mDataSpace(0), mUsage(0),
             mRequestCount(0), mErrorCount(0), mStartLatencyMs(0),
             mMaxHalBuffers(0), mMaxAppBuffers(0), mHistogramType(HISTOGRAM_TYPE_UNKNOWN),
             mDynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
-            mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {}
+            mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
+            mColorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED) {}
     CameraStreamStats(int width, int height, int format, float maxPreviewFps, int dataSpace,
             int64_t usage, int maxHalBuffers, int maxAppBuffers, int dynamicRangeProfile,
-            int streamUseCase)
+            int streamUseCase, int32_t colorSpace)
             : mWidth(width), mHeight(height), mFormat(format), mMaxPreviewFps(maxPreviewFps),
               mDataSpace(dataSpace), mUsage(usage), mRequestCount(0), mErrorCount(0),
               mStartLatencyMs(0), mMaxHalBuffers(maxHalBuffers), mMaxAppBuffers(maxAppBuffers),
               mHistogramType(HISTOGRAM_TYPE_UNKNOWN),
               mDynamicRangeProfile(dynamicRangeProfile),
-              mStreamUseCase(streamUseCase) {}
+              mStreamUseCase(streamUseCase),
+              mColorSpace(colorSpace) {}
 
     virtual status_t readFromParcel(const android::Parcel* parcel) override;
     virtual status_t writeToParcel(android::Parcel* parcel) const override;
@@ -126,6 +131,22 @@
     bool mIsNdk;
     // latency in ms for camera open, close, or session creation.
     int mLatencyMs;
+
+    /*
+     * A randomly generated identifier to map the open/active/idle/close stats to each other after
+     * being logged. Every 'open' event will have a newly generated id which will be logged with
+     * active/idle/closed that correspond to the particular 'open' event.
+     *
+     * This ID is not meant to be globally unique forever. Probabilistically, this ID can be
+     * safely considered unique across all logs from one android build for 48 to 72 hours from
+     * its generation. Chances of identifier collisions are significant past a week or two.
+     *
+     * NOTE: There are no guarantees that the identifiers will be unique. The probability of
+     * collision within a short timeframe is low, but any system consuming these identifiers at
+     * scale should handle identifier collisions, potentially even from the same device.
+     */
+    int64_t mLogId;
+
     float mMaxPreviewFps;
 
     // Session info and statistics
@@ -140,11 +161,15 @@
     std::vector<CameraStreamStats> mStreamStats;
     std::string mUserTag;
     int mVideoStabilizationMode;
+    int mSessionIndex;
+
+    CameraExtensionSessionStats mCameraExtensionSessionStats;
 
     // Constructors
     CameraSessionStats();
     CameraSessionStats(const std::string& cameraId, int facing, int newCameraState,
-            const std::string& clientName, int apiLevel, bool isNdk, int32_t latencyMs);
+                       const std::string& clientName, int apiLevel, bool isNdk, int32_t latencyMs,
+                       int64_t logId);
 
     virtual status_t readFromParcel(const android::Parcel* parcel) override;
     virtual status_t writeToParcel(android::Parcel* parcel) const override;
diff --git a/camera/include/camera/camera2/OutputConfiguration.h b/camera/include/camera/camera2/OutputConfiguration.h
index a9b5f72..3f74b4a 100644
--- a/camera/include/camera/camera2/OutputConfiguration.h
+++ b/camera/include/camera/camera2/OutputConfiguration.h
@@ -46,8 +46,7 @@
         TIMESTAMP_BASE_MONOTONIC = 2,
         TIMESTAMP_BASE_REALTIME = 3,
         TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED = 4,
-        TIMESTAMP_BASE_READOUT_SENSOR = 5,
-        TIMESTAMP_BASE_MAX = TIMESTAMP_BASE_READOUT_SENSOR,
+        TIMESTAMP_BASE_MAX = TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED,
     };
     enum MirrorModeType {
         MIRROR_MODE_AUTO = 0,
@@ -63,6 +62,7 @@
     int                        getWidth() const;
     int                        getHeight() const;
     int64_t                    getDynamicRangeProfile() const;
+    int32_t                    getColorSpace() const;
     bool                       isDeferred() const;
     bool                       isShared() const;
     std::string                getPhysicalCameraId() const;
@@ -70,6 +70,7 @@
     int64_t                    getStreamUseCase() const;
     int                        getTimestampBase() const;
     int                        getMirrorMode() const;
+    bool                       useReadoutTimestamp() const;
 
     // set of sensor pixel mode resolutions allowed {MAX_RESOLUTION, DEFAULT_MODE};
     const std::vector<int32_t>&            getSensorPixelModesUsed() const;
@@ -113,9 +114,11 @@
                 mIsMultiResolution == other.mIsMultiResolution &&
                 sensorPixelModesUsedEqual(other) &&
                 mDynamicRangeProfile == other.mDynamicRangeProfile &&
+                mColorSpace == other.mColorSpace &&
                 mStreamUseCase == other.mStreamUseCase &&
                 mTimestampBase == other.mTimestampBase &&
-                mMirrorMode == other.mMirrorMode);
+                mMirrorMode == other.mMirrorMode &&
+                mUseReadoutTimestamp == other.mUseReadoutTimestamp);
     }
     bool operator != (const OutputConfiguration& other) const {
         return !(*this == other);
@@ -155,6 +158,9 @@
         if (mDynamicRangeProfile != other.mDynamicRangeProfile) {
             return mDynamicRangeProfile < other.mDynamicRangeProfile;
         }
+        if (mColorSpace != other.mColorSpace) {
+            return mColorSpace < other.mColorSpace;
+        }
         if (mStreamUseCase != other.mStreamUseCase) {
             return mStreamUseCase < other.mStreamUseCase;
         }
@@ -164,6 +170,9 @@
         if (mMirrorMode != other.mMirrorMode) {
             return mMirrorMode < other.mMirrorMode;
         }
+        if (mUseReadoutTimestamp != other.mUseReadoutTimestamp) {
+            return mUseReadoutTimestamp < other.mUseReadoutTimestamp;
+        }
         return gbpsLessThan(other);
     }
 
@@ -189,9 +198,11 @@
     bool                       mIsMultiResolution;
     std::vector<int32_t>       mSensorPixelModesUsed;
     int64_t                    mDynamicRangeProfile;
+    int32_t                    mColorSpace;
     int64_t                    mStreamUseCase;
     int                        mTimestampBase;
     int                        mMirrorMode;
+    bool                       mUseReadoutTimestamp;
 };
 } // namespace params
 } // namespace camera2
diff --git a/camera/ndk/Android.bp b/camera/ndk/Android.bp
index e7d4680..24a11e3 100644
--- a/camera/ndk/Android.bp
+++ b/camera/ndk/Android.bp
@@ -141,6 +141,7 @@
     ],
 
     shared_libs: [
+        "libbinder_ndk",
         "libfmq",
         "libhidlbase",
         "libhardware",
@@ -151,15 +152,13 @@
         "libcutils",
         "libcamera_metadata",
         "libmediandk",
-        "android.frameworks.cameraservice.device@2.0",
-        "android.frameworks.cameraservice.device@2.1",
-        "android.frameworks.cameraservice.common@2.0",
-        "android.frameworks.cameraservice.service@2.0",
-        "android.frameworks.cameraservice.service@2.1",
-        "android.frameworks.cameraservice.service@2.2",
+        "android.frameworks.cameraservice.common-V1-ndk",
+        "android.frameworks.cameraservice.device-V1-ndk",
+        "android.frameworks.cameraservice.service-V1-ndk",
     ],
     static_libs: [
         "android.hardware.camera.common@1.0-helper",
+        "libaidlcommonsupport",
         "libarect",
     ],
     // TODO: jchowdhary@, use header_libs instead b/131165718
diff --git a/camera/ndk/NdkCameraCaptureSession.cpp b/camera/ndk/NdkCameraCaptureSession.cpp
index 9c98778..4387cc6 100644
--- a/camera/ndk/NdkCameraCaptureSession.cpp
+++ b/camera/ndk/NdkCameraCaptureSession.cpp
@@ -29,6 +29,7 @@
 #include "impl/ACameraCaptureSession.h"
 
 #include "impl/ACameraCaptureSession.inc"
+
 #include "NdkCameraCaptureSession.inc"
 
 using namespace android;
@@ -190,3 +191,38 @@
     }
     return session->updateOutputConfiguration(output);
 }
+
+EXPORT
+camera_status_t ACameraCaptureSession_setWindowPreparedCallback(
+        ACameraCaptureSession* session, void *context,
+        ACameraCaptureSession_prepareCallback cb) {
+    ATRACE_CALL();
+    if (session == nullptr || cb == nullptr) {
+        ALOGE("%s: Error: session %p / callback %p is null", __FUNCTION__, session, cb);
+        return ACAMERA_ERROR_INVALID_PARAMETER;
+    }
+
+    if (session->isClosed()) {
+        ALOGE("%s: session %p is already closed", __FUNCTION__, session);
+        return ACAMERA_ERROR_SESSION_CLOSED;
+    }
+    session->setWindowPreparedCallback(context, cb);
+    return ACAMERA_OK;
+}
+
+EXPORT
+camera_status_t ACameraCaptureSession_prepareWindow(
+        ACameraCaptureSession* session,
+        ACameraWindowType *window) {
+    ATRACE_CALL();
+    if (session == nullptr || window == nullptr) {
+        ALOGE("%s: Error: session %p / window %p is null", __FUNCTION__, session, window);
+        return ACAMERA_ERROR_INVALID_PARAMETER;
+    }
+
+    if (session->isClosed()) {
+        ALOGE("%s: session %p is already closed", __FUNCTION__, session);
+        return ACAMERA_ERROR_SESSION_CLOSED;
+    }
+    return session->prepare(window);
+}
diff --git a/camera/ndk/NdkCameraDevice.cpp b/camera/ndk/NdkCameraDevice.cpp
index 691996b..8211671 100644
--- a/camera/ndk/NdkCameraDevice.cpp
+++ b/camera/ndk/NdkCameraDevice.cpp
@@ -22,18 +22,11 @@
 #include <utils/Trace.h>
 
 #include <camera/NdkCameraDevice.h>
+
 #include "impl/ACameraCaptureSession.h"
 
 using namespace android::acam;
 
-bool areWindowTypesEqual(ACameraWindowType *a, ACameraWindowType *b) {
-#ifdef __ANDROID_VNDK__
-    return utils::isWindowNativeHandleEqual(a, b);
-#else
-    return a == b;
-#endif
-}
-
 EXPORT
 camera_status_t ACameraDevice_close(ACameraDevice* device) {
     ATRACE_CALL();
@@ -183,14 +176,15 @@
                 __FUNCTION__);
         return ACAMERA_ERROR_INVALID_OPERATION;
     }
-    if (areWindowTypesEqual(out->mWindow, window)) {
+    if (out->isWindowEqual(window)) {
         ALOGE("%s: Error trying to add the same window associated with the output configuration",
                 __FUNCTION__);
         return ACAMERA_ERROR_INVALID_PARAMETER;
     }
 
-    auto insert = out->mSharedWindows.insert(window);
-    camera_status_t ret = (insert.second) ? ACAMERA_OK : ACAMERA_ERROR_INVALID_PARAMETER;
+
+    bool insert = out->addSharedWindow(window);
+    camera_status_t ret = (insert) ? ACAMERA_OK : ACAMERA_ERROR_INVALID_PARAMETER;
     return ret;
 }
 
@@ -208,13 +202,13 @@
                 __FUNCTION__);
         return ACAMERA_ERROR_INVALID_OPERATION;
     }
-    if (areWindowTypesEqual(out->mWindow, window)) {
+    if (out->isWindowEqual(window)) {
         ALOGE("%s: Error trying to remove the same window associated with the output configuration",
                 __FUNCTION__);
         return ACAMERA_ERROR_INVALID_PARAMETER;
     }
 
-    auto remove = out->mSharedWindows.erase(window);
+    auto remove = out->removeSharedWindow(window);
     camera_status_t ret = (remove) ? ACAMERA_OK : ACAMERA_ERROR_INVALID_PARAMETER;
     return ret;
 }
diff --git a/camera/ndk/NdkCameraManager.cpp b/camera/ndk/NdkCameraManager.cpp
index 3d231a8..2de4a50 100644
--- a/camera/ndk/NdkCameraManager.cpp
+++ b/camera/ndk/NdkCameraManager.cpp
@@ -81,7 +81,7 @@
                callback->onCameraAvailable, callback->onCameraUnavailable);
         return ACAMERA_ERROR_INVALID_PARAMETER;
     }
-    CameraManagerGlobal::getInstance().registerAvailabilityCallback(callback);
+    CameraManagerGlobal::getInstance()->registerAvailabilityCallback(callback);
     return ACAMERA_OK;
 }
 
@@ -100,7 +100,7 @@
                callback->onCameraAvailable, callback->onCameraUnavailable);
         return ACAMERA_ERROR_INVALID_PARAMETER;
     }
-    CameraManagerGlobal::getInstance().unregisterAvailabilityCallback(callback);
+    CameraManagerGlobal::getInstance()->unregisterAvailabilityCallback(callback);
     return ACAMERA_OK;
 }
 
@@ -131,7 +131,7 @@
             return ACAMERA_ERROR_INVALID_PARAMETER;
         }
     }
-    CameraManagerGlobal::getInstance().registerExtendedAvailabilityCallback(callback);
+    CameraManagerGlobal::getInstance()->registerExtendedAvailabilityCallback(callback);
     return ACAMERA_OK;
 }
 
@@ -154,7 +154,7 @@
                callback->onCameraAccessPrioritiesChanged);
         return ACAMERA_ERROR_INVALID_PARAMETER;
     }
-    CameraManagerGlobal::getInstance().unregisterExtendedAvailabilityCallback(callback);
+    CameraManagerGlobal::getInstance()->unregisterExtendedAvailabilityCallback(callback);
     return ACAMERA_OK;
 }
 
diff --git a/camera/ndk/impl/ACameraCaptureSession.cpp b/camera/ndk/impl/ACameraCaptureSession.cpp
index 68db233..73439c7 100644
--- a/camera/ndk/impl/ACameraCaptureSession.cpp
+++ b/camera/ndk/impl/ACameraCaptureSession.cpp
@@ -23,7 +23,11 @@
 
 ACameraCaptureSession::~ACameraCaptureSession() {
     ALOGV("~ACameraCaptureSession: %p notify device end of life", this);
+#ifdef __ANDROID_VNDK__
+    std::shared_ptr<acam::CameraDevice> dev = getDevicePtr();
+#else
     sp<acam::CameraDevice> dev = getDeviceSp();
+#endif
     if (dev != nullptr && !dev->isClosed()) {
         dev->lockDeviceForSessionOps();
         {
@@ -50,7 +54,11 @@
         mClosedByApp = true;
     }
 
+#ifdef __ANDROID_VNDK__
+    std::shared_ptr<acam::CameraDevice> dev = getDevicePtr();
+#else
     sp<acam::CameraDevice> dev = getDeviceSp();
+#endif
     if (dev != nullptr) {
         dev->lockDeviceForSessionOps();
     }
@@ -75,7 +83,11 @@
 
 camera_status_t
 ACameraCaptureSession::stopRepeating() {
+#ifdef __ANDROID_VNDK__
+    std::shared_ptr<acam::CameraDevice> dev = getDevicePtr();
+#else
     sp<acam::CameraDevice> dev = getDeviceSp();
+#endif
     if (dev == nullptr) {
         ALOGE("Error: Device associated with session %p has been closed!", this);
         return ACAMERA_ERROR_SESSION_CLOSED;
@@ -93,7 +105,11 @@
 
 camera_status_t
 ACameraCaptureSession::abortCaptures() {
+#ifdef __ANDROID_VNDK__
+    std::shared_ptr<acam::CameraDevice> dev = getDevicePtr();
+#else
     sp<acam::CameraDevice> dev = getDeviceSp();
+#endif
     if (dev == nullptr) {
         ALOGE("Error: Device associated with session %p has been closed!", this);
         return ACAMERA_ERROR_SESSION_CLOSED;
@@ -110,7 +126,11 @@
 }
 
 camera_status_t ACameraCaptureSession::updateOutputConfiguration(ACaptureSessionOutput *output) {
+#ifdef __ANDROID_VNDK__
+    std::shared_ptr<acam::CameraDevice> dev = getDevicePtr();
+#else
     sp<acam::CameraDevice> dev = getDeviceSp();
+#endif
     if (dev == nullptr) {
         ALOGE("Error: Device associated with session %p has been closed!", this);
         return ACAMERA_ERROR_SESSION_CLOSED;
@@ -126,10 +146,35 @@
     return ret;
 }
 
+camera_status_t ACameraCaptureSession::prepare(ACameraWindowType* window) {
+#ifdef __ANDROID_VNDK__
+    std::shared_ptr<acam::CameraDevice> dev = getDevicePtr();
+#else
+    sp<acam::CameraDevice> dev = getDeviceSp();
+#endif
+    if (dev == nullptr) {
+        ALOGE("Error: Device associated with session %p has been closed!", this);
+        return ACAMERA_ERROR_SESSION_CLOSED;
+    }
+
+    camera_status_t ret;
+    dev->lockDeviceForSessionOps();
+    {
+        Mutex::Autolock _l(mSessionLock);
+        ret = dev->prepareLocked(window);
+    }
+    dev->unlockDevice();
+    return ret;
+}
+
 ACameraDevice*
 ACameraCaptureSession::getDevice() {
     Mutex::Autolock _l(mSessionLock);
+#ifdef __ANDROID_VNDK__
+    std::shared_ptr<acam::CameraDevice> dev = getDevicePtr();
+#else
     sp<acam::CameraDevice> dev = getDeviceSp();
+#endif
     if (dev == nullptr) {
         ALOGE("Error: Device associated with session %p has been closed!", this);
         return nullptr;
@@ -143,6 +188,17 @@
     mIsClosed = true;
 }
 
+#ifdef __ANDROID_VNDK__
+std::shared_ptr<acam::CameraDevice>
+ACameraCaptureSession::getDevicePtr() {
+    std::shared_ptr<acam::CameraDevice> device = mDevice.lock();
+    if (device == nullptr || device->isClosed()) {
+        ALOGW("Device is closed but session %d is not notified", mId);
+        return nullptr;
+    }
+    return device;
+}
+#else
 sp<acam::CameraDevice>
 ACameraCaptureSession::getDeviceSp() {
     sp<acam::CameraDevice> device = mDevice.promote();
@@ -152,5 +208,4 @@
     }
     return device;
 }
-
-
+#endif
diff --git a/camera/ndk/impl/ACameraCaptureSession.h b/camera/ndk/impl/ACameraCaptureSession.h
index 08a9226..88135ba 100644
--- a/camera/ndk/impl/ACameraCaptureSession.h
+++ b/camera/ndk/impl/ACameraCaptureSession.h
@@ -47,6 +47,21 @@
         return mWindow > other.mWindow;
     }
 
+    inline bool isWindowEqual(ACameraWindowType* window) const {
+        return mWindow == window;
+    }
+
+    // returns true if the window was successfully added, false otherwise.
+    inline bool addSharedWindow(ACameraWindowType* window) {
+        auto ret = mSharedWindows.insert(window);
+        return ret.second;
+    }
+
+    // returns the number of elements removed.
+    inline size_t removeSharedWindow(ACameraWindowType* window) {
+        return mSharedWindows.erase(window);
+    }
+
     ACameraWindowType* mWindow;
     std::set<ACameraWindowType *> mSharedWindows;
     bool           mIsShared;
@@ -60,11 +75,31 @@
 };
 
 /**
+ * Capture session state callbacks used in {@link ACameraDevice_setPrepareCallbacks}
+ */
+typedef struct ACameraCaptureSession_prepareCallbacks {
+    /// optional application context. This will be passed in the context
+    /// parameter of the {@link onWindowPrepared} callback.
+    void*                               context;
+
+    ACameraCaptureSession_prepareCallback onWindowPrepared;
+} ACameraCaptureSession_prepareCallbacks;
+
+/**
  * ACameraCaptureSession opaque struct definition
  * Leave outside of android namespace because it's NDK struct
  */
 struct ACameraCaptureSession : public RefBase {
   public:
+#ifdef __ANDROID_VNDK__
+    ACameraCaptureSession(
+            int id,
+            const ACaptureSessionOutputContainer* outputs,
+            const ACameraCaptureSession_stateCallbacks* cb,
+            std::weak_ptr<android::acam::CameraDevice> device) :
+            mId(id), mOutput(*outputs), mUserSessionCallback(*cb),
+            mDevice(std::move(device)) {}
+#else
     ACameraCaptureSession(
             int id,
             const ACaptureSessionOutputContainer* outputs,
@@ -72,6 +107,7 @@
             android::acam::CameraDevice* device) :
             mId(id), mOutput(*outputs), mUserSessionCallback(*cb),
             mDevice(device) {}
+#endif
 
     // This can be called in app calling close() or after some app callback is finished
     // Make sure the caller does not hold device or session lock!
@@ -105,6 +141,14 @@
 
     camera_status_t updateOutputConfiguration(ACaptureSessionOutput *output);
 
+    void setWindowPreparedCallback(void *context,
+            ACameraCaptureSession_prepareCallback cb) {
+        Mutex::Autolock _l(mSessionLock);
+        mPreparedCb.context = context;
+        mPreparedCb.onWindowPrepared = cb;
+    }
+    camera_status_t prepare(ACameraWindowType *window);
+
     ACameraDevice* getDevice();
 
   private:
@@ -114,14 +158,24 @@
     // or a new session is replacing this session.
     void closeByDevice();
 
+#ifdef __ANDROID_VNDK__
+    std::shared_ptr<android::acam::CameraDevice> getDevicePtr();
+#else
     sp<android::acam::CameraDevice> getDeviceSp();
+#endif
 
     const int mId;
     const ACaptureSessionOutputContainer mOutput;
     const ACameraCaptureSession_stateCallbacks mUserSessionCallback;
+#ifdef __ANDROID_VNDK__
+    const std::weak_ptr<android::acam::CameraDevice> mDevice;
+#else
     const wp<android::acam::CameraDevice> mDevice;
+#endif
+
     bool  mIsClosed = false;
     bool  mClosedByApp = false;
+    ACameraCaptureSession_prepareCallbacks mPreparedCb;
     Mutex mSessionLock;
 };
 
diff --git a/camera/ndk/impl/ACameraCaptureSession.inc b/camera/ndk/impl/ACameraCaptureSession.inc
index 86bf8a5..da535f8 100644
--- a/camera/ndk/impl/ACameraCaptureSession.inc
+++ b/camera/ndk/impl/ACameraCaptureSession.inc
@@ -15,9 +15,8 @@
  */
 
 #include "ACameraCaptureSession.h"
-
 #ifdef __ANDROID_VNDK__
-#include "ndk_vendor/impl/ACameraDeviceVendor.inc"
+#include <ndk_vendor/impl/ACameraDeviceVendor.inc>
 #else
 #include "ACameraDevice.inc"
 #endif
@@ -30,7 +29,11 @@
         /*optional*/T* cbs,
         int numRequests, ACaptureRequest** requests,
         /*optional*/int* captureSequenceId) {
+#ifdef __ANDROID_VNDK__
+    std::shared_ptr<acam::CameraDevice> dev = getDevicePtr();
+#else
     sp<acam::CameraDevice> dev = getDeviceSp();
+#endif
     if (dev == nullptr) {
         ALOGE("Error: Device associated with session %p has been closed!", this);
         return ACAMERA_ERROR_SESSION_CLOSED;
@@ -52,7 +55,11 @@
         /*optional*/T* cbs,
         int numRequests, ACaptureRequest** requests,
         /*optional*/int* captureSequenceId) {
+#ifdef __ANDROID_VNDK__
+    std::shared_ptr<acam::CameraDevice> dev = getDevicePtr();
+#else
     sp<acam::CameraDevice> dev = getDeviceSp();
+#endif
     if (dev == nullptr) {
         ALOGE("Error: Device associated with session %p has been closed!", this);
         return ACAMERA_ERROR_SESSION_CLOSED;
diff --git a/camera/ndk/impl/ACameraDevice.cpp b/camera/ndk/impl/ACameraDevice.cpp
index b3de17d..024ed20 100644
--- a/camera/ndk/impl/ACameraDevice.cpp
+++ b/camera/ndk/impl/ACameraDevice.cpp
@@ -341,6 +341,58 @@
     return ACAMERA_OK;
 }
 
+camera_status_t CameraDevice::prepareLocked(ACameraWindowType *window) {
+    camera_status_t ret = checkCameraClosedOrErrorLocked();
+    if (ret != ACAMERA_OK) {
+        return ret;
+    }
+
+    if (window == nullptr) {
+        return ACAMERA_ERROR_INVALID_PARAMETER;
+    }
+
+    int32_t streamId = -1;
+    for (auto& kvPair : mConfiguredOutputs) {
+        if (window == kvPair.second.first) {
+            streamId = kvPair.first;
+            break;
+        }
+    }
+    if (streamId < 0) {
+        ALOGE("Error: Invalid output configuration");
+        return ACAMERA_ERROR_INVALID_PARAMETER;
+    }
+    auto remoteRet = mRemote->prepare(streamId);
+    if (!remoteRet.isOk()) {
+        // TODO:(b/259735869) Do this check for all other binder calls in the
+        // ndk as well.
+        if (remoteRet.exceptionCode() != EX_SERVICE_SPECIFIC) {
+            ALOGE("Camera device %s failed to prepare output window %p: %s", getId(), window,
+                    remoteRet.toString8().string());
+            return ACAMERA_ERROR_UNKNOWN;
+
+        }
+        switch (remoteRet.serviceSpecificErrorCode()) {
+            case hardware::ICameraService::ERROR_INVALID_OPERATION:
+                ALOGE("Camera device %s invalid operation: %s", getId(),
+                        remoteRet.toString8().string());
+                return ACAMERA_ERROR_INVALID_OPERATION;
+                break;
+            case hardware::ICameraService::ERROR_ILLEGAL_ARGUMENT:
+                ALOGE("Camera device %s invalid input argument: %s", getId(),
+                        remoteRet.toString8().string());
+                return ACAMERA_ERROR_INVALID_PARAMETER;
+                break;
+            default:
+                ALOGE("Camera device %s failed to prepare output window %p: %s", getId(), window,
+                        remoteRet.toString8().string());
+                return ACAMERA_ERROR_UNKNOWN;
+        }
+    }
+
+    return ACAMERA_OK;
+}
+
 camera_status_t
 CameraDevice::allocateCaptureRequest(
         const ACaptureRequest* request, /*out*/sp<CaptureRequest>& outReq) {
@@ -917,6 +969,7 @@
         case kWhatCaptureSeqEnd:
         case kWhatCaptureSeqAbort:
         case kWhatCaptureBufferLost:
+        case kWhatPreparedCb:
             ALOGV("%s: Received msg %d", __FUNCTION__, msg->what());
             break;
         case kWhatCleanUpSessions:
@@ -990,6 +1043,7 @@
         case kWhatCaptureSeqEnd:
         case kWhatCaptureSeqAbort:
         case kWhatCaptureBufferLost:
+        case kWhatPreparedCb:
         {
             sp<RefBase> obj;
             found = msg->findObject(kSessionSpKey, &obj);
@@ -1032,6 +1086,26 @@
                     (*onState)(context, session.get());
                     break;
                 }
+                case kWhatPreparedCb:
+                {
+                    ACameraCaptureSession_prepareCallback onWindowPrepared;
+                    found = msg->findPointer(kCallbackFpKey, (void**) &onWindowPrepared);
+                    if (!found) {
+                        ALOGE("%s: Cannot find window prepared callback!", __FUNCTION__);
+                        return;
+                    }
+                    if (onWindowPrepared == nullptr) {
+                        return;
+                    }
+                    ACameraWindowType* anw;
+                    found = msg->findPointer(kAnwKey, (void**) &anw);
+                    if (!found) {
+                        ALOGE("%s: Cannot find ANativeWindow: %d!", __FUNCTION__, __LINE__);
+                        return;
+                    }
+                    (*onWindowPrepared)(context, anw, session.get());
+                    break;
+                }
                 case kWhatCaptureStart:
                 {
                     ACameraCaptureSession_captureCallback_start onStart;
@@ -1410,7 +1484,6 @@
     while (it != mSequenceLastFrameNumberMap.end()) {
         int sequenceId = it->first;
         int64_t lastFrameNumber = it->second.lastFrameNumber;
-        bool hasCallback = true;
 
         if (mRemote == nullptr) {
             ALOGW("Camera %s closed while checking sequence complete", getId());
@@ -1423,7 +1496,6 @@
             // This should not happen because we always register callback (with nullptr inside)
             if (mSequenceCallbackMap.count(sequenceId) == 0) {
                 ALOGW("No callback found for sequenceId %d", sequenceId);
-                hasCallback = false;
             }
 
             if (lastFrameNumber <= completedFrameNumber) {
@@ -1729,8 +1801,36 @@
 }
 
 binder::Status
-CameraDevice::ServiceCallback::onPrepared(int) {
-    // Prepare not yet implemented in NDK
+CameraDevice::ServiceCallback::onPrepared(int streamId) {
+    ALOGV("%s: callback for stream id %d", __FUNCTION__, streamId);
+    binder::Status ret = binder::Status::ok();
+    sp<CameraDevice> dev = mDevice.promote();
+    if (dev == nullptr) {
+        return ret; // device has been closed
+    }
+    Mutex::Autolock _l(dev->mDeviceLock);
+    if (dev->isClosed() || dev->mRemote == nullptr) {
+        return ret;
+    }
+    auto it = dev->mConfiguredOutputs.find(streamId);
+    if (it == dev->mConfiguredOutputs.end()) {
+        ALOGE("%s: stream id %d does not exist", __FUNCTION__ , streamId);
+        return ret;
+    }
+    sp<ACameraCaptureSession> session = dev->mCurrentSession.promote();
+    if (session == nullptr) {
+        ALOGE("%s: Session is dead already", __FUNCTION__ );
+        return ret;
+    }
+    // We've found the window corresponding to the surface id.
+    ACameraWindowType *window = it->second.first;
+    sp<AMessage> msg = new AMessage(kWhatPreparedCb, dev->mHandler);
+    msg->setPointer(kContextKey, session->mPreparedCb.context);
+    msg->setPointer(kAnwKey, window);
+    msg->setObject(kSessionSpKey, session);
+    msg->setPointer(kCallbackFpKey, (void *)session->mPreparedCb.onWindowPrepared);
+    dev->postSessionMsgAndCleanup(msg);
+
     return binder::Status::ok();
 }
 
diff --git a/camera/ndk/impl/ACameraDevice.h b/camera/ndk/impl/ACameraDevice.h
index fef8ec2..4658d18 100644
--- a/camera/ndk/impl/ACameraDevice.h
+++ b/camera/ndk/impl/ACameraDevice.h
@@ -151,6 +151,8 @@
 
     camera_status_t updateOutputConfigurationLocked(ACaptureSessionOutput *output);
 
+    camera_status_t prepareLocked(ACameraWindowType *window);
+
     camera_status_t allocateCaptureRequest(
             const ACaptureRequest* request, sp<CaptureRequest>& outReq);
 
@@ -222,7 +224,8 @@
         kWhatLogicalCaptureFail, // onLogicalCameraCaptureFailed
         kWhatCaptureSeqEnd,    // onCaptureSequenceCompleted
         kWhatCaptureSeqAbort,  // onCaptureSequenceAborted
-        kWhatCaptureBufferLost,// onCaptureBufferLost
+        kWhatCaptureBufferLost, // onCaptureBufferLost
+        kWhatPreparedCb, // onWindowPrepared
         // Internal cleanup
         kWhatCleanUpSessions   // Cleanup cached sp<ACameraCaptureSession>
     };
diff --git a/camera/ndk/impl/ACameraManager.cpp b/camera/ndk/impl/ACameraManager.cpp
index 3ecf845..5d3b65b 100644
--- a/camera/ndk/impl/ACameraManager.cpp
+++ b/camera/ndk/impl/ACameraManager.cpp
@@ -39,17 +39,16 @@
 const char* CameraManagerGlobal::kContextKey    = "CallbackContext";
 const nsecs_t CameraManagerGlobal::kCallbackDrainTimeout = 5000000; // 5 ms
 Mutex                CameraManagerGlobal::sLock;
-CameraManagerGlobal* CameraManagerGlobal::sInstance = nullptr;
+wp<CameraManagerGlobal> CameraManagerGlobal::sInstance = nullptr;
 
-CameraManagerGlobal&
-CameraManagerGlobal::getInstance() {
+sp<CameraManagerGlobal> CameraManagerGlobal::getInstance() {
     Mutex::Autolock _l(sLock);
-    CameraManagerGlobal* instance = sInstance;
+    sp<CameraManagerGlobal> instance = sInstance.promote();
     if (instance == nullptr) {
         instance = new CameraManagerGlobal();
         sInstance = instance;
     }
-    return *instance;
+    return instance;
 }
 
 CameraManagerGlobal::~CameraManagerGlobal() {
@@ -638,7 +637,7 @@
     Mutex::Autolock _l(mLock);
 
     std::vector<std::string> idList;
-    CameraManagerGlobal::getInstance().getCameraIdList(&idList);
+    CameraManagerGlobal::getInstance()->getCameraIdList(&idList);
 
     int numCameras = idList.size();
     ACameraIdList *out = new ACameraIdList;
@@ -688,7 +687,7 @@
         const char* cameraIdStr, sp<ACameraMetadata>* characteristics) {
     Mutex::Autolock _l(mLock);
 
-    sp<hardware::ICameraService> cs = CameraManagerGlobal::getInstance().getCameraService();
+    sp<hardware::ICameraService> cs = CameraManagerGlobal::getInstance()->getCameraService();
     if (cs == nullptr) {
         ALOGE("%s: Cannot reach camera service!", __FUNCTION__);
         return ACAMERA_ERROR_CAMERA_DISCONNECTED;
@@ -734,7 +733,7 @@
 
     ACameraDevice* device = new ACameraDevice(cameraId, callback, chars);
 
-    sp<hardware::ICameraService> cs = CameraManagerGlobal::getInstance().getCameraService();
+    sp<hardware::ICameraService> cs = CameraManagerGlobal::getInstance()->getCameraService();
     if (cs == nullptr) {
         ALOGE("%s: Cannot reach camera service!", __FUNCTION__);
         delete device;
diff --git a/camera/ndk/impl/ACameraManager.h b/camera/ndk/impl/ACameraManager.h
index 0960e6c..c135d0f 100644
--- a/camera/ndk/impl/ACameraManager.h
+++ b/camera/ndk/impl/ACameraManager.h
@@ -46,7 +46,7 @@
  */
 class CameraManagerGlobal final : public RefBase {
   public:
-    static CameraManagerGlobal& getInstance();
+    static sp<CameraManagerGlobal> getInstance();
     sp<hardware::ICameraService> getCameraService();
 
     void registerAvailabilityCallback(
@@ -257,7 +257,7 @@
 
     // For the singleton instance
     static Mutex sLock;
-    static CameraManagerGlobal* sInstance;
+    static wp<CameraManagerGlobal> sInstance;
     CameraManagerGlobal() {};
     ~CameraManagerGlobal();
 };
@@ -271,7 +271,7 @@
  */
 struct ACameraManager {
     ACameraManager() :
-            mGlobalManager(&(android::acam::CameraManagerGlobal::getInstance())) {}
+            mGlobalManager(android::acam::CameraManagerGlobal::getInstance()) {}
     ~ACameraManager();
     camera_status_t getCameraIdList(ACameraIdList** cameraIdList);
     static void     deleteCameraIdList(ACameraIdList* cameraIdList);
diff --git a/camera/ndk/impl/ACameraMetadata.cpp b/camera/ndk/impl/ACameraMetadata.cpp
index 7935909..365ac5c 100644
--- a/camera/ndk/impl/ACameraMetadata.cpp
+++ b/camera/ndk/impl/ACameraMetadata.cpp
@@ -537,6 +537,8 @@
         case ACAMERA_CONTROL_ENABLE_ZSL:
         case ACAMERA_CONTROL_EXTENDED_SCENE_MODE:
         case ACAMERA_CONTROL_ZOOM_RATIO:
+        case ACAMERA_CONTROL_SETTINGS_OVERRIDE:
+        case ACAMERA_CONTROL_AUTOFRAMING:
         case ACAMERA_EDGE_MODE:
         case ACAMERA_FLASH_MODE:
         case ACAMERA_HOT_PIXEL_MODE:
@@ -585,6 +587,7 @@
     ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
     ANDROID_CONTROL_AE_PRECAPTURE_ID,
     ANDROID_CONTROL_AF_TRIGGER_ID,
+    ANDROID_CONTROL_SETTINGS_OVERRIDING_FRAME_NUMBER,
     ANDROID_DEMOSAIC_MODE,
     ANDROID_EDGE_STRENGTH,
     ANDROID_FLASH_FIRING_POWER,
diff --git a/camera/ndk/include/camera/NdkCameraCaptureSession.h b/camera/ndk/include/camera/NdkCameraCaptureSession.h
index b0fd00c..099c5c5 100644
--- a/camera/ndk/include/camera/NdkCameraCaptureSession.h
+++ b/camera/ndk/include/camera/NdkCameraCaptureSession.h
@@ -99,6 +99,34 @@
     ACameraCaptureSession_stateCallback onActive;
 } ACameraCaptureSession_stateCallbacks;
 
+/**
+ * The definition of camera capture session onWindowPrepared callback.
+ *
+ * <p>This callback is called when the buffer pre-allocation for an output window Surface is
+ * complete. </p>
+ *
+ * <p>Buffer pre-allocation for an output window is started by
+ * {@link ACameraCaptureSession_prepare}
+ * call. While allocation is underway, the output must not be used in a capture request.
+ * Once this callback is called, the output provided can be used as a target for a
+ * capture request. In case of an error during pre-allocation (such as running out of
+ * suitable-memory), this callback is still invoked after the error is encountered, though some
+ * buffers may not have been successfully pre-allocated.</p>
+ *
+ * Introduced in API 34.
+ *
+ * @param context The optional app-provided context pointer that was included in
+ *        the {@link ACameraCaptureSession_setWindowPreparedCallback} method
+ *        call.
+ * @param window The window that {@link ACameraCaptureSession_prepare} was called on.
+ * @param session The camera capture session on which {@link ACameraCaptureSession_prepare} was
+ *                called on.
+ */
+typedef void (*ACameraCaptureSession_prepareCallback)(
+        void *context,
+        ACameraWindowType *window,
+        ACameraCaptureSession *session);
+
 /// Enum for describing error reason in {@link ACameraCaptureFailure}
 enum {
     /**
@@ -165,7 +193,7 @@
  *                capture request sent by application, so the address is different to what
  *                application sent but the content will match. This request will be freed by
  *                framework immediately after this callback returns.
- * @param timestamp The timestamp when the capture is started. This timestmap will match
+ * @param timestamp The timestamp when the capture is started. This timestamp will match
  *                  {@link ACAMERA_SENSOR_TIMESTAMP} of the {@link ACameraMetadata} in
  *                  {@link ACameraCaptureSession_captureCallbacks#onCaptureCompleted} callback.
  */
@@ -200,7 +228,7 @@
  *                capture request sent by application, so the address is different to what
  *                application sent but the content will match. This request will be freed by
  *                framework immediately after this callback returns.
- * @param failure The {@link ACameraCaptureFailure} desribes the capture failure. The memory is
+ * @param failure The {@link ACameraCaptureFailure} describes the capture failure. The memory is
  *                managed by camera framework. Do not access this pointer after this callback
  *                returns.
  */
@@ -412,7 +440,7 @@
  * and any repeating requests are stopped (as if {@link ACameraCaptureSession_stopRepeating} was
  * called). However, any in-progress capture requests submitted to the session will be completed as
  * normal; once all captures have completed and the session has been torn down,
- * {@link ACameraCaptureSession_stateCallbacks#onClosed} callback will be called and the seesion
+ * {@link ACameraCaptureSession_stateCallbacks#onClosed} callback will be called and the session
  * will be removed from memory.</p>
  *
  * <p>Closing a session is idempotent; closing more than once has no effect.</p>
@@ -499,7 +527,7 @@
  *
  * <p>Repeating burst requests are a simple way for an application to
  * maintain a preview or other continuous stream of frames where each
- * request is different in a predicatable way, without having to continually
+ * request is different in a predictable way, without having to continually
  * submit requests through {@link ACameraCaptureSession_capture}.</p>
  *
  * <p>To stop the repeating capture, call {@link ACameraCaptureSession_stopRepeating}. Any
@@ -710,7 +738,7 @@
  *                capture request sent by application, so the address is different to what
  *                application sent but the content will match. This request will be freed by
  *                framework immediately after this callback returns.
- * @param failure The {@link ALogicalCameraCaptureFailure} desribes the capture failure. The memory
+ * @param failure The {@link ALogicalCameraCaptureFailure} describes the capture failure. The memory
  *                is managed by camera framework. Do not access this pointer after this callback
  *                returns.
  */
@@ -989,6 +1017,92 @@
         int numRequests, ACaptureRequest** requests,
         /*optional*/int* captureSequenceId) __INTRODUCED_IN(33);
 
+/**
+ * Set the callback that is called when the output window for which the client has requested
+ * pre-allocation of buffers through the {@link ACameraCaptureSession_prepareWindow} call has
+ * completed the pre-allocation of buffers.
+ * @param session the ACameraCaptureSession on which ACameraCaptureSession_prepareWindow was called.
+ * @param context optional application provided context. This will be passed into the context
+ *        parameter of the {@link onWindowPrepared} callback.
+ * @param callback the callback to be called when the output window's buffer pre-allocation is
+ *        complete.
+ * @return <ul><li> {@link ACAMERA_OK} if the method succeeds</li>
+ *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if session or callbacks is
+ *              NULL. Or if the session has not been configured with the window</li>
+ *         <li>{@link ACAMERA_ERROR_SESSION_CLOSED} if the capture session has been closed</li>
+ *         <li>{@link ACAMERA_ERROR_CAMERA_DISCONNECTED} if the camera device is closed</li>
+ *         <li>{@link ACAMERA_ERROR_CAMERA_DEVICE} if the camera device encounters fatal error</li>
+ *         <li>{@link ACAMERA_ERROR_CAMERA_SERVICE} the camera service encounters fatal error</li>
+ *         <li>{@link ACAMERA_ERROR_UNKNOWN} if the method fails for some other reasons</li></ul>
+ */
+camera_status_t ACameraCaptureSession_setWindowPreparedCallback(
+    ACameraCaptureSession* session,
+    void *context,
+    ACameraCaptureSession_prepareCallback callback) __INTRODUCED_IN(34);
+
+/**
+ *
+ * <p>Pre-allocate all buffers for an output window.</p>
+ *
+ * <p>Normally, the image buffers for a given output window are allocated on-demand,
+ * to minimize startup latency and memory overhead.</p>
+ *
+ * <p>However, in some cases, it may be desirable for the buffers to be allocated before
+ * any requests targeting the window are actually submitted to the device. Large buffers
+ * may take some time to allocate, which can result in delays in submitting requests until
+ * sufficient buffers are allocated to reach steady-state behavior. Such delays can cause
+ * bursts to take longer than desired, or cause skips or stutters in preview output.</p>
+ *
+ * <p>The ACameraCaptureSession_prepare() call can be used to perform this pre-allocation.
+ * It may only be called for a given output window before that window is used as a target for a
+ * request. The number of buffers allocated is the sum of the count needed by the consumer providing
+ * the output window, and the maximum number needed by the camera device to fill its pipeline.
+ * Since this may be a larger number than what is actually required for steady-state operation,
+ * using this call may result in higher memory consumption than the normal on-demand behavior
+ * results in. This method will also delay the time to first output to a given Surface, in exchange
+ * for smoother frame rate once the allocation is complete.</p>
+ *
+ * <p>For example, an application that creates an
+ * {@link AImageReader} with a maxImages argument of 10,
+ * but only uses 3 simultaneous {@link AImage}s at once, would normally only cause those 3 images
+ * to be allocated (plus what is needed by the camera device for smooth operation).  But using
+ * ACameraCaptureSession_prepare() on the {@link AImageReader}'s window will result in all 10
+ * {@link AImage}s being allocated. So applications using this method should take care to request
+ * only the number of buffers actually necessary for their application.</p>
+ *
+ * <p>If the same output window is used in consecutive sessions (without closing the first
+ * session explicitly), then its already-allocated buffers are carried over, and if it was
+ * used as a target of a capture request in the first session, prepare cannot be called on it
+ * in the second session. If it is, {@link ACAMERA_ERROR_INVALID_PARAMETER} will
+ * be returned by the method</p>
+ *
+ * <p>Once allocation is complete, {@link ACameraCaptureSession_prepareCallback#onWindowPrepared}
+ * will be invoked with the output provided to this method. Between the prepare call and the
+ * {@link ACameraCaptureSession_prepareCallback#onWindowPrepared} call,
+ * the output provided to prepare must not be used as a target of a capture request submitted
+ * to this session.</p>
+ *
+ * <p>{@link android.hardware.camera2.CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY LEGACY}
+ * devices cannot pre-allocate output buffers; for those devices,
+ * {@link ACameraCaptureSession_prepareCallback#onWindowPrepared} will be immediately called,
+ * and no pre-allocation is done.</p>
+ *
+ * @param session the {@link ACameraCaptureSession} that needs to prepare output buffers.
+ * @param window the {@link ACameraWindowType} for which the output buffers need to be prepared.
+ *
+ * @return <ul><li>
+ *             {@link ACAMERA_OK} if the method succeeds</li>
+ *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if session/ window is
+ *              NULL. Or if the session has not been configured with the window</li>
+ *         <li>{@link ACAMERA_ERROR_SESSION_CLOSED} if the capture session has been closed</li>
+ *         <li>{@link ACAMERA_ERROR_CAMERA_DISCONNECTED} if the camera device is closed</li>
+ *         <li>{@link ACAMERA_ERROR_CAMERA_DEVICE} if the camera device encounters fatal error</li>
+ *         <li>{@link ACAMERA_ERROR_CAMERA_SERVICE} if the camera service encounters fatal error</li>
+ *         <li>{@link ACAMERA_ERROR_UNKNOWN} if the method fails for some other reasons</li></ul>
+ */
+camera_status_t ACameraCaptureSession_prepareWindow(
+    ACameraCaptureSession* session,
+    ACameraWindowType *window) __INTRODUCED_IN(34);
 __END_DECLS
 
 #endif /* _NDK_CAMERA_CAPTURE_SESSION_H */
diff --git a/camera/ndk/include/camera/NdkCameraDevice.h b/camera/ndk/include/camera/NdkCameraDevice.h
index 239cb31..de10eb3 100644
--- a/camera/ndk/include/camera/NdkCameraDevice.h
+++ b/camera/ndk/include/camera/NdkCameraDevice.h
@@ -113,7 +113,7 @@
  * @param context The optional context in {@link ACameraDevice_StateCallbacks} will be
  *                passed to this callback.
  * @param device The {@link ACameraDevice} that is being disconnected.
- * @param error The error code describes the cause of this error callback. See the folowing
+ * @param error The error code describes the cause of this error callback. See the following
  *              links for more detail.
  *
  * @see ERROR_CAMERA_IN_USE
@@ -447,8 +447,8 @@
  *   returned by {@link ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS}
  *   before creating a Surface from the SurfaceTexture with <a href=
  *   "http://developer.android.com/reference/android/view/Surface.html#Surface(android.graphics.SurfaceTexture)">
- *   Surface\#Surface(SurfaceTextrue)</a>. If the size is not set by the application, it will be set to be the
- *   smallest supported size less than 1080p, by the camera device.</li>
+ *   Surface\#Surface(SurfaceTexture)</a>. If the size is not set by the application, it will be
+ *   set to be the smallest supported size less than 1080p, by the camera device.</li>
  *
  * <li>For recording with <a href=
  *     "http://developer.android.com/reference/android/media/MediaCodec.html">
@@ -587,7 +587,7 @@
  * <tr><th>Type</th><th id="rb">Max size</th><th>Type</th><th id="rb">Max size</th><th>Type</th><th id="rb">Max size</th> </tr>
  * <tr> <td>`PRIV`</td><td id="rb">`PREVIEW`</td> <td>`PRIV`</td><td id="rb">`MAXIMUM`</td> <td colspan="2" id="rb"></td> <td>Maximum-resolution GPU processing with preview.</td> </tr>
  * <tr> <td>`PRIV`</td><td id="rb">`PREVIEW`</td> <td>`YUV `</td><td id="rb">`MAXIMUM`</td> <td colspan="2" id="rb"></td> <td>Maximum-resolution in-app processing with preview.</td> </tr>
- * <tr> <td>`YUV `</td><td id="rb">`PREVIEW`</td> <td>`YUV `</td><td id="rb">`MAXIMUM`</td> <td colspan="2" id="rb"></td> <td>Maximum-resolution two-input in-app processsing.</td> </tr>
+ * <tr> <td>`YUV `</td><td id="rb">`PREVIEW`</td> <td>`YUV `</td><td id="rb">`MAXIMUM`</td> <td colspan="2" id="rb"></td> <td>Maximum-resolution two-input in-app processing.</td> </tr>
  * <tr> <td>`PRIV`</td><td id="rb">`PREVIEW`</td> <td>`PRIV`</td><td id="rb">`PREVIEW`</td> <td>`JPEG`</td><td id="rb">`MAXIMUM`</td> <td>Video recording with maximum-size video snapshot</td> </tr>
  * <tr> <td>`YUV `</td><td id="rb">`640x480`</td> <td>`PRIV`</td><td id="rb">`PREVIEW`</td> <td>`YUV `</td><td id="rb">`MAXIMUM`</td> <td>Standard video recording plus maximum-resolution in-app processing.</td> </tr>
  * <tr> <td>`YUV `</td><td id="rb">`640x480`</td> <td>`YUV `</td><td id="rb">`PREVIEW`</td> <td>`YUV `</td><td id="rb">`MAXIMUM`</td> <td>Preview plus two-input maximum-resolution in-app processing.</td> </tr>
@@ -629,7 +629,7 @@
  * <tr><th>Type</th><th id="rb">Max size</th><th>Type</th><th id="rb">Max size</th> </tr>
  * <tr> <td>`PRIV`</td><td id="rb">`PREVIEW`</td> <td>`PRIV`</td><td id="rb">`MAXIMUM`</td> <td>Maximum-resolution GPU processing with preview.</td> </tr>
  * <tr> <td>`PRIV`</td><td id="rb">`PREVIEW`</td> <td>`YUV `</td><td id="rb">`MAXIMUM`</td> <td>Maximum-resolution in-app processing with preview.</td> </tr>
- * <tr> <td>`YUV `</td><td id="rb">`PREVIEW`</td> <td>`YUV `</td><td id="rb">`MAXIMUM`</td> <td>Maximum-resolution two-input in-app processsing.</td> </tr>
+ * <tr> <td>`YUV `</td><td id="rb">`PREVIEW`</td> <td>`YUV `</td><td id="rb">`MAXIMUM`</td> <td>Maximum-resolution two-input in-app processing.</td> </tr>
  * </table><br>
  * </p>
  *
diff --git a/camera/ndk/include/camera/NdkCameraError.h b/camera/ndk/include/camera/NdkCameraError.h
index 26db7f2..88063d6 100644
--- a/camera/ndk/include/camera/NdkCameraError.h
+++ b/camera/ndk/include/camera/NdkCameraError.h
@@ -97,7 +97,7 @@
     ACAMERA_ERROR_CAMERA_SERVICE        = ACAMERA_ERROR_BASE - 6,
 
     /**
-     * The {@link ACameraCaptureSession} has been closed and cannnot perform any operation other
+     * The {@link ACameraCaptureSession} has been closed and cannot perform any operation other
      * than {@link ACameraCaptureSession_close}.
      */
     ACAMERA_ERROR_SESSION_CLOSED        = ACAMERA_ERROR_BASE - 7,
diff --git a/camera/ndk/include/camera/NdkCameraManager.h b/camera/ndk/include/camera/NdkCameraManager.h
index 729182e..b4f3bf1 100644
--- a/camera/ndk/include/camera/NdkCameraManager.h
+++ b/camera/ndk/include/camera/NdkCameraManager.h
@@ -209,7 +209,8 @@
  * Query the capabilities of a camera device. These capabilities are
  * immutable for a given camera.
  *
- * <p>See {@link ACameraMetadata} document and {@link NdkCameraMetadataTags.h} for more details.</p>
+ * <p>See {@link ACameraMetadata} document and <a href="https://cs.android.com/android/platform/superproject/+/master:frameworks/av/camera/ndk/include/camera/NdkCameraMetadataTags.h">NdkCameraMetadataTags.h</a>
+ * for more details.</p>
  *
  * <p>The caller must call {@link ACameraMetadata_free} to free the memory of the output
  * characteristics.</p>
@@ -217,7 +218,7 @@
  * @param manager the {@link ACameraManager} of interest.
  * @param cameraId the ID string of the camera device of interest.
  * @param characteristics the output {@link ACameraMetadata} will be filled here if the method call
- *        succeeeds.
+ *        succeeds.
  *
  * @return <ul>
  *         <li>{@link ACAMERA_OK} if the method call succeeds.</li>
diff --git a/camera/ndk/include/camera/NdkCameraMetadata.h b/camera/ndk/include/camera/NdkCameraMetadata.h
index b331d50..cf29736 100644
--- a/camera/ndk/include/camera/NdkCameraMetadata.h
+++ b/camera/ndk/include/camera/NdkCameraMetadata.h
@@ -96,9 +96,12 @@
     /**
      * The tag identifying the entry.
      *
-     * <p> It is one of the values defined in {@link NdkCameraMetadataTags.h}, and defines how the
+     * <p> It is one of the values defined in
+     * <a href="https://cs.android.com/android/platform/superproject/+/master:frameworks/av/camera/ndk/include/camera/NdkCameraMetadataTags.h">NdkCameraMetadataTags.h</a>
+     * , and defines how the
      * entry should be interpreted and which parts of the API provide it.
-     * See {@link NdkCameraMetadataTags.h} for more details. </p>
+     * See <a href="https://cs.android.com/android/platform/superproject/+/master:frameworks/av/camera/ndk/include/camera/NdkCameraMetadataTags.h">NdkCameraMetadataTags.h</a>
+     * for more details. </p>
      */
     uint32_t tag;
 
@@ -141,9 +144,11 @@
     /**
      * The tag identifying the entry.
      *
-     * <p> It is one of the values defined in {@link NdkCameraMetadataTags.h}, and defines how the
+     * <p> It is one of the values defined in <a href="https://cs.android.com/android/platform/superproject/+/master:frameworks/av/camera/ndk/include/camera/NdkCameraMetadataTags.h">NdkCameraMetadataTags.h</a>
+     * , and defines how the
      * entry should be interpreted and which parts of the API provide it.
-     * See {@link NdkCameraMetadataTags.h} for more details. </p>
+     * See <a href="https://cs.android.com/android/platform/superproject/+/master:frameworks/av/camera/ndk/include/camera/NdkCameraMetadataTags.h">NdkCameraMetadataTags.h</a>
+     * for more details. </p>
      */
     uint32_t tag;
 
@@ -185,7 +190,7 @@
  * @param metadata the {@link ACameraMetadata} of interest.
  * @param tag the tag value of the camera metadata entry to be get.
  * @param entry the output {@link ACameraMetadata_const_entry} will be filled here if the method
- *        call succeeeds.
+ *        call succeeds.
  *
  * @return <ul>
  *         <li>{@link ACAMERA_OK} if the method call succeeds.</li>
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 9174adf..bd679e5 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -74,6 +74,8 @@
     ACAMERA_HEIC_INFO,
     ACAMERA_AUTOMOTIVE,
     ACAMERA_AUTOMOTIVE_LENS,
+    ACAMERA_EXTENSION,
+    ACAMERA_JPEGR,
     ACAMERA_SECTION_COUNT,
 
     ACAMERA_VENDOR = 0x8000
@@ -119,6 +121,8 @@
     ACAMERA_HEIC_INFO_START        = ACAMERA_HEIC_INFO         << 16,
     ACAMERA_AUTOMOTIVE_START       = ACAMERA_AUTOMOTIVE        << 16,
     ACAMERA_AUTOMOTIVE_LENS_START  = ACAMERA_AUTOMOTIVE_LENS   << 16,
+    ACAMERA_EXTENSION_START        = ACAMERA_EXTENSION         << 16,
+    ACAMERA_JPEGR_START            = ACAMERA_JPEGR             << 16,
     ACAMERA_VENDOR_START           = ACAMERA_VENDOR            << 16
 } acamera_metadata_section_start_t;
 
@@ -541,7 +545,9 @@
      * mode.</p>
      * <p>For camera devices with the
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
-     * capability,
+     * capability or devices where
+     * <a href="https://developer.android.com/reference/CameraCharacteristics.html#getAvailableCaptureRequestKeys">CameraCharacteristics#getAvailableCaptureRequestKeys</a>
+     * lists <a href="https://developer.android.com/reference/CaptureRequest.html#SENSOR_PIXEL_MODE">ACAMERA_SENSOR_PIXEL_MODE</a>
      * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION /
      * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION must be used as the
      * coordinate system for requests where ACAMERA_SENSOR_PIXEL_MODE is set to
@@ -750,7 +756,10 @@
      * mode.</p>
      * <p>For camera devices with the
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
-     * capability, ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION /
+     * capability or devices where
+     * <a href="https://developer.android.com/reference/CameraCharacteristics.html#getAvailableCaptureRequestKeys">CameraCharacteristics#getAvailableCaptureRequestKeys</a>
+     * lists <a href="https://developer.android.com/reference/CaptureRequest.html#SENSOR_PIXEL_MODE">ACAMERA_SENSOR_PIXEL_MODE</a>,
+     * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION /
      * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION must be used as the
      * coordinate system for requests where ACAMERA_SENSOR_PIXEL_MODE is set to
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
@@ -953,7 +962,10 @@
      * mode.</p>
      * <p>For camera devices with the
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
-     * capability, ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION /
+     * capability or devices where
+     * <a href="https://developer.android.com/reference/CameraCharacteristics.html#getAvailableCaptureRequestKeys">CameraCharacteristics#getAvailableCaptureRequestKeys</a>
+     * lists <a href="https://developer.android.com/reference/CaptureRequest.html#SENSOR_PIXEL_MODE">ACAMERA_SENSOR_PIXEL_MODE</a>,
+     * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION /
      * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION must be used as the
      * coordinate system for requests where ACAMERA_SENSOR_PIXEL_MODE is set to
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
@@ -2044,6 +2056,175 @@
      */
     ACAMERA_CONTROL_ZOOM_RATIO =                                // float
             ACAMERA_CONTROL_START + 47,
+    /**
+     * <p>The desired CaptureRequest settings override with which certain keys are
+     * applied earlier so that they can take effect sooner.</p>
+     *
+     * <p>Type: int32 (acamera_metadata_enum_android_control_settings_override_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+     *   <li>ACaptureRequest</li>
+     * </ul></p>
+     *
+     * <p>There are some CaptureRequest keys which can be applied earlier than others
+     * when controls within a CaptureRequest aren't required to take effect at the same time.
+     * One such example is zoom. Zoom can be applied at a later stage of the camera pipeline.
+     * As soon as the camera device receives the CaptureRequest, it can apply the requested
+     * zoom value onto an earlier request that's already in the pipeline, thus improves zoom
+     * latency.</p>
+     * <p>This key's value in the capture result reflects whether the controls for this capture
+     * are overridden "by" a newer request. This means that if a capture request turns on
+     * settings override, the capture result of an earlier request will contain the key value
+     * of ZOOM. On the other hand, if a capture request has settings override turned on,
+     * but all newer requests have it turned off, the key's value in the capture result will
+     * be OFF because this capture isn't overridden by a newer capture. In the two examples
+     * below, the capture results columns illustrate the settingsOverride values in different
+     * scenarios.</p>
+     * <p>Assuming the zoom settings override can speed up by 1 frame, below example illustrates
+     * the speed-up at the start of capture session:</p>
+     * <pre><code>Camera session created
+     * Request 1 (zoom=1.0x, override=ZOOM) -&gt;
+     * Request 2 (zoom=1.2x, override=ZOOM) -&gt;
+     * Request 3 (zoom=1.4x, override=ZOOM) -&gt;  Result 1 (zoom=1.2x, override=ZOOM)
+     * Request 4 (zoom=1.6x, override=ZOOM) -&gt;  Result 2 (zoom=1.4x, override=ZOOM)
+     * Request 5 (zoom=1.8x, override=ZOOM) -&gt;  Result 3 (zoom=1.6x, override=ZOOM)
+     *                                      -&gt;  Result 4 (zoom=1.8x, override=ZOOM)
+     *                                      -&gt;  Result 5 (zoom=1.8x, override=OFF)
+     * </code></pre>
+     * <p>The application can turn on settings override and use zoom as normal. The example
+     * shows that the later zoom values (1.2x, 1.4x, 1.6x, and 1.8x) overwrite the zoom
+     * values (1.0x, 1.2x, 1.4x, and 1.8x) of earlier requests (#1, #2, #3, and #4).</p>
+     * <p>The application must make sure the settings override doesn't interfere with user
+     * journeys requiring simultaneous application of all controls in CaptureRequest on the
+     * requested output targets. For example, if the application takes a still capture using
+     * CameraCaptureSession#capture, and the repeating request immediately sets a different
+     * zoom value using override, the inflight still capture could have its zoom value
+     * overwritten unexpectedly.</p>
+     * <p>So the application is strongly recommended to turn off settingsOverride when taking
+     * still/burst captures, and turn it back on when there is only repeating viewfinder
+     * request and no inflight still/burst captures.</p>
+     * <p>Below is the example demonstrating the transitions in and out of the
+     * settings override:</p>
+     * <pre><code>Request 1 (zoom=1.0x, override=OFF)
+     * Request 2 (zoom=1.2x, override=OFF)
+     * Request 3 (zoom=1.4x, override=ZOOM)  -&gt; Result 1 (zoom=1.0x, override=OFF)
+     * Request 4 (zoom=1.6x, override=ZOOM)  -&gt; Result 2 (zoom=1.4x, override=ZOOM)
+     * Request 5 (zoom=1.8x, override=OFF)   -&gt; Result 3 (zoom=1.6x, override=ZOOM)
+     *                                       -&gt; Result 4 (zoom=1.6x, override=OFF)
+     *                                       -&gt; Result 5 (zoom=1.8x, override=OFF)
+     * </code></pre>
+     * <p>This example shows that:</p>
+     * <ul>
+     * <li>The application "ramps in" settings override by setting the control to ZOOM.
+     * In the example, request #3 enables zoom settings override. Because the camera device
+     * can speed up applying zoom by 1 frame, the outputs of request #2 has 1.4x zoom, the
+     * value specified in request #3.</li>
+     * <li>The application "ramps out" of settings override by setting the control to OFF. In
+     * the example, request #5 changes the override to OFF. Because request #4's zoom
+     * takes effect in result #3, result #4's zoom remains the same until new value takes
+     * effect in result #5.</li>
+     * </ul>
+     */
+    ACAMERA_CONTROL_SETTINGS_OVERRIDE =                         // int32 (acamera_metadata_enum_android_control_settings_override_t)
+            ACAMERA_CONTROL_START + 49,
+    /**
+     * <p>List of available settings overrides supported by the camera device that can
+     * be used to speed up certain controls.</p>
+     *
+     * <p>Type: int32[n]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>When not all controls within a CaptureRequest are required to take effect
+     * at the same time on the outputs, the camera device may apply certain request keys sooner
+     * to improve latency. This list contains such supported settings overrides. Each settings
+     * override corresponds to a set of CaptureRequest keys that can be sped up when applying.</p>
+     * <p>A supported settings override can be passed in via
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CaptureRequest.html#CONTROL_SETTINGS_OVERRIDE">CaptureRequest#CONTROL_SETTINGS_OVERRIDE</a>, and the
+     * CaptureRequest keys corresponding to the override are applied as soon as possible, not
+     * bound by per-frame synchronization. See ACAMERA_CONTROL_SETTINGS_OVERRIDE for the
+     * CaptureRequest keys for each override.</p>
+     * <p>OFF is always included in this list.</p>
+     *
+     * @see ACAMERA_CONTROL_SETTINGS_OVERRIDE
+     */
+    ACAMERA_CONTROL_AVAILABLE_SETTINGS_OVERRIDES =              // int32[n]
+            ACAMERA_CONTROL_START + 50,
+    /**
+     * <p>Automatic crop, pan and zoom to keep objects in the center of the frame.</p>
+     *
+     * <p>Type: byte (acamera_metadata_enum_android_control_autoframing_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+     *   <li>ACaptureRequest</li>
+     * </ul></p>
+     *
+     * <p>Auto-framing is a special mode provided by the camera device to dynamically crop, zoom
+     * or pan the camera feed to try to ensure that the people in a scene occupy a reasonable
+     * portion of the viewport. It is primarily designed to support video calling in
+     * situations where the user isn't directly in front of the device, especially for
+     * wide-angle cameras.
+     * ACAMERA_SCALER_CROP_REGION and ACAMERA_CONTROL_ZOOM_RATIO in CaptureResult will be used
+     * to denote the coordinates of the auto-framed region.
+     * Zoom and video stabilization controls are disabled when auto-framing is enabled. The 3A
+     * regions must map the screen coordinates into the scaler crop returned from the capture
+     * result instead of using the active array sensor.</p>
+     *
+     * @see ACAMERA_CONTROL_ZOOM_RATIO
+     * @see ACAMERA_SCALER_CROP_REGION
+     */
+    ACAMERA_CONTROL_AUTOFRAMING =                               // byte (acamera_metadata_enum_android_control_autoframing_t)
+            ACAMERA_CONTROL_START + 52,
+    /**
+     * <p>Whether the camera device supports ACAMERA_CONTROL_AUTOFRAMING.</p>
+     *
+     * @see ACAMERA_CONTROL_AUTOFRAMING
+     *
+     * <p>Type: byte (acamera_metadata_enum_android_control_autoframing_available_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Will be <code>false</code> if auto-framing is not available.</p>
+     */
+    ACAMERA_CONTROL_AUTOFRAMING_AVAILABLE =                     // byte (acamera_metadata_enum_android_control_autoframing_available_t)
+            ACAMERA_CONTROL_START + 53,
+    /**
+     * <p>Current state of auto-framing.</p>
+     *
+     * <p>Type: byte (acamera_metadata_enum_android_control_autoframing_state_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+     * </ul></p>
+     *
+     * <p>When the camera doesn't have auto-framing available (i.e
+     * <code>ACAMERA_CONTROL_AUTOFRAMING_AVAILABLE</code> == false) or it is not enabled (i.e
+     * <code>ACAMERA_CONTROL_AUTOFRAMING</code> == OFF), the state will always be INACTIVE.
+     * Other states indicate the current auto-framing state:</p>
+     * <ul>
+     * <li>When <code>ACAMERA_CONTROL_AUTOFRAMING</code> is set to ON, auto-framing will take
+     * place. While the frame is aligning itself to center the object (doing things like
+     * zooming in, zooming out or pan), the state will be FRAMING.</li>
+     * <li>When field of view is not being adjusted anymore and has reached a stable state, the
+     * state will be CONVERGED.</li>
+     * </ul>
+     *
+     * @see ACAMERA_CONTROL_AUTOFRAMING
+     * @see ACAMERA_CONTROL_AUTOFRAMING_AVAILABLE
+     */
+    ACAMERA_CONTROL_AUTOFRAMING_STATE =                         // byte (acamera_metadata_enum_android_control_autoframing_state_t)
+            ACAMERA_CONTROL_START + 54,
     ACAMERA_CONTROL_END,
 
     /**
@@ -3520,6 +3701,26 @@
      */
     ACAMERA_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP =      // int64[n*3] (acamera_metadata_enum_android_request_available_dynamic_range_profiles_map_t)
             ACAMERA_REQUEST_START + 19,
+    /**
+     * <p>A list of all possible color space profiles supported by a camera device.</p>
+     *
+     * <p>Type: int64[n*3] (acamera_metadata_enum_android_request_available_color_space_profiles_map_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>A color space profile is a combination of a color space, an image format, and a dynamic range
+     * profile. If a camera does not support the
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT">CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT</a>
+     * capability, the dynamic range profile will always be
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/params/DynamicRangeProfiles.html#STANDARD">DynamicRangeProfiles#STANDARD</a>. Camera clients can
+     * use <a href="https://developer.android.com/reference/android/hardware/camera2/params/SessionConfiguration.html#setColorSpace">SessionConfiguration#setColorSpace</a> to select
+     * a color space.</p>
+     */
+    ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP =        // int64[n*3] (acamera_metadata_enum_android_request_available_color_space_profiles_map_t)
+            ACAMERA_REQUEST_START + 21,
     ACAMERA_REQUEST_END,
 
     /**
@@ -3547,9 +3748,9 @@
      * <p>Output streams use this rectangle to produce their output, cropping to a smaller region
      * if necessary to maintain the stream's aspect ratio, then scaling the sensor input to
      * match the output's configured resolution.</p>
-     * <p>The crop region is applied after the RAW to other color space (e.g. YUV)
-     * conversion. Since raw streams (e.g. RAW16) don't have the conversion stage, they are not
-     * croppable. The crop region will be ignored by raw streams.</p>
+     * <p>The crop region is usually applied after the RAW to other color space (e.g. YUV)
+     * conversion. As a result RAW streams are not croppable unless supported by the
+     * camera device. See ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES#CROPPED_RAW for details.</p>
      * <p>For non-raw streams, any additional per-stream cropping will be done to maximize the
      * final pixel area of the stream.</p>
      * <p>For example, if the crop region is set to a 4:3 aspect ratio, then 4:3 streams will use
@@ -3630,7 +3831,9 @@
      * ACAMERA_CONTROL_ZOOM_RATIO for details.</p>
      * <p>For camera devices with the
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
-     * capability, ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION /
+     * capability or devices where <a href="https://developer.android.com/reference/CameraCharacteristics.html#getAvailableCaptureRequestKeys">CameraCharacteristics#getAvailableCaptureRequestKeys</a>
+     * lists <a href="https://developer.android.com/reference/CaptureRequest.html#SENSOR_PIXEL_MODE">ACAMERA_SENSOR_PIXEL_MODE</a></p>
+     * <p>ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION /
      * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION must be used as the
      * coordinate system for requests where ACAMERA_SENSOR_PIXEL_MODE is set to
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
@@ -3640,6 +3843,7 @@
      * @see ACAMERA_CONTROL_ZOOM_RATIO
      * @see ACAMERA_DISTORTION_CORRECTION_MODE
      * @see ACAMERA_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM
+     * @see ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES
      * @see ACAMERA_SCALER_CROPPING_TYPE
      * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
      * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION
@@ -4283,8 +4487,8 @@
      * <p>The guaranteed stream combinations related to stream use case for a camera device with
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE">CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE</a>
      * capability is documented in the camera device
-     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">guideline</a>. The
-     * application is strongly recommended to use one of the guaranteed stream combinations.
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#stream-use-case-capability-additional-guaranteed-configurations">guideline</a>. The application is strongly recommended to use one of the guaranteed stream
+     * combinations.
      * If the application creates a session with a stream combination not in the guaranteed
      * list, or with mixed DEFAULT and non-DEFAULT use cases within the same session,
      * the camera device may ignore some stream use cases due to hardware constraints
@@ -4295,6 +4499,59 @@
      */
     ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES =                 // int64[n] (acamera_metadata_enum_android_scaler_available_stream_use_cases_t)
             ACAMERA_SCALER_START + 25,
+    /**
+     * <p>The region of the sensor that corresponds to the RAW read out for this
+     * capture when the stream use case of a RAW stream is set to CROPPED_RAW.</p>
+     *
+     * <p>Type: int32[4]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+     * </ul></p>
+     *
+     * <p>The coordinate system follows that of ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE.</p>
+     * <p>This CaptureResult key will be set when the corresponding CaptureRequest has a RAW target
+     * with stream use case set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW">CameraMetadata#SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW</a>,
+     * otherwise it will be {@code null}.
+     * The value of this key specifies the region of the sensor used for the RAW capture and can
+     * be used to calculate the corresponding field of view of RAW streams.
+     * This field of view will always be &gt;= field of view for (processed) non-RAW streams for the
+     * capture. Note: The region specified may not necessarily be centered.</p>
+     * <p>For example: Assume a camera device has a pre correction active array size of
+     * {@code {0, 0, 1500, 2000}}. If the RAW_CROP_REGION is {@code {500, 375, 1500, 1125}}, that
+     * corresponds to a centered crop of 1/4th of the full field of view RAW stream.</p>
+     * <p>The metadata keys which describe properties of RAW frames:</p>
+     * <ul>
+     * <li>ACAMERA_STATISTICS_HOT_PIXEL_MAP</li>
+     * <li>android.statistics.lensShadingCorrectionMap</li>
+     * <li>ACAMERA_LENS_DISTORTION</li>
+     * <li>ACAMERA_LENS_POSE_TRANSLATION</li>
+     * <li>ACAMERA_LENS_POSE_ROTATION</li>
+     * <li>ACAMERA_LENS_DISTORTION</li>
+     * <li>ACAMERA_LENS_INTRINSIC_CALIBRATION</li>
+     * </ul>
+     * <p>should be interpreted in the effective after raw crop field-of-view coordinate system.
+     * In this coordinate system,
+     * {preCorrectionActiveArraySize.left, preCorrectionActiveArraySize.top} corresponds to the
+     * the top left corner of the cropped RAW frame and
+     * {preCorrectionActiveArraySize.right, preCorrectionActiveArraySize.bottom} corresponds to
+     * the bottom right corner. Client applications must use the values of the keys
+     * in the CaptureResult metadata if present.</p>
+     * <p>Crop regions (android.scaler.CropRegion), AE/AWB/AF regions and face coordinates still
+     * use the ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE coordinate system as usual.</p>
+     *
+     * @see ACAMERA_LENS_DISTORTION
+     * @see ACAMERA_LENS_INTRINSIC_CALIBRATION
+     * @see ACAMERA_LENS_POSE_ROTATION
+     * @see ACAMERA_LENS_POSE_TRANSLATION
+     * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
+     * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
+     * @see ACAMERA_STATISTICS_HOT_PIXEL_MAP
+     */
+    ACAMERA_SCALER_RAW_CROP_REGION =                            // int32[4]
+            ACAMERA_SCALER_START + 26,
     ACAMERA_SCALER_END,
 
     /**
@@ -5117,13 +5374,10 @@
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_DEFAULT">CameraMetadata#SENSOR_PIXEL_MODE_DEFAULT</a> mode.
      * When operating in
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_DEFAULT">CameraMetadata#SENSOR_PIXEL_MODE_DEFAULT</a> mode, sensors
-     * with <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
-     * capability would typically perform pixel binning in order to improve low light
+     * would typically perform pixel binning in order to improve low light
      * performance, noise reduction etc. However, in
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>
-     * mode (supported only
-     * by <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
-     * sensors), sensors typically operate in unbinned mode allowing for a larger image size.
+     * mode, sensors typically operate in unbinned mode allowing for a larger image size.
      * The stream configurations supported in
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>
      * mode are also different from those of
@@ -5137,7 +5391,36 @@
      * <code>android.scaler.streamConfigurationMap</code>
      * must not be mixed in the same CaptureRequest. In other words, these outputs are
      * exclusive to each other.
-     * This key does not need to be set for reprocess requests.</p>
+     * This key does not need to be set for reprocess requests.
+     * This key will be be present on devices supporting the
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * capability. It may also be present on devices which do not support the aforementioned
+     * capability. In that case:</p>
+     * <ul>
+     * <li>
+     * <p>The mandatory stream combinations listed in
+     *   <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics/mandatoryMaximumResolutionStreamCombinations.html">mandatoryMaximumResolutionStreamCombinations</a>
+     *   would not apply.</p>
+     * </li>
+     * <li>
+     * <p>The bayer pattern of {@code RAW} streams when
+     *   <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>
+     *   is selected will be the one listed in <a href="https://developer.android.com/reference/android/sensor/info/binningFactor.html">binningFactor</a>.</p>
+     * </li>
+     * <li>
+     * <p>The following keys will always be present:</p>
+     * <ul>
+     * <li>android.scaler.streamConfigurationMapMaximumResolution</li>
+     * <li>ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION</li>
+     * <li>ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE_MAXIMUM_RESOLUTION</li>
+     * <li>ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION</li>
+     * </ul>
+     * </li>
+     * </ul>
+     *
+     * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION
+     * @see ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE_MAXIMUM_RESOLUTION
+     * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION
      */
     ACAMERA_SENSOR_PIXEL_MODE =                                 // byte (acamera_metadata_enum_android_sensor_pixel_mode_t)
             ACAMERA_SENSOR_START + 32,
@@ -5482,7 +5765,8 @@
      * counterparts.
      * This key will only be present for devices which advertise the
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
-     * capability.</p>
+     * capability or devices where <a href="https://developer.android.com/reference/CameraCharacteristics.html#getAvailableCaptureRequestKeys">CameraCharacteristics#getAvailableCaptureRequestKeys</a>
+     * lists <a href="https://developer.android.com/reference/CaptureRequest.html#SENSOR_PIXEL_MODE">ACAMERA_SENSOR_PIXEL_MODE</a></p>
      * <p>The data representation is <code>int[4]</code>, which maps to <code>(left, top, width, height)</code>.</p>
      *
      * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
@@ -5514,7 +5798,8 @@
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.
      * This key will only be present for devices which advertise the
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
-     * capability.</p>
+     * capability or devices where <a href="https://developer.android.com/reference/CameraCharacteristics.html#getAvailableCaptureRequestKeys">CameraCharacteristics#getAvailableCaptureRequestKeys</a>
+     * lists <a href="https://developer.android.com/reference/CaptureRequest.html#SENSOR_PIXEL_MODE">ACAMERA_SENSOR_PIXEL_MODE</a></p>
      *
      * @see ACAMERA_SENSOR_INFO_PHYSICAL_SIZE
      * @see ACAMERA_SENSOR_PIXEL_MODE
@@ -5542,7 +5827,8 @@
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.
      * This key will only be present for devices which advertise the
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
-     * capability.</p>
+     * capability or devices where <a href="https://developer.android.com/reference/CameraCharacteristics.html#getAvailableCaptureRequestKeys">CameraCharacteristics#getAvailableCaptureRequestKeys</a>
+     * lists <a href="https://developer.android.com/reference/CaptureRequest.html#SENSOR_PIXEL_MODE">ACAMERA_SENSOR_PIXEL_MODE</a></p>
      * <p>The data representation is <code>int[4]</code>, which maps to <code>(left, top, width, height)</code>.</p>
      *
      * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
@@ -5567,12 +5853,27 @@
      * to improve various aspects of imaging such as noise reduction, low light
      * performance etc. These groups can be of various sizes such as 2X2 (quad bayer),
      * 3X3 (nona-bayer). This key specifies the length and width of the pixels grouped under
-     * the same color filter.</p>
-     * <p>This key will not be present if REMOSAIC_REPROCESSING is not supported, since RAW images
-     * will have a regular bayer pattern.</p>
-     * <p>This key will not be present for sensors which don't have the
-     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
-     * capability.</p>
+     * the same color filter.
+     * In case the device has the
+     * <a href="https://developer.android.com/reference/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * capability :</p>
+     * <ul>
+     * <li>This key will not be present if REMOSAIC_REPROCESSING is not supported, since RAW
+     *   images will have a regular bayer pattern.</li>
+     * </ul>
+     * <p>In case the device does not have the
+     * <a href="https://developer.android.com/reference/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * capability :</p>
+     * <ul>
+     * <li>This key will be present if
+     *   <a href="https://developer.android.com/reference/CameraCharacteristics.html#getAvailableCaptureRequestKeys">CameraCharacteristics#getAvailableCaptureRequestKeys</a>
+     *   lists <a href="https://developer.android.com/reference/CaptureRequest.html#SENSOR_PIXEL_MODE">ACAMERA_SENSOR_PIXEL_MODE</a>, since RAW
+     *   images may not necessarily have a regular bayer pattern when
+     *   <a href="https://developer.android.com/reference/CaptureRequest.html#SENSOR_PIXEL_MODE">ACAMERA_SENSOR_PIXEL_MODE</a> is set to
+     *   <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</li>
+     * </ul>
+     *
+     * @see ACAMERA_SENSOR_PIXEL_MODE
      */
     ACAMERA_SENSOR_INFO_BINNING_FACTOR =                        // int32[2]
             ACAMERA_SENSOR_INFO_START + 14,
@@ -7322,6 +7623,145 @@
             ACAMERA_AUTOMOTIVE_LENS_START,
     ACAMERA_AUTOMOTIVE_LENS_END,
 
+    /**
+     * <p>The available Jpeg/R stream
+     * configurations that this camera device supports
+     * (i.e. format, width, height, output/input stream).</p>
+     *
+     * <p>Type: int32[n*4] (acamera_metadata_enum_android_jpegr_available_jpeg_r_stream_configurations_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>The configurations are listed as <code>(format, width, height, input?)</code> tuples.</p>
+     * <p>If the camera device supports Jpeg/R, it will support the same stream combinations with
+     * Jpeg/R as it does with P010. The stream combinations with Jpeg/R (or P010) supported
+     * by the device is determined by the device's hardware level and capabilities.</p>
+     * <p>All the static, control, and dynamic metadata tags related to JPEG apply to Jpeg/R formats.
+     * Configuring JPEG and Jpeg/R streams at the same time is not supported.</p>
+     * <p>All the configuration tuples <code>(format, width, height, input?)</code> will contain
+     * AIMAGE_FORMAT_JPEGR format as OUTPUT only.</p>
+     */
+    ACAMERA_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS =      // int32[n*4] (acamera_metadata_enum_android_jpegr_available_jpeg_r_stream_configurations_t)
+            ACAMERA_JPEGR_START,
+    /**
+     * <p>This lists the minimum frame duration for each
+     * format/size combination for Jpeg/R output formats.</p>
+     *
+     * <p>Type: int64[4*n]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>This should correspond to the frame duration when only that
+     * stream is active, with all processing (typically in android.*.mode)
+     * set to either OFF or FAST.</p>
+     * <p>When multiple streams are used in a request, the minimum frame
+     * duration will be max(individual stream min durations).</p>
+     * <p>See ACAMERA_SENSOR_FRAME_DURATION and
+     * ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS for more details about
+     * calculating the max frame rate.</p>
+     *
+     * @see ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS
+     * @see ACAMERA_SENSOR_FRAME_DURATION
+     */
+    ACAMERA_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS =        // int64[4*n]
+            ACAMERA_JPEGR_START + 1,
+    /**
+     * <p>This lists the maximum stall duration for each
+     * output format/size combination for Jpeg/R streams.</p>
+     *
+     * <p>Type: int64[4*n]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>A stall duration is how much extra time would get added
+     * to the normal minimum frame duration for a repeating request
+     * that has streams with non-zero stall.</p>
+     * <p>This functions similarly to
+     * ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS for Jpeg/R
+     * streams.</p>
+     * <p>All Jpeg/R output stream formats may have a nonzero stall
+     * duration.</p>
+     *
+     * @see ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS
+     */
+    ACAMERA_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS =            // int64[4*n]
+            ACAMERA_JPEGR_START + 2,
+    /**
+     * <p>The available Jpeg/R stream
+     * configurations that this camera device supports
+     * (i.e. format, width, height, output/input stream).</p>
+     *
+     * <p>Type: int32[n*4] (acamera_metadata_enum_android_jpegr_available_jpeg_r_stream_configurations_maximum_resolution_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Refer to ACAMERA_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS for details.</p>
+     * <p>All the configuration tuples <code>(format, width, height, input?)</code> will contain
+     * AIMAGE_FORMAT_JPEG_R format as OUTPUT only.</p>
+     *
+     * @see ACAMERA_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS
+     */
+    ACAMERA_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION = 
+                                                                // int32[n*4] (acamera_metadata_enum_android_jpegr_available_jpeg_r_stream_configurations_maximum_resolution_t)
+            ACAMERA_JPEGR_START + 3,
+    /**
+     * <p>This lists the minimum frame duration for each
+     * format/size combination for Jpeg/R output formats for CaptureRequests where
+     * ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: int64[4*n]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Refer to ACAMERA_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS for details.</p>
+     *
+     * @see ACAMERA_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS
+     */
+    ACAMERA_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION = 
+                                                                // int64[4*n]
+            ACAMERA_JPEGR_START + 4,
+    /**
+     * <p>This lists the maximum stall duration for each
+     * output format/size combination for Jpeg/R streams for CaptureRequests where
+     * ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: int64[4*n]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Refer to ACAMERA_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS for details.</p>
+     *
+     * @see ACAMERA_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS
+     */
+    ACAMERA_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS_MAXIMUM_RESOLUTION = 
+                                                                // int64[4*n]
+            ACAMERA_JPEGR_START + 5,
+    ACAMERA_JPEGR_END,
+
 } acamera_metadata_tag_t;
 
 /**
@@ -7538,7 +7978,7 @@
     /**
      * <p>An external flash has been turned on.</p>
      * <p>It informs the camera device that an external flash has been turned on, and that
-     * metering (and continuous focus if active) should be quickly recaculated to account
+     * metering (and continuous focus if active) should be quickly recalculated to account
      * for the external flash. Otherwise, this mode acts like ON.</p>
      * <p>When the external flash is turned off, AE mode should be changed to one of the
      * other available AE modes.</p>
@@ -8475,6 +8915,82 @@
 
 } acamera_metadata_enum_android_control_extended_scene_mode_t;
 
+// ACAMERA_CONTROL_SETTINGS_OVERRIDE
+typedef enum acamera_metadata_enum_acamera_control_settings_override {
+    /**
+     * <p>No keys are applied sooner than the other keys when applying CaptureRequest
+     * settings to the camera device. This is the default value.</p>
+     */
+    ACAMERA_CONTROL_SETTINGS_OVERRIDE_OFF                            = 0,
+
+    /**
+     * <p>Zoom related keys are applied sooner than the other keys in the CaptureRequest. The
+     * zoom related keys are:</p>
+     * <ul>
+     * <li>ACAMERA_CONTROL_ZOOM_RATIO</li>
+     * <li>ACAMERA_SCALER_CROP_REGION</li>
+     * <li>ACAMERA_CONTROL_AE_REGIONS</li>
+     * <li>ACAMERA_CONTROL_AWB_REGIONS</li>
+     * <li>ACAMERA_CONTROL_AF_REGIONS</li>
+     * </ul>
+     * <p>Even though ACAMERA_CONTROL_AE_REGIONS, ACAMERA_CONTROL_AWB_REGIONS,
+     * and ACAMERA_CONTROL_AF_REGIONS are not directly zoom related, applications
+     * typically scale these regions together with ACAMERA_SCALER_CROP_REGION to have a
+     * consistent mapping within the current field of view. In this aspect, they are
+     * related to ACAMERA_SCALER_CROP_REGION and ACAMERA_CONTROL_ZOOM_RATIO.</p>
+     *
+     * @see ACAMERA_CONTROL_AE_REGIONS
+     * @see ACAMERA_CONTROL_AF_REGIONS
+     * @see ACAMERA_CONTROL_AWB_REGIONS
+     * @see ACAMERA_CONTROL_ZOOM_RATIO
+     * @see ACAMERA_SCALER_CROP_REGION
+     */
+    ACAMERA_CONTROL_SETTINGS_OVERRIDE_ZOOM                           = 1,
+
+} acamera_metadata_enum_android_control_settings_override_t;
+
+// ACAMERA_CONTROL_AUTOFRAMING
+typedef enum acamera_metadata_enum_acamera_control_autoframing {
+    /**
+     * <p>Disable autoframing.</p>
+     */
+    ACAMERA_CONTROL_AUTOFRAMING_OFF                                  = 0,
+
+    /**
+     * <p>Enable autoframing to keep people in the frame's field of view.</p>
+     */
+    ACAMERA_CONTROL_AUTOFRAMING_ON                                   = 1,
+
+} acamera_metadata_enum_android_control_autoframing_t;
+
+// ACAMERA_CONTROL_AUTOFRAMING_AVAILABLE
+typedef enum acamera_metadata_enum_acamera_control_autoframing_available {
+    ACAMERA_CONTROL_AUTOFRAMING_AVAILABLE_FALSE                      = 0,
+
+    ACAMERA_CONTROL_AUTOFRAMING_AVAILABLE_TRUE                       = 1,
+
+} acamera_metadata_enum_android_control_autoframing_available_t;
+
+// ACAMERA_CONTROL_AUTOFRAMING_STATE
+typedef enum acamera_metadata_enum_acamera_control_autoframing_state {
+    /**
+     * <p>Auto-framing is inactive.</p>
+     */
+    ACAMERA_CONTROL_AUTOFRAMING_STATE_INACTIVE                       = 0,
+
+    /**
+     * <p>Auto-framing is in process - either zooming in, zooming out or pan is taking place.</p>
+     */
+    ACAMERA_CONTROL_AUTOFRAMING_STATE_FRAMING                        = 1,
+
+    /**
+     * <p>Auto-framing has reached a stable state (frame/fov is not being adjusted). The state
+     * may transition back to FRAMING if the scene changes.</p>
+     */
+    ACAMERA_CONTROL_AUTOFRAMING_STATE_CONVERGED                      = 2,
+
+} acamera_metadata_enum_android_control_autoframing_state_t;
+
 
 
 // ACAMERA_EDGE_MODE
@@ -9350,9 +9866,10 @@
      * </ul>
      * <p><a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#SCALER_AVAILABLE_STREAM_USE_CASES">CameraCharacteristics#SCALER_AVAILABLE_STREAM_USE_CASES</a>
      * lists all of the supported stream use cases.</p>
-     * <p>Refer to <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">CameraDevice#createCaptureSession</a> for the
-     * mandatory stream combinations involving stream use cases, which can also be queried
-     * via <a href="https://developer.android.com/reference/android/hardware/camera2/params/MandatoryStreamCombination.html">MandatoryStreamCombination</a>.</p>
+     * <p>Refer to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#stream-use-case-capability-additional-guaranteed-configurations">CameraDevice#stream-use-case-capability-additional-guaranteed-configurations</a>
+     * for the mandatory stream combinations involving stream use cases, which can also be
+     * queried via <a href="https://developer.android.com/reference/android/hardware/camera2/params/MandatoryStreamCombination.html">MandatoryStreamCombination</a>.</p>
      */
     ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE           = 19,
 
@@ -9449,6 +9966,31 @@
 
 } acamera_metadata_enum_android_request_available_dynamic_range_profiles_map_t;
 
+// ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP
+typedef enum acamera_metadata_enum_acamera_request_available_color_space_profiles_map {
+    /**
+     * <p>Default value, when not explicitly specified. The Camera device will choose the color
+     * space to employ.</p>
+     */
+    ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED   = -1,
+
+    /**
+     * <p>RGB color space sRGB standardized as IEC 61966-2.1:1999.</p>
+     */
+    ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_SRGB          = 0,
+
+    /**
+     * <p>RGB color space Display P3 based on SMPTE RP 431-2-2007 and IEC 61966-2.1:1999.</p>
+     */
+    ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DISPLAY_P3    = 7,
+
+    /**
+     * <p>RGB color space BT.2100 standardized as Hybrid Log Gamma encoding.</p>
+     */
+    ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_BT2020_HLG    = 16,
+
+} acamera_metadata_enum_android_request_available_color_space_profiles_map_t;
+
 
 // ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS
 typedef enum acamera_metadata_enum_acamera_scaler_available_stream_configurations {
@@ -9702,6 +10244,30 @@
      */
     ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL             = 0x5,
 
+    /**
+     * <p>Cropped RAW stream when the client chooses to crop the field of view.</p>
+     * <p>Certain types of image sensors can run in binned modes in order to improve signal to
+     * noise ratio while capturing frames. However, at certain zoom levels and / or when
+     * other scene conditions are deemed fit, the camera sub-system may choose to un-bin and
+     * remosaic the sensor's output. This results in a RAW frame which is cropped in field
+     * of view and yet has the same number of pixels as full field of view RAW, thereby
+     * improving image detail.</p>
+     * <p>The resultant field of view of the RAW stream will be greater than or equal to
+     * croppable non-RAW streams. The effective crop region for this RAW stream will be
+     * reflected in the CaptureResult key ACAMERA_SCALER_RAW_CROP_REGION.</p>
+     * <p>If this stream use case is set on a non-RAW stream, i.e. not one of :</p>
+     * <ul>
+     * <li>{@link AIMAGE_FORMAT_RAW16 RAW_SENSOR}</li>
+     * <li>{@link AIMAGE_FORMAT_RAW10 RAW10}</li>
+     * <li>{@link AIMAGE_FORMAT_RAW12 RAW12}</li>
+     * </ul>
+     * <p>session configuration is not guaranteed to succeed.</p>
+     * <p>This stream use case may not be supported on some devices.</p>
+     *
+     * @see ACAMERA_SCALER_RAW_CROP_REGION
+     */
+    ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW            = 0x6,
+
 } acamera_metadata_enum_android_scaler_available_stream_use_cases_t;
 
 
@@ -9865,16 +10431,12 @@
 // ACAMERA_SENSOR_PIXEL_MODE
 typedef enum acamera_metadata_enum_acamera_sensor_pixel_mode {
     /**
-     * <p>This is the default sensor pixel mode. This is the only sensor pixel mode
-     * supported unless a camera device advertises
-     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>.</p>
+     * <p>This is the default sensor pixel mode.</p>
      */
     ACAMERA_SENSOR_PIXEL_MODE_DEFAULT                                = 0,
 
     /**
-     * <p>This sensor pixel mode is offered by devices with capability
-     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>.
-     * In this mode, sensors typically do not bin pixels, as a result can offer larger
+     * <p>In this mode, sensors typically do not bin pixels, as a result can offer larger
      * image sizes.</p>
      */
     ACAMERA_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION                     = 1,
@@ -10167,7 +10729,8 @@
      * <p>This camera device does not have enough capabilities to qualify as a <code>FULL</code> device or
      * better.</p>
      * <p>Only the stream configurations listed in the <code>LEGACY</code> and <code>LIMITED</code> tables in the
-     * {@link ACameraDevice_createCaptureSession createCaptureSession} documentation are guaranteed to be supported.</p>
+     * {@link ACameraDevice_createCaptureSession }
+     * documentation are guaranteed to be supported.</p>
      * <p>All <code>LIMITED</code> devices support the <code>BACKWARDS_COMPATIBLE</code> capability, indicating basic
      * support for color image capture. The only exception is that the device may
      * alternatively support only the <code>DEPTH_OUTPUT</code> capability, if it can only output depth
@@ -10193,7 +10756,8 @@
     /**
      * <p>This camera device is capable of supporting advanced imaging applications.</p>
      * <p>The stream configurations listed in the <code>FULL</code>, <code>LEGACY</code> and <code>LIMITED</code> tables in the
-     * {@link ACameraDevice_createCaptureSession createCaptureSession} documentation are guaranteed to be supported.</p>
+     * {@link ACameraDevice_createCaptureSession }
+     * documentation are guaranteed to be supported.</p>
      * <p>A <code>FULL</code> device will support below capabilities:</p>
      * <ul>
      * <li><code>BURST_CAPTURE</code> capability (ACAMERA_REQUEST_AVAILABLE_CAPABILITIES contains
@@ -10220,7 +10784,9 @@
 
     /**
      * <p>This camera device is running in backward compatibility mode.</p>
-     * <p>Only the stream configurations listed in the <code>LEGACY</code> table in the {@link ACameraDevice_createCaptureSession createCaptureSession} documentation are supported.</p>
+     * <p>Only the stream configurations listed in the <code>LEGACY</code> table in the
+     * {@link ACameraDevice_createCaptureSession }
+     * documentation are supported.</p>
      * <p>A <code>LEGACY</code> device does not support per-frame control, manual sensor control, manual
      * post-processing, arbitrary cropping regions, and has relaxed performance constraints.
      * No additional capabilities beyond <code>BACKWARD_COMPATIBLE</code> will ever be listed by a
@@ -10242,7 +10808,9 @@
      * <p>This camera device is capable of YUV reprocessing and RAW data capture, in addition to
      * FULL-level capabilities.</p>
      * <p>The stream configurations listed in the <code>LEVEL_3</code>, <code>RAW</code>, <code>FULL</code>, <code>LEGACY</code> and
-     * <code>LIMITED</code> tables in the {@link ACameraDevice_createCaptureSession createCaptureSession} documentation are guaranteed to be supported.</p>
+     * <code>LIMITED</code> tables in the
+     * {@link ACameraDevice_createCaptureSession }
+     * documentation are guaranteed to be supported.</p>
      * <p>The following additional capabilities are guaranteed to be supported:</p>
      * <ul>
      * <li><code>YUV_REPROCESSING</code> capability (ACAMERA_REQUEST_AVAILABLE_CAPABILITIES contains
@@ -10627,6 +11195,26 @@
 
 
 
+// ACAMERA_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS
+typedef enum acamera_metadata_enum_acamera_jpegr_available_jpeg_r_stream_configurations {
+    ACAMERA_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_OUTPUT      = 0,
+
+    ACAMERA_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_INPUT       = 1,
+
+} acamera_metadata_enum_android_jpegr_available_jpeg_r_stream_configurations_t;
+
+// ACAMERA_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION
+typedef enum acamera_metadata_enum_acamera_jpegr_available_jpeg_r_stream_configurations_maximum_resolution {
+    ACAMERA_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_OUTPUT
+                                                                      = 0,
+
+    ACAMERA_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_INPUT
+                                                                      = 1,
+
+} acamera_metadata_enum_android_jpegr_available_jpeg_r_stream_configurations_maximum_resolution_t;
+
+
+
 __END_DECLS
 
 #endif /* _NDK_CAMERA_METADATA_TAGS_H */
diff --git a/camera/ndk/include/camera/NdkCaptureRequest.h b/camera/ndk/include/camera/NdkCaptureRequest.h
index d83c5b3..dc18544 100644
--- a/camera/ndk/include/camera/NdkCaptureRequest.h
+++ b/camera/ndk/include/camera/NdkCaptureRequest.h
@@ -148,7 +148,7 @@
  * @param request the {@link ACaptureRequest} of interest.
  * @param tag the tag value of the camera metadata entry to be get.
  * @param entry the output {@link ACameraMetadata_const_entry} will be filled here if the method
- *        call succeeeds.
+ *        call succeeds.
  *
  * @return <ul>
  *         <li>{@link ACAMERA_OK} if the method call succeeds.</li>
diff --git a/camera/ndk/libcamera2ndk.map.txt b/camera/ndk/libcamera2ndk.map.txt
index b3977ff..4c54658 100644
--- a/camera/ndk/libcamera2ndk.map.txt
+++ b/camera/ndk/libcamera2ndk.map.txt
@@ -12,6 +12,8 @@
     ACameraCaptureSession_logicalCamera_setRepeatingRequest; # introduced=29
     ACameraCaptureSession_logicalCamera_setRepeatingRequestV2; # introduced=33
     ACameraCaptureSession_stopRepeating;
+    ACameraCaptureSession_setWindowPreparedCallback; # introduced=34
+    ACameraCaptureSession_prepareWindow; # introduced=34
     ACameraCaptureSession_updateSharedOutput; # introduced=28
     ACameraDevice_close;
     ACameraDevice_createCaptureRequest;
diff --git a/camera/ndk/ndk_vendor/impl/ACameraCaptureSessionVendor.h b/camera/ndk/ndk_vendor/impl/ACameraCaptureSessionVendor.h
index 5a1af79..45098c3 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraCaptureSessionVendor.h
+++ b/camera/ndk/ndk_vendor/impl/ACameraCaptureSessionVendor.h
@@ -14,9 +14,14 @@
  * limitations under the License.
  */
 
-#include <string>
 #include "utils.h"
 
+#include <android/binder_auto_utils.h>
+#include <string>
+#include <set>
+
+using ::android::acam::utils::native_handle_ptr_wrapper;
+
 struct ACaptureSessionOutput {
     explicit ACaptureSessionOutput(const native_handle_t* window, bool isShared = false,
             const char* physicalCameraId = "") :
@@ -38,8 +43,23 @@
         return mWindow > other.mWindow;
     }
 
-    android::acam::utils::native_handle_ptr_wrapper mWindow;
-    std::set<android::acam::utils::native_handle_ptr_wrapper> mSharedWindows;
+    inline bool isWindowEqual(ACameraWindowType* window) const {
+        return mWindow == native_handle_ptr_wrapper(window);
+    }
+
+    // returns true if the window was successfully added, false otherwise.
+    inline bool addSharedWindow(ACameraWindowType* window) {
+        auto ret = mSharedWindows.insert(window);
+        return ret.second;
+    }
+
+    // returns the number of elements removed.
+    inline size_t removeSharedWindow(ACameraWindowType* window) {
+        return mSharedWindows.erase(window);
+    }
+
+    native_handle_ptr_wrapper mWindow;
+    std::set<native_handle_ptr_wrapper> mSharedWindows;
     bool           mIsShared;
     int            mRotation = CAMERA3_STREAM_ROTATION_0;
     std::string mPhysicalCameraId;
diff --git a/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp b/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
index 0a57590..87102e4 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
+++ b/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
@@ -17,27 +17,34 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "ACameraDeviceVendor"
 
-#include <vector>
-#include <inttypes.h>
-#include <android/frameworks/cameraservice/service/2.0/ICameraService.h>
-#include <android/frameworks/cameraservice/device/2.0/types.h>
-#include <CameraMetadata.h>
-
-#include "ndk_vendor/impl/ACameraDevice.h"
 #include "ACameraCaptureSession.h"
 #include "ACameraMetadata.h"
 #include "ACaptureRequest.h"
+#include "ndk_vendor/impl/ACameraDevice.h"
 #include "utils.h"
+#include <CameraMetadata.h>
+#include <aidl/android/frameworks/cameraservice/device/CameraMetadata.h>
+#include <aidl/android/frameworks/cameraservice/device/OutputConfiguration.h>
+#include <aidl/android/frameworks/cameraservice/device/SessionConfiguration.h>
+#include <aidlcommonsupport/NativeHandle.h>
+#include <inttypes.h>
+#include <map>
+#include <utility>
+#include <vector>
 
-#define CHECK_TRANSACTION_AND_RET(remoteRet, status, callName) \
-    if (!remoteRet.isOk()) { \
-        ALOGE("%s: Transaction error during %s call %s", __FUNCTION__, callName, \
-                  remoteRet.description().c_str()); \
-        return ACAMERA_ERROR_UNKNOWN; \
-    } \
-    if (status != Status::NO_ERROR) { \
-        ALOGE("%s: %s call failed", __FUNCTION__, callName); \
-        return utils::convertFromHidl(status); \
+#define CHECK_TRANSACTION_AND_RET(ret, callName)                                            \
+    if (!remoteRet.isOk()) {                                                                \
+        if (remoteRet.getExceptionCode() != EX_SERVICE_SPECIFIC) {                          \
+            ALOGE("%s: Transaction error during %s call %d", __FUNCTION__, callName,        \
+                                ret.getExceptionCode());                                    \
+            return ACAMERA_ERROR_UNKNOWN;                                                   \
+        } else {                                                                            \
+            Status errStatus = static_cast<Status>(remoteRet.getServiceSpecificError());    \
+            std::string errorMsg =                                                          \
+                    aidl::android::frameworks::cameraservice::common::toString(errStatus);  \
+            ALOGE("%s: %s call failed: %s", __FUNCTION__, callName, errorMsg.c_str());      \
+            return utils::convertFromAidl(errStatus);                                       \
+        }                                                                                   \
     }
 
 using namespace android;
@@ -49,10 +56,10 @@
 namespace android {
 namespace acam {
 
-using HCameraMetadata = frameworks::cameraservice::device::V2_0::CameraMetadata;
-using OutputConfiguration = frameworks::cameraservice::device::V2_0::OutputConfiguration;
-using SessionConfiguration = frameworks::cameraservice::device::V2_0::SessionConfiguration;
-using hardware::Void;
+using AidlCameraMetadata = ::aidl::android::frameworks::cameraservice::device::CameraMetadata;
+using ::aidl::android::frameworks::cameraservice::device::OutputConfiguration;
+using ::aidl::android::frameworks::cameraservice::device::SessionConfiguration;
+using ::ndk::ScopedAStatus;
 
 // Static member definitions
 const char* CameraDevice::kContextKey        = "Context";
@@ -81,7 +88,6 @@
         mCameraId(id),
         mAppCallbacks(*cb),
         mChars(std::move(chars)),
-        mServiceCallback(new ServiceCallback(this)),
         mWrapper(wrapper),
         mInError(false),
         mError(ACAMERA_OK),
@@ -125,8 +131,11 @@
 
 CameraDevice::~CameraDevice() { }
 
-void
-CameraDevice::postSessionMsgAndCleanup(sp<AMessage>& msg) {
+void CameraDevice::init() {
+    mServiceCallback = ndk::SharedRefBase::make<ServiceCallback>(weak_from_this());
+}
+
+void CameraDevice::postSessionMsgAndCleanup(sp<AMessage>& msg) {
     msg->post();
     msg.clear();
     sp<AMessage> cleanupMsg = new AMessage(kWhatCleanUpSessions, mHandler);
@@ -134,8 +143,7 @@
 }
 
 // TODO: cached created request?
-camera_status_t
-CameraDevice::createCaptureRequest(
+camera_status_t CameraDevice::createCaptureRequest(
         ACameraDevice_request_template templateId,
         const ACameraIdList* physicalCameraIdList,
         ACaptureRequest** request) const {
@@ -147,20 +155,16 @@
     if (mRemote == nullptr) {
         return ACAMERA_ERROR_CAMERA_DISCONNECTED;
     }
-    CameraMetadata rawRequest;
-    Status status = Status::UNKNOWN_ERROR;
-    auto remoteRet = mRemote->createDefaultRequest(
-        utils::convertToHidl(templateId),
-        [&status, &rawRequest](auto s, const hidl_vec<uint8_t> &metadata) {
-            status = s;
-            if (status == Status::NO_ERROR && utils::convertFromHidlCloned(metadata, &rawRequest)) {
-            } else {
-                ALOGE("%s: Couldn't create default request", __FUNCTION__);
-            }
-        });
-    CHECK_TRANSACTION_AND_RET(remoteRet, status, "createDefaultRequest()")
+
+    AidlCameraMetadata aidlMetadata;
+    ScopedAStatus remoteRet = mRemote->createDefaultRequest(
+            utils::convertToAidl(templateId), &aidlMetadata);
+    CHECK_TRANSACTION_AND_RET(remoteRet, "createDefaultRequest()")
+
+    camera_metadata_t* rawRequest;
+    utils::cloneFromAidl(aidlMetadata, &rawRequest);
     ACaptureRequest* outReq = new ACaptureRequest();
-    outReq->settings = new ACameraMetadata(rawRequest.release(), ACameraMetadata::ACM_REQUEST);
+    outReq->settings = new ACameraMetadata(rawRequest, ACameraMetadata::ACM_REQUEST);
     if (physicalCameraIdList != nullptr) {
         for (auto i = 0; i < physicalCameraIdList->numCameras; i++) {
             outReq->physicalSettings.emplace(physicalCameraIdList->cameraIds[i],
@@ -172,9 +176,8 @@
     return ACAMERA_OK;
 }
 
-camera_status_t
-CameraDevice::createCaptureSession(
-        const ACaptureSessionOutputContainer*       outputs,
+camera_status_t CameraDevice::createCaptureSession(
+        const ACaptureSessionOutputContainer* outputs,
         const ACaptureRequest* sessionParameters,
         const ACameraCaptureSession_stateCallbacks* callbacks,
         /*out*/ACameraCaptureSession** session) {
@@ -199,7 +202,7 @@
     }
 
     ACameraCaptureSession* newSession = new ACameraCaptureSession(
-            mNextSessionId++, outputs, callbacks, this);
+            mNextSessionId++, outputs, callbacks, weak_from_this());
 
     // set new session as current session
     newSession->incStrong((void *) ACameraDevice_createCaptureSession);
@@ -225,41 +228,39 @@
     sessionConfig.outputStreams.resize(sessionOutputContainer->mOutputs.size());
     size_t index = 0;
     for (const auto& output : sessionOutputContainer->mOutputs) {
-        sessionConfig.outputStreams[index].rotation = utils::convertToHidl(output.mRotation);
-        sessionConfig.outputStreams[index].windowGroupId = -1;
-        sessionConfig.outputStreams[index].windowHandles.resize(output.mSharedWindows.size() + 1);
-        sessionConfig.outputStreams[index].windowHandles[0] = output.mWindow;
-        sessionConfig.outputStreams[index].physicalCameraId = output.mPhysicalCameraId;
+        OutputConfiguration& outputStream = sessionConfig.outputStreams[index];
+        outputStream.rotation = utils::convertToAidl(output.mRotation);
+        outputStream.windowGroupId = -1;
+        outputStream.windowHandles.resize(output.mSharedWindows.size() + 1);
+        outputStream.windowHandles[0] = std::move(dupToAidl(output.mWindow));
+        outputStream.physicalCameraId = output.mPhysicalCameraId;
         index++;
     }
 
     bool configSupported = false;
-    Status status = Status::UNKNOWN_ERROR;
-    auto remoteRet = mRemote->isSessionConfigurationSupported(sessionConfig,
-        [&status, &configSupported](auto s, auto supported) {
-            status = s;
-            configSupported = supported;
-        });
-
-    CHECK_TRANSACTION_AND_RET(remoteRet, status, "isSessionConfigurationSupported()");
+    ScopedAStatus remoteRet = mRemote->isSessionConfigurationSupported(
+            sessionConfig, &configSupported);
+    CHECK_TRANSACTION_AND_RET(remoteRet, "isSessionConfigurationSupported()")
     return configSupported ? ACAMERA_OK : ACAMERA_ERROR_STREAM_CONFIGURE_FAIL;
 }
 
 static void addMetadataToPhysicalCameraSettings(const CameraMetadata *metadata,
         const std::string &cameraId, PhysicalCameraSettings *physicalCameraSettings) {
-    CameraMetadata metadataCopy = *metadata;
-    camera_metadata_t *camera_metadata = metadataCopy.release();
-    HCameraMetadata hCameraMetadata;
-    utils::convertToHidl(camera_metadata, &hCameraMetadata, /*shouldOwn*/ true);
-    physicalCameraSettings->settings.metadata(std::move(hCameraMetadata));
+    const camera_metadata_t* cameraMetadata = metadata->getAndLock();
+    AidlCameraMetadata aidlCameraMetadata;
+    utils::convertToAidl(cameraMetadata, &aidlCameraMetadata);
+    metadata->unlock(cameraMetadata);
+    physicalCameraSettings->settings.set<CaptureMetadataInfo::metadata>(
+            std::move(aidlCameraMetadata));
     physicalCameraSettings->id = cameraId;
 }
 
 void CameraDevice::addRequestSettingsMetadata(ACaptureRequest *aCaptureRequest,
         sp<CaptureRequest> &req) {
     req->mPhysicalCameraSettings.resize(1 + aCaptureRequest->physicalSettings.size());
-    addMetadataToPhysicalCameraSettings(&(aCaptureRequest->settings->getInternalData()), getId(),
-                    &(req->mPhysicalCameraSettings[0]));
+    addMetadataToPhysicalCameraSettings(
+            &(aCaptureRequest->settings->getInternalData()),
+            getId(),&(req->mPhysicalCameraSettings[0]));
     size_t i = 1;
     for (auto &physicalSetting : aCaptureRequest->physicalSettings) {
         addMetadataToPhysicalCameraSettings(&(physicalSetting.second->getInternalData()),
@@ -285,7 +286,7 @@
 
     int32_t streamId = -1;
     for (auto& kvPair : mConfiguredOutputs) {
-        if (utils::isWindowNativeHandleEqual(kvPair.second.first, output->mWindow)) {
+        if (kvPair.second.first == output->mWindow) {
             streamId = kvPair.first;
             break;
         }
@@ -295,56 +296,86 @@
         return ACAMERA_ERROR_INVALID_PARAMETER;
     }
 
-    OutputConfigurationWrapper outConfigW;
-    OutputConfiguration &outConfig = outConfigW.mOutputConfiguration;
-    outConfig.rotation = utils::convertToHidl(output->mRotation);
+    OutputConfiguration outConfig;
+    outConfig.rotation = utils::convertToAidl(output->mRotation);
     outConfig.windowHandles.resize(output->mSharedWindows.size() + 1);
-    outConfig.windowHandles[0] = output->mWindow;
+    outConfig.windowHandles[0] = std::move(dupToAidl(output->mWindow));
     outConfig.physicalCameraId = output->mPhysicalCameraId;
     int i = 1;
     for (auto& anw : output->mSharedWindows) {
-        outConfig.windowHandles[i++] = anw;
+        outConfig.windowHandles[i++] = std::move(dupToAidl(anw));
     }
 
-    auto remoteRet = mRemote->updateOutputConfiguration(streamId, outConfig);
+    auto remoteRet = mRemote->updateOutputConfiguration(streamId,
+                                                        outConfig);
+
     if (!remoteRet.isOk()) {
-        ALOGE("%s: Transaction error in updating OutputConfiguration: %s", __FUNCTION__,
-              remoteRet.description().c_str());
-        return ACAMERA_ERROR_UNKNOWN;
+        if (remoteRet.getExceptionCode() == EX_SERVICE_SPECIFIC) {
+            Status st = static_cast<Status>(remoteRet.getServiceSpecificError());
+            switch (st) {
+                case Status::NO_ERROR:
+                    break;
+                case Status::INVALID_OPERATION:
+                    ALOGE("Camera device %s invalid operation", getId());
+                    return ACAMERA_ERROR_INVALID_OPERATION;
+                case Status::ALREADY_EXISTS:
+                    ALOGE("Camera device %s output surface already exists", getId());
+                    return ACAMERA_ERROR_INVALID_PARAMETER;
+                case Status::ILLEGAL_ARGUMENT:
+                    ALOGE("Camera device %s invalid input argument", getId());
+                    return ACAMERA_ERROR_INVALID_PARAMETER;
+                default:
+                    ALOGE("Camera device %s failed to add shared output", getId());
+                    return ACAMERA_ERROR_UNKNOWN;
+            }
+        } else {
+            ALOGE("%s: Transaction error in updating OutputConfiguration: %d", __FUNCTION__,
+                remoteRet.getExceptionCode());
+            return ACAMERA_ERROR_UNKNOWN;
+        }
     }
 
-    switch (remoteRet) {
-            case Status::NO_ERROR:
-                break;
-            case Status::INVALID_OPERATION:
-                ALOGE("Camera device %s invalid operation", getId());
-                return ACAMERA_ERROR_INVALID_OPERATION;
-            case Status::ALREADY_EXISTS:
-                ALOGE("Camera device %s output surface already exists", getId());
-                return ACAMERA_ERROR_INVALID_PARAMETER;
-            case Status::ILLEGAL_ARGUMENT:
-                ALOGE("Camera device %s invalid input argument", getId());
-                return ACAMERA_ERROR_INVALID_PARAMETER;
-            default:
-                ALOGE("Camera device %s failed to add shared output", getId());
-                return ACAMERA_ERROR_UNKNOWN;
-    }
-
-    mConfiguredOutputs[streamId] =
-            std::move(std::make_pair(std::move(output->mWindow), std::move(outConfigW)));
-
+    mConfiguredOutputs[streamId] = std::make_pair(output->mWindow,
+                                        std::move(outConfig));
     return ACAMERA_OK;
 }
 
-camera_status_t
-CameraDevice::allocateCaptureRequestLocked(
+camera_status_t CameraDevice::prepareLocked(ACameraWindowType *window) {
+    camera_status_t ret = checkCameraClosedOrErrorLocked();
+    if (ret != ACAMERA_OK) {
+        return ret;
+    }
+
+    if (window == nullptr) {
+        return ACAMERA_ERROR_INVALID_PARAMETER;
+    }
+
+    int32_t streamId = -1;
+    for (auto& kvPair : mConfiguredOutputs) {
+        if (window == kvPair.second.first) {
+            streamId = kvPair.first;
+            break;
+        }
+    }
+    if (streamId < 0) {
+        ALOGE("Error: Invalid output configuration");
+        return ACAMERA_ERROR_INVALID_PARAMETER;
+    }
+
+    auto remoteRet = mRemote->prepare(streamId);
+    CHECK_TRANSACTION_AND_RET(remoteRet, "prepare()")
+    return ACAMERA_OK;
+}
+
+camera_status_t CameraDevice::allocateCaptureRequestLocked(
         const ACaptureRequest* request, /*out*/sp<CaptureRequest> &outReq) {
     sp<CaptureRequest> req(new CaptureRequest());
     req->mCaptureRequest.physicalCameraSettings.resize(1 + request->physicalSettings.size());
 
     size_t index = 0;
     allocateOneCaptureRequestMetadata(
-            req->mCaptureRequest.physicalCameraSettings[index++], mCameraId, request->settings);
+            req->mCaptureRequest.physicalCameraSettings[index++],
+            mCameraId, request->settings);
 
     for (auto& physicalEntry : request->physicalSettings) {
         allocateOneCaptureRequestMetadata(
@@ -354,19 +385,20 @@
 
     std::vector<int32_t> requestStreamIdxList;
     std::vector<int32_t> requestSurfaceIdxList;
-    for (auto outputTarget : request->targets->mOutputs) {
-        const native_handle_t* anw = outputTarget.mWindow;
+
+    for (auto& outputTarget : request->targets->mOutputs) {
+        native_handle_ptr_wrapper anw = outputTarget.mWindow;
         bool found = false;
         req->mSurfaceList.push_back(anw);
         // lookup stream/surface ID
         for (const auto& kvPair : mConfiguredOutputs) {
             int streamId = kvPair.first;
-            const OutputConfigurationWrapper& outConfig = kvPair.second.second;
-            const auto& windowHandles = outConfig.mOutputConfiguration.windowHandles;
+            const OutputConfiguration& outConfig = kvPair.second.second;
+            const auto& windowHandles = outConfig.windowHandles;
             for (int surfaceId = 0; surfaceId < (int) windowHandles.size(); surfaceId++) {
-                // If two native handles are equivalent, so are their surfaces.
-                if (utils::isWindowNativeHandleEqual(windowHandles[surfaceId].getNativeHandle(),
-                                                      anw)) {
+                // If two window handles point to the same native window,
+                // they have the same surfaces.
+                if (utils::isWindowNativeHandleEqual(anw, windowHandles[surfaceId])) {
                     found = true;
                     requestStreamIdxList.push_back(streamId);
                     requestSurfaceIdxList.push_back(surfaceId);
@@ -378,7 +410,7 @@
             }
         }
         if (!found) {
-            ALOGE("Unconfigured output target %p in capture request!", anw);
+            ALOGE("Unconfigured output target %p in capture request!", anw.mWindow);
             return ACAMERA_ERROR_INVALID_PARAMETER;
         }
     }
@@ -395,54 +427,57 @@
         PhysicalCameraSettings& cameraSettings,
         const std::string& id, const sp<ACameraMetadata>& metadata) {
     cameraSettings.id = id;
-    // TODO: Do we really need to copy the metadata here ?
-    CameraMetadata metadataCopy = metadata->getInternalData();
-    camera_metadata_t *cameraMetadata = metadataCopy.release();
-    HCameraMetadata hCameraMetadata;
-    utils::convertToHidl(cameraMetadata, &hCameraMetadata, true);
-    if (metadata != nullptr) {
-        if (hCameraMetadata.data() != nullptr &&
-            mCaptureRequestMetadataQueue != nullptr &&
-            mCaptureRequestMetadataQueue->write(
-                reinterpret_cast<const uint8_t *>(hCameraMetadata.data()),
-                hCameraMetadata.size())) {
-            // The metadata field of the union would've been destructued, so no need
-            // to re-size it.
-            cameraSettings.settings.fmqMetadataSize(hCameraMetadata.size());
-        } else {
-            ALOGE("Fmq write capture result failed, falling back to hwbinder");
-            cameraSettings.settings.metadata(std::move(hCameraMetadata));
-        }
+
+    if (metadata == nullptr) {
+        return;
+    }
+
+    const camera_metadata_t* cameraMetadata = metadata->getInternalData().getAndLock();
+    AidlCameraMetadata aidlCameraMetadata;
+    utils::convertToAidl(cameraMetadata, &aidlCameraMetadata);
+    metadata->getInternalData().unlock(cameraMetadata);
+
+    if (aidlCameraMetadata.metadata.data() != nullptr &&
+        mCaptureRequestMetadataQueue != nullptr &&
+        mCaptureRequestMetadataQueue->write(
+                reinterpret_cast<const int8_t*>(aidlCameraMetadata.metadata.data()),
+                aidlCameraMetadata.metadata.size())) {
+        cameraSettings.settings.set<CaptureMetadataInfo::fmqMetadataSize>(
+                aidlCameraMetadata.metadata.size());
+    } else {
+        ALOGE("Fmq write capture result failed, falling back to hwbinder");
+        cameraSettings.settings.set<CaptureMetadataInfo::metadata>(std::move(aidlCameraMetadata));
     }
 }
 
 
-ACaptureRequest*
-CameraDevice::allocateACaptureRequest(sp<CaptureRequest>& req, const char* deviceId) {
+ACaptureRequest* CameraDevice::allocateACaptureRequest(sp<CaptureRequest>& req,
+                                                       const char* deviceId) {
     ACaptureRequest* pRequest = new ACaptureRequest();
     for (size_t i = 0; i < req->mPhysicalCameraSettings.size(); i++) {
         const std::string& id = req->mPhysicalCameraSettings[i].id;
-        CameraMetadata clone;
-        utils::convertFromHidlCloned(req->mPhysicalCameraSettings[i].settings.metadata(), &clone);
-        camera_metadata_t *clonep = clone.release();
+        camera_metadata_t* clone;
+        AidlCameraMetadata& aidlCameraMetadata = req->mPhysicalCameraSettings[i].settings
+                                                         .get<CaptureMetadataInfo::metadata>();
+        utils::cloneFromAidl(aidlCameraMetadata, &clone);
+
         if (id == deviceId) {
-            pRequest->settings = new ACameraMetadata(clonep, ACameraMetadata::ACM_REQUEST);
+            pRequest->settings = new ACameraMetadata(clone, ACameraMetadata::ACM_REQUEST);
         } else {
             pRequest->physicalSettings[req->mPhysicalCameraSettings[i].id] =
-                    new ACameraMetadata(clonep, ACameraMetadata::ACM_REQUEST);
+                    new ACameraMetadata(clone, ACameraMetadata::ACM_REQUEST);
         }
     }
     pRequest->targets = new ACameraOutputTargets();
     for (size_t i = 0; i < req->mSurfaceList.size(); i++) {
-        const native_handle_t* anw = req->mSurfaceList[i];
+        native_handle_ptr_wrapper anw = req->mSurfaceList[i];
         ACameraOutputTarget outputTarget(anw);
-        pRequest->targets->mOutputs.insert(outputTarget);
+        pRequest->targets->mOutputs.insert(std::move(outputTarget));
     }
     return pRequest;
 }
 
-void
-CameraDevice::freeACaptureRequest(ACaptureRequest* req) {
+void CameraDevice::freeACaptureRequest(ACaptureRequest* req) {
     if (req == nullptr) {
         return;
     }
@@ -459,7 +494,7 @@
     }
 
     if (mCurrentSession != session) {
-        // Session has been replaced by other seesion or device is closed
+        // Session has been replaced by other session or device is closed
         return;
     }
     mCurrentSession = nullptr;
@@ -471,8 +506,8 @@
         return;
     }
 
-    // No new session, unconfigure now
-    // Note: The unconfiguration of session won't be accounted for session
+    // No new session, un-configure now
+    // Note: The un-configuration of session won't be accounted for session
     // latency because a stream configuration with 0 streams won't ever become
     // active.
     nsecs_t startTimeNs = systemTime();
@@ -494,8 +529,8 @@
         ALOGD("%s: binder disconnect reached", __FUNCTION__);
         auto ret = mRemote->disconnect();
         if (!ret.isOk()) {
-            ALOGE("%s: Transaction error while disconnecting device %s", __FUNCTION__,
-                  ret.description().c_str());
+            ALOGE("%s: Transaction error while disconnecting device %d", __FUNCTION__,
+                  ret.getExceptionCode());
         }
     }
     mRemote = nullptr;
@@ -505,8 +540,7 @@
     }
 }
 
-camera_status_t
-CameraDevice::stopRepeatingLocked() {
+camera_status_t CameraDevice::stopRepeatingLocked() {
     camera_status_t ret = checkCameraClosedOrErrorLocked();
     if (ret != ACAMERA_OK) {
         ALOGE("Camera %s stop repeating failed! ret %d", getId(), ret);
@@ -517,20 +551,14 @@
         mRepeatingSequenceId = REQUEST_ID_NONE;
 
         int64_t lastFrameNumber;
-        Status status = Status::UNKNOWN_ERROR;
-        auto remoteRet = mRemote->cancelRepeatingRequest(
-                [&status, &lastFrameNumber](Status s, auto frameNumber) {
-                    status = s;
-                    lastFrameNumber = frameNumber;
-                });
-        CHECK_TRANSACTION_AND_RET(remoteRet, status, "cancelRepeatingRequest()");
+        ScopedAStatus remoteRet = mRemote->cancelRepeatingRequest(&lastFrameNumber);
+        CHECK_TRANSACTION_AND_RET(remoteRet, "cancelRepeatingRequest()");
         checkRepeatingSequenceCompleteLocked(repeatingSequenceId, lastFrameNumber);
     }
     return ACAMERA_OK;
 }
 
-camera_status_t
-CameraDevice::flushLocked(ACameraCaptureSession* session) {
+camera_status_t CameraDevice::flushLocked(ACameraCaptureSession* session) {
     camera_status_t ret = checkCameraClosedOrErrorLocked();
     if (ret != ACAMERA_OK) {
         ALOGE("Camera %s abort captures failed! ret %d", getId(), ret);
@@ -571,20 +599,15 @@
     }
 
     int64_t lastFrameNumber;
-    Status status = Status::UNKNOWN_ERROR;
-    auto remoteRet = mRemote->flush([&status, &lastFrameNumber](auto s, auto frameNumber) {
-                                        status = s;
-                                        lastFrameNumber = frameNumber;
-                                    });
-    CHECK_TRANSACTION_AND_RET(remoteRet, status, "flush()")
+    ScopedAStatus remoteRet = mRemote->flush(&lastFrameNumber);
+    CHECK_TRANSACTION_AND_RET(remoteRet, "flush()")
     if (mRepeatingSequenceId != REQUEST_ID_NONE) {
         checkRepeatingSequenceCompleteLocked(mRepeatingSequenceId, lastFrameNumber);
     }
     return ACAMERA_OK;
 }
 
-camera_status_t
-CameraDevice::waitUntilIdleLocked() {
+camera_status_t CameraDevice::waitUntilIdleLocked() {
     camera_status_t ret = checkCameraClosedOrErrorLocked();
     if (ret != ACAMERA_OK) {
         ALOGE("Wait until camera %s idle failed! ret %d", getId(), ret);
@@ -597,13 +620,13 @@
     }
 
     auto remoteRet = mRemote->waitUntilIdle();
-    CHECK_TRANSACTION_AND_RET(remoteRet, remoteRet, "waitUntilIdle()")
+    CHECK_TRANSACTION_AND_RET(remoteRet, "waitUntilIdle()")
     return ACAMERA_OK;
 }
 
-camera_status_t
-CameraDevice::configureStreamsLocked(const ACaptureSessionOutputContainer* outputs,
-        const ACaptureRequest* sessionParameters, nsecs_t startTimeNs) {
+camera_status_t CameraDevice::configureStreamsLocked(const ACaptureSessionOutputContainer* outputs,
+                                                     const ACaptureRequest* sessionParameters,
+                                                     nsecs_t startTimeNs) {
     ACaptureSessionOutputContainer emptyOutput;
     if (outputs == nullptr) {
         outputs = &emptyOutput;
@@ -614,31 +637,37 @@
         return ret;
     }
 
-    std::set<std::pair<native_handle_ptr_wrapper, OutputConfigurationWrapper>> outputSet;
-    for (auto outConfig : outputs->mOutputs) {
-        const native_handle_t* anw = outConfig.mWindow;
-        OutputConfigurationWrapper outConfigInsertW;
-        OutputConfiguration &outConfigInsert = outConfigInsertW.mOutputConfiguration;
-        outConfigInsert.rotation = utils::convertToHidl(outConfig.mRotation);
+    std::map<native_handle_ptr_wrapper, OutputConfiguration> handleToConfig;
+    for (const auto& outConfig : outputs->mOutputs) {
+        native_handle_ptr_wrapper anw = outConfig.mWindow;
+        OutputConfiguration outConfigInsert;
+        outConfigInsert.rotation = utils::convertToAidl(outConfig.mRotation);
         outConfigInsert.windowGroupId = -1;
         outConfigInsert.windowHandles.resize(outConfig.mSharedWindows.size() + 1);
-        outConfigInsert.windowHandles[0] = anw;
+        outConfigInsert.windowHandles[0] = std::move(dupToAidl(anw));
         outConfigInsert.physicalCameraId = outConfig.mPhysicalCameraId;
-        native_handle_ptr_wrapper wrap(anw);
-
-        outputSet.emplace(std::make_pair(std::move(anw), std::move(outConfigInsertW)));
+        handleToConfig.insert({anw, std::move(outConfigInsert)});
     }
-    std::set<std::pair<native_handle_ptr_wrapper, OutputConfigurationWrapper>> addSet = outputSet;
+
+    std::set<native_handle_ptr_wrapper> addSet;
+    for (auto& kvPair : handleToConfig) {
+        addSet.insert(kvPair.first);
+    }
+
     std::vector<int32_t> deleteList;
 
     // Determine which streams need to be created, which to be deleted
     for (auto& kvPair : mConfiguredOutputs) {
         int32_t streamId = kvPair.first;
         auto& outputPair = kvPair.second;
-        if (outputSet.count(outputPair)) {
-            deleteList.push_back(streamId); // Need to delete a no longer needed stream
+        auto& anw = outputPair.first;
+        auto& configuredOutput = outputPair.second;
+
+        auto itr = handleToConfig.find(anw);
+        if (itr != handleToConfig.end() && (itr->second) == configuredOutput) {
+            deleteList.push_back(streamId);
         } else {
-            addSet.erase(outputPair);        // No need to add already existing stream
+            addSet.erase(anw);
         }
     }
 
@@ -673,106 +702,96 @@
     mIdle = true;
 
     auto remoteRet = mRemote->beginConfigure();
-    CHECK_TRANSACTION_AND_RET(remoteRet, remoteRet, "beginConfigure()")
+    CHECK_TRANSACTION_AND_RET(remoteRet, "beginConfigure()")
 
     // delete to-be-deleted streams
     for (auto streamId : deleteList) {
         remoteRet = mRemote->deleteStream(streamId);
-        CHECK_TRANSACTION_AND_RET(remoteRet, remoteRet, "deleteStream()")
+        CHECK_TRANSACTION_AND_RET(remoteRet, "deleteStream()")
         mConfiguredOutputs.erase(streamId);
     }
 
     // add new streams
-    for (const auto &outputPair : addSet) {
-        int streamId;
-        Status status = Status::UNKNOWN_ERROR;
-        auto ret = mRemote->createStream(outputPair.second,
-                                         [&status, &streamId](Status s, auto stream_id) {
-                                             status = s;
-                                             streamId = stream_id;
-                                         });
-        CHECK_TRANSACTION_AND_RET(ret, status, "createStream()")
-        mConfiguredOutputs.insert(std::make_pair(streamId, outputPair));
+    for (const auto &anw : addSet) {
+        int32_t streamId;
+        auto itr = handleToConfig.find(anw);
+        remoteRet = mRemote->createStream(itr->second, &streamId);
+        CHECK_TRANSACTION_AND_RET(remoteRet, "createStream()")
+        mConfiguredOutputs.insert(std::make_pair(streamId,
+                                                 std::make_pair(anw,
+                                                                std::move(itr->second))));
+        handleToConfig.erase(itr);
     }
 
-    CameraMetadata params;
-    HCameraMetadata hidlParams;
+    AidlCameraMetadata aidlParams;
     if ((sessionParameters != nullptr) && (sessionParameters->settings != nullptr)) {
-        params.append(sessionParameters->settings->getInternalData());
-        const camera_metadata_t *params_metadata = params.getAndLock();
-        utils::convertToHidl(params_metadata, &hidlParams);
-        params.unlock(params_metadata);
+        const CameraMetadata &params = sessionParameters->settings->getInternalData();
+        const camera_metadata_t* paramsMetadata = params.getAndLock();
+        utils::convertToAidl(paramsMetadata, &aidlParams);
+        params.unlock(paramsMetadata);
     }
-    remoteRet = mRemote->endConfigure_2_1(StreamConfigurationMode::NORMAL_MODE,
-                                          hidlParams, startTimeNs);
-    CHECK_TRANSACTION_AND_RET(remoteRet, remoteRet, "endConfigure()")
+    remoteRet = mRemote->endConfigure(StreamConfigurationMode::NORMAL_MODE,
+                                      aidlParams, startTimeNs);
+    CHECK_TRANSACTION_AND_RET(remoteRet, "endConfigure()")
     return ACAMERA_OK;
 }
 
-void
-CameraDevice::setRemoteDevice(sp<ICameraDeviceUser> remote) {
+void CameraDevice::setRemoteDevice(std::shared_ptr<ICameraDeviceUser> remote) {
     Mutex::Autolock _l(mDeviceLock);
-    mRemote = remote;
+    mRemote = std::move(remote);
 }
 
-bool
-CameraDevice::setDeviceMetadataQueues() {
+bool CameraDevice::setDeviceMetadataQueues() {
         if (mRemote == nullptr) {
           ALOGE("mRemote must not be null while trying to fetch metadata queues");
           return false;
         }
         std::shared_ptr<RequestMetadataQueue> &reqQueue = mCaptureRequestMetadataQueue;
-        auto ret =
-            mRemote->getCaptureRequestMetadataQueue(
-                [&reqQueue](const auto &mqDescriptor) {
-                    reqQueue = std::make_shared<RequestMetadataQueue>(mqDescriptor);
-                    if (!reqQueue->isValid() || reqQueue->availableToWrite() <=0) {
-                        ALOGE("Empty fmq from cameraserver");
-                        reqQueue = nullptr;
-                    }
-                });
+        MQDescriptor<int8_t, SynchronizedReadWrite> reqMqDescriptor;
+        ScopedAStatus ret = mRemote->getCaptureRequestMetadataQueue(&reqMqDescriptor);
         if (!ret.isOk()) {
             ALOGE("Transaction error trying to get capture request metadata queue");
             return false;
         }
+        reqQueue = std::make_shared<RequestMetadataQueue>(reqMqDescriptor);
+        if (!reqQueue->isValid() || reqQueue->availableToWrite() <= 0) {
+            ALOGE("Empty fmq from cameraserver");
+            reqQueue = nullptr;
+        }
+
+        MQDescriptor<int8_t, SynchronizedReadWrite> resMqDescriptor;
         std::shared_ptr<ResultMetadataQueue> &resQueue = mCaptureResultMetadataQueue;
-        ret =
-                mRemote->getCaptureResultMetadataQueue(
-                        [&resQueue](const auto &mqDescriptor) {
-                            resQueue = std::make_shared<ResultMetadataQueue>(mqDescriptor);
-                            if (!resQueue->isValid() || resQueue->availableToWrite() <=0) {
-                                ALOGE("Empty fmq from cameraserver");
-                            }
-                        });
+        ret = mRemote->getCaptureResultMetadataQueue(&resMqDescriptor);
         if (!ret.isOk()) {
             ALOGE("Transaction error trying to get capture result metadata queue");
             return false;
         }
+        resQueue = std::make_shared<ResultMetadataQueue>(resMqDescriptor);
+        if (!resQueue->isValid() || resQueue->availableToWrite() <= 0) {
+            ALOGE("Empty fmq from cameraserver");
+        }
+
         return true;
 }
 
-camera_status_t
-CameraDevice::checkCameraClosedOrErrorLocked() const {
+camera_status_t CameraDevice::checkCameraClosedOrErrorLocked() const {
     if (mRemote == nullptr) {
         ALOGE("%s: camera device already closed", __FUNCTION__);
         return ACAMERA_ERROR_CAMERA_DISCONNECTED;
     }
-    if (mInError) {// triggered by onDeviceError
-        ALOGE("%s: camera device has encountered a serious error", __FUNCTION__);
+    if (mInError) { // triggered by onDeviceError
+        ALOGE("%s: camera device has encountered a serious error: %d", __FUNCTION__, mError);
         return mError;
     }
     return ACAMERA_OK;
 }
 
-void
-CameraDevice::setCameraDeviceErrorLocked(camera_status_t error) {
+void CameraDevice::setCameraDeviceErrorLocked(camera_status_t error) {
     mInError = true;
     mError = error;
-    return;
 }
 
-void
-CameraDevice::FrameNumberTracker::updateTracker(int64_t frameNumber, bool isError) {
+void CameraDevice::FrameNumberTracker::updateTracker(int64_t frameNumber, bool isError) {
     ALOGV("updateTracker frame %" PRId64 " isError %d", frameNumber, isError);
     if (isError) {
         mFutureErrorSet.insert(frameNumber);
@@ -791,8 +810,7 @@
     update();
 }
 
-void
-CameraDevice::FrameNumberTracker::update() {
+void CameraDevice::FrameNumberTracker::update() {
     for (auto it = mFutureErrorSet.begin(); it != mFutureErrorSet.end();) {
         int64_t errorFrameNumber = *it;
         if (errorFrameNumber == mCompletedFrameNumber + 1) {
@@ -811,10 +829,8 @@
     ALOGV("Update complete frame %" PRId64, mCompletedFrameNumber);
 }
 
-void
-CameraDevice::onCaptureErrorLocked(
-        ErrorCode errorCode,
-        const CaptureResultExtras& resultExtras) {
+void CameraDevice::onCaptureErrorLocked(ErrorCode errorCode,
+                                        const CaptureResultExtras& resultExtras) {
     int sequenceId = resultExtras.requestId;
     int64_t frameNumber = resultExtras.frameNumber;
     int32_t burstId = resultExtras.burstId;
@@ -826,7 +842,7 @@
         return;
     }
 
-    CallbackHolder cbh = (*it).second;
+    CallbackHolder cbh = it->second;
     sp<ACameraCaptureSession> session = cbh.mSession;
     if ((size_t) burstId >= cbh.mRequests.size()) {
         ALOGE("%s: Error: request index %d out of bound (size %zu)",
@@ -852,7 +868,7 @@
         // them and try to match the surfaces in the corresponding
         // CaptureRequest.
         const auto& errorWindowHandles =
-                outputPairIt->second.second.mOutputConfiguration.windowHandles;
+                outputPairIt->second.second.windowHandles;
         for (const auto& errorWindowHandle : errorWindowHandles) {
             for (const auto &requestStreamAndWindowId :
                         request->mCaptureRequest.streamAndWindowIds) {
@@ -869,11 +885,11 @@
                 }
 
                 const auto &requestWindowHandles =
-                        requestSurfacePairIt->second.second.mOutputConfiguration.windowHandles;
-                if (utils::isWindowNativeHandleEqual(
-                        requestWindowHandles[requestWindowId], errorWindowHandle)) {
-                    const native_handle_t* anw =
-                            requestWindowHandles[requestWindowId].getNativeHandle();
+                        requestSurfacePairIt->second.second.windowHandles;
+
+                if (requestWindowHandles[requestWindowId] == errorWindowHandle) {
+                    const native_handle_t* anw = makeFromAidl(
+                            requestWindowHandles[requestWindowId]);
                     ALOGV("Camera %s Lost output buffer for ANW %p frame %" PRId64,
                             getId(), anw, frameNumber);
 
@@ -898,14 +914,16 @@
         failure->sequenceId  = sequenceId;
         failure->wasImageCaptured = (errorCode == ErrorCode::CAMERA_RESULT);
 
-        sp<AMessage> msg = new AMessage(cbh.mIsLogicalCameraCallback ? kWhatLogicalCaptureFail :
-                kWhatCaptureFail, mHandler);
+        sp<AMessage> msg = new AMessage(cbh.mIsLogicalCameraCallback ? kWhatLogicalCaptureFail
+                                                                     : kWhatCaptureFail,
+                                        mHandler);
         msg->setPointer(kContextKey, cbh.mContext);
         msg->setObject(kSessionSpKey, session);
         if (cbh.mIsLogicalCameraCallback) {
-            if (resultExtras.errorPhysicalCameraId.size() > 0) {
-                msg->setString(kFailingPhysicalCameraId, resultExtras.errorPhysicalCameraId.c_str(),
-                        resultExtras.errorPhysicalCameraId.size());
+            if (!resultExtras.errorPhysicalCameraId.empty()) {
+                msg->setString(kFailingPhysicalCameraId,
+                               resultExtras.errorPhysicalCameraId.c_str(),
+                               resultExtras.errorPhysicalCameraId.size());
             }
             msg->setPointer(kCallbackFpKey, (void*) cbh.mOnLogicalCameraCaptureFailed);
         } else {
@@ -919,7 +937,6 @@
         mFrameNumberTracker.updateTracker(frameNumber, /*isError*/true);
         checkAndFireSequenceCompleteLocked();
     }
-    return;
 }
 
 CameraDevice::CallbackHandler::CallbackHandler(const char *id) : mId(id) { }
@@ -939,6 +956,7 @@
         case kWhatCaptureSeqEnd:
         case kWhatCaptureSeqAbort:
         case kWhatCaptureBufferLost:
+        case kWhatPreparedCb:
             ALOGV("%s: Received msg %d", __FUNCTION__, msg->what());
             break;
         case kWhatCleanUpSessions:
@@ -1012,6 +1030,7 @@
         case kWhatCaptureSeqEnd:
         case kWhatCaptureSeqAbort:
         case kWhatCaptureBufferLost:
+        case kWhatPreparedCb:
         {
             sp<RefBase> obj;
             found = msg->findObject(kSessionSpKey, &obj);
@@ -1020,7 +1039,7 @@
                 return;
             }
             sp<ACameraCaptureSession> session(static_cast<ACameraCaptureSession*>(obj.get()));
-            mCachedSessions.push(session);
+            mCachedSessions.push_back(session);
             sp<CaptureRequest> requestSp = nullptr;
             const char *id_cstr = mId.c_str();
             switch (msg->what()) {
@@ -1055,6 +1074,26 @@
                     (*onState)(context, session.get());
                     break;
                 }
+                case kWhatPreparedCb:
+                {
+                    ACameraCaptureSession_prepareCallback onWindowPrepared;
+                    found = msg->findPointer(kCallbackFpKey, (void**) &onWindowPrepared);
+                    if (!found) {
+                        ALOGE("%s: Cannot find state callback!", __FUNCTION__);
+                        return;
+                    }
+                    if (onWindowPrepared == nullptr) {
+                        return;
+                    }
+                    native_handle_t* anw;
+                    found = msg->findPointer(kAnwKey, (void**) &anw);
+                    if (!found) {
+                        ALOGE("%s: Cannot find ANativeWindow: %d!", __FUNCTION__, __LINE__);
+                        return;
+                    }
+                    (*onWindowPrepared)(context, anw, session.get());
+                    break;
+                }
                 case kWhatCaptureStart:
                 {
                     ACameraCaptureSession_captureCallback_start onStart;
@@ -1167,7 +1206,8 @@
                         clone.update(ANDROID_SYNC_FRAME_NUMBER,
                                 &physicalResult->mFrameNumber, /*data_count*/1);
                         sp<ACameraMetadata> metadata =
-                                new ACameraMetadata(clone.release(), ACameraMetadata::ACM_RESULT);
+                                new ACameraMetadata(clone.release(),
+                                                    ACameraMetadata::ACM_RESULT);
                         physicalMetadataCopy.push_back(metadata);
                     }
                     std::vector<const char*> physicalCameraIdPtrs;
@@ -1302,7 +1342,7 @@
                         return;
                     }
 
-                    const native_handle_t* anw;
+                    native_handle_t* anw;
                     found = msg->findPointer(kAnwKey, (void**) &anw);
                     if (!found) {
                         ALOGE("%s: Cannot find native_handle_t!", __FUNCTION__);
@@ -1319,6 +1359,7 @@
                     ACaptureRequest* request = allocateACaptureRequest(requestSp, id_cstr);
                     (*onBufferLost)(context, session.get(), request, anw, frameNumber);
                     freeACaptureRequest(request);
+                    native_handle_delete(anw); // clean up anw as it was copied from AIDL
                     break;
                 }
             }
@@ -1329,10 +1370,10 @@
 
 CameraDevice::CallbackHolder::CallbackHolder(
         sp<ACameraCaptureSession>          session,
-        const Vector<sp<CaptureRequest> >& requests,
+        std::vector<sp<CaptureRequest>>  requests,
         bool                               isRepeating,
         ACameraCaptureSession_captureCallbacks* cbs) :
-        mSession(session), mRequests(requests),
+        mSession(std::move(session)), mRequests(std::move(requests)),
         mIsRepeating(isRepeating),
         mIs2Callback(false),
         mIsLogicalCameraCallback(false) {
@@ -1346,10 +1387,10 @@
 
 CameraDevice::CallbackHolder::CallbackHolder(
         sp<ACameraCaptureSession>          session,
-        const Vector<sp<CaptureRequest> >& requests,
+        std::vector<sp<CaptureRequest>>  requests,
         bool                               isRepeating,
         ACameraCaptureSession_logicalCamera_captureCallbacks* lcbs) :
-        mSession(session), mRequests(requests),
+        mSession(std::move(session)), mRequests(std::move(requests)),
         mIsRepeating(isRepeating),
         mIs2Callback(false),
         mIsLogicalCameraCallback(true) {
@@ -1363,10 +1404,10 @@
 
 CameraDevice::CallbackHolder::CallbackHolder(
         sp<ACameraCaptureSession>          session,
-        const Vector<sp<CaptureRequest> >& requests,
+        std::vector<sp<CaptureRequest>>  requests,
         bool                               isRepeating,
         ACameraCaptureSession_captureCallbacksV2* cbs) :
-        mSession(session), mRequests(requests),
+        mSession(std::move(session)), mRequests(std::move(requests)),
         mIsRepeating(isRepeating),
         mIs2Callback(true),
         mIsLogicalCameraCallback(false) {
@@ -1380,10 +1421,10 @@
 
 CameraDevice::CallbackHolder::CallbackHolder(
         sp<ACameraCaptureSession>          session,
-        const Vector<sp<CaptureRequest> >& requests,
+        std::vector<sp<CaptureRequest>>  requests,
         bool                               isRepeating,
         ACameraCaptureSession_logicalCamera_captureCallbacksV2* lcbs) :
-        mSession(session), mRequests(requests),
+        mSession(std::move(session)), mRequests(std::move(requests)),
         mIsRepeating(isRepeating),
         mIs2Callback(true),
         mIsLogicalCameraCallback(true) {
@@ -1501,23 +1542,21 @@
 /**
   * Camera service callback implementation
   */
-android::hardware::Return<void>
-CameraDevice::ServiceCallback::onDeviceError(
-        ErrorCode errorCode,
-        const CaptureResultExtras& resultExtras) {
+ScopedAStatus CameraDevice::ServiceCallback::onDeviceError(
+        ErrorCode errorCode, const CaptureResultExtras& resultExtras) {
     ALOGD("Device error received, code %d, frame number %" PRId64 ", request ID %d, subseq ID %d"
             " physical camera ID %s", errorCode, resultExtras.frameNumber, resultExtras.requestId,
             resultExtras.burstId, resultExtras.errorPhysicalCameraId.c_str());
-    auto ret = Void();
-    sp<CameraDevice> dev = mDevice.promote();
+
+    std::shared_ptr<CameraDevice> dev = mDevice.lock();
     if (dev == nullptr) {
-        return ret; // device has been closed
+        return ScopedAStatus::ok(); // device has been closed
     }
 
     sp<ACameraCaptureSession> session = dev->mCurrentSession.promote();
     Mutex::Autolock _l(dev->mDeviceLock);
     if (dev->mRemote == nullptr) {
-        return ret; // device has been closed
+        return ScopedAStatus::ok(); // device has been closed
     }
     switch (errorCode) {
         case ErrorCode::CAMERA_DISCONNECTED:
@@ -1570,26 +1609,25 @@
             dev->onCaptureErrorLocked(errorCode, resultExtras);
             break;
     }
-    return ret;
+    return ScopedAStatus::ok();
 }
 
-android::hardware::Return<void>
-CameraDevice::ServiceCallback::onDeviceIdle() {
+ScopedAStatus CameraDevice::ServiceCallback::onDeviceIdle() {
     ALOGV("Camera is now idle");
-    auto ret = Void();
-    sp<CameraDevice> dev = mDevice.promote();
+
+    std::shared_ptr<CameraDevice> dev = mDevice.lock();
     if (dev == nullptr) {
-        return ret; // device has been closed
+        return ScopedAStatus::ok(); // device has been closed
     }
 
     Mutex::Autolock _l(dev->mDeviceLock);
     if (dev->isClosed() || dev->mRemote == nullptr) {
-        return ret;
+        return ScopedAStatus::ok();
     }
 
     if (dev->mIdle) {
         // Already in idle state. Possibly other thread did waitUntilIdle
-        return ret;
+        return ScopedAStatus::ok();
     }
 
     if (dev->mCurrentSession != nullptr) {
@@ -1597,13 +1635,14 @@
         if (dev->mBusySession != dev->mCurrentSession) {
             ALOGE("Current session != busy session");
             dev->setCameraDeviceErrorLocked(ACAMERA_ERROR_CAMERA_DEVICE);
-            return ret;
+            return ScopedAStatus::ok();
         }
 
         sp<AMessage> msg = new AMessage(kWhatSessionStateCb, dev->mHandler);
         msg->setPointer(kContextKey, dev->mBusySession->mUserSessionCallback.context);
         msg->setObject(kSessionSpKey, dev->mBusySession);
-        msg->setPointer(kCallbackFpKey, (void*) dev->mBusySession->mUserSessionCallback.onReady);
+        msg->setPointer(kCallbackFpKey,
+                        (void*) dev->mBusySession->mUserSessionCallback.onReady);
         // Make sure we clear the sp first so the session destructor can
         // only happen on handler thread (where we don't hold device/session lock)
         dev->mBusySession.clear();
@@ -1611,22 +1650,20 @@
     }
     dev->mIdle = true;
     dev->mFlushing = false;
-    return ret;
+    return ScopedAStatus::ok();
 }
 
-android::hardware::Return<void>
-CameraDevice::ServiceCallback::onCaptureStarted(
-        const CaptureResultExtras& resultExtras,
-        uint64_t timestamp) {
-    auto ret = Void();
 
-    sp<CameraDevice> dev = mDevice.promote();
+
+ndk::ScopedAStatus CameraDevice::ServiceCallback::onCaptureStarted(
+        const CaptureResultExtras& resultExtras, int64_t timestamp) {
+    std::shared_ptr<CameraDevice> dev = mDevice.lock();
     if (dev == nullptr) {
-        return ret; // device has been closed
+        return ScopedAStatus::ok(); // device has been closed
     }
     Mutex::Autolock _l(dev->mDeviceLock);
     if (dev->isClosed() || dev->mRemote == nullptr) {
-        return ret;
+        return ScopedAStatus::ok();
     }
 
     int32_t sequenceId = resultExtras.requestId;
@@ -1635,7 +1672,7 @@
 
     auto it = dev->mSequenceCallbackMap.find(sequenceId);
     if (it != dev->mSequenceCallbackMap.end()) {
-        CallbackHolder cbh = (*it).second;
+        CallbackHolder &cbh = it->second;
         ACameraCaptureSession_captureCallback_start onStart = cbh.mOnCaptureStarted;
         ACameraCaptureSession_captureCallback_startV2 onStart2 = cbh.mOnCaptureStarted2;
         bool v2Callback = cbh.mIs2Callback;
@@ -1646,6 +1683,7 @@
             dev->setCameraDeviceErrorLocked(ACAMERA_ERROR_CAMERA_SERVICE);
         }
         sp<CaptureRequest> request = cbh.mRequests[burstId];
+        ALOGE("%s: request = %p", __FUNCTION__, request.get());
         sp<AMessage> msg = nullptr;
         if (v2Callback) {
             msg = new AMessage(kWhatCaptureStart2, dev->mHandler);
@@ -1661,24 +1699,22 @@
         msg->setInt64(kFrameNumberKey, frameNumber);
         dev->postSessionMsgAndCleanup(msg);
     }
-    return ret;
+    return ScopedAStatus::ok();
 }
 
-android::hardware::Return<void>
-CameraDevice::ServiceCallback::onResultReceived(
-        const FmqSizeOrMetadata& resultMetadata,
+ScopedAStatus CameraDevice::ServiceCallback::onResultReceived(
+        const CaptureMetadataInfo& resultMetadata,
         const CaptureResultExtras& resultExtras,
-        const hidl_vec<PhysicalCaptureResultInfo>& physicalResultInfos) {
-    auto ret = Void();
+        const std::vector<PhysicalCaptureResultInfo>& physicalResultInfos) {
 
-    sp<CameraDevice> dev = mDevice.promote();
+    std::shared_ptr<CameraDevice> dev = mDevice.lock();
     if (dev == nullptr) {
-        return ret; // device has been closed
+        return ScopedAStatus::ok(); // device has been closed
     }
     int32_t sequenceId = resultExtras.requestId;
     int64_t frameNumber = resultExtras.frameNumber;
     int32_t burstId = resultExtras.burstId;
-    bool    isPartialResult = (resultExtras.partialResultCount < dev->mPartialResultCount);
+    bool isPartialResult = (resultExtras.partialResultCount < dev->mPartialResultCount);
 
     if (!isPartialResult) {
         ALOGV("SeqId %d frame %" PRId64 " result arrive.", sequenceId, frameNumber);
@@ -1686,7 +1722,7 @@
 
     Mutex::Autolock _l(dev->mDeviceLock);
     if (dev->mRemote == nullptr) {
-        return ret; // device has been disconnected
+        return ScopedAStatus::ok(); // device has been disconnected
     }
 
     if (dev->isClosed()) {
@@ -1694,7 +1730,7 @@
             dev->mFrameNumberTracker.updateTracker(frameNumber, /*isError*/false);
         }
         // early return to avoid callback sent to closed devices
-        return ret;
+        return ScopedAStatus::ok();
     }
 
     CameraMetadata metadataCopy;
@@ -1702,11 +1738,12 @@
             dev->mCaptureResultMetadataQueue.get(), &metadataCopy);
     if (status != ACAMERA_OK) {
         ALOGE("%s: result metadata couldn't be converted", __FUNCTION__);
-        return ret;
+        return ScopedAStatus::ok();
     }
 
-    metadataCopy.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE, dev->mShadingMapSize, /*data_count*/2);
-    metadataCopy.update(ANDROID_SYNC_FRAME_NUMBER, &frameNumber, /*data_count*/1);
+    metadataCopy.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE, dev->mShadingMapSize,
+                        /* data_count= */ 2);
+    metadataCopy.update(ANDROID_SYNC_FRAME_NUMBER, &frameNumber, /* data_count= */1);
 
     auto it = dev->mSequenceCallbackMap.find(sequenceId);
     if (it != dev->mSequenceCallbackMap.end()) {
@@ -1730,7 +1767,7 @@
                     &localPhysicalResult[i].physicalMetadata);
             if (status != ACAMERA_OK) {
                 ALOGE("%s: physical camera result metadata couldn't be converted", __FUNCTION__);
-                return ret;
+                return ScopedAStatus::ok();
             }
         }
         sp<ACameraPhysicalCaptureResultInfo> physicalResult(
@@ -1762,17 +1799,14 @@
         dev->checkAndFireSequenceCompleteLocked();
     }
 
-    return ret;
+    return ScopedAStatus::ok();
 }
 
-android::hardware::Return<void>
-CameraDevice::ServiceCallback::onRepeatingRequestError(
-        uint64_t lastFrameNumber, int32_t stoppedSequenceId) {
-    auto ret = Void();
-
-    sp<CameraDevice> dev = mDevice.promote();
+ScopedAStatus CameraDevice::ServiceCallback::onRepeatingRequestError(int64_t lastFrameNumber,
+                                                                     int32_t stoppedSequenceId) {
+    std::shared_ptr<CameraDevice> dev = mDevice.lock();
     if (dev == nullptr) {
-        return ret; // device has been closed
+        return ScopedAStatus::ok(); // device has been closed
     }
 
     Mutex::Autolock _l(dev->mDeviceLock);
@@ -1784,33 +1818,72 @@
 
     dev->checkRepeatingSequenceCompleteLocked(repeatingSequenceId, lastFrameNumber);
 
-    return ret;
+    return ScopedAStatus::ok();
+}
+
+ScopedAStatus CameraDevice::ServiceCallback::onPrepared(int32_t streamId) {
+    ALOGV("%s: callback for stream id %d", __FUNCTION__, streamId);
+    std::shared_ptr<CameraDevice> dev = mDevice.lock();
+    if (dev == nullptr) {
+        return ScopedAStatus::ok();
+    }
+    Mutex::Autolock _l(dev->mDeviceLock);
+    if (dev->isClosed() || dev->mRemote == nullptr) {
+        return ScopedAStatus::ok();
+    }
+    auto it = dev->mConfiguredOutputs.find(streamId);
+    if (it == dev->mConfiguredOutputs.end()) {
+        ALOGE("%s: stream id %d does not exist", __FUNCTION__ , streamId);
+        return ScopedAStatus::ok();
+    }
+    sp<ACameraCaptureSession> session = dev->mCurrentSession.promote();
+    if (session == nullptr) {
+        ALOGE("%s: Session is dead already", __FUNCTION__ );
+        return ScopedAStatus::ok();
+    }
+    // We've found the window corresponding to the surface id.
+    const native_handle_t *anw = it->second.first.mWindow;
+    sp<AMessage> msg = new AMessage(kWhatPreparedCb, dev->mHandler);
+    msg->setPointer(kContextKey, session->mPreparedCb.context);
+    msg->setPointer(kAnwKey, (void *)anw);
+    msg->setObject(kSessionSpKey, session);
+    msg->setPointer(kCallbackFpKey, (void *)session->mPreparedCb.onWindowPrepared);
+    dev->postSessionMsgAndCleanup(msg);
+    return ScopedAStatus::ok();
 }
 
 camera_status_t CameraDevice::ServiceCallback::readOneResultMetadata(
-        const FmqSizeOrMetadata& fmqSizeOrMetadata, ResultMetadataQueue* metadataQueue,
+        const CaptureMetadataInfo& captureMetadataInfo, ResultMetadataQueue* metadataQueue,
         CameraMetadata* metadata) {
     if (metadataQueue == nullptr || metadata == nullptr) {
         return ACAMERA_ERROR_INVALID_PARAMETER;
     }
     bool converted;
-    HCameraMetadata hCameraMetadata;
-    if (fmqSizeOrMetadata.getDiscriminator() ==
-            FmqSizeOrMetadata::hidl_discriminator::fmqMetadataSize) {
-        hCameraMetadata.resize(fmqSizeOrMetadata.fmqMetadataSize());
-        bool read = metadataQueue->read(
-                hCameraMetadata.data(), fmqSizeOrMetadata.fmqMetadataSize());
+    AidlCameraMetadata aidlCameraMetadata;
+    std::vector<uint8_t>& metadataVec = aidlCameraMetadata.metadata;
+    camera_metadata_t* clonedMetadata;
+    if (captureMetadataInfo.getTag() == CaptureMetadataInfo::fmqMetadataSize) {
+        int64_t size = captureMetadataInfo.get<CaptureMetadataInfo::fmqMetadataSize>();
+        metadataVec.resize(size);
+        bool read = metadataQueue->read(reinterpret_cast<int8_t*>(metadataVec.data()), size);
         if (!read) {
             ALOGE("%s capture request settings could't be read from fmq", __FUNCTION__);
             return ACAMERA_ERROR_UNKNOWN;
         }
         // TODO: Do we actually need to clone here ?
-        converted = utils::convertFromHidlCloned(hCameraMetadata, metadata);
+        converted = utils::cloneFromAidl(aidlCameraMetadata, &clonedMetadata);
     } else {
-        converted = utils::convertFromHidlCloned(fmqSizeOrMetadata.metadata(), metadata);
+        const AidlCameraMetadata &embeddedMetadata =
+                captureMetadataInfo.get<CaptureMetadataInfo::metadata>();
+        converted = utils::cloneFromAidl(embeddedMetadata, &clonedMetadata);
     }
 
-    return converted ? ACAMERA_OK : ACAMERA_ERROR_UNKNOWN;
+    if (converted) {
+        *metadata = CameraMetadata(clonedMetadata);
+        return ACAMERA_OK;
+    }
+
+    return ACAMERA_ERROR_UNKNOWN;
 }
 
 } // namespace acam
diff --git a/camera/ndk/ndk_vendor/impl/ACameraDevice.h b/camera/ndk/ndk_vendor/impl/ACameraDevice.h
index c306206..6e0c772 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraDevice.h
+++ b/camera/ndk/ndk_vendor/impl/ACameraDevice.h
@@ -16,54 +16,63 @@
 #ifndef _ACAMERA_DEVICE_H
 #define _ACAMERA_DEVICE_H
 
-#include <memory>
-#include <map>
-#include <set>
-#include <atomic>
-#include <utility>
-#include <vector>
-#include <utils/StrongPointer.h>
-#include <utils/Mutex.h>
-#include <utils/List.h>
-#include <utils/Vector.h>
-#include <android/frameworks/cameraservice/device/2.1/ICameraDeviceUser.h>
-#include <android/frameworks/cameraservice/device/2.0/ICameraDeviceCallback.h>
-#include <android/frameworks/cameraservice/device/2.0/types.h>
-#include <fmq/MessageQueue.h>
-#include <media/stagefright/foundation/ALooper.h>
-#include <media/stagefright/foundation/AHandler.h>
-#include <media/stagefright/foundation/AMessage.h>
-
-#include <camera/NdkCameraManager.h>
-#include <camera/NdkCameraCaptureSession.h>
-
 #include "ACameraMetadata.h"
 #include "utils.h"
 
+#include <aidl/android/frameworks/cameraservice/common/Status.h>
+#include <aidl/android/frameworks/cameraservice/device/BnCameraDeviceCallback.h>
+#include <aidl/android/frameworks/cameraservice/device/CaptureResultExtras.h>
+#include <aidl/android/frameworks/cameraservice/device/ErrorCode.h>
+#include <aidl/android/frameworks/cameraservice/device/CaptureMetadataInfo.h>
+#include <aidl/android/frameworks/cameraservice/device/ICameraDeviceUser.h>
+#include <aidl/android/frameworks/cameraservice/device/PhysicalCameraSettings.h>
+#include <aidl/android/frameworks/cameraservice/device/PhysicalCaptureResultInfo.h>
+#include <aidl/android/frameworks/cameraservice/device/StreamConfigurationMode.h>
+#include <aidl/android/frameworks/cameraservice/device/SubmitInfo.h>
+#include <aidl/android/frameworks/cameraservice/service/CameraStatusAndId.h>
+#include <atomic>
+#include <camera/NdkCameraCaptureSession.h>
+#include <camera/NdkCameraManager.h>
+#include <fmq/AidlMessageQueue.h>
+#include <fmq/MessageQueue.h>
+#include <map>
+#include <media/stagefright/foundation/AHandler.h>
+#include <media/stagefright/foundation/ALooper.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <memory>
+#include <set>
+#include <utility>
+#include <utils/List.h>
+#include <utils/Mutex.h>
+#include <utils/StrongPointer.h>
+#include <utils/Vector.h>
+#include <vector>
+
 namespace android {
 namespace acam {
 
-using ICameraDeviceCallback = frameworks::cameraservice::device::V2_0::ICameraDeviceCallback;
-using ICameraDeviceUser_2_0 = frameworks::cameraservice::device::V2_0::ICameraDeviceUser;
-using ICameraDeviceUser = frameworks::cameraservice::device::V2_1::ICameraDeviceUser;
-using CaptureResultExtras = frameworks::cameraservice::device::V2_0::CaptureResultExtras;
-using PhysicalCaptureResultInfo = frameworks::cameraservice::device::V2_0::PhysicalCaptureResultInfo;
-using PhysicalCameraSettings = frameworks::cameraservice::device::V2_0::PhysicalCameraSettings;
-using SubmitInfo = frameworks::cameraservice::device::V2_0::SubmitInfo;
-using CaptureResultExtras = frameworks::cameraservice::device::V2_0::CaptureResultExtras;
-using ErrorCode = frameworks::cameraservice::device::V2_0::ErrorCode;
-using FmqSizeOrMetadata = frameworks::cameraservice::device::V2_0::FmqSizeOrMetadata;
-using StreamConfigurationMode = frameworks::cameraservice::device::V2_0::StreamConfigurationMode;
-using Status = frameworks::cameraservice::common::V2_0::Status;
-using ResultMetadataQueue = hardware::MessageQueue<uint8_t, hardware::kSynchronizedReadWrite>;
-using RequestMetadataQueue = hardware::MessageQueue<uint8_t, hardware::kSynchronizedReadWrite>;
-using CameraStatusAndId = frameworks::cameraservice::service::V2_0::CameraStatusAndId;
+using ::aidl::android::frameworks::cameraservice::common::Status;
+using ::aidl::android::frameworks::cameraservice::device::BnCameraDeviceCallback;
+using ::aidl::android::frameworks::cameraservice::device::CaptureResultExtras;
+using ::aidl::android::frameworks::cameraservice::device::ErrorCode;
+using ::aidl::android::frameworks::cameraservice::device::CaptureMetadataInfo;
+using ::aidl::android::frameworks::cameraservice::device::ICameraDeviceCallback;
+using ::aidl::android::frameworks::cameraservice::device::ICameraDeviceUser;
+using ::aidl::android::frameworks::cameraservice::device::OutputConfiguration;
+using ::aidl::android::frameworks::cameraservice::device::PhysicalCameraSettings;
+using ::aidl::android::frameworks::cameraservice::device::PhysicalCaptureResultInfo;
+using ::aidl::android::frameworks::cameraservice::device::StreamConfigurationMode;
+using ::aidl::android::frameworks::cameraservice::device::SubmitInfo;
+using ::aidl::android::frameworks::cameraservice::service::CameraStatusAndId;
+using ::aidl::android::hardware::common::fmq::SynchronizedReadWrite;
+using ::android::AidlMessageQueue;
+using ::android::acam::utils::native_handle_ptr_wrapper;
 
-using hardware::hidl_vec;
-using hardware::hidl_string;
-using utils::native_handle_ptr_wrapper;
+
+using ResultMetadataQueue = AidlMessageQueue<int8_t, SynchronizedReadWrite>;
+using RequestMetadataQueue = AidlMessageQueue<int8_t, SynchronizedReadWrite>;
+
 using utils::CaptureRequest;
-using utils::OutputConfigurationWrapper;
 
 // Wrap ACameraCaptureFailure so it can be ref-counted
 struct CameraCaptureFailure : public RefBase, public ACameraCaptureFailure { };
@@ -83,13 +92,16 @@
     int64_t mFrameNumber;
 };
 
-class CameraDevice final : public RefBase {
+class CameraDevice final : public std::enable_shared_from_this<CameraDevice> {
   public:
     CameraDevice(const char* id, ACameraDevice_StateCallbacks* cb,
                   sp<ACameraMetadata> chars,
                   ACameraDevice* wrapper);
     ~CameraDevice();
 
+    // Called to initialize fields that require shared_ptr to `this`
+    void init();
+
     inline const char* getId() const { return mCameraId.c_str(); }
 
     camera_status_t createCaptureRequest(
@@ -107,30 +119,36 @@
             const ACaptureSessionOutputContainer* sessionOutputContainer) const;
 
     // Callbacks from camera service
-    class ServiceCallback : public ICameraDeviceCallback {
+    class ServiceCallback : public BnCameraDeviceCallback {
       public:
-        explicit ServiceCallback(CameraDevice* device) : mDevice(device) {}
-        android::hardware::Return<void> onDeviceError(ErrorCode errorCode,
-                           const CaptureResultExtras& resultExtras) override;
-        android::hardware::Return<void> onDeviceIdle() override;
-        android::hardware::Return<void> onCaptureStarted(const CaptureResultExtras& resultExtras,
-                              uint64_t timestamp) override;
-        android::hardware::Return<void> onResultReceived(const FmqSizeOrMetadata& result,
-                              const CaptureResultExtras& resultExtras,
-                              const hidl_vec<PhysicalCaptureResultInfo>& physicalResultInfos) override;
-        android::hardware::Return<void> onRepeatingRequestError(uint64_t lastFrameNumber,
-                int32_t stoppedSequenceId) override;
+        explicit ServiceCallback(std::weak_ptr<CameraDevice> device) :
+              mDevice(std::move(device)) {}
+
+        ndk::ScopedAStatus onDeviceError(ErrorCode in_errorCode,
+                                         const CaptureResultExtras& in_resultExtras) override;
+        ndk::ScopedAStatus onDeviceIdle() override;
+
+        ndk::ScopedAStatus onCaptureStarted(const CaptureResultExtras& in_resultExtras,
+                                            int64_t in_timestamp) override;
+        ndk::ScopedAStatus onPrepared(int32_t in_streamId) override;
+        ndk::ScopedAStatus onRepeatingRequestError(int64_t in_lastFrameNumber,
+                                                   int32_t in_repeatingRequestId) override;
+        ndk::ScopedAStatus onResultReceived(const CaptureMetadataInfo& in_result,
+                                            const CaptureResultExtras& in_resultExtras,
+                                            const std::vector<PhysicalCaptureResultInfo>&
+                                                    in_physicalCaptureResultInfos) override;
+
       private:
-        camera_status_t readOneResultMetadata(const FmqSizeOrMetadata& fmqSizeOrMetadata,
+        camera_status_t readOneResultMetadata(const CaptureMetadataInfo& captureMetadataInfo,
                 ResultMetadataQueue* metadataQueue, CameraMetadata* metadata);
-        const wp<CameraDevice> mDevice;
+        const std::weak_ptr<CameraDevice> mDevice;
     };
-    inline sp<ICameraDeviceCallback> getServiceCallback() {
+    inline std::shared_ptr<BnCameraDeviceCallback> getServiceCallback() {
         return mServiceCallback;
     };
 
     // Camera device is only functional after remote being set
-    void setRemoteDevice(sp<ICameraDeviceUser> remote);
+    void setRemoteDevice(std::shared_ptr<ICameraDeviceUser> remote);
 
     bool setDeviceMetadataQueues();
     inline ACameraDevice* getWrapper() const { return mWrapper; };
@@ -179,6 +197,8 @@
 
     camera_status_t updateOutputConfigurationLocked(ACaptureSessionOutput *output);
 
+    camera_status_t prepareLocked(ACameraWindowType *window);
+
     // Since this writes to ICameraDeviceUser's fmq, clients must take care that:
     //   a) This function is called serially.
     //   b) This function is called in accordance with ICameraDeviceUser.submitRequestList,
@@ -208,15 +228,15 @@
     void postSessionMsgAndCleanup(sp<AMessage>& msg);
 
     mutable Mutex mDeviceLock;
-    const hidl_string mCameraId;                          // Camera ID
+    const std::string mCameraId;                          // Camera ID
     const ACameraDevice_StateCallbacks mAppCallbacks; // Callback to app
     const sp<ACameraMetadata> mChars;    // Camera characteristics
-    const sp<ServiceCallback> mServiceCallback;
+    std::shared_ptr<ServiceCallback> mServiceCallback;
     ACameraDevice* mWrapper;
 
     // stream id -> pair of (ACameraWindowType* from application, OutputConfiguration used for
     // camera service)
-    std::map<int, std::pair<native_handle_ptr_wrapper, OutputConfigurationWrapper>> mConfiguredOutputs;
+    std::map<int, std::pair<native_handle_ptr_wrapper, OutputConfiguration>> mConfiguredOutputs;
 
     // TODO: maybe a bool will suffice for synchronous implementation?
     std::atomic_bool mClosing;
@@ -232,7 +252,7 @@
     // This will avoid a busy session being deleted before it's back to idle state
     sp<ACameraCaptureSession> mBusySession;
 
-    sp<ICameraDeviceUser> mRemote;
+    std::shared_ptr<ICameraDeviceUser> mRemote;
 
     // Looper thread to handle callback to app
     sp<ALooper> mCbLooper;
@@ -252,7 +272,8 @@
         kWhatLogicalCaptureFail, // onLogicalCameraCaptureFailed
         kWhatCaptureSeqEnd,    // onCaptureSequenceCompleted
         kWhatCaptureSeqAbort,  // onCaptureSequenceAborted
-        kWhatCaptureBufferLost,// onCaptureBufferLost
+        kWhatCaptureBufferLost, // onCaptureBufferLost
+        kWhatPreparedCb, // onPrepared
         // Internal cleanup
         kWhatCleanUpSessions   // Cleanup cached sp<ACameraCaptureSession>
     };
@@ -281,7 +302,7 @@
         // This handler will cache all capture session sp until kWhatCleanUpSessions
         // is processed. This is used to guarantee the last session reference is always
         // being removed in callback thread without holding camera device lock
-        Vector<sp<ACameraCaptureSession>> mCachedSessions;
+        std::vector<sp<ACameraCaptureSession>> mCachedSessions;
     };
     sp<CallbackHandler> mHandler;
 
@@ -303,19 +324,19 @@
 
     struct CallbackHolder {
         CallbackHolder(sp<ACameraCaptureSession>          session,
-                       const Vector<sp<CaptureRequest>>&  requests,
+                       std::vector<sp<CaptureRequest>>   requests,
                        bool                               isRepeating,
                        ACameraCaptureSession_captureCallbacks* cbs);
         CallbackHolder(sp<ACameraCaptureSession>          session,
-                       const Vector<sp<CaptureRequest>>&  requests,
+                       std::vector<sp<CaptureRequest>>   requests,
                        bool                               isRepeating,
                        ACameraCaptureSession_logicalCamera_captureCallbacks* lcbs);
         CallbackHolder(sp<ACameraCaptureSession>          session,
-                       const Vector<sp<CaptureRequest> >& requests,
+                       std::vector<sp<CaptureRequest> >  requests,
                        bool                               isRepeating,
                        ACameraCaptureSession_captureCallbacksV2* cbs);
         CallbackHolder(sp<ACameraCaptureSession>          session,
-                       const Vector<sp<CaptureRequest> >& requests,
+                       std::vector<sp<CaptureRequest> >  requests,
                        bool                               isRepeating,
                        ACameraCaptureSession_logicalCamera_captureCallbacksV2* lcbs);
         void clearCallbacks() {
@@ -359,7 +380,7 @@
         }
 
         sp<ACameraCaptureSession>   mSession;
-        Vector<sp<CaptureRequest>>  mRequests;
+        std::vector<sp<CaptureRequest>>  mRequests;
         const bool                  mIsRepeating;
         const bool                  mIs2Callback;
         const bool                  mIsLogicalCameraCallback;
@@ -401,7 +422,7 @@
     // Misc variables
     int32_t mShadingMapSize[2];   // const after constructor
     int32_t mPartialResultCount;  // const after constructor
-    std::shared_ptr<ResultMetadataQueue> mCaptureRequestMetadataQueue = nullptr;
+    std::shared_ptr<RequestMetadataQueue> mCaptureRequestMetadataQueue = nullptr;
     std::shared_ptr<ResultMetadataQueue> mCaptureResultMetadataQueue = nullptr;
 };
 
@@ -415,7 +436,10 @@
 struct ACameraDevice {
     ACameraDevice(const char* id, ACameraDevice_StateCallbacks* cb,
                   sp<ACameraMetadata> chars) :
-            mDevice(new android::acam::CameraDevice(id, cb, std::move(chars), this)) {}
+            mDevice(std::make_shared<android::acam::CameraDevice>(id, cb,
+                                                                std::move(chars), this)) {
+        mDevice->init();
+    }
 
     ~ACameraDevice();
     /*******************
@@ -446,19 +470,20 @@
     /***********************
      * Device interal APIs *
      ***********************/
-    inline android::sp<android::acam::ICameraDeviceCallback> getServiceCallback() {
+    inline std::shared_ptr<android::acam::BnCameraDeviceCallback> getServiceCallback() {
         return mDevice->getServiceCallback();
     };
 
     // Camera device is only functional after remote being set
-    inline void setRemoteDevice(android::sp<android::acam::ICameraDeviceUser> remote) {
+    inline void setRemoteDevice(std::shared_ptr<
+            ::aidl::android::frameworks::cameraservice::device::ICameraDeviceUser> remote) {
         mDevice->setRemoteDevice(remote);
     }
     inline bool setDeviceMetadataQueues() {
         return mDevice->setDeviceMetadataQueues();
     }
   private:
-    android::sp<android::acam::CameraDevice> mDevice;
+    std::shared_ptr<android::acam::CameraDevice> mDevice;
 };
 
 #endif // _ACAMERA_DEVICE_H
diff --git a/camera/ndk/ndk_vendor/impl/ACameraDeviceVendor.inc b/camera/ndk/ndk_vendor/impl/ACameraDeviceVendor.inc
index 8bd5a52..1e724eb 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraDeviceVendor.inc
+++ b/camera/ndk/ndk_vendor/impl/ACameraDeviceVendor.inc
@@ -14,17 +14,14 @@
  * limitations under the License.
  */
 
-#include <vector>
-#include <inttypes.h>
-#include <android/frameworks/cameraservice/service/2.0/ICameraService.h>
-#include <android/frameworks/cameraservice/device/2.0/types.h>
-#include <CameraMetadata.h>
-
-#include "ndk_vendor/impl/ACameraDevice.h"
 #include "ACameraCaptureSession.h"
 #include "ACameraMetadata.h"
 #include "ACaptureRequest.h"
 #include "utils.h"
+#include <CameraMetadata.h>
+#include <inttypes.h>
+#include <ndk_vendor/impl/ACameraDevice.h>
+#include <vector>
 
 using namespace android;
 
@@ -32,22 +29,22 @@
 namespace acam {
 
 template<class T>
-camera_status_t
-CameraDevice::captureLocked(
+camera_status_t CameraDevice::captureLocked(
         sp<ACameraCaptureSession> session,
         /*optional*/T* cbs,
-        int numRequests, ACaptureRequest** requests,
+        int numRequests,
+        ACaptureRequest** requests,
         /*optional*/int* captureSequenceId) {
     return submitRequestsLocked(
             session, cbs, numRequests, requests, captureSequenceId, /*isRepeating*/false);
 }
 
 template<class T>
-camera_status_t
-CameraDevice::setRepeatingRequestsLocked(
+camera_status_t CameraDevice::setRepeatingRequestsLocked(
         sp<ACameraCaptureSession> session,
         /*optional*/T* cbs,
-        int numRequests, ACaptureRequest** requests,
+        int numRequests,
+        ACaptureRequest** requests,
         /*optional*/int* captureSequenceId) {
     return submitRequestsLocked(
             session, cbs, numRequests, requests, captureSequenceId, /*isRepeating*/true);
@@ -56,11 +53,10 @@
 template<class T>
 camera_status_t CameraDevice::submitRequestsLocked(
         sp<ACameraCaptureSession> session,
-        /*optional*/T* cbs,
-        int numRequests, ACaptureRequest** requests,
+        /*optional*/T* cbs, int numRequests,
+        ACaptureRequest** requests,
         /*out*/int* captureSequenceId,
-        bool isRepeating)
-{
+        bool isRepeating) {
     camera_status_t ret = checkCameraClosedOrErrorLocked();
     if (ret != ACAMERA_OK) {
         ALOGE("Camera %s submit capture request failed! ret %d", getId(), ret);
@@ -68,9 +64,10 @@
     }
 
     // Form two vectors of capture request, one for internal tracking
-    std::vector<frameworks::cameraservice::device::V2_0::CaptureRequest> requestList;
-    Vector<sp<CaptureRequest>> requestsV;
-    requestsV.setCapacity(numRequests);
+
+    std::vector<::aidl::android::frameworks::cameraservice::device::CaptureRequest> requestList;
+    std::vector<sp<CaptureRequest>> requestsV;
+    requestsV.reserve(numRequests);
     for (int i = 0; i < numRequests; i++) {
         sp<CaptureRequest> req;
         ret = allocateCaptureRequestLocked(requests[i], req);
@@ -87,7 +84,7 @@
             ALOGE("Capture request without output target cannot be submitted!");
             return ACAMERA_ERROR_INVALID_PARAMETER;
         }
-        requestList.push_back(utils::convertToHidl(req.get()));
+        requestList.push_back(utils::convertToAidl(req.get()));
         requestsV.push_back(req);
     }
     if (isRepeating) {
@@ -100,18 +97,20 @@
 
     SubmitInfo info;
     Status status;
-    auto remoteRet = mRemote->submitRequestList(requestList, isRepeating,
-                                                [&status, &info](auto s, auto &submitInfo) {
-                                                    status = s;
-                                                    info = submitInfo;
-                                                });
-    if (!remoteRet.isOk()) {
-        ALOGE("%s: Transaction error for submitRequestList call: %s", __FUNCTION__,
-              remoteRet.description().c_str());
+    ndk::ScopedAStatus remoteRet = mRemote->submitRequestList(requestList, isRepeating, &info);
+        if (!remoteRet.isOk()) {
+            if (remoteRet.getExceptionCode() == EX_SERVICE_SPECIFIC) {
+            Status errStatus = static_cast<Status>(remoteRet.getServiceSpecificError());
+            ALOGE("%s: submitRequestList call failed: %s",
+                  __FUNCTION__, toString(errStatus).c_str());
+            return utils::convertFromAidl(errStatus);
+        } else {
+            ALOGE("%s: Transaction error for submitRequestList call: %d", __FUNCTION__,
+                  remoteRet.getExceptionCode());
+            return ACAMERA_ERROR_UNKNOWN;
+        }
     }
-    if (status != Status::NO_ERROR) {
-        return utils::convertFromHidl(status);
-    }
+
     int32_t sequenceId = info.requestId;
     int64_t lastFrameNumber = info.lastFrameNumber;
     if (sequenceId < 0) {
diff --git a/camera/ndk/ndk_vendor/impl/ACameraManager.cpp b/camera/ndk/ndk_vendor/impl/ACameraManager.cpp
index bb4ef56..3aa7817 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraManager.cpp
+++ b/camera/ndk/ndk_vendor/impl/ACameraManager.cpp
@@ -17,28 +17,29 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "ACameraManagerVendor"
 
-#include <memory>
-#include "ndk_vendor/impl/ACameraManager.h"
 #include "ACameraMetadata.h"
 #include "ndk_vendor/impl/ACameraDevice.h"
+#include "ndk_vendor/impl/ACameraManager.h"
 #include "utils.h"
+
 #include <CameraMetadata.h>
-#include <camera_metadata_hidden.h>
-
-#include <utils/Vector.h>
-#include <cutils/properties.h>
-#include <stdlib.h>
-
 #include <VendorTagDescriptor.h>
+#include <android/binder_manager.h>
+#include <android/binder_process.h>
+#include <camera_metadata_hidden.h>
+#include <cutils/properties.h>
+#include <memory>
+#include <utils/Vector.h>
 
 using namespace android::acam;
 
 namespace android {
 namespace acam {
 
-using frameworks::cameraservice::common::V2_0::ProviderIdAndVendorTagSections;
-using android::hardware::camera::common::V1_0::helper::VendorTagDescriptor;
-using android::hardware::camera::common::V1_0::helper::VendorTagDescriptorCache;
+using ::aidl::android::frameworks::cameraservice::common::ProviderIdAndVendorTagSections;
+using ::android::hardware::camera::common::V1_0::helper::VendorTagDescriptor;
+using ::android::hardware::camera::common::V1_0::helper::VendorTagDescriptorCache;
+using ::ndk::ScopedAStatus;
 
 // Static member definitions
 const char* CameraManagerGlobal::kCameraIdKey   = "CameraId";
@@ -47,52 +48,55 @@
 const char* CameraManagerGlobal::kContextKey    = "CallbackContext";
 const nsecs_t CameraManagerGlobal::kCallbackDrainTimeout = 5000000; // 5 ms
 Mutex                CameraManagerGlobal::sLock;
-CameraManagerGlobal* CameraManagerGlobal::sInstance = nullptr;
+std::weak_ptr<CameraManagerGlobal> CameraManagerGlobal::sInstance =
+        std::weak_ptr<CameraManagerGlobal>();
 
 /**
- * The vendor tag descriptor class that takes HIDL vendor tag information as
+ * The vendor tag descriptor class that takes AIDL vendor tag information as
  * input. Not part of vendor available VendorTagDescriptor class because that class is used by
  * default HAL implementation code as well.
+ *
+ * This is a class instead of a free-standing function because VendorTagDescriptor has some
+ * protected fields that need to be initialized during conversion.
  */
-class HidlVendorTagDescriptor : public VendorTagDescriptor {
+class AidlVendorTagDescriptor : public VendorTagDescriptor {
 public:
     /**
-     * Create a VendorTagDescriptor object from the HIDL VendorTagSection
+     * Create a VendorTagDescriptor object from the AIDL VendorTagSection
      * vector.
      *
      * Returns OK on success, or a negative error code.
      */
-    static status_t createDescriptorFromHidl(const hidl_vec<VendorTagSection>& vts,
+    static status_t createDescriptorFromAidl(const std::vector<VendorTagSection>& vts,
                                              /*out*/ sp<VendorTagDescriptor> *descriptor);
 };
 
-status_t HidlVendorTagDescriptor::createDescriptorFromHidl(const hidl_vec<VendorTagSection> &vts,
-                                                           sp<VendorTagDescriptor> *descriptor) {
-    int tagCount = 0;
+status_t AidlVendorTagDescriptor::createDescriptorFromAidl(const std::vector<VendorTagSection>& vts,
+                                                           sp<VendorTagDescriptor>* descriptor){
+    size_t tagCount = 0;
 
     for (size_t s = 0; s < vts.size(); s++) {
         tagCount += vts[s].tags.size();
     }
 
     if (tagCount < 0 || tagCount > INT32_MAX) {
-        ALOGE("%s: tag count %d from vendor tag sections is invalid.", __FUNCTION__, tagCount);
+        ALOGE("%s: tag count %zu from vendor tag sections is invalid.", __FUNCTION__, tagCount);
         return BAD_VALUE;
     }
 
-    Vector<uint32_t> tagArray;
-    LOG_ALWAYS_FATAL_IF(tagArray.resize(tagCount) != tagCount,
-            "%s: too many (%u) vendor tags defined.", __FUNCTION__, tagCount);
+    std::vector<int64_t> tagArray;
+    tagArray.resize(tagCount);
 
-    sp<HidlVendorTagDescriptor> desc = new HidlVendorTagDescriptor();
+    sp<AidlVendorTagDescriptor> desc = new AidlVendorTagDescriptor();
     desc->mTagCount = tagCount;
 
-    KeyedVector<uint32_t, String8> tagToSectionMap;
+    std::map<int64_t, std::string> tagToSectionMap;
 
     int idx = 0;
     for (size_t s = 0; s < vts.size(); s++) {
         const VendorTagSection& section = vts[s];
         const char *sectionName = section.sectionName.c_str();
-        if (sectionName == NULL) {
+        if (sectionName == nullptr) {
             ALOGE("%s: no section name defined for vendor tag section %zu.", __FUNCTION__, s);
             return BAD_VALUE;
         }
@@ -106,15 +110,15 @@
                 return BAD_VALUE;
             }
 
-            tagArray.editItemAt(idx++) = section.tags[j].tagId;
+            tagArray[idx++] = section.tags[j].tagId;
 
             const char *tagName = section.tags[j].tagName.c_str();
-            if (tagName == NULL) {
+            if (tagName == nullptr) {
                 ALOGE("%s: no tag name defined for vendor tag %d.", __FUNCTION__, tag);
                 return BAD_VALUE;
             }
             desc->mTagToNameMap.add(tag, String8(tagName));
-            tagToSectionMap.add(tag, sectionString);
+            tagToSectionMap.insert({tag, section.sectionName});
 
             int tagType = (int) section.tags[j].tagType;
             if (tagType < 0 || tagType >= NUM_TYPES) {
@@ -127,8 +131,12 @@
 
     for (size_t i = 0; i < tagArray.size(); ++i) {
         uint32_t tag = tagArray[i];
-        String8 sectionString = tagToSectionMap.valueFor(tag);
-
+        auto itr = tagToSectionMap.find(tag);
+        if (itr == tagToSectionMap.end()) {
+            ALOGE("%s: Couldn't find previously added tag in map.", __FUNCTION__);
+            return UNKNOWN_ERROR;
+        }
+        String8 sectionString = String8(itr->second.c_str());
         // Set up tag to section index map
         ssize_t index = desc->mSections.indexOf(sectionString);
         LOG_ALWAYS_FATAL_IF(index < 0, "index %zd must be non-negative", index);
@@ -147,38 +155,37 @@
     return OK;
 }
 
-CameraManagerGlobal&
-CameraManagerGlobal::getInstance() {
+std::shared_ptr<CameraManagerGlobal> CameraManagerGlobal::getInstance() {
     Mutex::Autolock _l(sLock);
-    CameraManagerGlobal* instance = sInstance;
+    std::shared_ptr<CameraManagerGlobal> instance = sInstance.lock();
     if (instance == nullptr) {
-        instance = new CameraManagerGlobal();
+        instance = std::make_shared<CameraManagerGlobal>();
         sInstance = instance;
     }
-    return *instance;
+    return instance;
 }
 
 CameraManagerGlobal::~CameraManagerGlobal() {
-    // clear sInstance so next getInstance call knows to create a new one
     Mutex::Autolock _sl(sLock);
-    sInstance = nullptr;
     Mutex::Autolock _l(mLock);
     if (mCameraService != nullptr) {
-        mCameraService->unlinkToDeath(mDeathNotifier);
+        AIBinder_unlinkToDeath(mCameraService->asBinder().get(),
+                               mDeathRecipient.get(), this);
         auto stat = mCameraService->removeListener(mCameraServiceListener);
         if (!stat.isOk()) {
-            ALOGE("Failed to remove listener to camera service %s", stat.description().c_str());
+            ALOGE("Failed to remove listener to camera service %d:%d", stat.getExceptionCode(),
+                  stat.getServiceSpecificError());
         }
     }
-    mDeathNotifier.clear();
+
     if (mCbLooper != nullptr) {
         mCbLooper->unregisterHandler(mHandler->id());
         mCbLooper->stop();
     }
     mCbLooper.clear();
     mHandler.clear();
-    mCameraServiceListener.clear();
-    mCameraService.clear();
+    mCameraServiceListener.reset();
+    mCameraService.reset();
 }
 
 static bool isCameraServiceDisabled() {
@@ -191,23 +198,28 @@
     sp<VendorTagDescriptorCache> tagCache = new VendorTagDescriptorCache();
     Status status = Status::NO_ERROR;
     std::vector<ProviderIdAndVendorTagSections> providerIdsAndVts;
-    auto remoteRet = mCameraService->getCameraVendorTagSections([&status, &providerIdsAndVts]
-                                                                 (Status s,
-                                                                  auto &IdsAndVts) {
-                                                         status = s;
-                                                         providerIdsAndVts = IdsAndVts; });
+    ScopedAStatus remoteRet = mCameraService->getCameraVendorTagSections(&providerIdsAndVts);
 
-    if (!remoteRet.isOk() || status != Status::NO_ERROR) {
-        ALOGE("Failed to retrieve VendorTagSections %s", remoteRet.description().c_str());
+    if (!remoteRet.isOk()) {
+        if (remoteRet.getExceptionCode() == EX_SERVICE_SPECIFIC) {
+            Status errStatus = static_cast<Status>(remoteRet.getServiceSpecificError());
+            ALOGE("%s: Failed to retrieve VendorTagSections %s",
+                __FUNCTION__, toString(status).c_str());
+        } else {
+            ALOGE("%s: Binder error when retrieving VendorTagSections: %d", __FUNCTION__,
+                remoteRet.getExceptionCode());
+        }
         return false;
     }
+
     // Convert each providers VendorTagSections into a VendorTagDescriptor and
     // add it to the cache
     for (auto &providerIdAndVts : providerIdsAndVts) {
         sp<VendorTagDescriptor> vendorTagDescriptor;
-        if (HidlVendorTagDescriptor::createDescriptorFromHidl(providerIdAndVts.vendorTagSections,
-                                                              &vendorTagDescriptor) != OK) {
-            ALOGE("Failed to convert from Hidl: VendorTagDescriptor");
+        status_t ret = AidlVendorTagDescriptor::createDescriptorFromAidl(
+                providerIdAndVts.vendorTagSections, &vendorTagDescriptor);
+        if (ret != OK) {
+            ALOGE("Failed to convert from Aidl: VendorTagDescriptor: %d", ret);
             return false;
         }
         tagCache->addVendorDescriptor(providerIdAndVts.providerId, vendorTagDescriptor);
@@ -216,101 +228,125 @@
     return true;
 }
 
-sp<ICameraService> CameraManagerGlobal::getCameraService() {
+std::shared_ptr<ICameraService> CameraManagerGlobal::getCameraService() {
     Mutex::Autolock _l(mLock);
-    if (mCameraService.get() == nullptr) {
-        if (isCameraServiceDisabled()) {
-            return mCameraService;
-        }
 
-        sp<ICameraService> cameraServiceBinder;
-        do {
-            cameraServiceBinder = ICameraService::getService();
-            if (cameraServiceBinder != nullptr) {
-                break;
-            }
-            ALOGW("CameraService not published, waiting...");
-            usleep(kCameraServicePollDelay);
-        } while(true);
-        if (mDeathNotifier == nullptr) {
-            mDeathNotifier = new DeathNotifier(this);
-        }
-        cameraServiceBinder->linkToDeath(mDeathNotifier, 0);
-        mCameraService = cameraServiceBinder;
+    if (mCameraService != nullptr) {
+        // Camera service already set up. Return existing value.
+        return mCameraService;
+    }
 
-        // Setup looper thread to perfrom availiability callbacks
-        if (mCbLooper == nullptr) {
-            mCbLooper = new ALooper;
-            mCbLooper->setName("C2N-mgr-looper");
-            status_t err = mCbLooper->start(
-                    /*runOnCallingThread*/false,
-                    /*canCallJava*/       true,
-                    PRIORITY_DEFAULT);
-            if (err != OK) {
-                ALOGE("%s: Unable to start camera service listener looper: %s (%d)",
-                        __FUNCTION__, strerror(-err), err);
-                mCbLooper.clear();
-                return nullptr;
-            }
-            if (mHandler == nullptr) {
-                mHandler = new CallbackHandler(this);
-            }
-            mCbLooper->registerHandler(mHandler);
-        }
+    if (isCameraServiceDisabled()) {
+        // Camera service is disabled. return nullptr.
+        return mCameraService;
+    }
 
-        // register ICameraServiceListener
-        if (mCameraServiceListener == nullptr) {
-            mCameraServiceListener = new CameraServiceListener(this);
-        }
-        hidl_vec<frameworks::cameraservice::service::V2_1::CameraStatusAndId> cameraStatuses{};
-        Status status = Status::NO_ERROR;
-        auto remoteRet = mCameraService->addListener_2_1(mCameraServiceListener,
-                                                     [&status, &cameraStatuses](Status s,
-                                                                                auto &retStatuses) {
-                                                         status = s;
-                                                         cameraStatuses = retStatuses;
-                                                     });
-        if (!remoteRet.isOk() || status != Status::NO_ERROR) {
-            ALOGE("Failed to add listener to camera service %s", remoteRet.description().c_str());
-        }
+    std::string serviceName = ICameraService::descriptor;
+    serviceName += "/default";
 
-        // Setup vendor tags
-        if (!setupVendorTags()) {
-            ALOGE("Unable to set up vendor tags");
+    bool isDeclared = AServiceManager_isDeclared(serviceName.c_str());
+    if (!isDeclared) {
+        ALOGE("%s: No ICameraService instance declared: %s", __FUNCTION__, serviceName.c_str());
+        return nullptr;
+    }
+
+    // Before doing any more make sure there is a binder threadpool alive
+    // This is a no-op if the binder threadpool was already started by this process.
+    ABinderProcess_startThreadPool();
+
+    std::shared_ptr<ICameraService> cameraService =
+            ICameraService::fromBinder(ndk::SpAIBinder(
+                    AServiceManager_waitForService(serviceName.c_str())));
+    if (cameraService == nullptr) {
+        ALOGE("%s: Could not get ICameraService instance.", __FUNCTION__);
+        return nullptr;
+    }
+
+    if (mDeathRecipient.get() == nullptr) {
+        mDeathRecipient = ndk::ScopedAIBinder_DeathRecipient(
+                AIBinder_DeathRecipient_new(CameraManagerGlobal::binderDeathCallback));
+    }
+    AIBinder_linkToDeath(cameraService->asBinder().get(),
+                         mDeathRecipient.get(), /*cookie=*/ this);
+
+    mCameraService = cameraService;
+
+    // Setup looper thread to perform availability callbacks
+    if (mCbLooper == nullptr) {
+        mCbLooper = new ALooper;
+        mCbLooper->setName("C2N-mgr-looper");
+        status_t err = mCbLooper->start(
+                /*runOnCallingThread*/false,
+                /*canCallJava*/       true,
+                PRIORITY_DEFAULT);
+        if (err != OK) {
+            ALOGE("%s: Unable to start camera service listener looper: %s (%d)",
+                    __FUNCTION__, strerror(-err), err);
+            mCbLooper.clear();
             return nullptr;
         }
+        if (mHandler == nullptr) {
+            mHandler = new CallbackHandler(weak_from_this());
+        }
+        mCbLooper->registerHandler(mHandler);
+    }
 
-        for (auto& c : cameraStatuses) {
-            onStatusChangedLocked(c.v2_0);
+    // register ICameraServiceListener
+    if (mCameraServiceListener == nullptr) {
+        mCameraServiceListener = ndk::SharedRefBase::make<CameraServiceListener>(weak_from_this());
+    }
 
-            for (auto& unavailablePhysicalId : c.unavailPhysicalCameraIds) {
-                PhysicalCameraStatusAndId statusAndId;
-                statusAndId.deviceStatus = CameraDeviceStatus::STATUS_NOT_PRESENT;
-                statusAndId.cameraId = c.v2_0.cameraId;
-                statusAndId.physicalCameraId = unavailablePhysicalId;
-                onStatusChangedLocked(statusAndId);
-            }
+    std::vector<CameraStatusAndId> cameraStatuses;
+    Status status = Status::NO_ERROR;
+    ScopedAStatus remoteRet = mCameraService->addListener(mCameraServiceListener,
+                                                          &cameraStatuses);
+
+    if (!remoteRet.isOk()) {
+        if (remoteRet.getExceptionCode() == EX_SERVICE_SPECIFIC) {
+            Status errStatus = static_cast<Status>(remoteRet.getServiceSpecificError());
+            ALOGE("%s: Failed to add listener to camera service: %s", __FUNCTION__,
+                toString(errStatus).c_str());
+        } else {
+            ALOGE("%s: Transaction failed when adding listener to camera service: %d",
+                __FUNCTION__, remoteRet.getExceptionCode());
+        }
+    }
+
+    // Setup vendor tags
+    if (!setupVendorTags()) {
+        ALOGE("Unable to set up vendor tags");
+        return nullptr;
+    }
+
+    for (auto& csi: cameraStatuses){
+        onStatusChangedLocked(csi.deviceStatus, csi.cameraId);
+
+        for (auto& unavailablePhysicalId : csi.unavailPhysicalCameraIds) {
+            onStatusChangedLocked(CameraDeviceStatus::STATUS_NOT_PRESENT,
+                                  csi.cameraId, unavailablePhysicalId);
         }
     }
     return mCameraService;
 }
 
-void CameraManagerGlobal::DeathNotifier::serviceDied(uint64_t cookie, const wp<IBase> &who) {
-    (void) cookie;
-    (void) who;
+void CameraManagerGlobal::binderDeathCallback(void* /*cookie*/) {
+    AutoMutex _l(sLock);
+
     ALOGE("Camera service binderDied!");
-    sp<CameraManagerGlobal> cm = mCameraManager.promote();
-    if (cm != nullptr) {
-        AutoMutex lock(cm->mLock);
-        for (auto& pair : cm->mDeviceStatusMap) {
-            CameraStatusAndId cameraStatusAndId;
-            cameraStatusAndId.cameraId = pair.first;
-            cameraStatusAndId.deviceStatus = pair.second.getStatus();
-            cm->onStatusChangedLocked(cameraStatusAndId);
-        }
-        cm->mCameraService.clear();
-        // TODO: consider adding re-connect call here?
+    std::shared_ptr<CameraManagerGlobal> instance = sInstance.lock();
+    if (instance == nullptr) {
+        return;
     }
+
+    // Remove cameraService from the static instance
+    AutoMutex lock(instance->mLock);
+    for (auto& pair : instance->mDeviceStatusMap) {
+        const auto &cameraId = pair.first;
+        const auto &deviceStatus = pair.second.getStatus();
+        instance->onStatusChangedLocked(deviceStatus, cameraId);
+    }
+    instance->mCameraService.reset();
+    // TODO: consider adding re-connect call here?
 }
 
 void CameraManagerGlobal::registerAvailabilityCallback(
@@ -362,41 +398,45 @@
 void CameraManagerGlobal::registerAvailCallback(const T *callback) {
     Mutex::Autolock _l(mLock);
     Callback cb(callback);
-    auto pair = mCallbacks.insert(cb);
+    auto res = mCallbacks.insert(cb);
+    if (!res.second) {
+        ALOGE("%s: Failed to register callback. Couldn't insert in map.", __FUNCTION__);
+        return;
+    }
     // Send initial callbacks if callback is newly registered
-    if (pair.second) {
-        for (auto& pair : mDeviceStatusMap) {
-            const hidl_string& cameraId = pair.first;
-            CameraDeviceStatus status = pair.second.getStatus();
+    for (auto& pair : mDeviceStatusMap) {
+        const std::string& cameraId = pair.first;
+        CameraDeviceStatus status = pair.second.getStatus();
 
+        {
             // Camera available/unavailable callback
             sp<AMessage> msg = new AMessage(kWhatSendSingleCallback, mHandler);
             ACameraManager_AvailabilityCallback cbFunc = isStatusAvailable(status) ?
-                    cb.mAvailable : cb.mUnavailable;
+                                                         cb.mAvailable : cb.mUnavailable;
             msg->setPointer(kCallbackFpKey, (void *) cbFunc);
             msg->setPointer(kContextKey, cb.mContext);
             msg->setString(kCameraIdKey, AString(cameraId.c_str()));
             mPendingCallbackCnt++;
             msg->post();
+        }
 
-            // Physical camera unavailable callback
-            std::set<hidl_string> unavailPhysicalIds = pair.second.getUnavailablePhysicalIds();
-            for (const auto& physicalCameraId : unavailPhysicalIds) {
-                sp<AMessage> msg = new AMessage(kWhatSendSinglePhysicalCameraCallback, mHandler);
-                ACameraManager_PhysicalCameraAvailabilityCallback cbFunc =
-                        cb.mPhysicalCamUnavailable;
-                msg->setPointer(kCallbackFpKey, (void *) cbFunc);
-                msg->setPointer(kContextKey, cb.mContext);
-                msg->setString(kCameraIdKey, AString(cameraId.c_str()));
-                msg->setString(kPhysicalCameraIdKey, AString(physicalCameraId.c_str()));
-                mPendingCallbackCnt++;
-                msg->post();
-            }
+        // Physical camera unavailable callback
+        std::set<std::string> unavailPhysicalIds = pair.second.getUnavailablePhysicalIds();
+        for (const auto& physicalCameraId : unavailPhysicalIds) {
+            sp<AMessage> msg = new AMessage(kWhatSendSinglePhysicalCameraCallback, mHandler);
+            ACameraManager_PhysicalCameraAvailabilityCallback cbFunc =
+                    cb.mPhysicalCamUnavailable;
+            msg->setPointer(kCallbackFpKey, (void *) cbFunc);
+            msg->setPointer(kContextKey, cb.mContext);
+            msg->setString(kCameraIdKey, AString(cameraId.c_str()));
+            msg->setString(kPhysicalCameraIdKey, AString(physicalCameraId.c_str()));
+            mPendingCallbackCnt++;
+            msg->post();
         }
     }
 }
 
-void CameraManagerGlobal::getCameraIdList(std::vector<hidl_string>* cameraIds) {
+void CameraManagerGlobal::getCameraIdList(std::vector<std::string>* cameraIds) {
     // Ensure that we have initialized/refreshed the list of available devices
     auto cs = getCameraService();
     Mutex::Autolock _l(mLock);
@@ -507,33 +547,31 @@
 }
 
 void CameraManagerGlobal::CallbackHandler::notifyParent() {
-    sp<CameraManagerGlobal> parent = mParent.promote();
+    std::shared_ptr<CameraManagerGlobal> parent = mParent.lock();
     if (parent != nullptr) {
         parent->onCallbackCalled();
     }
 }
 
-hardware::Return<void> CameraManagerGlobal::CameraServiceListener::onStatusChanged(
-        const CameraStatusAndId &statusAndId) {
-    sp<CameraManagerGlobal> cm = mCameraManager.promote();
+ScopedAStatus CameraManagerGlobal::CameraServiceListener::onStatusChanged(
+        CameraDeviceStatus status, const std::string &cameraId) {
+    std::shared_ptr<CameraManagerGlobal> cm = mCameraManager.lock();
     if (cm != nullptr) {
-        cm->onStatusChanged(statusAndId);
+        cm->onStatusChanged(status, cameraId);
     } else {
         ALOGE("Cannot deliver status change. Global camera manager died");
     }
-    return Void();
+    return ScopedAStatus::ok();
 }
 
 void CameraManagerGlobal::onStatusChanged(
-        const CameraStatusAndId &statusAndId) {
+        const CameraDeviceStatus &status, const std::string &cameraId) {
     Mutex::Autolock _l(mLock);
-    onStatusChangedLocked(statusAndId);
+    onStatusChangedLocked(status, cameraId);
 }
 
 void CameraManagerGlobal::onStatusChangedLocked(
-        const CameraStatusAndId &statusAndId) {
-    hidl_string cameraId = statusAndId.cameraId;
-    CameraDeviceStatus status = statusAndId.deviceStatus;
+        const CameraDeviceStatus &status, const std::string &cameraId) {
     if (!validStatus(status)) {
         ALOGE("%s: Invalid status %d", __FUNCTION__, status);
         return;
@@ -567,28 +605,28 @@
     }
 }
 
-hardware::Return<void> CameraManagerGlobal::CameraServiceListener::onPhysicalCameraStatusChanged(
-        const PhysicalCameraStatusAndId &statusAndId) {
-    sp<CameraManagerGlobal> cm = mCameraManager.promote();
+ScopedAStatus CameraManagerGlobal::CameraServiceListener::onPhysicalCameraStatusChanged(
+        CameraDeviceStatus in_status, const std::string& in_cameraId,
+        const std::string& in_physicalCameraId) {
+    std::shared_ptr<CameraManagerGlobal> cm = mCameraManager.lock();
     if (cm != nullptr) {
-        cm->onStatusChanged(statusAndId);
+        cm->onStatusChanged(in_status, in_cameraId, in_physicalCameraId);
     } else {
         ALOGE("Cannot deliver status change. Global camera manager died");
     }
-    return Void();
+    return ScopedAStatus::ok();
 }
 
 void CameraManagerGlobal::onStatusChanged(
-        const PhysicalCameraStatusAndId &statusAndId) {
+        const CameraDeviceStatus &status, const std::string& cameraId,
+        const std::string& physicalCameraId) {
     Mutex::Autolock _l(mLock);
-    onStatusChangedLocked(statusAndId);
+    onStatusChangedLocked(status, cameraId, physicalCameraId);
 }
 
 void CameraManagerGlobal::onStatusChangedLocked(
-        const PhysicalCameraStatusAndId &statusAndId) {
-    hidl_string cameraId = statusAndId.cameraId;
-    hidl_string physicalCameraId = statusAndId.physicalCameraId;
-    CameraDeviceStatus status = statusAndId.deviceStatus;
+        const CameraDeviceStatus &status, const std::string& cameraId,
+        const std::string& physicalCameraId) {
     if (!validStatus(status)) {
         ALOGE("%s: Invalid status %d", __FUNCTION__, status);
         return;
@@ -642,20 +680,20 @@
 }
 
 bool CameraManagerGlobal::CameraStatus::addUnavailablePhysicalId(
-        const hidl_string& physicalCameraId) {
+        const std::string& physicalCameraId) {
     std::lock_guard<std::mutex> lock(mLock);
     auto result = unavailablePhysicalIds.insert(physicalCameraId);
     return result.second;
 }
 
 bool CameraManagerGlobal::CameraStatus::removeUnavailablePhysicalId(
-        const hidl_string& physicalCameraId) {
+        const std::string& physicalCameraId) {
     std::lock_guard<std::mutex> lock(mLock);
     auto count = unavailablePhysicalIds.erase(physicalCameraId);
     return count > 0;
 }
 
-std::set<hidl_string> CameraManagerGlobal::CameraStatus::getUnavailablePhysicalIds() {
+std::set<std::string> CameraManagerGlobal::CameraStatus::getUnavailablePhysicalIds() {
     std::lock_guard<std::mutex> lock(mLock);
     return unavailablePhysicalIds;
 }
@@ -666,16 +704,15 @@
 /**
  * ACameraManger Implementation
  */
-camera_status_t
-ACameraManager::getCameraIdList(ACameraIdList** cameraIdList) {
+camera_status_t ACameraManager::getCameraIdList(ACameraIdList** cameraIdList) {
     Mutex::Autolock _l(mLock);
 
-    std::vector<hidl_string> idList;
-    CameraManagerGlobal::getInstance().getCameraIdList(&idList);
+    std::vector<std::string> idList;
+    CameraManagerGlobal::getInstance()->getCameraIdList(&idList);
 
     int numCameras = idList.size();
     ACameraIdList *out = new ACameraIdList;
-    if (!out) {
+    if (out == nullptr) {
         ALOGE("Allocate memory for ACameraIdList failed!");
         return ACAMERA_ERROR_NOT_ENOUGH_MEMORY;
     }
@@ -717,33 +754,37 @@
     }
 }
 
-camera_status_t ACameraManager::getCameraCharacteristics(
-        const char *cameraIdStr, sp<ACameraMetadata> *characteristics) {
+camera_status_t ACameraManager::getCameraCharacteristics(const char *cameraIdStr,
+                                                         sp<ACameraMetadata> *characteristics) {
+    using AidlCameraMetadata = ::aidl::android::frameworks::cameraservice::device::CameraMetadata;
     Mutex::Autolock _l(mLock);
 
-    sp<ICameraService> cs = CameraManagerGlobal::getInstance().getCameraService();
+    std::shared_ptr<ICameraService> cs = CameraManagerGlobal::getInstance()->getCameraService();
     if (cs == nullptr) {
         ALOGE("%s: Cannot reach camera service!", __FUNCTION__);
         return ACAMERA_ERROR_CAMERA_DISCONNECTED;
     }
-    CameraMetadata rawMetadata;
-    Status status = Status::NO_ERROR;
-    auto serviceRet =
-        cs->getCameraCharacteristics(cameraIdStr,
-                                     [&status, &rawMetadata] (auto s ,
-                                                              const hidl_vec<uint8_t> &metadata) {
-                                          status = s;
-                                          if (status == Status::NO_ERROR) {
-                                              utils::convertFromHidlCloned(metadata, &rawMetadata);
-                                          }
-                                     });
-    if (!serviceRet.isOk() || status != Status::NO_ERROR) {
-        ALOGE("Get camera characteristics from camera service failed");
+    AidlCameraMetadata rawMetadata;
+    ScopedAStatus serviceRet = cs->getCameraCharacteristics(cameraIdStr, &rawMetadata);
+
+    if (!serviceRet.isOk()) {
+        if (serviceRet.getExceptionCode() == EX_SERVICE_SPECIFIC) {
+            Status errStatus = static_cast<Status>(serviceRet.getServiceSpecificError());
+            ALOGE("%s: Get camera characteristics from camera service failed: %s",
+                __FUNCTION__, toString(errStatus).c_str());
+        } else {
+            ALOGE("%s: Transaction error when getting camera "
+                  "characteristics from camera service: %d",
+                __FUNCTION__, serviceRet.getExceptionCode());
+        }
         return ACAMERA_ERROR_UNKNOWN; // should not reach here
     }
 
-    *characteristics = new ACameraMetadata(
-            rawMetadata.release(), ACameraMetadata::ACM_CHARACTERISTICS);
+    camera_metadata_t* metadataBuffer;
+    ::android::acam::utils::cloneFromAidl(rawMetadata, &metadataBuffer);
+
+    *characteristics = new ACameraMetadata(metadataBuffer,
+                                           ACameraMetadata::ACM_CHARACTERISTICS);
     return ACAMERA_OK;
 }
 
@@ -763,42 +804,41 @@
 
     ACameraDevice* device = new ACameraDevice(cameraId, callback, std::move(rawChars));
 
-    sp<ICameraService> cs = CameraManagerGlobal::getInstance().getCameraService();
+    std::shared_ptr<ICameraService> cs = CameraManagerGlobal::getInstance()->getCameraService();
     if (cs == nullptr) {
         ALOGE("%s: Cannot reach camera service!", __FUNCTION__);
         delete device;
         return ACAMERA_ERROR_CAMERA_DISCONNECTED;
     }
 
-    sp<ICameraDeviceCallback> callbacks = device->getServiceCallback();
-    sp<ICameraDeviceUser_2_0> deviceRemote_2_0;
+    std::shared_ptr<BnCameraDeviceCallback> deviceCallback = device->getServiceCallback();
+    std::shared_ptr<ICameraDeviceUser> deviceRemote;
 
     // No way to get package name from native.
     // Send a zero length package name and let camera service figure it out from UID
-    Status status = Status::NO_ERROR;
-    auto serviceRet = cs->connectDevice(
-            callbacks, cameraId, [&status, &deviceRemote_2_0](auto s, auto &device) {
-                                     status = s;
-                                     deviceRemote_2_0 = device;
-                                 });
-
-    if (!serviceRet.isOk() || status != Status::NO_ERROR) {
-        ALOGE("%s: connect camera device failed", __FUNCTION__);
-        delete device;
-        return utils::convertFromHidl(status);
+    ScopedAStatus serviceRet = cs->connectDevice(deviceCallback,
+                                                 std::string(cameraId), &deviceRemote);
+    if (!serviceRet.isOk()) {
+        if (serviceRet.getExceptionCode() == EX_SERVICE_SPECIFIC) {
+            Status errStatus = static_cast<Status>(serviceRet.getServiceSpecificError());
+            ALOGE("%s: connect camera device failed: %s",
+                  __FUNCTION__, toString(errStatus).c_str());
+            delete device;
+            return utils::convertFromAidl(errStatus);
+        } else {
+            ALOGE("%s: Transaction failed when connecting camera device: %d",
+                __FUNCTION__, serviceRet.getExceptionCode());
+            delete device;
+            return ACAMERA_ERROR_UNKNOWN;
+        }
     }
-    if (deviceRemote_2_0 == nullptr) {
+
+    if (deviceRemote == nullptr) {
         ALOGE("%s: connect camera device failed! remote device is null", __FUNCTION__);
         delete device;
         return ACAMERA_ERROR_CAMERA_DISCONNECTED;
     }
-    auto castResult = ICameraDeviceUser::castFrom(deviceRemote_2_0);
-    if (!castResult.isOk()) {
-        ALOGE("%s: failed to cast remote device to version 2.1", __FUNCTION__);
-        delete device;
-        return ACAMERA_ERROR_CAMERA_DISCONNECTED;
-    }
-    sp<ICameraDeviceUser> deviceRemote = castResult;
+
     device->setRemoteDevice(deviceRemote);
     device->setDeviceMetadataQueues();
     *outDevice = device;
@@ -821,7 +861,7 @@
     sp<VendorTagDescriptorCache> vtCache = VendorTagDescriptorCache::getGlobalVendorTagCache();
     sp<VendorTagDescriptor> vTags = nullptr;
     vtCache->getVendorTagDescriptor(vendorTagId, &vTags);
-    status_t status= metadata.getTagFromName(name, vTags.get(), tag);
+    status_t status = CameraMetadata::getTagFromName(name, vTags.get(), tag);
     return status == OK ? ACAMERA_OK : ACAMERA_ERROR_METADATA_NOT_FOUND;
 }
 
diff --git a/camera/ndk/ndk_vendor/impl/ACameraManager.h b/camera/ndk/ndk_vendor/impl/ACameraManager.h
index 4663529..85acee7 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraManager.h
+++ b/camera/ndk/ndk_vendor/impl/ACameraManager.h
@@ -17,39 +17,35 @@
 #ifndef _ACAMERA_MANAGER_H
 #define _ACAMERA_MANAGER_H
 
-#include <camera/NdkCameraManager.h>
-
-#include <android-base/parseint.h>
-#include <android/frameworks/cameraservice/service/2.0/ICameraService.h>
-#include <android/frameworks/cameraservice/service/2.1/ICameraService.h>
-#include <android/frameworks/cameraservice/service/2.2/ICameraService.h>
-#include <android/frameworks/cameraservice/service/2.1/ICameraServiceListener.h>
-
 #include <CameraMetadata.h>
-#include <utils/StrongPointer.h>
-#include <utils/Mutex.h>
-
-#include <media/stagefright/foundation/ALooper.h>
-#include <media/stagefright/foundation/AHandler.h>
-#include <media/stagefright/foundation/AMessage.h>
-
-#include <set>
+#include <aidl/android/frameworks/cameraservice/common/Status.h>
+#include <aidl/android/frameworks/cameraservice/common/VendorTag.h>
+#include <aidl/android/frameworks/cameraservice/common/VendorTagSection.h>
+#include <aidl/android/frameworks/cameraservice/service/BnCameraServiceListener.h>
+#include <aidl/android/frameworks/cameraservice/service/CameraDeviceStatus.h>
+#include <aidl/android/frameworks/cameraservice/service/CameraStatusAndId.h>
+#include <aidl/android/frameworks/cameraservice/service/ICameraService.h>
+#include <android-base/parseint.h>
+#include <camera/NdkCameraManager.h>
 #include <map>
+#include <media/stagefright/foundation/AHandler.h>
+#include <media/stagefright/foundation/ALooper.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <set>
+#include <utility>
+#include <utils/Mutex.h>
+#include <utils/StrongPointer.h>
 
 namespace android {
 namespace acam {
 
-using ICameraService = frameworks::cameraservice::service::V2_2::ICameraService;
-using CameraDeviceStatus = frameworks::cameraservice::service::V2_0::CameraDeviceStatus;
-using ICameraServiceListener = frameworks::cameraservice::service::V2_1::ICameraServiceListener;
-using PhysicalCameraStatusAndId = frameworks::cameraservice::service::V2_1::PhysicalCameraStatusAndId;
-using CameraStatusAndId = frameworks::cameraservice::service::V2_0::CameraStatusAndId;
-using Status = frameworks::cameraservice::common::V2_0::Status;
-using VendorTagSection = frameworks::cameraservice::common::V2_0::VendorTagSection;
-using VendorTag = frameworks::cameraservice::common::V2_0::VendorTag;
-using IBase = android::hidl::base::V1_0::IBase;
-using android::hardware::hidl_string;
-using hardware::Void;
+using ::aidl::android::frameworks::cameraservice::common::Status;
+using ::aidl::android::frameworks::cameraservice::common::VendorTag;
+using ::aidl::android::frameworks::cameraservice::common::VendorTagSection;
+using ::aidl::android::frameworks::cameraservice::service::BnCameraServiceListener;
+using ::aidl::android::frameworks::cameraservice::service::CameraDeviceStatus;
+using ::aidl::android::frameworks::cameraservice::service::CameraStatusAndId;
+using ::aidl::android::frameworks::cameraservice::service::ICameraService;
 
 /**
  * Per-process singleton instance of CameraManger. Shared by all ACameraManager
@@ -58,15 +54,18 @@
  *
  * TODO: maybe CameraManagerGlobal is better suited in libcameraclient?
  */
-class CameraManagerGlobal final : public RefBase {
+class CameraManagerGlobal final: public std::enable_shared_from_this<CameraManagerGlobal> {
   public:
-    static CameraManagerGlobal& getInstance();
-    sp<ICameraService> getCameraService();
+    static std::shared_ptr<CameraManagerGlobal> getInstance();
+    static void binderDeathCallback(void* cookie);
 
-    void registerAvailabilityCallback(
-            const ACameraManager_AvailabilityCallbacks *callback);
-    void unregisterAvailabilityCallback(
-            const ACameraManager_AvailabilityCallbacks *callback);
+    CameraManagerGlobal() {};
+    ~CameraManagerGlobal();
+
+    std::shared_ptr<ICameraService> getCameraService();
+
+    void registerAvailabilityCallback(const ACameraManager_AvailabilityCallbacks *callback);
+    void unregisterAvailabilityCallback(const ACameraManager_AvailabilityCallbacks *callback);
 
     void registerExtendedAvailabilityCallback(
             const ACameraManager_ExtendedAvailabilityCallbacks* callback);
@@ -76,35 +75,28 @@
     /**
      * Return camera IDs that support camera2
      */
-    void getCameraIdList(std::vector<hidl_string> *cameraIds);
+    void getCameraIdList(std::vector<std::string> *cameraIds);
 
   private:
-    sp<ICameraService> mCameraService;
+    std::shared_ptr<ICameraService> mCameraService;
     const int          kCameraServicePollDelay = 500000; // 0.5s
     Mutex              mLock;
-    class DeathNotifier : public android::hardware::hidl_death_recipient {
-      public:
-        explicit DeathNotifier(CameraManagerGlobal* cm) : mCameraManager(cm) {}
-      protected:
-        // IBinder::DeathRecipient implementation
-        virtual void serviceDied(uint64_t cookie, const wp<IBase> &who);
-      private:
-        const wp<CameraManagerGlobal> mCameraManager;
-    };
-    sp<DeathNotifier> mDeathNotifier;
+    ::ndk::ScopedAIBinder_DeathRecipient mDeathRecipient;
 
-    class CameraServiceListener final : public ICameraServiceListener {
+    class CameraServiceListener final : public BnCameraServiceListener {
       public:
-        explicit CameraServiceListener(CameraManagerGlobal* cm) : mCameraManager(cm) {}
-        android::hardware::Return<void> onStatusChanged(
-            const CameraStatusAndId &statusAndId) override;
-        android::hardware::Return<void> onPhysicalCameraStatusChanged(
-            const PhysicalCameraStatusAndId &statusAndId) override;
+        explicit CameraServiceListener(std::weak_ptr<CameraManagerGlobal> cm) :
+              mCameraManager(std::move(cm)) {}
+        ndk::ScopedAStatus onPhysicalCameraStatusChanged(
+                CameraDeviceStatus in_status, const std::string& in_cameraId,
+                const std::string& in_physicalCameraId) override;
+        ndk::ScopedAStatus onStatusChanged(CameraDeviceStatus in_status,
+                                           const std::string& in_cameraId) override;
 
       private:
-        const wp<CameraManagerGlobal> mCameraManager;
+        const std::weak_ptr<CameraManagerGlobal> mCameraManager;
     };
-    sp<CameraServiceListener> mCameraServiceListener;
+    std::shared_ptr<CameraServiceListener> mCameraServiceListener;
 
     // Wrapper of ACameraManager_AvailabilityCallbacks so we can store it in std::set
     struct Callback {
@@ -180,20 +172,22 @@
     static const nsecs_t kCallbackDrainTimeout;
     class CallbackHandler : public AHandler {
       public:
-        CallbackHandler(wp<CameraManagerGlobal> parent) : mParent(parent) {}
+        CallbackHandler(std::weak_ptr<CameraManagerGlobal> parent) : mParent(std::move(parent)) {}
         void onMessageReceived(const sp<AMessage> &msg) override;
       private:
-        wp<CameraManagerGlobal> mParent;
+        std::weak_ptr<CameraManagerGlobal> mParent;
         void notifyParent();
         void onMessageReceivedInternal(const sp<AMessage> &msg);
     };
     sp<CallbackHandler> mHandler;
     sp<ALooper>         mCbLooper; // Looper thread where callbacks actually happen on
 
-    void onStatusChanged(const CameraStatusAndId &statusAndId);
-    void onStatusChangedLocked(const CameraStatusAndId &statusAndId);
-    void onStatusChanged(const PhysicalCameraStatusAndId &statusAndId);
-    void onStatusChangedLocked(const PhysicalCameraStatusAndId &statusAndId);
+    void onStatusChanged(const CameraDeviceStatus &status, const std::string &cameraId);
+    void onStatusChangedLocked(const CameraDeviceStatus &status, const std::string &cameraId);
+    void onStatusChanged(const CameraDeviceStatus &status, const std::string &cameraId,
+                         const std::string &physicalCameraId);
+    void onStatusChangedLocked(const CameraDeviceStatus &status, const std::string &cameraId,
+                               const std::string &physicalCameraId);
     bool setupVendorTags();
 
     // Utils for status
@@ -203,7 +197,7 @@
     // The sort logic must match the logic in
     // libcameraservice/common/CameraProviderManager.cpp::getAPI1CompatibleCameraDeviceIds
     struct CameraIdComparator {
-        bool operator()(const hidl_string& a, const hidl_string& b) const {
+        bool operator()(const std::string& a, const std::string& b) const {
             uint32_t aUint = 0, bUint = 0;
             bool aIsUint = base::ParseUint(a.c_str(), &aUint);
             bool bIsUint = base::ParseUint(b.c_str(), &bUint);
@@ -225,29 +219,29 @@
       private:
         CameraDeviceStatus status = CameraDeviceStatus::STATUS_NOT_PRESENT;
         mutable std::mutex mLock;
-        std::set<hidl_string> unavailablePhysicalIds;
+        std::set<std::string> unavailablePhysicalIds;
       public:
         CameraStatus(CameraDeviceStatus st): status(st) { };
         CameraStatus() = default;
 
-        bool addUnavailablePhysicalId(const hidl_string& physicalCameraId);
-        bool removeUnavailablePhysicalId(const hidl_string& physicalCameraId);
+        bool addUnavailablePhysicalId(const std::string& physicalCameraId);
+        bool removeUnavailablePhysicalId(const std::string& physicalCameraId);
         CameraDeviceStatus getStatus();
         void updateStatus(CameraDeviceStatus newStatus);
-        std::set<hidl_string> getUnavailablePhysicalIds();
+        std::set<std::string> getUnavailablePhysicalIds();
     };
 
     template <class T>
     void registerAvailCallback(const T *callback);
 
     // Map camera_id -> status
-    std::map<hidl_string, CameraStatus, CameraIdComparator> mDeviceStatusMap;
+    std::map<std::string, CameraStatus, CameraIdComparator> mDeviceStatusMap;
 
     // For the singleton instance
     static Mutex sLock;
-    static CameraManagerGlobal* sInstance;
-    CameraManagerGlobal() {};
-    ~CameraManagerGlobal();
+    // Static instance is stored in a weak pointer, so will only exist if there is at least one
+    // active consumer of CameraManagerGlobal
+    static std::weak_ptr<CameraManagerGlobal> sInstance;
 };
 
 } // namespace acam;
@@ -259,7 +253,7 @@
  */
 struct ACameraManager {
     ACameraManager() :
-            mGlobalManager(&(android::acam::CameraManagerGlobal::getInstance())) {}
+            mGlobalManager(android::acam::CameraManagerGlobal::getInstance()) {}
     ~ACameraManager();
     camera_status_t getCameraIdList(ACameraIdList** cameraIdList);
     static void     deleteCameraIdList(ACameraIdList* cameraIdList);
@@ -277,7 +271,7 @@
         kCameraIdListNotInit = -1
     };
     android::Mutex         mLock;
-    android::sp<android::acam::CameraManagerGlobal> mGlobalManager;
+    std::shared_ptr<android::acam::CameraManagerGlobal> mGlobalManager;
 };
 
 #endif //_ACAMERA_MANAGER_H
diff --git a/camera/ndk/ndk_vendor/impl/ACaptureRequestVendor.h b/camera/ndk/ndk_vendor/impl/ACaptureRequestVendor.h
index 5715d77..fcb7e34 100644
--- a/camera/ndk/ndk_vendor/impl/ACaptureRequestVendor.h
+++ b/camera/ndk/ndk_vendor/impl/ACaptureRequestVendor.h
@@ -16,6 +16,7 @@
 
 #include "utils.h"
 
+using ::android::acam::utils::native_handle_ptr_wrapper;
 struct ACameraOutputTarget {
     explicit ACameraOutputTarget(const native_handle_t* window) : mWindow(window) {};
 
@@ -32,5 +33,5 @@
         return mWindow > other.mWindow;
     }
 
-    android::acam::utils::native_handle_ptr_wrapper mWindow;
+    native_handle_ptr_wrapper mWindow;
 };
diff --git a/camera/ndk/ndk_vendor/impl/utils.cpp b/camera/ndk/ndk_vendor/impl/utils.cpp
index e4fb204..73a527b 100644
--- a/camera/ndk/ndk_vendor/impl/utils.cpp
+++ b/camera/ndk/ndk_vendor/impl/utils.cpp
@@ -16,66 +16,75 @@
 
 #define LOG_TAG "ACameraVendorUtils"
 
-#include <utils/Log.h>
-
 #include "utils.h"
 
+#include <aidlcommonsupport/NativeHandle.h>
+#include <utils/Log.h>
+
 namespace android {
 namespace acam {
 namespace utils {
 
-// Convert CaptureRequest wrappable by sp<> to hidl CaptureRequest.
-frameworks::cameraservice::device::V2_0::CaptureRequest
-convertToHidl(const CaptureRequest *captureRequest) {
-    frameworks::cameraservice::device::V2_0::CaptureRequest hCaptureRequest;
-    hCaptureRequest.physicalCameraSettings = captureRequest->mCaptureRequest.physicalCameraSettings;
-    hCaptureRequest.streamAndWindowIds = captureRequest->mCaptureRequest.streamAndWindowIds;
-    return hCaptureRequest;
+// Convert CaptureRequest wrappable by sp<> to aidl CaptureRequest.
+AidlCaptureRequest convertToAidl(const CaptureRequest *captureRequest) {
+    AidlCaptureRequest aidlCaptureRequest;
+    aidlCaptureRequest.physicalCameraSettings =
+            captureRequest->mCaptureRequest.physicalCameraSettings;
+    aidlCaptureRequest.streamAndWindowIds = captureRequest->mCaptureRequest.streamAndWindowIds;
+    return aidlCaptureRequest;
 }
 
-HRotation convertToHidl(int rotation) {
-    HRotation hRotation = HRotation::R0;
+OutputConfiguration::Rotation convertToAidl(int rotation) {
+    using AidlRotation = OutputConfiguration::Rotation;
+
+    AidlRotation aRot = AidlRotation ::R0;
     switch(rotation) {
         case CAMERA3_STREAM_ROTATION_90:
-            hRotation = HRotation::R90;
+            aRot = AidlRotation::R90;
             break;
         case CAMERA3_STREAM_ROTATION_180:
-            hRotation = HRotation::R180;
+            aRot = AidlRotation::R180;
             break;
         case CAMERA3_STREAM_ROTATION_270:
-            hRotation = HRotation::R270;
+            aRot = AidlRotation::R270;
             break;
         default:
             break;
     }
-    return hRotation;
+    return aRot;
 }
 
-bool convertFromHidlCloned(const HCameraMetadata &metadata, CameraMetadata *rawMetadata) {
-    const camera_metadata *buffer = (camera_metadata_t*)(metadata.data());
-    size_t expectedSize = metadata.size();
+bool cloneFromAidl(const AidlCameraMetadata& srcMetadata, camera_metadata_t** dst) {
+    const camera_metadata *buffer = (camera_metadata_t*)(srcMetadata.metadata.data());
+    size_t expectedSize = srcMetadata.metadata.size();
     int ret = validate_camera_metadata_structure(buffer, &expectedSize);
-    if (ret == OK || ret == CAMERA_METADATA_VALIDATION_SHIFTED) {
-        *rawMetadata = buffer;
-    } else {
-        ALOGE("%s: Malformed camera metadata received from caller", __FUNCTION__);
+    if (ret != OK && ret != CAMERA_METADATA_VALIDATION_SHIFTED) {
+        ALOGE("%s: Malformed camera srcMetadata received from caller", __FUNCTION__);
         return false;
     }
-    return true;
+
+    camera_metadata_t* clonedBuffer = clone_camera_metadata(buffer);
+    if (clonedBuffer != nullptr) {
+        *dst = clonedBuffer;
+        return true;
+    }
+
+    ALOGE("%s: Failed to clone srcMetadata buffer.", __FUNCTION__);
+    return false;
 }
 
-// Note: existing data in dst will be gone. dst owns memory if shouldOwn is set
-//       to true.
-void convertToHidl(const camera_metadata_t *src, HCameraMetadata* dst, bool shouldOwn) {
+// Note: existing data in dst will be gone.
+void convertToAidl(const camera_metadata_t *src, AidlCameraMetadata* dst) {
     if (src == nullptr) {
         return;
     }
     size_t size = get_camera_metadata_size(src);
-    dst->setToExternal((uint8_t *) src, size, shouldOwn);
-    return;
+    uint8_t* metadataStart = (uint8_t*)src;
+    uint8_t* metadataEnd = metadataStart + size;
+    dst->metadata.assign(metadataStart, metadataEnd);
 }
 
-TemplateId convertToHidl(ACameraDevice_request_template templateId) {
+TemplateId convertToAidl(ACameraDevice_request_template templateId) {
     switch(templateId) {
         case TEMPLATE_STILL_CAPTURE:
             return TemplateId::STILL_CAPTURE;
@@ -92,7 +101,7 @@
     }
 }
 
-camera_status_t convertFromHidl(Status status) {
+camera_status_t convertFromAidl(Status status) {
     camera_status_t ret = ACAMERA_OK;
     switch(status) {
         case Status::NO_ERROR:
@@ -146,6 +155,14 @@
     return true;
 }
 
+bool isWindowNativeHandleEqual(const native_handle_t *nh1,
+                               const aidl::android::hardware::common::NativeHandle& nh2) {
+    native_handle_t* tempNh = makeFromAidl(nh2);
+    bool equal = isWindowNativeHandleEqual(nh1, tempNh);
+    native_handle_delete(tempNh);
+    return equal;
+}
+
 bool isWindowNativeHandleLessThan(const native_handle_t *nh1, const native_handle_t *nh2) {
     if (isWindowNativeHandleEqual(nh1, nh2)) {
         return false;
@@ -166,32 +183,6 @@
     return !isWindowNativeHandleLessThan(nh1, nh2) && !isWindowNativeHandleEqual(nh1, nh2);
 }
 
-bool areWindowNativeHandlesEqual(hidl_vec<hidl_handle> handles1, hidl_vec<hidl_handle> handles2) {
-    if (handles1.size() != handles2.size()) {
-        return false;
-    }
-    for (int i = 0; i < handles1.size(); i++) {
-        if (!isWindowNativeHandleEqual(handles1[i], handles2[i])) {
-            return false;
-        }
-    }
-    return true;
-}
-
-bool areWindowNativeHandlesLessThan(hidl_vec<hidl_handle> handles1, hidl_vec<hidl_handle>handles2) {
-    if (handles1.size() != handles2.size()) {
-        return handles1.size() < handles2.size();
-    }
-    for (int i = 0; i < handles1.size(); i++) {
-        const native_handle_t *handle1 = handles1[i].getNativeHandle();
-        const native_handle_t *handle2 = handles2[i].getNativeHandle();
-        if (!isWindowNativeHandleEqual(handle1, handle2)) {
-            return isWindowNativeHandleLessThan(handle1, handle2);
-        }
-    }
-    return false;
-}
-
 } // namespace utils
 } // namespace acam
 } // namespace android
diff --git a/camera/ndk/ndk_vendor/impl/utils.h b/camera/ndk/ndk_vendor/impl/utils.h
index 62779a4..7ad74ad 100644
--- a/camera/ndk/ndk_vendor/impl/utils.h
+++ b/camera/ndk/ndk_vendor/impl/utils.h
@@ -14,46 +14,39 @@
  * limitations under the License.
  */
 
-#include <android/frameworks/cameraservice/service/2.0/ICameraService.h>
-#include <android/frameworks/cameraservice/device/2.0/ICameraDeviceUser.h>
-#include <android/frameworks/cameraservice/device/2.0/types.h>
-#include <camera/NdkCameraDevice.h>
+#ifndef CAMERA_NDK_VENDOR_UTILS_H
+#define CAMERA_NDK_VENDOR_UTILS_H
+
 #include <CameraMetadata.h>
+#include <aidl/android/frameworks/cameraservice/common/Status.h>
+#include <aidl/android/frameworks/cameraservice/device/CameraMetadata.h>
+#include <aidl/android/frameworks/cameraservice/device/CaptureRequest.h>
+#include <aidl/android/frameworks/cameraservice/device/ICameraDeviceUser.h>
+#include <aidl/android/frameworks/cameraservice/device/OutputConfiguration.h>
+#include <aidl/android/frameworks/cameraservice/device/PhysicalCameraSettings.h>
+#include <aidl/android/frameworks/cameraservice/device/TemplateId.h>
+#include <aidl/android/frameworks/cameraservice/service/ICameraService.h>
+#include <camera/NdkCameraDevice.h>
 #include <hardware/camera3.h>
-
-#ifndef CAMERA_NDK_VENDOR_H
-#define CAMERA_NDK_VENDOR_H
-
-using android::hardware::hidl_vec;
-using android::hardware::hidl_handle;
+#include <utils/RefBase.h>
 
 namespace android {
 namespace acam {
 namespace utils {
 
-using CameraMetadata = hardware::camera::common::V1_0::helper::CameraMetadata;
-using HCameraMetadata  = frameworks::cameraservice::service::V2_0::CameraMetadata;
-using Status = frameworks::cameraservice::common::V2_0::Status;
-using TemplateId = frameworks::cameraservice::device::V2_0::TemplateId;
-using PhysicalCameraSettings = frameworks::cameraservice::device::V2_0::PhysicalCameraSettings;
-using HRotation = frameworks::cameraservice::device::V2_0::OutputConfiguration::Rotation;
-using OutputConfiguration = frameworks::cameraservice::device::V2_0::OutputConfiguration;
-
-// Utility class so that CaptureRequest can be stored by sp<>
-struct CaptureRequest : public RefBase {
-  frameworks::cameraservice::device::V2_0::CaptureRequest mCaptureRequest;
-  std::vector<const native_handle_t *> mSurfaceList;
-  //Physical camera settings metadata is stored here, since the capture request
-  //might not contain it. That's since, fmq might have consumed it.
-  hidl_vec<PhysicalCameraSettings> mPhysicalCameraSettings;
-};
-
-bool areWindowNativeHandlesEqual(hidl_vec<hidl_handle> handles1, hidl_vec<hidl_handle>handles2);
-
-bool areWindowNativeHandlesLessThan(hidl_vec<hidl_handle> handles1, hidl_vec<hidl_handle>handles2);
+using ::aidl::android::frameworks::cameraservice::common::Status;
+using ::aidl::android::frameworks::cameraservice::device::OutputConfiguration;
+using ::aidl::android::frameworks::cameraservice::device::PhysicalCameraSettings;
+using ::aidl::android::frameworks::cameraservice::device::TemplateId;
+using ::aidl::android::hardware::common::NativeHandle;
+using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
+using AidlCameraMetadata = ::aidl::android::frameworks::cameraservice::device::CameraMetadata;
+using AidlCaptureRequest = ::aidl::android::frameworks::cameraservice::device::CaptureRequest;
 
 bool isWindowNativeHandleEqual(const native_handle_t *nh1, const native_handle_t *nh2);
 
+bool isWindowNativeHandleEqual(const native_handle_t* nh1, const NativeHandle& nh2);
+
 bool isWindowNativeHandleLessThan(const native_handle_t *nh1, const native_handle_t *nh2);
 
 // Convenience wrapper over isWindowNativeHandleLessThan and isWindowNativeHandleEqual
@@ -88,117 +81,30 @@
 
 };
 
-// Wrapper around OutputConfiguration. This is needed since HIDL
-// OutputConfiguration is auto-generated and marked final. Therefore, operator
-// overloads outside the class, will not get picked by clang while trying to
-// store OutputConfiguration in maps/sets.
-struct OutputConfigurationWrapper {
-    OutputConfiguration mOutputConfiguration;
-
-    operator const OutputConfiguration &() const {
-        return mOutputConfiguration;
-    }
-
-    OutputConfigurationWrapper() {
-        mOutputConfiguration.rotation = OutputConfiguration::Rotation::R0;
-        // The ndk currently doesn't support deferred surfaces
-        mOutputConfiguration.isDeferred = false;
-        mOutputConfiguration.width = 0;
-        mOutputConfiguration.height = 0;
-        // ndk doesn't support inter OutputConfiguration buffer sharing.
-        mOutputConfiguration.windowGroupId = -1;
-    };
-
-    OutputConfigurationWrapper(const OutputConfigurationWrapper &other) {
-        *this = other;
-    }
-
-    // Needed to make sure that OutputConfiguration in
-    // OutputConfigurationWrapper, when copied doesn't call hidl_handle's
-    // assignment operator / copy constructor, which will lead to native handle
-    // cloning, which is not what we want for app callbacks which have the native
-    // handle as parameter.
-    OutputConfigurationWrapper &operator=(const OutputConfigurationWrapper &other) {
-        const OutputConfiguration &outputConfiguration = other.mOutputConfiguration;
-        mOutputConfiguration.rotation = outputConfiguration.rotation;
-        mOutputConfiguration.isDeferred = outputConfiguration.isDeferred;
-        mOutputConfiguration.width = outputConfiguration.width;
-        mOutputConfiguration.height = outputConfiguration.height;
-        mOutputConfiguration.windowGroupId = outputConfiguration.windowGroupId;
-        mOutputConfiguration.windowHandles.resize(outputConfiguration.windowHandles.size());
-        mOutputConfiguration.physicalCameraId = outputConfiguration.physicalCameraId;
-        size_t i = 0;
-        for (const auto &handle : outputConfiguration.windowHandles) {
-            mOutputConfiguration.windowHandles[i++] = handle.getNativeHandle();
-        }
-        return *this;
-    }
-
-    bool operator ==(const OutputConfiguration &other) const {
-        const OutputConfiguration &self = mOutputConfiguration;
-        return self.rotation == other.rotation && self.windowGroupId == other.windowGroupId &&
-                self.physicalCameraId == other.physicalCameraId && self.width == other.width &&
-                self.height == other.height && self.isDeferred == other.isDeferred &&
-                areWindowNativeHandlesEqual(self.windowHandles, other.windowHandles);
-    }
-
-    bool operator < (const OutputConfiguration &other) const {
-        if (*this == other) {
-            return false;
-        }
-        const OutputConfiguration &self = mOutputConfiguration;
-        if (self.windowGroupId != other.windowGroupId) {
-            return self.windowGroupId < other.windowGroupId;
-        }
-
-        if (self.width != other.width) {
-            return self.width < other.width;
-        }
-
-        if (self.height != other.height) {
-            return self.height < other.height;
-        }
-
-        if (self.rotation != other.rotation) {
-            return static_cast<uint32_t>(self.rotation) < static_cast<uint32_t>(other.rotation);
-        }
-
-        if (self.isDeferred != other.isDeferred) {
-            return self.isDeferred < other.isDeferred;
-        }
-
-        if (self.physicalCameraId != other.physicalCameraId) {
-            return self.physicalCameraId < other.physicalCameraId;
-        }
-        return areWindowNativeHandlesLessThan(self.windowHandles, other.windowHandles);
-    }
-
-    bool operator != (const OutputConfiguration &other) const {
-        return !(*this == other);
-    }
-
-    bool operator > (const OutputConfiguration &other) const {
-        return (*this != other) && !(*this < other);
-    }
+// Utility class so that CaptureRequest can be stored by sp<>
+struct CaptureRequest: public RefBase {
+  AidlCaptureRequest mCaptureRequest;
+  std::vector<native_handle_ptr_wrapper> mSurfaceList;
+  // Physical camera settings metadata is stored here, as the capture request
+  // might not contain it. That's since, fmq might have consumed it.
+  std::vector<PhysicalCameraSettings> mPhysicalCameraSettings;
 };
 
-// Convert CaptureRequest wrappable by sp<> to hidl CaptureRequest.
-frameworks::cameraservice::device::V2_0::CaptureRequest convertToHidl(
-    const CaptureRequest *captureRequest);
+AidlCaptureRequest convertToAidl(const CaptureRequest *captureRequest);
 
-HRotation convertToHidl(int rotation);
+OutputConfiguration::Rotation convertToAidl(int rotation);
 
-bool convertFromHidlCloned(const HCameraMetadata &metadata, CameraMetadata *rawMetadata);
+bool cloneFromAidl(const AidlCameraMetadata & srcMetadata, camera_metadata_t** dst);
 
 // Note: existing data in dst will be gone.
-void convertToHidl(const camera_metadata_t *src, HCameraMetadata* dst, bool shouldOwn = false);
+void convertToAidl(const camera_metadata_t *src, AidlCameraMetadata * dst);
 
-TemplateId convertToHidl(ACameraDevice_request_template templateId);
+TemplateId convertToAidl(ACameraDevice_request_template templateId);
 
-camera_status_t convertFromHidl(Status status);
+camera_status_t convertFromAidl(Status status);
 
 } // namespace utils
 } // namespace acam
 } // namespace android
 
-#endif // CAMERA_NDK_VENDOR_H
+#endif // CAMERA_NDK_VENDOR_UTILS_H
diff --git a/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp b/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
index 63cdb76..7f6ea9d 100644
--- a/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
+++ b/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
@@ -66,7 +66,7 @@
     // Retaining the error code in case the caller needs to analyze it.
     std::variant<int, ConfiguredWindows> initCamera(const native_handle_t* imgReaderAnw,
             const std::vector<PhysicalImgReaderInfo>& physicalImgReaders,
-            bool usePhysicalSettings) {
+            bool usePhysicalSettings, bool prepareWindows = false) {
         ConfiguredWindows configuredWindows;
         if (imgReaderAnw == nullptr) {
             ALOGE("Cannot initialize camera before image reader get initialized.");
@@ -142,7 +142,26 @@
             ALOGE("ACameraDevice_createCaptureSession failed, ret=%d", ret);
             return ret;
         }
-
+        if (prepareWindows) {
+            // Set window prepared callback
+            ACameraCaptureSession_setWindowPreparedCallback(mSession, /*context*/this,
+                    mPreparedCb);
+            // Prepare windows
+            for (auto &window : configuredWindows) {
+                ret = ACameraCaptureSession_prepareWindow(mSession, window);
+                if (ret != ACAMERA_OK) {
+                    ALOGE("%s: ACameraCaptureSession_prepareWindow failed", __FUNCTION__);
+                    return ret;
+                }
+                incPendingPrepared(window);
+            }
+            // Some time for the on-PreparedCallbacks
+            usleep(configuredWindows.size() * 100000);
+            // Check that callbacks were received
+            if (!gotAllPreparedCallbacks()) {
+                return -1;
+            }
+        }
         // Create capture request
         if (usePhysicalSettings) {
             ret = ACameraDevice_createCaptureRequest_withPhysicalIds(mDevice,
@@ -254,20 +273,22 @@
                 &mLogicalCaptureCallbacksV2, 1, &mStillRequest, &seqId);
     }
 
-    bool checkCallbacks(int pictureCount) {
+    bool checkCallbacks(int pictureCount, bool printLog = false) {
         std::lock_guard<std::mutex> lock(mMutex);
         if (mCompletedCaptureCallbackCount != pictureCount) {
-            ALOGE("Completed capture callback count not as expected. expected %d actual %d",
-                  pictureCount, mCompletedCaptureCallbackCount);
+            ALOGE_IF(printLog,
+                     "Completed capture callback count not as expected. expected %d actual %d",
+                     pictureCount, mCompletedCaptureCallbackCount);
             return false;
         }
         return true;
     }
-    bool checkCallbacksV2(int pictureCount) {
+    bool checkCallbacksV2(int pictureCount, bool printLog = false) {
         std::lock_guard<std::mutex> lock(mMutex);
         if (mCaptureStartedCallbackCount != pictureCount) {
-            ALOGE("Capture started callback count not as expected. expected %d actual %d",
-                  pictureCount, mCaptureStartedCallbackCount);
+            ALOGE_IF(printLog,
+                     "Capture started callback count not as expected. expected %d actual %d",
+                     pictureCount, mCaptureStartedCallbackCount);
             return false;
         }
         return true;
@@ -275,9 +296,55 @@
 
 
    private:
+    static void onPreparedCb(void* obj, ACameraWindowType *anw, ACameraCaptureSession *session) {
+        CameraHelper* thiz = reinterpret_cast<CameraHelper*>(obj);
+        thiz->handlePrepared(anw, session);
+    }
+    bool gotAllPreparedCallbacks() {
+        std::lock_guard<std::mutex> lock(mMutex);
+        bool ret = (mPendingPreparedCbs.size() == 0);
+        if (!ret) {
+            ALOGE("%s: mPendingPreparedCbs has the following expected callbacks", __FUNCTION__);
+            for (auto pair : mPendingPreparedCbs) {
+                ALOGE("%s: ANW: %p : pending callbacks %d", __FUNCTION__, pair.first, pair.second);
+            }
+        }
+        return ret;
+    }
+
+    void handlePrepared(ACameraWindowType *anw, ACameraCaptureSession *session) {
+        // Reduce the pending prepared count of anw by 1. If count is  0, remove the key.
+        std::lock_guard<std::mutex> lock(mMutex);
+        if (session != mSession) {
+            ALOGE("%s: Received callback for incorrect session ? mSession %p, session %p",
+                    __FUNCTION__, mSession, session);
+            return;
+        }
+        if(mPendingPreparedCbs.find(anw) == mPendingPreparedCbs.end()) {
+            ALOGE("%s: ANW %p was not being prepared at all ?", __FUNCTION__, anw);
+            return;
+        }
+        mPendingPreparedCbs[anw]--;
+        if (mPendingPreparedCbs[anw] == 0) {
+            mPendingPreparedCbs.erase(anw);
+        }
+    }
+    void incPendingPrepared(ACameraWindowType *anw) {
+        std::lock_guard<std::mutex> lock(mMutex);
+        if ((mPendingPreparedCbs.find(anw) == mPendingPreparedCbs.end())) {
+            mPendingPreparedCbs[anw] = 1;
+            return;
+        }
+        mPendingPreparedCbs[anw]++;
+    }
+
+    // ANW -> pending prepared callbacks
+    std::unordered_map<ACameraWindowType *, int> mPendingPreparedCbs;
     ACameraDevice_StateCallbacks mDeviceCb{this, nullptr, nullptr};
     ACameraCaptureSession_stateCallbacks mSessionCb{ this, nullptr, nullptr, nullptr};
 
+    ACameraCaptureSession_prepareCallback mPreparedCb = &onPreparedCb;
+
     const native_handle_t* mImgReaderAnw = nullptr;  // not owned by us.
 
     // Camera device
@@ -626,7 +693,7 @@
     }
 
     bool takePictures(const char* id, uint64_t readerUsage, int readerMaxImages,
-            bool readerAsync, int pictureCount, bool v2 = false) {
+            bool readerAsync, int pictureCount, bool v2 = false, bool prepareSurfaces = false) {
         int ret = 0;
 
         ImageReaderTestCase testCase(
@@ -641,7 +708,7 @@
         CameraHelper cameraHelper(id, mCameraManager);
         std::variant<int, ConfiguredWindows> retInit =
                 cameraHelper.initCamera(testCase.getNativeWindow(), {}/*physicalImageReaders*/,
-                                        false/*usePhysicalSettings*/);
+                                        false/*usePhysicalSettings*/, prepareSurfaces);
         int *retp = std::get_if<int>(&retInit);
         if (retp) {
             ALOGE("Unable to initialize camera helper");
@@ -670,18 +737,25 @@
         // Sleep until all capture finished
         for (int i = 0; i < kCaptureWaitRetry * pictureCount; i++) {
             usleep(kCaptureWaitUs);
-            if (testCase.getAcquiredImageCount() == pictureCount) {
+            bool receivedAllCallbacks = v2 ? cameraHelper.checkCallbacksV2(pictureCount)
+                                           : cameraHelper.checkCallbacks(pictureCount);
+
+            bool acquiredAllImages = testCase.getAcquiredImageCount() == pictureCount;
+            if (acquiredAllImages) {
                 ALOGI("Session take ~%d ms to capture %d images", i * kCaptureWaitUs / 1000,
                       pictureCount);
+            }
+            // Wait for all images to be acquired and all callbacks to be processed
+            if (acquiredAllImages && receivedAllCallbacks) {
                 break;
             }
         }
         return testCase.getAcquiredImageCount() == pictureCount &&
-               v2 ? cameraHelper.checkCallbacksV2(pictureCount) :
-                    cameraHelper.checkCallbacks(pictureCount);
+               v2 ? cameraHelper.checkCallbacksV2(pictureCount, /* printLog= */true) :
+                    cameraHelper.checkCallbacks(pictureCount, /* printLog= */true);
     }
 
-    bool testTakePicturesNative(const char* id) {
+    bool testTakePicturesNative(const char* id, bool prepareSurfaces) {
         for (auto& readerUsage :
              {AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN}) {
             for (auto& readerMaxImages : {1, 4, 8}) {
@@ -689,7 +763,7 @@
                     for (auto& pictureCount : {1, 4, 8}) {
                         for ( auto & v2 : {true, false}) {
                             if (!takePictures(id, readerUsage, readerMaxImages,
-                                    readerAsync, pictureCount, v2)) {
+                                    readerAsync, pictureCount, v2, prepareSurfaces)) {
                                 ALOGE("Test takePictures failed for test case usage=%" PRIu64
                                       ", maxImages=%d, async=%d, pictureCount=%d",
                                       readerUsage, readerMaxImages, readerAsync, pictureCount);
@@ -869,39 +943,46 @@
 
         ACameraMetadata_free(staticMetadata);
     }
+
+    void testBasicTakePictures(bool prepareSurfaces) {
+        // We always use the first camera.
+        const char* cameraId = mCameraIdList->cameraIds[0];
+        ASSERT_TRUE(cameraId != nullptr);
+
+        ACameraMetadata* staticMetadata = nullptr;
+        camera_status_t ret = ACameraManager_getCameraCharacteristics(
+                mCameraManager, cameraId, &staticMetadata);
+        ASSERT_EQ(ret, ACAMERA_OK);
+        ASSERT_NE(staticMetadata, nullptr);
+
+        bool isBC = isCapabilitySupported(staticMetadata,
+                ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
+
+        uint32_t namedTag = 0;
+        // Test that ACameraMetadata_getTagFromName works as expected for public tag
+        // names
+        camera_status_t status = ACameraManager_getTagFromName(mCameraManager, cameraId,
+                "android.control.aeMode", &namedTag);
+
+        ASSERT_EQ(status, ACAMERA_OK);
+        ASSERT_EQ(namedTag, ACAMERA_CONTROL_AE_MODE);
+
+        ACameraMetadata_free(staticMetadata);
+
+        if (!isBC) {
+            ALOGW("Camera does not support BACKWARD_COMPATIBLE.");
+            return;
+        }
+
+        EXPECT_TRUE(testTakePicturesNative(cameraId, prepareSurfaces));
+    }
 };
 
+
+
 TEST_F(AImageReaderVendorTest, CreateWindowNativeHandle) {
-    // We always use the first camera.
-    const char* cameraId = mCameraIdList->cameraIds[0];
-    ASSERT_TRUE(cameraId != nullptr);
-
-    ACameraMetadata* staticMetadata = nullptr;
-    camera_status_t ret = ACameraManager_getCameraCharacteristics(
-            mCameraManager, cameraId, &staticMetadata);
-    ASSERT_EQ(ret, ACAMERA_OK);
-    ASSERT_NE(staticMetadata, nullptr);
-
-    bool isBC = isCapabilitySupported(staticMetadata,
-            ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
-
-    uint32_t namedTag = 0;
-    // Test that ACameraMetadata_getTagFromName works as expected for public tag
-    // names
-    camera_status_t status = ACameraManager_getTagFromName(mCameraManager, cameraId,
-            "android.control.aeMode", &namedTag);
-
-    ASSERT_EQ(status, ACAMERA_OK);
-    ASSERT_EQ(namedTag, ACAMERA_CONTROL_AE_MODE);
-
-    ACameraMetadata_free(staticMetadata);
-
-    if (!isBC) {
-        ALOGW("Camera does not support BACKWARD_COMPATIBLE.");
-        return;
-    }
-
-    EXPECT_TRUE(testTakePicturesNative(cameraId));
+    testBasicTakePictures(/*prepareSurfaces*/ false);
+    testBasicTakePictures(/*prepareSurfaces*/ true);
 }
 
 TEST_F(AImageReaderVendorTest, LogicalCameraPhysicalStream) {
diff --git a/camera/tests/fuzzer/Android.bp b/camera/tests/fuzzer/Android.bp
new file mode 100644
index 0000000..bae8706
--- /dev/null
+++ b/camera/tests/fuzzer/Android.bp
@@ -0,0 +1,150 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_camera_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_camera_license"],
+}
+
+cc_defaults {
+    name: "camera_defaults",
+    static_libs: [
+        "libcamera_client",
+    ],
+    shared_libs: [
+        "libbase",
+        "libcutils",
+        "libutils",
+        "liblog",
+        "libbinder",
+        "libgui",
+        "libcamera_metadata",
+        "libnativewindow",
+    ],
+    fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
+}
+
+cc_fuzz {
+    name: "camera_fuzzer",
+    srcs: [
+        "camera_fuzzer.cpp",
+    ],
+    defaults: [
+        "camera_defaults",
+    ],
+}
+
+cc_fuzz {
+    name: "camera_c2CaptureRequest_fuzzer",
+    srcs: [
+        "camera_c2CaptureRequest_fuzzer.cpp",
+    ],
+    defaults: [
+        "camera_defaults",
+    ],
+}
+
+cc_fuzz {
+    name: "camera_c2ConcurrentCamera_fuzzer",
+    srcs: [
+        "camera_c2ConcurrentCamera_fuzzer.cpp",
+    ],
+    defaults: [
+        "camera_defaults",
+    ],
+}
+
+cc_fuzz {
+    name: "camera_c2SubmitInfo_fuzzer",
+    srcs: [
+        "camera_c2SubmitInfo_fuzzer.cpp",
+    ],
+    defaults: [
+        "camera_defaults",
+    ],
+}
+
+cc_fuzz {
+    name: "camera_c2SessionConfiguration_fuzzer",
+    srcs: [
+        "camera_c2SessionConfiguration_fuzzer.cpp",
+    ],
+    defaults: [
+        "camera_defaults",
+    ],
+}
+
+cc_fuzz {
+    name: "camera_c2OutputConfiguration_fuzzer",
+    srcs: [
+        "camera_c2OutputConfiguration_fuzzer.cpp",
+    ],
+    defaults: [
+        "camera_defaults",
+    ],
+}
+
+cc_fuzz {
+    name: "camera_vendorTagDescriptor_fuzzer",
+    srcs: [
+        "camera_vendorTagDescriptor_fuzzer.cpp",
+    ],
+    defaults: [
+        "camera_defaults",
+    ],
+    include_dirs: [
+        "system/media/camera/tests",
+        "system/media/private/camera/include",
+    ],
+}
+
+cc_fuzz {
+    name: "camera_Parameters_fuzzer",
+    srcs: [
+        "camera_Parameters_fuzzer.cpp",
+    ],
+    defaults: [
+        "camera_defaults",
+    ],
+}
+
+cc_fuzz {
+    name: "camera_SessionStats_fuzzer",
+    srcs: [
+        "camera_SessionStats_fuzzer.cpp",
+    ],
+    defaults: [
+        "camera_defaults",
+    ],
+}
+
+cc_fuzz {
+    name: "camera_captureResult_fuzzer",
+    srcs: [
+        "camera_captureResult_fuzzer.cpp",
+    ],
+    defaults: [
+        "camera_defaults",
+    ],
+}
diff --git a/camera/tests/fuzzer/README.md b/camera/tests/fuzzer/README.md
new file mode 100644
index 0000000..c07ac04
--- /dev/null
+++ b/camera/tests/fuzzer/README.md
@@ -0,0 +1,74 @@
+# Fuzzers for libcamera_client
+
+## Plugin Design Considerations
+The fuzzer plugins for libcamera_client are designed based on the understanding of the
+source code and try to achieve the following:
+
+##### Maximize code coverage
+The configuration parameters are not hardcoded, but instead selected based on
+incoming data. This ensures more code paths are reached by the fuzzers.
+
+libcamera_client supports the following parameters:
+1. Command (parameter name: `cmd`)
+2. Video Buffer Mode (parameter name: `videoBufferMode`)
+3. Preview Callback Flag (parameter name: `previewCallbackFlag`)
+4. Facing (parameter name: `facing`)
+5. Orientation (parameter name: `orientation`)
+6. Format (parameter name: `format`)
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+| `cmd` | 0.`CAMERA_CMD_START_SMOOTH_ZOOM` 1.`CAMERA_CMD_STOP_SMOOTH_ZOOM` 3.`CAMERA_CMD_SET_DISPLAY_ORIENTATION` 4.`CAMERA_CMD_ENABLE_SHUTTER_SOUND` 5.`CAMERA_CMD_PLAY_RECORDING_SOUND` 6.`CAMERA_CMD_START_FACE_DETECTION` 7.`CAMERA_CMD_STOP_FACE_DETECTION` 8.`CAMERA_CMD_ENABLE_FOCUS_MOVE_MSG` 9.`CAMERA_CMD_PING` 10.`CAMERA_CMD_SET_VIDEO_BUFFER_COUNT` 11.`CAMERA_CMD_SET_VIDEO_FORMAT`| Value obtained from FuzzedDataProvider|
+| `videoBufferMode` |0. `ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV` 1.`ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA` 2.`ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE`| Value obtained from FuzzedDataProvider|
+| `previewCallbackFlag` | 0. `CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK` 1.`CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK` 2.`CAMERA_FRAME_CALLBACK_FLAG_COPY_OUT_MASK` 3.`CAMERA_FRAME_CALLBACK_FLAG_NOOP` 4.`CAMERA_FRAME_CALLBACK_FLAG_CAMCORDER` 5.`CAMERA_FRAME_CALLBACK_FLAG_CAMERA` 6.`CAMERA_FRAME_CALLBACK_FLAG_BARCODE_SCANNER`| Value obtained from FuzzedDataProvider|
+| `facing` | 0.`android::hardware::CAMERA_FACING_BACK` 1.`android::hardware::CAMERA_FACING_FRONT`| Value obtained from FuzzedDataProvider|
+| `orientation` | 0.`0` 1.`90` 2.`180`3.`270`| Value obtained from FuzzedDataProvider|
+| `format` | 0.`CameraParameters::PIXEL_FORMAT_YUV422SP` 1.`CameraParameters::PIXEL_FORMAT_YUV420SP` 2.`CameraParameters::PIXEL_FORMAT_YUV422I` 3.`CameraParameters::PIXEL_FORMAT_YUV420P` 4.`CameraParameters::PIXEL_FORMAT_RGB565` 5.`CameraParameters::PIXEL_FORMAT_RGBA8888` 6.`CameraParameters::PIXEL_FORMAT_JPEG` 7.`CameraParameters::PIXEL_FORMAT_BAYER_RGGB` 8.`CameraParameters::PIXEL_FORMAT_ANDROID_OPAQUE`| Value obtained from FuzzedDataProvider|
+
+This also ensures that the plugins are always deterministic for any given input.
+
+##### Maximize utilization of input data
+The plugins feed the entire input data to the module.
+This ensures that the plugins tolerate any kind of input (empty, huge,
+malformed, etc) and dont `exit()` on any input and thereby increasing the
+chance of identifying vulnerabilities.
+
+## Build
+
+This describes steps to build camera_fuzzer, camera2CaptureRequest_fuzzer, camera2ConcurrentCamera_fuzzer, camera2SubmitInfo_fuzzer, camera2SessionConfiguration_fuzzer, camera2OutputConfiguration_fuzzer, vendorTagDescriptor_fuzzer, cameraParameters_fuzzer, cameraSessionStats_fuzzer and captureResult_fuzzer binaries
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) camera_fuzzer
+  $ mm -j$(nproc) camera_c2CaptureRequest_fuzzer
+  $ mm -j$(nproc) camera_c2ConcurrentCamera_fuzzer
+  $ mm -j$(nproc) camera_c2SubmitInfo_fuzzer
+  $ mm -j$(nproc) camera_c2SessionConfiguration_fuzzer
+  $ mm -j$(nproc) camera_c2OutputConfiguration_fuzzer
+  $ mm -j$(nproc) camera_vendorTagDescriptor_fuzzer
+  $ mm -j$(nproc) camera_Parameters_fuzzer
+  $ mm -j$(nproc) camera_SessionStats_fuzzer
+  $ mm -j$(nproc) camera_captureResult_fuzzer
+```
+#### Steps to run
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/${TARGET_ARCH}/camera_fuzzer/camera_fuzzer
+  $ adb shell /data/fuzz/${TARGET_ARCH}/camera_c2CaptureRequest_fuzzer/camera_c2CaptureRequest_fuzzer
+  $ adb shell /data/fuzz/${TARGET_ARCH}/camera_c2ConcurrentCamera_fuzzer/camera_c2ConcurrentCamera_fuzzer
+  $ adb shell /data/fuzz/${TARGET_ARCH}/camera_c2SubmitInfo_fuzzer/camera_c2SubmitInfo_fuzzer
+  $ adb shell /data/fuzz/${TARGET_ARCH}/camera_c2SessionConfiguration_fuzzer/camera_c2SessionConfiguration_fuzzer
+  $ adb shell /data/fuzz/${TARGET_ARCH}/camera_c2OutputConfiguration_fuzzer/camera_c2OutputConfiguration_fuzzer
+  $ adb shell /data/fuzz/${TARGET_ARCH}/camera_vendorTagDescriptor_fuzzer/camera_vendorTagDescriptor_fuzzer
+  $ adb shell /data/fuzz/${TARGET_ARCH}/camera_Parameters_fuzzer/camera_Parameters_fuzzer
+  $ adb shell /data/fuzz/${TARGET_ARCH}/camera_SessionStats_fuzzer/camera_SessionStats_fuzzer
+  $ adb shell /data/fuzz/${TARGET_ARCH}/camera_captureResult_fuzzer/camera_captureResult_fuzzer
+```
+
+## References:
+ * http://llvm.org/docs/LibFuzzer.html
+ * https://github.com/google/oss-fuzz
diff --git a/camera/tests/fuzzer/camera2common.h b/camera/tests/fuzzer/camera2common.h
new file mode 100644
index 0000000..14a1b1b
--- /dev/null
+++ b/camera/tests/fuzzer/camera2common.h
@@ -0,0 +1,55 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef CAMERA2COMMON_H
+#define CAMERA2COMMON_H
+
+#include <binder/Parcel.h>
+
+using namespace android;
+
+template <class type>
+void invokeReadWriteNullParcel(type* obj) {
+    Parcel* parcelNull = nullptr;
+    obj->writeToParcel(parcelNull);
+    obj->readFromParcel(parcelNull);
+}
+
+template <class type>
+void invokeReadWriteNullParcelsp(sp<type> obj) {
+    Parcel* parcelNull = nullptr;
+    obj->writeToParcel(parcelNull);
+    obj->readFromParcel(parcelNull);
+}
+
+template <class type>
+void invokeReadWriteParcel(type* obj) {
+    Parcel* parcel = new Parcel();
+    obj->writeToParcel(parcel);
+    parcel->setDataPosition(0);
+    obj->readFromParcel(parcel);
+    delete parcel;
+}
+
+template <class type>
+void invokeReadWriteParcelsp(sp<type> obj) {
+    Parcel* parcel = new Parcel();
+    obj->writeToParcel(parcel);
+    parcel->setDataPosition(0);
+    obj->readFromParcel(parcel);
+    delete parcel;
+}
+
+#endif  // CAMERA2COMMON_H
diff --git a/camera/tests/fuzzer/camera_Parameters_fuzzer.cpp b/camera/tests/fuzzer/camera_Parameters_fuzzer.cpp
new file mode 100644
index 0000000..07efc20
--- /dev/null
+++ b/camera/tests/fuzzer/camera_Parameters_fuzzer.cpp
@@ -0,0 +1,164 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <CameraParameters.h>
+#include <CameraParameters2.h>
+#include <fcntl.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <utils/String16.h>
+#include <camera/StringUtils.h>
+
+using namespace std;
+using namespace android;
+
+string kValidFormats[] = {
+        CameraParameters::PIXEL_FORMAT_YUV422SP,      CameraParameters::PIXEL_FORMAT_YUV420SP,
+        CameraParameters::PIXEL_FORMAT_YUV422I,       CameraParameters::PIXEL_FORMAT_YUV420P,
+        CameraParameters::PIXEL_FORMAT_RGB565,        CameraParameters::PIXEL_FORMAT_RGBA8888,
+        CameraParameters::PIXEL_FORMAT_JPEG,          CameraParameters::PIXEL_FORMAT_BAYER_RGGB,
+        CameraParameters::PIXEL_FORMAT_ANDROID_OPAQUE};
+
+class CameraParametersFuzzer {
+  public:
+    void process(const uint8_t* data, size_t size);
+    ~CameraParametersFuzzer() {
+        delete mCameraParameters;
+        delete mCameraParameters2;
+    }
+
+  private:
+    void invokeCameraParameters();
+    template <class type>
+    void initCameraParameters(type** obj);
+    template <class type>
+    void cameraParametersCommon(type* obj);
+    CameraParameters* mCameraParameters = nullptr;
+    CameraParameters2* mCameraParameters2 = nullptr;
+    FuzzedDataProvider* mFDP = nullptr;
+};
+
+template <class type>
+void CameraParametersFuzzer::initCameraParameters(type** obj) {
+    if (mFDP->ConsumeBool()) {
+        *obj = new type();
+    } else {
+        string params;
+        if (mFDP->ConsumeBool()) {
+            int32_t width = mFDP->ConsumeIntegral<int32_t>();
+            int32_t height = mFDP->ConsumeIntegral<int32_t>();
+            int32_t minFps = mFDP->ConsumeIntegral<int32_t>();
+            int32_t maxFps = mFDP->ConsumeIntegral<int32_t>();
+            params = CameraParameters::KEY_SUPPORTED_VIDEO_SIZES;
+            params += '=' + to_string(width) + 'x' + to_string(height) + ';';
+            if (mFDP->ConsumeBool()) {
+                params += CameraParameters::KEY_PREVIEW_FPS_RANGE;
+                params += '=' + to_string(minFps) + ',' + to_string(maxFps) + ';';
+            }
+            if (mFDP->ConsumeBool()) {
+                params += CameraParameters::KEY_SUPPORTED_PICTURE_SIZES;
+                params += '=' + to_string(width) + 'x' + to_string(height) + ';';
+            }
+            if (mFDP->ConsumeBool()) {
+                params += CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS;
+                params += '=' + mFDP->PickValueInArray(kValidFormats) + ';';
+            }
+        } else {
+            params = mFDP->ConsumeRandomLengthString();
+        }
+        *obj = new type(toString8(params));
+    }
+}
+
+template <class type>
+void CameraParametersFuzzer::cameraParametersCommon(type* obj) {
+    Vector<Size> supportedPreviewSizes;
+    obj->getSupportedPreviewSizes(supportedPreviewSizes);
+    int32_t previewWidth = mFDP->ConsumeIntegral<int32_t>();
+    int32_t previewHeight = mFDP->ConsumeIntegral<int32_t>();
+    obj->setPreviewSize(previewWidth, previewHeight);
+    obj->getPreviewSize(&previewWidth, &previewHeight);
+
+    Vector<Size> supportedVideoSizes;
+    obj->getSupportedVideoSizes(supportedVideoSizes);
+    if (supportedVideoSizes.size() != 0) {
+        int32_t videoWidth, videoHeight, preferredVideoWidth, preferredVideoHeight;
+        if (mFDP->ConsumeBool()) {
+            int32_t idx = mFDP->ConsumeIntegralInRange<int32_t>(0, supportedVideoSizes.size() - 1);
+            obj->setVideoSize(supportedVideoSizes[idx].width, supportedVideoSizes[idx].height);
+        } else {
+            videoWidth = mFDP->ConsumeIntegral<int32_t>();
+            videoHeight = mFDP->ConsumeIntegral<int32_t>();
+            obj->setVideoSize(videoWidth, videoHeight);
+        }
+        obj->getVideoSize(&videoWidth, &videoHeight);
+        obj->getPreferredPreviewSizeForVideo(&preferredVideoWidth, &preferredVideoHeight);
+    }
+
+    int32_t fps = mFDP->ConsumeIntegral<int32_t>();
+    obj->setPreviewFrameRate(fps);
+    obj->getPreviewFrameRate();
+    string previewFormat = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidFormats)
+                                               : mFDP->ConsumeRandomLengthString();
+    obj->setPreviewFormat(previewFormat.c_str());
+
+    int32_t pictureWidth = mFDP->ConsumeIntegral<int32_t>();
+    int32_t pictureHeight = mFDP->ConsumeIntegral<int32_t>();
+    Vector<Size> supportedPictureSizes;
+    obj->setPictureSize(pictureWidth, pictureHeight);
+    obj->getPictureSize(&pictureWidth, &pictureHeight);
+    obj->getSupportedPictureSizes(supportedPictureSizes);
+    string pictureFormat = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidFormats)
+                                               : mFDP->ConsumeRandomLengthString();
+    obj->setPictureFormat(pictureFormat.c_str());
+    obj->getPictureFormat();
+
+    if (mFDP->ConsumeBool()) {
+        obj->dump();
+    } else {
+        int32_t fd = open("/dev/null", O_CLOEXEC | O_RDWR | O_CREAT);
+        Vector<String16> args = {};
+        obj->dump(fd, args);
+        close(fd);
+    }
+}
+
+void CameraParametersFuzzer::invokeCameraParameters() {
+    initCameraParameters<CameraParameters>(&mCameraParameters);
+    cameraParametersCommon<CameraParameters>(mCameraParameters);
+    initCameraParameters<CameraParameters2>(&mCameraParameters2);
+    cameraParametersCommon<CameraParameters2>(mCameraParameters2);
+
+    int32_t minFPS, maxFPS;
+    mCameraParameters->getPreviewFpsRange(&minFPS, &maxFPS);
+    string format = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidFormats)
+                                        : mFDP->ConsumeRandomLengthString();
+    mCameraParameters->previewFormatToEnum(format.c_str());
+    mCameraParameters->isEmpty();
+    Vector<int32_t> formats;
+    mCameraParameters->getSupportedPreviewFormats(formats);
+}
+
+void CameraParametersFuzzer::process(const uint8_t* data, size_t size) {
+    mFDP = new FuzzedDataProvider(data, size);
+    invokeCameraParameters();
+    delete mFDP;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    CameraParametersFuzzer cameraParametersFuzzer;
+    cameraParametersFuzzer.process(data, size);
+    return 0;
+}
diff --git a/camera/tests/fuzzer/camera_SessionStats_fuzzer.cpp b/camera/tests/fuzzer/camera_SessionStats_fuzzer.cpp
new file mode 100644
index 0000000..c9bb20c
--- /dev/null
+++ b/camera/tests/fuzzer/camera_SessionStats_fuzzer.cpp
@@ -0,0 +1,170 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <CameraSessionStats.h>
+#include <binder/Parcel.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <camera/StringUtils.h>
+#include "camera2common.h"
+
+using namespace std;
+using namespace android;
+using namespace android::hardware;
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+    CameraStreamStats* cameraStreamStats = nullptr;
+    Parcel parcelCamStreamStats;
+
+    if (fdp.ConsumeBool()) {
+        cameraStreamStats = new CameraStreamStats();
+    } else {
+        int32_t width = fdp.ConsumeIntegral<int32_t>();
+        if (fdp.ConsumeBool()) {
+            parcelCamStreamStats.writeInt32(width);
+        }
+        int32_t height = fdp.ConsumeIntegral<int32_t>();
+        if (fdp.ConsumeBool()) {
+            parcelCamStreamStats.writeInt32(height);
+        }
+        int32_t format = fdp.ConsumeIntegral<int32_t>();
+        if (fdp.ConsumeBool()) {
+            parcelCamStreamStats.writeInt32(format);
+        }
+        float maxPreviewFps = fdp.ConsumeFloatingPoint<float>();
+        if (fdp.ConsumeBool()) {
+            parcelCamStreamStats.writeFloat(maxPreviewFps);
+        }
+        int32_t dataSpace = fdp.ConsumeIntegral<int32_t>();
+        if (fdp.ConsumeBool()) {
+            parcelCamStreamStats.writeInt32(dataSpace);
+        }
+        int64_t usage = fdp.ConsumeIntegral<int64_t>();
+        if (fdp.ConsumeBool()) {
+            parcelCamStreamStats.writeInt64(usage);
+        }
+        int64_t requestCount = fdp.ConsumeIntegral<int64_t>();
+        if (fdp.ConsumeBool()) {
+            parcelCamStreamStats.writeInt64(requestCount);
+        }
+        int64_t errorCount = fdp.ConsumeIntegral<int64_t>();
+        if (fdp.ConsumeBool()) {
+            parcelCamStreamStats.writeInt64(errorCount);
+        }
+        int32_t maxHalBuffers = fdp.ConsumeIntegral<int32_t>();
+        if (fdp.ConsumeBool()) {
+            parcelCamStreamStats.writeInt32(maxHalBuffers);
+        }
+        int32_t maxAppBuffers = fdp.ConsumeIntegral<int32_t>();
+        if (fdp.ConsumeBool()) {
+            parcelCamStreamStats.writeInt32(maxAppBuffers);
+        }
+        int32_t dynamicRangeProfile = fdp.ConsumeIntegral<int32_t>();
+        if (fdp.ConsumeBool()) {
+            parcelCamStreamStats.writeInt32(dynamicRangeProfile);
+        }
+        int32_t streamUseCase = fdp.ConsumeIntegral<int32_t>();
+        if (fdp.ConsumeBool()) {
+            parcelCamStreamStats.writeInt32(streamUseCase);
+        }
+        int32_t colorSpace = fdp.ConsumeIntegral<int32_t>();
+        if (fdp.ConsumeBool()) {
+            parcelCamStreamStats.writeInt32(colorSpace);
+        }
+
+        cameraStreamStats = new CameraStreamStats(width, height, format, maxPreviewFps, dataSpace,
+                                                  usage, maxHalBuffers, maxAppBuffers,
+                                                  dynamicRangeProfile, streamUseCase, colorSpace);
+    }
+
+    parcelCamStreamStats.setDataPosition(0);
+    cameraStreamStats->readFromParcel(&parcelCamStreamStats);
+    invokeReadWriteNullParcel<CameraStreamStats>(cameraStreamStats);
+    invokeReadWriteParcel<CameraStreamStats>(cameraStreamStats);
+
+    CameraSessionStats* cameraSessionStats = nullptr;
+    Parcel parcelCamSessionStats;
+
+    if (fdp.ConsumeBool()) {
+        cameraSessionStats = new CameraSessionStats();
+    } else {
+        string cameraId = fdp.ConsumeRandomLengthString();
+        if (fdp.ConsumeBool()) {
+            parcelCamSessionStats.writeString16(toString16(cameraId));
+        }
+        int32_t facing = fdp.ConsumeIntegral<int32_t>();
+        if (fdp.ConsumeBool()) {
+            parcelCamSessionStats.writeInt32(facing);
+        }
+        int32_t newCameraState = fdp.ConsumeIntegral<int32_t>();
+        if (fdp.ConsumeBool()) {
+            parcelCamSessionStats.writeInt32(newCameraState);
+        }
+        string clientName = fdp.ConsumeRandomLengthString();
+        if (fdp.ConsumeBool()) {
+            parcelCamSessionStats.writeString16(toString16(clientName));
+        }
+        int32_t apiLevel = fdp.ConsumeIntegral<int32_t>();
+        if (fdp.ConsumeBool()) {
+            parcelCamSessionStats.writeInt32(apiLevel);
+        }
+        bool isNdk = fdp.ConsumeBool();
+        if (fdp.ConsumeBool()) {
+            parcelCamSessionStats.writeBool(isNdk);
+        }
+        int32_t latencyMs = fdp.ConsumeIntegral<int32_t>();
+        if (fdp.ConsumeBool()) {
+            parcelCamSessionStats.writeInt32(latencyMs);
+        }
+
+        int64_t logId = fdp.ConsumeIntegral<int64_t>();
+        if (fdp.ConsumeBool()) {
+            parcelCamSessionStats.writeInt64(logId);
+        }
+
+        cameraSessionStats = new CameraSessionStats(cameraId, facing, newCameraState, clientName,
+                                                    apiLevel, isNdk, latencyMs, logId);
+    }
+
+    if (fdp.ConsumeBool()) {
+        int32_t internalReconfigure = fdp.ConsumeIntegral<int32_t>();
+        parcelCamSessionStats.writeInt32(internalReconfigure);
+    }
+
+    if (fdp.ConsumeBool()) {
+        int64_t requestCount = fdp.ConsumeIntegral<int64_t>();
+        parcelCamSessionStats.writeInt64(requestCount);
+    }
+
+    if (fdp.ConsumeBool()) {
+        int64_t resultErrorCount = fdp.ConsumeIntegral<int64_t>();
+        parcelCamSessionStats.writeInt64(resultErrorCount);
+    }
+
+    if (fdp.ConsumeBool()) {
+        bool deviceError = fdp.ConsumeBool();
+        parcelCamSessionStats.writeBool(deviceError);
+    }
+
+    parcelCamSessionStats.setDataPosition(0);
+    cameraSessionStats->readFromParcel(&parcelCamSessionStats);
+    invokeReadWriteNullParcel<CameraSessionStats>(cameraSessionStats);
+    invokeReadWriteParcel<CameraSessionStats>(cameraSessionStats);
+
+    delete cameraStreamStats;
+    delete cameraSessionStats;
+    return 0;
+}
diff --git a/camera/tests/fuzzer/camera_c2CaptureRequest_fuzzer.cpp b/camera/tests/fuzzer/camera_c2CaptureRequest_fuzzer.cpp
new file mode 100644
index 0000000..494ec1b
--- /dev/null
+++ b/camera/tests/fuzzer/camera_c2CaptureRequest_fuzzer.cpp
@@ -0,0 +1,129 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <CameraMetadata.h>
+#include <camera/StringUtils.h>
+#include <camera2/CaptureRequest.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <gui/Surface.h>
+#include <gui/SurfaceComposerClient.h>
+#include <gui/view/Surface.h>
+#include "camera2common.h"
+
+using namespace std;
+using namespace android;
+
+constexpr int32_t kNonZeroRangeMin = 0;
+constexpr int32_t kRangeMax = 1000;
+constexpr int32_t kSizeMin = 1;
+constexpr int32_t kSizeMax = 1000;
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+
+    sp<CaptureRequest> captureRequest = new CaptureRequest();
+    Parcel parcelCamCaptureReq;
+
+    size_t physicalCameraSettingsSize =
+            fdp.ConsumeIntegralInRange<size_t>(kNonZeroRangeMin, kRangeMax);
+    if (fdp.ConsumeBool()) {
+        parcelCamCaptureReq.writeInt32(physicalCameraSettingsSize);
+    }
+
+    for (size_t idx = 0; idx < physicalCameraSettingsSize; ++idx) {
+        string id = fdp.ConsumeRandomLengthString();
+        if (fdp.ConsumeBool()) {
+            parcelCamCaptureReq.writeString16(toString16(id));
+        }
+        CameraMetadata cameraMetadata;
+        if (fdp.ConsumeBool()) {
+            cameraMetadata = CameraMetadata();
+        } else {
+            size_t entryCapacity = fdp.ConsumeIntegralInRange<size_t>(kNonZeroRangeMin, kRangeMax);
+            size_t dataCapacity = fdp.ConsumeIntegralInRange<size_t>(kNonZeroRangeMin, kRangeMax);
+            cameraMetadata = CameraMetadata(entryCapacity, dataCapacity);
+        }
+        captureRequest->mPhysicalCameraSettings.push_back({id, cameraMetadata});
+        if (fdp.ConsumeBool()) {
+            cameraMetadata.writeToParcel(&parcelCamCaptureReq);
+        }
+    }
+
+    captureRequest->mIsReprocess = fdp.ConsumeBool();
+    if (fdp.ConsumeBool()) {
+        parcelCamCaptureReq.writeInt32(captureRequest->mIsReprocess);
+    }
+
+    captureRequest->mSurfaceConverted = fdp.ConsumeBool();
+    if (fdp.ConsumeBool() && captureRequest->mSurfaceConverted) {
+        // 0-sized array
+        parcelCamCaptureReq.writeInt32(0);
+    }
+
+    if (!captureRequest->mSurfaceConverted) {
+        size_t surfaceListSize = fdp.ConsumeIntegralInRange<size_t>(kSizeMin, kSizeMax);
+        if (fdp.ConsumeBool()) {
+            parcelCamCaptureReq.writeInt32(surfaceListSize);
+        }
+        for (size_t idx = 0; idx < surfaceListSize; ++idx) {
+            sp<SurfaceComposerClient> composerClient = new SurfaceComposerClient;
+            sp<SurfaceControl> surfaceControl = composerClient->createSurface(
+                    static_cast<String8>(fdp.ConsumeRandomLengthString().c_str()) /* name */,
+                    fdp.ConsumeIntegral<uint32_t>() /* width */,
+                    fdp.ConsumeIntegral<uint32_t>() /* height */,
+                    fdp.ConsumeIntegral<int32_t>() /* format */,
+                    fdp.ConsumeIntegral<int32_t>() /* flags */);
+            if (surfaceControl) {
+                sp<Surface> surface = surfaceControl->getSurface();
+                captureRequest->mSurfaceList.push_back(surface);
+                if (fdp.ConsumeBool()) {
+                    view::Surface surfaceShim;
+                    surfaceShim.name = String16((fdp.ConsumeRandomLengthString()).c_str());
+                    surfaceShim.graphicBufferProducer = surface->getIGraphicBufferProducer();
+                    surfaceShim.writeToParcel(&parcelCamCaptureReq);
+                }
+                surface.clear();
+            }
+            composerClient.clear();
+            surfaceControl.clear();
+        }
+    }
+
+    size_t indexListSize = fdp.ConsumeIntegralInRange<size_t>(kSizeMin, kSizeMax);
+    if (fdp.ConsumeBool()) {
+        parcelCamCaptureReq.writeInt32(indexListSize);
+    }
+
+    for (size_t idx = 0; idx < indexListSize; ++idx) {
+        int32_t streamIdx = fdp.ConsumeIntegral<int32_t>();
+        int32_t surfaceIdx = fdp.ConsumeIntegral<int32_t>();
+        captureRequest->mStreamIdxList.push_back(streamIdx);
+        captureRequest->mSurfaceIdxList.push_back(surfaceIdx);
+        if (fdp.ConsumeBool()) {
+            parcelCamCaptureReq.writeInt32(streamIdx);
+        }
+        if (fdp.ConsumeBool()) {
+            parcelCamCaptureReq.writeInt32(surfaceIdx);
+        }
+    }
+
+    invokeReadWriteParcelsp<CaptureRequest>(captureRequest);
+    invokeReadWriteNullParcelsp<CaptureRequest>(captureRequest);
+    parcelCamCaptureReq.setDataPosition(0);
+    captureRequest->readFromParcel(&parcelCamCaptureReq);
+    captureRequest.clear();
+    return 0;
+}
diff --git a/camera/tests/fuzzer/camera_c2ConcurrentCamera_fuzzer.cpp b/camera/tests/fuzzer/camera_c2ConcurrentCamera_fuzzer.cpp
new file mode 100644
index 0000000..12b5bc3
--- /dev/null
+++ b/camera/tests/fuzzer/camera_c2ConcurrentCamera_fuzzer.cpp
@@ -0,0 +1,62 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <camera2/ConcurrentCamera.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include "camera2common.h"
+
+using namespace std;
+using namespace android;
+using namespace android::hardware::camera2::utils;
+
+constexpr int32_t kRangeMin = 0;
+constexpr int32_t kRangeMax = 1000;
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+    ConcurrentCameraIdCombination camIdCombination;
+
+    if (fdp.ConsumeBool()) {
+        size_t concurrentCameraIdSize = fdp.ConsumeIntegralInRange<size_t>(kRangeMin, kRangeMax);
+        for (size_t idx = 0; idx < concurrentCameraIdSize; ++idx) {
+            string concurrentCameraId = fdp.ConsumeRandomLengthString();
+            camIdCombination.mConcurrentCameraIds.push_back(concurrentCameraId);
+        }
+    }
+
+    invokeReadWriteNullParcel<ConcurrentCameraIdCombination>(&camIdCombination);
+    invokeReadWriteParcel<ConcurrentCameraIdCombination>(&camIdCombination);
+
+    CameraIdAndSessionConfiguration camIdAndSessionConfig;
+
+    if (fdp.ConsumeBool()) {
+        camIdAndSessionConfig.mCameraId = fdp.ConsumeRandomLengthString();
+        if (fdp.ConsumeBool()) {
+            camIdAndSessionConfig.mSessionConfiguration = SessionConfiguration();
+        } else {
+            int32_t inputWidth = fdp.ConsumeIntegral<int32_t>();
+            int32_t inputHeight = fdp.ConsumeIntegral<int32_t>();
+            int32_t inputFormat = fdp.ConsumeIntegral<int32_t>();
+            int32_t operatingMode = fdp.ConsumeIntegral<int32_t>();
+            camIdAndSessionConfig.mSessionConfiguration =
+                    SessionConfiguration(inputWidth, inputHeight, inputFormat, operatingMode);
+        }
+    }
+
+    invokeReadWriteNullParcel<CameraIdAndSessionConfiguration>(&camIdAndSessionConfig);
+    invokeReadWriteParcel<CameraIdAndSessionConfiguration>(&camIdAndSessionConfig);
+    return 0;
+}
diff --git a/camera/tests/fuzzer/camera_c2OutputConfiguration_fuzzer.cpp b/camera/tests/fuzzer/camera_c2OutputConfiguration_fuzzer.cpp
new file mode 100644
index 0000000..2fe9a94
--- /dev/null
+++ b/camera/tests/fuzzer/camera_c2OutputConfiguration_fuzzer.cpp
@@ -0,0 +1,110 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <camera2/OutputConfiguration.h>
+#include <camera2/SessionConfiguration.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <gui/IGraphicBufferProducer.h>
+#include <gui/Surface.h>
+#include <gui/SurfaceComposerClient.h>
+#include "camera2common.h"
+
+using namespace std;
+using namespace android;
+using namespace android::hardware::camera2::params;
+
+constexpr int32_t kSizeMin = 0;
+constexpr int32_t kSizeMax = 1000;
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+
+    OutputConfiguration* outputConfiguration = nullptr;
+
+    if (fdp.ConsumeBool()) {
+        outputConfiguration = new OutputConfiguration();
+    } else {
+        int32_t rotation = fdp.ConsumeIntegral<int32_t>();
+        string physicalCameraId = fdp.ConsumeRandomLengthString();
+        int32_t surfaceSetID = fdp.ConsumeIntegral<int32_t>();
+        bool isShared = fdp.ConsumeBool();
+
+        if (fdp.ConsumeBool()) {
+            sp<IGraphicBufferProducer> iGBP = nullptr;
+            sp<SurfaceComposerClient> composerClient = new SurfaceComposerClient;
+            sp<SurfaceControl> surfaceControl = composerClient->createSurface(
+                    static_cast<String8>(fdp.ConsumeRandomLengthString().c_str()) /* name */,
+                    fdp.ConsumeIntegral<uint32_t>() /* width */,
+                    fdp.ConsumeIntegral<uint32_t>() /* height */,
+                    fdp.ConsumeIntegral<int32_t>() /* format */,
+                    fdp.ConsumeIntegral<int32_t>() /* flags */);
+            if (surfaceControl) {
+                sp<Surface> surface = surfaceControl->getSurface();
+                iGBP = surface->getIGraphicBufferProducer();
+            }
+            outputConfiguration = new OutputConfiguration(iGBP, rotation, physicalCameraId,
+                                                          surfaceSetID, isShared);
+            iGBP.clear();
+            composerClient.clear();
+            surfaceControl.clear();
+        } else {
+            size_t iGBPSize = fdp.ConsumeIntegralInRange<size_t>(kSizeMin, kSizeMax);
+            vector<sp<IGraphicBufferProducer>> iGBPs;
+            for (size_t idx = 0; idx < iGBPSize; ++idx) {
+                sp<IGraphicBufferProducer> iGBP = nullptr;
+                sp<SurfaceComposerClient> composerClient = new SurfaceComposerClient;
+                sp<SurfaceControl> surfaceControl = composerClient->createSurface(
+                        static_cast<String8>(fdp.ConsumeRandomLengthString().c_str()) /* name */,
+                        fdp.ConsumeIntegral<uint32_t>() /* width */,
+                        fdp.ConsumeIntegral<uint32_t>() /* height */,
+                        fdp.ConsumeIntegral<int32_t>() /* format */,
+                        fdp.ConsumeIntegral<int32_t>() /* flags */);
+                if (surfaceControl) {
+                    sp<Surface> surface = surfaceControl->getSurface();
+                    iGBP = surface->getIGraphicBufferProducer();
+                    iGBPs.push_back(iGBP);
+                }
+                iGBP.clear();
+                composerClient.clear();
+                surfaceControl.clear();
+            }
+            outputConfiguration = new OutputConfiguration(iGBPs, rotation, physicalCameraId,
+                                                          surfaceSetID, isShared);
+        }
+    }
+
+    outputConfiguration->getRotation();
+    outputConfiguration->getSurfaceSetID();
+    outputConfiguration->getSurfaceType();
+    outputConfiguration->getWidth();
+    outputConfiguration->getHeight();
+    outputConfiguration->isDeferred();
+    outputConfiguration->isShared();
+    outputConfiguration->getPhysicalCameraId();
+
+    OutputConfiguration outputConfiguration2;
+    outputConfiguration->gbpsEqual(outputConfiguration2);
+    outputConfiguration->sensorPixelModesUsedEqual(outputConfiguration2);
+    outputConfiguration->gbpsLessThan(outputConfiguration2);
+    outputConfiguration->sensorPixelModesUsedLessThan(outputConfiguration2);
+    outputConfiguration->getGraphicBufferProducers();
+    sp<IGraphicBufferProducer> gbp;
+    outputConfiguration->addGraphicProducer(gbp);
+    invokeReadWriteNullParcel<OutputConfiguration>(outputConfiguration);
+    invokeReadWriteParcel<OutputConfiguration>(outputConfiguration);
+    delete outputConfiguration;
+    return 0;
+}
diff --git a/camera/tests/fuzzer/camera_c2SessionConfiguration_fuzzer.cpp b/camera/tests/fuzzer/camera_c2SessionConfiguration_fuzzer.cpp
new file mode 100644
index 0000000..7cd0e59
--- /dev/null
+++ b/camera/tests/fuzzer/camera_c2SessionConfiguration_fuzzer.cpp
@@ -0,0 +1,88 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <camera2/OutputConfiguration.h>
+#include <camera2/SessionConfiguration.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <gui/IGraphicBufferProducer.h>
+#include <gui/Surface.h>
+#include <gui/SurfaceComposerClient.h>
+#include "camera2common.h"
+
+using namespace std;
+using namespace android;
+using namespace android::hardware::camera2::params;
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+
+    SessionConfiguration* sessionConfiguration = nullptr;
+
+    if (fdp.ConsumeBool()) {
+        sessionConfiguration = new SessionConfiguration();
+    } else {
+        int32_t inputWidth = fdp.ConsumeIntegral<int32_t>();
+        int32_t inputHeight = fdp.ConsumeIntegral<int32_t>();
+        int32_t inputFormat = fdp.ConsumeIntegral<int32_t>();
+        int32_t operatingMode = fdp.ConsumeIntegral<int32_t>();
+        sessionConfiguration =
+                new SessionConfiguration(inputWidth, inputHeight, inputFormat, operatingMode);
+    }
+
+    sessionConfiguration->getInputWidth();
+    sessionConfiguration->getInputHeight();
+    sessionConfiguration->getInputFormat();
+    sessionConfiguration->getOperatingMode();
+
+    OutputConfiguration* outputConfiguration = nullptr;
+
+    if (fdp.ConsumeBool()) {
+        outputConfiguration = new OutputConfiguration();
+        sessionConfiguration->addOutputConfiguration(*outputConfiguration);
+    } else {
+        sp<IGraphicBufferProducer> iGBP = nullptr;
+        sp<SurfaceComposerClient> composerClient = new SurfaceComposerClient;
+        sp<SurfaceControl> surfaceControl = composerClient->createSurface(
+                static_cast<String8>(fdp.ConsumeRandomLengthString().c_str()),
+                fdp.ConsumeIntegral<uint32_t>(), fdp.ConsumeIntegral<uint32_t>(),
+                fdp.ConsumeIntegral<int32_t>(), fdp.ConsumeIntegral<int32_t>());
+        if (surfaceControl) {
+            sp<Surface> surface = surfaceControl->getSurface();
+            iGBP = surface->getIGraphicBufferProducer();
+            surface.clear();
+        }
+        int32_t rotation = fdp.ConsumeIntegral<int32_t>();
+        string physicalCameraId = fdp.ConsumeRandomLengthString();
+        int32_t surfaceSetID = fdp.ConsumeIntegral<int32_t>();
+        bool isShared = fdp.ConsumeBool();
+        outputConfiguration =
+                new OutputConfiguration(iGBP, rotation, physicalCameraId, surfaceSetID, isShared);
+        sessionConfiguration->addOutputConfiguration(*outputConfiguration);
+    }
+
+    sessionConfiguration->getOutputConfigurations();
+    SessionConfiguration sessionConfiguration2;
+    sessionConfiguration->outputsEqual(sessionConfiguration2);
+    sessionConfiguration->outputsLessThan(sessionConfiguration2);
+    sessionConfiguration->inputIsMultiResolution();
+
+    invokeReadWriteNullParcel<SessionConfiguration>(sessionConfiguration);
+    invokeReadWriteParcel<SessionConfiguration>(sessionConfiguration);
+
+    delete sessionConfiguration;
+    delete outputConfiguration;
+    return 0;
+}
diff --git a/camera/tests/fuzzer/camera_c2SubmitInfo_fuzzer.cpp b/camera/tests/fuzzer/camera_c2SubmitInfo_fuzzer.cpp
new file mode 100644
index 0000000..dc40b0f
--- /dev/null
+++ b/camera/tests/fuzzer/camera_c2SubmitInfo_fuzzer.cpp
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <camera2/SubmitInfo.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include "camera2common.h"
+
+using namespace std;
+using namespace android;
+using namespace android::hardware::camera2::utils;
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+    SubmitInfo submitInfo;
+    submitInfo.mRequestId = fdp.ConsumeIntegral<int32_t>();
+    submitInfo.mLastFrameNumber = fdp.ConsumeIntegral<int64_t>();
+    invokeReadWriteParcel<SubmitInfo>(&submitInfo);
+    return 0;
+}
diff --git a/camera/tests/fuzzer/camera_captureResult_fuzzer.cpp b/camera/tests/fuzzer/camera_captureResult_fuzzer.cpp
new file mode 100644
index 0000000..dd857d4
--- /dev/null
+++ b/camera/tests/fuzzer/camera_captureResult_fuzzer.cpp
@@ -0,0 +1,68 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <CaptureResult.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include "camera2common.h"
+
+using namespace std;
+using namespace android;
+using namespace android::hardware::camera2::impl;
+
+constexpr int32_t kSizeMin = 0;
+constexpr int32_t kSizeMax = 1000;
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+    PhysicalCaptureResultInfo* physicalCaptureResultInfo = nullptr;
+
+    if (fdp.ConsumeBool()) {
+        physicalCaptureResultInfo = new PhysicalCaptureResultInfo();
+    } else {
+        string cameraId = fdp.ConsumeRandomLengthString();
+        CameraMetadata cameraMetadata = CameraMetadata();
+        physicalCaptureResultInfo = new PhysicalCaptureResultInfo(cameraId, cameraMetadata);
+    }
+
+    invokeReadWriteParcel<PhysicalCaptureResultInfo>(physicalCaptureResultInfo);
+
+    CaptureResult* captureResult = new CaptureResult();
+
+    if (fdp.ConsumeBool()) {
+        captureResult->mMetadata = CameraMetadata();
+    }
+    if (fdp.ConsumeBool()) {
+        captureResult->mResultExtras = CaptureResultExtras();
+        captureResult->mResultExtras.errorPhysicalCameraId = fdp.ConsumeRandomLengthString();
+        captureResult->mResultExtras.isValid();
+        invokeReadWriteNullParcel<CaptureResultExtras>(&(captureResult->mResultExtras));
+    }
+    if (fdp.ConsumeBool()) {
+        size_t physicalMetadatasSize = fdp.ConsumeIntegralInRange<size_t>(kSizeMin, kSizeMax);
+        for (size_t idx = 0; idx < physicalMetadatasSize; ++idx) {
+            captureResult->mPhysicalMetadatas.push_back(PhysicalCaptureResultInfo());
+        }
+    }
+
+    invokeReadWriteNullParcel<CaptureResult>(captureResult);
+    invokeReadWriteParcel<CaptureResult>(captureResult);
+    CaptureResult captureResult2(*captureResult);
+    CaptureResult captureResult3(std::move(captureResult2));
+
+    delete captureResult;
+    delete physicalCaptureResultInfo;
+    return 0;
+}
diff --git a/camera/tests/fuzzer/camera_fuzzer.cpp b/camera/tests/fuzzer/camera_fuzzer.cpp
new file mode 100644
index 0000000..d09a6dd
--- /dev/null
+++ b/camera/tests/fuzzer/camera_fuzzer.cpp
@@ -0,0 +1,402 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <Camera.h>
+#include <CameraBase.h>
+#include <CameraMetadata.h>
+#include <CameraParameters.h>
+#include <CameraUtils.h>
+#include <VendorTagDescriptor.h>
+#include <binder/IMemory.h>
+#include <binder/MemoryDealer.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <gui/IGraphicBufferProducer.h>
+#include <gui/Surface.h>
+#include <gui/SurfaceComposerClient.h>
+#include <utils/Log.h>
+#include "camera2common.h"
+#include <android/hardware/ICameraService.h>
+
+using namespace std;
+using namespace android;
+using namespace android::hardware;
+
+constexpr int32_t kFrameRateMin = 1;
+constexpr int32_t kFrameRateMax = 120;
+constexpr int32_t kCamIdMin = 0;
+constexpr int32_t kCamIdMax = 1;
+constexpr int32_t kNumMin = 0;
+constexpr int32_t kNumMax = 1024;
+constexpr int32_t kMemoryDealerSize = 1000;
+constexpr int32_t kRangeMin = 0;
+constexpr int32_t kRangeMax = 1000;
+constexpr int32_t kSizeMin = 0;
+constexpr int32_t kSizeMax = 1000;
+
+constexpr int32_t kValidCMD[] = {CAMERA_CMD_START_SMOOTH_ZOOM,
+                                 CAMERA_CMD_STOP_SMOOTH_ZOOM,
+                                 CAMERA_CMD_SET_DISPLAY_ORIENTATION,
+                                 CAMERA_CMD_ENABLE_SHUTTER_SOUND,
+                                 CAMERA_CMD_PLAY_RECORDING_SOUND,
+                                 CAMERA_CMD_START_FACE_DETECTION,
+                                 CAMERA_CMD_STOP_FACE_DETECTION,
+                                 CAMERA_CMD_ENABLE_FOCUS_MOVE_MSG,
+                                 CAMERA_CMD_PING,
+                                 CAMERA_CMD_SET_VIDEO_BUFFER_COUNT,
+                                 CAMERA_CMD_SET_VIDEO_FORMAT};
+
+constexpr int32_t kValidVideoBufferMode[] = {ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV,
+                                             ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA,
+                                             ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE};
+
+constexpr int32_t kValidPreviewCallbackFlag[] = {
+        CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK,    CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK,
+        CAMERA_FRAME_CALLBACK_FLAG_COPY_OUT_MASK,  CAMERA_FRAME_CALLBACK_FLAG_NOOP,
+        CAMERA_FRAME_CALLBACK_FLAG_CAMCORDER,      CAMERA_FRAME_CALLBACK_FLAG_CAMERA,
+        CAMERA_FRAME_CALLBACK_FLAG_BARCODE_SCANNER};
+
+constexpr int32_t kValidFacing[] = {android::hardware::CAMERA_FACING_BACK,
+                                    android::hardware::CAMERA_FACING_FRONT};
+
+constexpr int32_t kValidOrientation[] = {0, 90, 180, 270};
+
+class TestCameraListener : public CameraListener {
+  public:
+    virtual ~TestCameraListener() = default;
+
+    void notify(int32_t /*msgType*/, int32_t /*ext1*/, int32_t /*ext2*/) override { return; };
+    void postData(int32_t /*msgType*/, const sp<IMemory>& /*dataPtr*/,
+                  camera_frame_metadata_t* /*metadata*/) override {
+        return;
+    };
+    void postDataTimestamp(nsecs_t /*timestamp*/, int32_t /*msgType*/,
+                           const sp<IMemory>& /*dataPtr*/) override {
+        return;
+    };
+    void postRecordingFrameHandleTimestamp(nsecs_t /*timestamp*/,
+                                           native_handle_t* /*handle*/) override {
+        return;
+    };
+    void postRecordingFrameHandleTimestampBatch(
+            const std::vector<nsecs_t>& /*timestamps*/,
+            const std::vector<native_handle_t*>& /*handles*/) override {
+        return;
+    };
+};
+
+class CameraFuzzer : public ::android::hardware::BnCameraClient {
+  public:
+    void process(const uint8_t* data, size_t size);
+    ~CameraFuzzer() {
+        delete mCameraMetadata;
+        mComposerClient.clear();
+        mSurfaceControl.clear();
+        mSurface.clear();
+        mCamera.clear();
+        mMemoryDealer.clear();
+        mIMem.clear();
+        mCameraListener.clear();
+        mCameraService.clear();
+    }
+
+  private:
+    bool initCamera();
+    void initCameraMetadata();
+    void invokeCamera();
+    void invokeCameraUtils();
+    void invokeCameraBase();
+    void invokeCameraMetadata();
+    void invokeSetParameters();
+    sp<Camera> mCamera = nullptr;
+    CameraMetadata* mCameraMetadata = nullptr;
+    sp<SurfaceComposerClient> mComposerClient = nullptr;
+    sp<SurfaceControl> mSurfaceControl = nullptr;
+    sp<Surface> mSurface = nullptr;
+    sp<MemoryDealer> mMemoryDealer = nullptr;
+    sp<IMemory> mIMem = nullptr;
+    sp<TestCameraListener> mCameraListener = nullptr;
+    sp<ICameraService> mCameraService = nullptr;
+    sp<ICamera> cameraDevice = nullptr;
+    FuzzedDataProvider* mFDP = nullptr;
+
+    // CameraClient interface
+    void notifyCallback(int32_t, int32_t, int32_t) override { return; };
+    void dataCallback(int32_t, const sp<IMemory>&, camera_frame_metadata_t*) override { return; };
+    void dataCallbackTimestamp(nsecs_t, int32_t, const sp<IMemory>&) override { return; };
+    void recordingFrameHandleCallbackTimestamp(nsecs_t, native_handle_t*) override { return; };
+    void recordingFrameHandleCallbackTimestampBatch(const std::vector<nsecs_t>&,
+                                                    const std::vector<native_handle_t*>&) override {
+        return;
+    };
+};
+
+bool CameraFuzzer::initCamera() {
+    ProcessState::self()->startThreadPool();
+    sp<IServiceManager> sm = defaultServiceManager();
+    sp<IBinder> binder = sm->getService(String16("media.camera"));
+    mCameraService = interface_cast<ICameraService>(binder);
+    mCameraService->connect(this, mFDP->ConsumeIntegral<int32_t>() /* cameraId */,
+                            "CAMERAFUZZ", hardware::ICameraService::USE_CALLING_UID,
+                            hardware::ICameraService::USE_CALLING_PID,
+                            /*targetSdkVersion*/ __ANDROID_API_FUTURE__,
+                            /*overrideToPortrait*/false, /*forceSlowJpegMode*/false, &cameraDevice);
+    mCamera = Camera::create(cameraDevice);
+    if (!mCamera) {
+        return false;
+    }
+    return true;
+}
+
+void CameraFuzzer::invokeSetParameters() {
+    String8 s = mCamera->getParameters();
+    CameraParameters params(s);
+    int32_t width = mFDP->ConsumeIntegral<int32_t>();
+    int32_t height = mFDP->ConsumeIntegral<int32_t>();
+    params.setVideoSize(width, height);
+    int32_t frameRate = mFDP->ConsumeIntegralInRange<int32_t>(kFrameRateMin, kFrameRateMax);
+    params.setPreviewFrameRate(frameRate);
+    mCamera->setParameters(params.flatten());
+}
+
+void CameraFuzzer::invokeCamera() {
+    if (!initCamera()) {
+        return;
+    }
+
+    int32_t cameraId = mFDP->ConsumeIntegralInRange<int32_t>(kCamIdMin, kCamIdMax);
+    Camera::getNumberOfCameras();
+    CameraInfo cameraInfo;
+    cameraInfo.facing = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidFacing)
+                                            : mFDP->ConsumeIntegral<int>();
+    cameraInfo.orientation = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidOrientation)
+                                                 : mFDP->ConsumeIntegral<int>();
+    Camera::getCameraInfo(cameraId, /*overrideToPortrait*/false, &cameraInfo);
+    mCamera->reconnect();
+
+    mComposerClient = new SurfaceComposerClient;
+    mSurfaceControl = mComposerClient->createSurface(
+            static_cast<String8>(mFDP->ConsumeRandomLengthString().c_str()) /* name */,
+            mFDP->ConsumeIntegral<uint32_t>() /* width */,
+            mFDP->ConsumeIntegral<uint32_t>() /* height */,
+            mFDP->ConsumeIntegral<int32_t>() /* format */,
+            mFDP->ConsumeIntegral<int32_t>() /* flags */);
+    if (mSurfaceControl) {
+        mSurface = mSurfaceControl->getSurface();
+        mCamera->setPreviewTarget(mSurface->getIGraphicBufferProducer());
+        mCamera->startPreview();
+        mCamera->stopPreview();
+        mCamera->previewEnabled();
+        mCamera->startRecording();
+        mCamera->stopRecording();
+    }
+
+    mCamera->lock();
+    mCamera->unlock();
+    mCamera->autoFocus();
+    mCamera->cancelAutoFocus();
+
+    int32_t msgType = mFDP->ConsumeIntegral<int32_t>();
+    mCamera->takePicture(msgType);
+    invokeSetParameters();
+    int32_t cmd;
+    if (mFDP->ConsumeBool()) {
+        cmd = mFDP->PickValueInArray(kValidCMD);
+    } else {
+        cmd = mFDP->ConsumeIntegral<int32_t>();
+    }
+    int32_t arg1 = mFDP->ConsumeIntegral<int32_t>();
+    int32_t arg2 = mFDP->ConsumeIntegral<int32_t>();
+    mCamera->sendCommand(cmd, arg1, arg2);
+
+    int32_t videoBufferMode = mFDP->PickValueInArray(kValidVideoBufferMode);
+    mCamera->setVideoBufferMode(videoBufferMode);
+    if (mSurfaceControl) {
+        mSurface = mSurfaceControl->getSurface();
+        mCamera->setVideoTarget(mSurface->getIGraphicBufferProducer());
+    }
+    mCameraListener = sp<TestCameraListener>::make();
+    mCamera->setListener(mCameraListener);
+    int32_t previewCallbackFlag;
+    if (mFDP->ConsumeBool()) {
+        previewCallbackFlag = mFDP->PickValueInArray(kValidPreviewCallbackFlag);
+    } else {
+        previewCallbackFlag = mFDP->ConsumeIntegral<int32_t>();
+    }
+    mCamera->setPreviewCallbackFlags(previewCallbackFlag);
+    if (mSurfaceControl) {
+        mSurface = mSurfaceControl->getSurface();
+        mCamera->setPreviewCallbackTarget(mSurface->getIGraphicBufferProducer());
+    }
+
+    mCamera->getRecordingProxy();
+    int32_t mode = mFDP->ConsumeIntegral<int32_t>();
+    mCamera->setAudioRestriction(mode);
+    mCamera->getGlobalAudioRestriction();
+    mCamera->recordingEnabled();
+
+    mMemoryDealer = new MemoryDealer(kMemoryDealerSize);
+    mIMem = mMemoryDealer->allocate(kMemoryDealerSize);
+    mCamera->releaseRecordingFrame(mIMem);
+
+    int32_t numFds = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
+    int32_t numInts = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
+    native_handle_t* handle = native_handle_create(numFds, numInts);
+    mCamera->releaseRecordingFrameHandle(handle);
+
+    int32_t msgTypeNC = mFDP->ConsumeIntegral<int32_t>();
+    int32_t ext = mFDP->ConsumeIntegral<int32_t>();
+    int32_t ext2 = mFDP->ConsumeIntegral<int32_t>();
+    mCamera->notifyCallback(msgTypeNC, ext, ext2);
+
+    int64_t timestamp = mFDP->ConsumeIntegral<int64_t>();
+    mCamera->dataCallbackTimestamp(timestamp, msgTypeNC, mIMem);
+    mCamera->recordingFrameHandleCallbackTimestamp(timestamp, handle);
+}
+
+void CameraFuzzer::invokeCameraUtils() {
+    CameraMetadata staticMetadata;
+    int32_t orientVal = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidOrientation)
+                                            : mFDP->ConsumeIntegral<int32_t>();
+    uint8_t facingVal = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidFacing)
+                                            : mFDP->ConsumeIntegral<uint8_t>();
+    staticMetadata.update(ANDROID_SENSOR_ORIENTATION, &orientVal, 1);
+    staticMetadata.update(ANDROID_LENS_FACING, &facingVal, 1);
+    int32_t transform = 0;
+    CameraUtils::getRotationTransform(
+            staticMetadata, mFDP->ConsumeIntegral<int32_t>() /* mirrorMode */, &transform /*out*/);
+    CameraUtils::isCameraServiceDisabled();
+}
+
+void CameraFuzzer::invokeCameraBase() {
+    CameraInfo cameraInfo;
+    cameraInfo.facing = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidFacing)
+                                            : mFDP->ConsumeIntegral<int>();
+    cameraInfo.orientation = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidOrientation)
+                                                 : mFDP->ConsumeIntegral<int>();
+    invokeReadWriteParcel<CameraInfo>(&cameraInfo);
+
+    CameraStatus* cameraStatus = nullptr;
+
+    if (mFDP->ConsumeBool()) {
+        cameraStatus = new CameraStatus();
+    } else {
+        string cid = mFDP->ConsumeRandomLengthString();
+        int32_t status = mFDP->ConsumeIntegral<int32_t>();
+        size_t unavailSubIdsSize = mFDP->ConsumeIntegralInRange<size_t>(kSizeMin, kSizeMax);
+        vector<std::string> unavailSubIds;
+        for (size_t idx = 0; idx < unavailSubIdsSize; ++idx) {
+            string subId = mFDP->ConsumeRandomLengthString();
+            unavailSubIds.push_back(subId);
+        }
+        string clientPackage = mFDP->ConsumeRandomLengthString();
+        cameraStatus = new CameraStatus(cid, status, unavailSubIds, clientPackage);
+    }
+
+    invokeReadWriteParcel<CameraStatus>(cameraStatus);
+    delete cameraStatus;
+}
+
+void CameraFuzzer::initCameraMetadata() {
+    if (mFDP->ConsumeBool()) {
+        mCameraMetadata = new CameraMetadata();
+    } else {
+        size_t entryCapacity = mFDP->ConsumeIntegralInRange<size_t>(kSizeMin, kSizeMax);
+        size_t dataCapacity = mFDP->ConsumeIntegralInRange<size_t>(kSizeMin, kSizeMax);
+        mCameraMetadata = new CameraMetadata(entryCapacity, dataCapacity);
+    }
+}
+
+void CameraFuzzer::invokeCameraMetadata() {
+    initCameraMetadata();
+
+    const camera_metadata_t* metadataBuffer = nullptr;
+    if (mFDP->ConsumeBool()) {
+        metadataBuffer = mCameraMetadata->getAndLock();
+    }
+
+    mCameraMetadata->entryCount();
+    mCameraMetadata->isEmpty();
+    mCameraMetadata->bufferSize();
+    mCameraMetadata->sort();
+
+    uint32_t tag = mFDP->ConsumeIntegral<uint32_t>();
+    uint8_t dataUint8 = mFDP->ConsumeIntegral<uint8_t>();
+    int32_t dataInt32 = mFDP->ConsumeIntegral<int32_t>();
+    int64_t dataInt64 = mFDP->ConsumeIntegral<int64_t>();
+    float dataFloat = mFDP->ConsumeFloatingPoint<float>();
+    double dataDouble = mFDP->ConsumeFloatingPoint<double>();
+    camera_metadata_rational dataRational;
+    dataRational.numerator = mFDP->ConsumeIntegral<int32_t>();
+    dataRational.denominator = mFDP->ConsumeIntegral<int32_t>();
+    string dataStr = mFDP->ConsumeRandomLengthString();
+    String8 dataString(dataStr.c_str());
+    size_t data_count = 1;
+    mCameraMetadata->update(tag, &dataUint8, data_count);
+    mCameraMetadata->update(tag, &dataInt32, data_count);
+    mCameraMetadata->update(tag, &dataFloat, data_count);
+    mCameraMetadata->update(tag, &dataInt64, data_count);
+    mCameraMetadata->update(tag, &dataRational, data_count);
+    mCameraMetadata->update(tag, &dataDouble, data_count);
+    mCameraMetadata->update(tag, dataString);
+
+    uint32_t tagExists = mFDP->ConsumeBool() ? tag : mFDP->ConsumeIntegral<uint32_t>();
+    mCameraMetadata->exists(tagExists);
+
+    uint32_t tagFind = mFDP->ConsumeBool() ? tag : mFDP->ConsumeIntegral<uint32_t>();
+    mCameraMetadata->find(tagFind);
+
+    uint32_t tagErase = mFDP->ConsumeBool() ? tag : mFDP->ConsumeIntegral<uint32_t>();
+    mCameraMetadata->erase(tagErase);
+
+    mCameraMetadata->unlock(metadataBuffer);
+    std::vector<int32_t> tagsRemoved;
+    uint64_t vendorId = mFDP->ConsumeIntegral<uint64_t>();
+    mCameraMetadata->removePermissionEntries(vendorId, &tagsRemoved);
+
+    string name = mFDP->ConsumeRandomLengthString();
+    VendorTagDescriptor vTags;
+    uint32_t tagName = mFDP->ConsumeIntegral<uint32_t>();
+    mCameraMetadata->getTagFromName(name.c_str(), &vTags, &tagName);
+
+    invokeReadWriteNullParcel<CameraMetadata>(mCameraMetadata);
+    invokeReadWriteParcel<CameraMetadata>(mCameraMetadata);
+
+    int32_t fd = open("/dev/null", O_CLOEXEC | O_RDWR | O_CREAT);
+    int32_t verbosity = mFDP->ConsumeIntegralInRange<int32_t>(kRangeMin, kRangeMax);
+    int32_t indentation = mFDP->ConsumeIntegralInRange<int32_t>(kRangeMin, kRangeMax);
+    mCameraMetadata->dump(fd, verbosity, indentation);
+
+    CameraMetadata metadataCopy(mCameraMetadata->release());
+    CameraMetadata otherCameraMetadata;
+    mCameraMetadata->swap(otherCameraMetadata);
+    close(fd);
+}
+
+void CameraFuzzer::process(const uint8_t* data, size_t size) {
+    mFDP = new FuzzedDataProvider(data, size);
+    invokeCamera();
+    invokeCameraUtils();
+    invokeCameraBase();
+    invokeCameraMetadata();
+    delete mFDP;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    sp<CameraFuzzer> cameraFuzzer = new CameraFuzzer();
+    cameraFuzzer->process(data, size);
+    cameraFuzzer.clear();
+    return 0;
+}
diff --git a/camera/tests/fuzzer/camera_vendorTagDescriptor_fuzzer.cpp b/camera/tests/fuzzer/camera_vendorTagDescriptor_fuzzer.cpp
new file mode 100644
index 0000000..e14d9ce
--- /dev/null
+++ b/camera/tests/fuzzer/camera_vendorTagDescriptor_fuzzer.cpp
@@ -0,0 +1,207 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <VendorTagDescriptor.h>
+#include <binder/Parcel.h>
+#include <camera_metadata_tests_fake_vendor.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <system/camera_vendor_tags.h>
+
+#include <camera_metadata_hidden.h>
+#include "camera2common.h"
+
+using namespace std;
+using namespace android;
+
+constexpr int32_t kRangeMin = 0;
+constexpr int32_t kRangeMax = 1000;
+constexpr int32_t kVendorTagDescriptorId = -1;
+
+extern "C" {
+
+static int zero_get_tag_count(const vendor_tag_ops_t*) {
+    return 0;
+}
+
+static int default_get_tag_count(const vendor_tag_ops_t*) {
+    return VENDOR_TAG_COUNT_ERR;
+}
+
+static void default_get_all_tags(const vendor_tag_ops_t*, uint32_t*) {}
+
+static const char* default_get_section_name(const vendor_tag_ops_t*, uint32_t) {
+    return VENDOR_SECTION_NAME_ERR;
+}
+
+static const char* default_get_tag_name(const vendor_tag_ops_t*, uint32_t) {
+    return VENDOR_TAG_NAME_ERR;
+}
+
+static int default_get_tag_type(const vendor_tag_ops_t*, uint32_t) {
+    return VENDOR_TAG_TYPE_ERR;
+}
+
+} /*extern "C"*/
+
+static void FillWithDefaults(vendor_tag_ops_t* vOps) {
+    vOps->get_tag_count = default_get_tag_count;
+    vOps->get_all_tags = default_get_all_tags;
+    vOps->get_section_name = default_get_section_name;
+    vOps->get_tag_name = default_get_tag_name;
+    vOps->get_tag_type = default_get_tag_type;
+}
+
+class VendorTagDescriptorFuzzer {
+  public:
+    void process(const uint8_t* data, size_t size);
+    ~VendorTagDescriptorFuzzer() {
+        mVendorTagDescriptor.clear();
+        mVendorTagDescriptorCache.clear();
+    }
+
+  private:
+    void initVendorTagDescriptor();
+    void invokeVendorTagDescriptor();
+    void invokeVendorTagDescriptorCache();
+    void invokeVendorTagErrorConditions();
+    sp<VendorTagDescriptor> mVendorTagDescriptor = nullptr;
+    sp<VendorTagDescriptorCache> mVendorTagDescriptorCache = nullptr;
+    FuzzedDataProvider* mFDP = nullptr;
+};
+
+void VendorTagDescriptorFuzzer::initVendorTagDescriptor() {
+    if (mFDP->ConsumeBool()) {
+        mVendorTagDescriptor = new VendorTagDescriptor();
+    } else {
+        const vendor_tag_ops_t* vOps = &fakevendor_ops;
+        VendorTagDescriptor::createDescriptorFromOps(vOps, mVendorTagDescriptor);
+    }
+}
+
+void VendorTagDescriptorFuzzer::invokeVendorTagDescriptor() {
+    initVendorTagDescriptor();
+
+    sp<VendorTagDescriptor> vdesc = new VendorTagDescriptor();
+    vdesc->copyFrom(*mVendorTagDescriptor);
+    VendorTagDescriptor::setAsGlobalVendorTagDescriptor(mVendorTagDescriptor);
+    VendorTagDescriptor::getGlobalVendorTagDescriptor();
+
+    int32_t tagCount = mVendorTagDescriptor->getTagCount();
+    if (tagCount > 0) {
+        uint32_t tagArray[tagCount];
+        mVendorTagDescriptor->getTagArray(tagArray);
+        uint32_t tag;
+        for (int32_t i = 0; i < tagCount; ++i) {
+            tag = tagArray[i];
+            get_local_camera_metadata_section_name_vendor_id(tag, kVendorTagDescriptorId);
+            get_local_camera_metadata_tag_name_vendor_id(tag, kVendorTagDescriptorId);
+            get_local_camera_metadata_tag_type_vendor_id(tag, kVendorTagDescriptorId);
+            mVendorTagDescriptor->getSectionIndex(tag);
+        }
+        mVendorTagDescriptor->getAllSectionNames();
+    }
+
+    String8 name((mFDP->ConsumeRandomLengthString()).c_str());
+    String8 section((mFDP->ConsumeRandomLengthString()).c_str());
+    uint32_t lookupTag;
+    mVendorTagDescriptor->lookupTag(name, section, &lookupTag);
+
+    int32_t fd = open("/dev/null", O_CLOEXEC | O_RDWR | O_CREAT);
+    int32_t verbosity = mFDP->ConsumeIntegralInRange<int32_t>(kRangeMin, kRangeMax);
+    int32_t indentation = mFDP->ConsumeIntegralInRange<int32_t>(kRangeMin, kRangeMax);
+    mVendorTagDescriptor->dump(fd, verbosity, indentation);
+
+    invokeReadWriteParcelsp<VendorTagDescriptor>(mVendorTagDescriptor);
+    VendorTagDescriptor::clearGlobalVendorTagDescriptor();
+    vdesc.clear();
+    close(fd);
+}
+
+void VendorTagDescriptorFuzzer::invokeVendorTagDescriptorCache() {
+    mVendorTagDescriptorCache = new VendorTagDescriptorCache();
+    uint64_t id = mFDP->ConsumeIntegral<uint64_t>();
+    initVendorTagDescriptor();
+
+    mVendorTagDescriptorCache->addVendorDescriptor(id, mVendorTagDescriptor);
+    VendorTagDescriptorCache::setAsGlobalVendorTagCache(mVendorTagDescriptorCache);
+    VendorTagDescriptorCache::getGlobalVendorTagCache();
+    sp<VendorTagDescriptor> tagDesc;
+    mVendorTagDescriptorCache->getVendorTagDescriptor(id, &tagDesc);
+
+    int32_t tagCount = mVendorTagDescriptorCache->getTagCount(id);
+    if (tagCount > 0) {
+        uint32_t tagArray[tagCount];
+        mVendorTagDescriptorCache->getTagArray(tagArray, id);
+        uint32_t tag;
+        for (int32_t i = 0; i < tagCount; ++i) {
+            tag = tagArray[i];
+            get_local_camera_metadata_section_name_vendor_id(tag, id);
+            get_local_camera_metadata_tag_name_vendor_id(tag, id);
+            get_local_camera_metadata_tag_type_vendor_id(tag, id);
+        }
+    }
+
+    int32_t fd = open("/dev/null", O_CLOEXEC | O_RDWR | O_CREAT);
+    int32_t verbosity = mFDP->ConsumeIntegralInRange<int>(kRangeMin, kRangeMax);
+    int32_t indentation = mFDP->ConsumeIntegralInRange<int>(kRangeMin, kRangeMax);
+    mVendorTagDescriptorCache->dump(fd, verbosity, indentation);
+
+    invokeReadWriteParcelsp<VendorTagDescriptorCache>(mVendorTagDescriptorCache);
+    VendorTagDescriptorCache::isVendorCachePresent(id);
+    mVendorTagDescriptorCache->getVendorIdsAndTagDescriptors();
+    mVendorTagDescriptorCache->clearGlobalVendorTagCache();
+    tagDesc.clear();
+    close(fd);
+}
+
+void VendorTagDescriptorFuzzer::invokeVendorTagErrorConditions() {
+    sp<VendorTagDescriptor> vDesc;
+    vendor_tag_ops_t vOps;
+    FillWithDefaults(&vOps);
+    vOps.get_tag_count = zero_get_tag_count;
+
+    if (mFDP->ConsumeBool()) {
+        VendorTagDescriptor::createDescriptorFromOps(/*vOps*/ NULL, vDesc);
+    } else {
+        VendorTagDescriptor::createDescriptorFromOps(&vOps, vDesc);
+        int32_t tagCount = vDesc->getTagCount();
+        uint32_t badTag = mFDP->ConsumeIntegral<uint32_t>();
+        uint32_t badTagArray[tagCount + 1];
+        vDesc->getTagArray(badTagArray);
+        vDesc->getSectionName(badTag);
+        vDesc->getTagName(badTag);
+        vDesc->getTagType(badTag);
+        VendorTagDescriptor::clearGlobalVendorTagDescriptor();
+        VendorTagDescriptor::getGlobalVendorTagDescriptor();
+        VendorTagDescriptor::setAsGlobalVendorTagDescriptor(vDesc);
+        invokeReadWriteNullParcelsp<VendorTagDescriptor>(vDesc);
+        vDesc.clear();
+    }
+}
+
+void VendorTagDescriptorFuzzer::process(const uint8_t* data, size_t size) {
+    mFDP = new FuzzedDataProvider(data, size);
+    invokeVendorTagDescriptor();
+    invokeVendorTagDescriptorCache();
+    invokeVendorTagErrorConditions();
+    delete mFDP;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    VendorTagDescriptorFuzzer vendorTagDescriptorFuzzer;
+    vendorTagDescriptorFuzzer.process(data, size);
+    return 0;
+}