Merge "Do not include selected output in secondary output list." into main
diff --git a/camera/Android.bp b/camera/Android.bp
index 25b5e2c..71c1673 100644
--- a/camera/Android.bp
+++ b/camera/Android.bp
@@ -82,6 +82,8 @@
         include_dirs: [
             "frameworks/native/aidl/gui",
             "frameworks/native/libs/permission/aidl",
+            "hardware/interfaces/common/fmq/aidl",
+            "hardware/interfaces/common/aidl",
         ],
     },
 
@@ -112,6 +114,8 @@
     ],
 
     shared_libs: [
+        "android.hardware.common.fmq-V1-cpp",
+        "android.hardware.common-V2-cpp",
         "camera_platform_flags_c_lib",
         "framework-permission-aidl-cpp",
         "lib-platform-compat-native-api",
@@ -136,6 +140,8 @@
     ],
     export_shared_lib_headers: [
         "framework-permission-aidl-cpp",
+        "android.hardware.common.fmq-V1-cpp",
+        "android.hardware.common-V2-cpp",
         "libcamera_metadata",
         "libgui",
         "libnativewindow",
@@ -187,6 +193,7 @@
         "aidl/android/hardware/camera2/ICameraInjectionCallback.aidl",
         "aidl/android/hardware/camera2/ICameraInjectionSession.aidl",
         "aidl/android/hardware/camera2/ICameraOfflineSession.aidl",
+        "aidl/android/hardware/camera2/CameraMetadataInfo.aidl",
     ],
     path: "aidl",
 }
diff --git a/camera/CaptureResult.cpp b/camera/CaptureResult.cpp
index 9ff2578..254984f 100644
--- a/camera/CaptureResult.cpp
+++ b/camera/CaptureResult.cpp
@@ -98,7 +98,6 @@
     status_t res;
 
     mPhysicalCameraId = "";
-    mPhysicalCameraMetadata.clear();
 
     String16 physicalCameraId;
     if ((res = parcel->readString16(&physicalCameraId)) != OK) {
@@ -107,10 +106,11 @@
     }
     mPhysicalCameraId = toStdString(physicalCameraId);
 
-    if ((res = mPhysicalCameraMetadata.readFromParcel(parcel)) != OK) {
+    if ((res = mCameraMetadataInfo.readFromParcel(parcel)) != OK) {
         ALOGE("%s: Failed to read metadata from parcel: %d", __FUNCTION__, res);
         return res;
     }
+
     return OK;
 }
 
@@ -121,11 +121,13 @@
                 __FUNCTION__, res);
         return res;
     }
-    if ((res = mPhysicalCameraMetadata.writeToParcel(parcel)) != OK) {
+
+    if ((res = mCameraMetadataInfo.writeToParcel(parcel)) != OK) {
         ALOGE("%s: Failed to write physical camera metadata to parcel: %d",
                 __FUNCTION__, res);
         return res;
     }
+
     return OK;
 }
 
@@ -178,20 +180,12 @@
     }
 
     for (int32_t i = 0; i < physicalMetadataCount; i++) {
-        String16 cameraId;
-        if ((res = parcel->readString16(&cameraId)) != OK) {
-            ALOGE("%s: Failed to read camera id: %d", __FUNCTION__, res);
+        PhysicalCaptureResultInfo result;
+        if ((res = result.readFromParcel(parcel)) != OK) {
+            ALOGE("%s: Failed to read physical result from parcel: %d", __FUNCTION__, res);
             return res;
         }
-
-        CameraMetadata physicalMetadata;
-        if ((res = physicalMetadata.readFromParcel(parcel)) != OK) {
-            ALOGE("%s: Failed to read metadata from parcel: %d", __FUNCTION__, res);
-            return res;
-        }
-
-        mPhysicalMetadatas.emplace(mPhysicalMetadatas.end(), toStdString(cameraId),
-                physicalMetadata);
+        mPhysicalMetadatas.emplace(mPhysicalMetadatas.end(), result);
     }
     ALOGV("%s: Read physical metadata from parcel", __FUNCTION__);
 
@@ -232,13 +226,8 @@
         return BAD_VALUE;
     }
     for (const auto& physicalMetadata : mPhysicalMetadatas) {
-        if ((res = parcel->writeString16(toString16(physicalMetadata.mPhysicalCameraId))) != OK) {
-            ALOGE("%s: Failed to write physical camera ID to parcel: %d",
-                    __FUNCTION__, res);
-            return res;
-        }
-        if ((res = physicalMetadata.mPhysicalCameraMetadata.writeToParcel(parcel)) != OK) {
-            ALOGE("%s: Failed to write physical camera metadata to parcel: %d",
+        if ((res = physicalMetadata.writeToParcel(parcel)) != OK) {
+            ALOGE("%s: Failed to write physicalMetadata to parcel: %d",
                     __FUNCTION__, res);
             return res;
         }
diff --git a/camera/aidl/android/hardware/camera2/CameraMetadataInfo.aidl b/camera/aidl/android/hardware/camera2/CameraMetadataInfo.aidl
new file mode 100644
index 0000000..74c207e
--- /dev/null
+++ b/camera/aidl/android/hardware/camera2/CameraMetadataInfo.aidl
@@ -0,0 +1,25 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2;
+
+import android.hardware.camera2.impl.CameraMetadataNative;
+
+/** @hide */
+union CameraMetadataInfo {
+    long fmqSize;
+    CameraMetadataNative metadata;
+}
\ No newline at end of file
diff --git a/camera/aidl/android/hardware/camera2/ICameraDeviceCallbacks.aidl b/camera/aidl/android/hardware/camera2/ICameraDeviceCallbacks.aidl
index 49e9920..68e6354 100644
--- a/camera/aidl/android/hardware/camera2/ICameraDeviceCallbacks.aidl
+++ b/camera/aidl/android/hardware/camera2/ICameraDeviceCallbacks.aidl
@@ -16,6 +16,7 @@
 
 package android.hardware.camera2;
 
+import android.hardware.camera2.CameraMetadataInfo;
 import android.hardware.camera2.impl.CameraMetadataNative;
 import android.hardware.camera2.impl.CaptureResultExtras;
 import android.hardware.camera2.impl.PhysicalCaptureResultInfo;
@@ -36,7 +37,7 @@
     oneway void onDeviceError(int errorCode, in CaptureResultExtras resultExtras);
     oneway void onDeviceIdle();
     oneway void onCaptureStarted(in CaptureResultExtras resultExtras, long timestamp);
-    oneway void onResultReceived(in CameraMetadataNative result,
+    oneway void onResultReceived(in CameraMetadataInfo resultInfo,
                                  in CaptureResultExtras resultExtras,
                                  in PhysicalCaptureResultInfo[] physicalCaptureResultInfos);
     oneway void onPrepared(int streamId);
diff --git a/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl b/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
index c1da126..a9191eb 100644
--- a/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
+++ b/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
@@ -23,6 +23,8 @@
 import android.hardware.camera2.params.OutputConfiguration;
 import android.hardware.camera2.params.SessionConfiguration;
 import android.hardware.camera2.utils.SubmitInfo;
+import android.hardware.common.fmq.MQDescriptor;
+import android.hardware.common.fmq.SynchronizedReadWrite;
 import android.view.Surface;
 
 /** @hide */
@@ -173,6 +175,7 @@
 
     void finalizeOutputConfigurations(int streamId, in OutputConfiguration outputConfiguration);
 
+    MQDescriptor<byte, SynchronizedReadWrite> getCaptureResultMetadataQueue();
 
     // Keep in sync with public API in
     // frameworks/base/core/java/android/hardware/camera2/CameraDevice.java
diff --git a/camera/camera2/CaptureRequest.cpp b/camera/camera2/CaptureRequest.cpp
index 071f34e..d2fcde6 100644
--- a/camera/camera2/CaptureRequest.cpp
+++ b/camera/camera2/CaptureRequest.cpp
@@ -24,6 +24,7 @@
 #include <camera/StringUtils.h>
 
 #include <binder/Parcel.h>
+#include <gui/Flags.h>  // remove with WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
 #include <gui/Surface.h>
 #include <gui/view/Surface.h>
 
@@ -112,11 +113,14 @@
             return err;
         }
 
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+        sp<Surface> surface = surfaceShim.toSurface();
+#else
         sp<Surface> surface;
         if (surfaceShim.graphicBufferProducer != NULL) {
             surface = new Surface(surfaceShim.graphicBufferProducer);
         }
-
+#endif
         mSurfaceList.push_back(surface);
     }
 
@@ -206,9 +210,13 @@
             parcel->writeString16(String16("android.view.Surface"));
 
             // Surface.writeToParcel
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+            view::Surface surfaceShim = view::Surface::fromSurface(mSurfaceList[i]);
+#else
             view::Surface surfaceShim;
             surfaceShim.name = String16("unknown_name");
             surfaceShim.graphicBufferProducer = mSurfaceList[i]->getIGraphicBufferProducer();
+#endif
             if ((err = surfaceShim.writeToParcel(parcel)) != OK) {
                 ALOGE("%s: Failed to write output target Surface %d to parcel: %s (%d)",
                         __FUNCTION__, i, strerror(-err), err);
diff --git a/camera/camera2/OutputConfiguration.cpp b/camera/camera2/OutputConfiguration.cpp
index a89d7ca..f67214b 100644
--- a/camera/camera2/OutputConfiguration.cpp
+++ b/camera/camera2/OutputConfiguration.cpp
@@ -37,9 +37,8 @@
 const int OutputConfiguration::ROTATION_0 = 0;
 const int OutputConfiguration::INVALID_SET_ID = -1;
 
-const std::vector<sp<IGraphicBufferProducer>>&
-        OutputConfiguration::getGraphicBufferProducers() const {
-    return mGbps;
+const std::vector<ParcelableSurfaceType>& OutputConfiguration::getSurfaces() const {
+    return mSurfaces;
 }
 
 int OutputConfiguration::getRotation() const {
@@ -102,24 +101,23 @@
     return mMirrorMode;
 }
 
-int OutputConfiguration::getMirrorMode(sp<IGraphicBufferProducer> surface) const {
+int OutputConfiguration::getMirrorMode(ParcelableSurfaceType surface) const {
     if (!flags::mirror_mode_shared_surfaces()) {
         return mMirrorMode;
     }
 
-    if (mGbps.size() != mMirrorModeForProducers.size()) {
-        ALOGE("%s: mGbps size doesn't match mMirrorModeForProducers: %zu vs %zu",
-                __FUNCTION__, mGbps.size(), mMirrorModeForProducers.size());
+    if (mSurfaces.size() != mMirrorModeForProducers.size()) {
+        ALOGE("%s: mSurfaces size doesn't match mMirrorModeForProducers: %zu vs %zu",
+                __FUNCTION__, mSurfaces.size(), mMirrorModeForProducers.size());
         return mMirrorMode;
     }
 
     // Use per-producer mirror mode if available.
-    for (size_t i = 0; i < mGbps.size(); i++) {
-        if (mGbps[i] == surface) {
+    for (size_t i = 0; i < mSurfaces.size(); i++) {
+        if (mSurfaces[i] == surface) {
             return mMirrorModeForProducers[i];
         }
     }
-
     // For surface that doesn't belong to this output configuration, use
     // mMirrorMode as default.
     ALOGW("%s: Surface doesn't belong to this OutputConfiguration!", __FUNCTION__);
@@ -144,9 +142,9 @@
 
 bool OutputConfiguration::isComplete() const {
     return !((mSurfaceType == SURFACE_TYPE_MEDIA_RECORDER ||
-             mSurfaceType == SURFACE_TYPE_MEDIA_CODEC ||
-             mSurfaceType == SURFACE_TYPE_IMAGE_READER) &&
-             mGbps.empty());
+              mSurfaceType == SURFACE_TYPE_MEDIA_CODEC ||
+              mSurfaceType == SURFACE_TYPE_IMAGE_READER) &&
+             mSurfaces.empty());
 }
 
 OutputConfiguration::OutputConfiguration() :
@@ -348,7 +346,7 @@
         ALOGV("%s: OutputConfiguration: %p, name %s", __FUNCTION__,
                 surface.graphicBufferProducer.get(),
                 toString8(surface.name).c_str());
-        mGbps.push_back(surface.graphicBufferProducer);
+        mSurfaces.push_back(flagtools::toParcelableSurfaceType(surface));
     }
 
     mSensorPixelModesUsed = std::move(sensorPixelModesUsed);
@@ -369,10 +367,10 @@
     return err;
 }
 
-OutputConfiguration::OutputConfiguration(sp<IGraphicBufferProducer>& gbp, int rotation,
+OutputConfiguration::OutputConfiguration(ParcelableSurfaceType& surface, int rotation,
         const std::string& physicalId,
         int surfaceSetID, bool isShared) {
-    mGbps.push_back(gbp);
+    mSurfaces.push_back(surface);
     mRotation = rotation;
     mSurfaceSetID = surfaceSetID;
     mIsDeferred = false;
@@ -392,17 +390,17 @@
 }
 
 OutputConfiguration::OutputConfiguration(
-        const std::vector<sp<IGraphicBufferProducer>>& gbps,
+        const std::vector<ParcelableSurfaceType>& surfaces,
     int rotation, const std::string& physicalCameraId, int surfaceSetID,  int surfaceType,
     int width, int height, bool isShared)
-  : mGbps(gbps), mRotation(rotation), mSurfaceSetID(surfaceSetID), mSurfaceType(surfaceType),
-    mWidth(width), mHeight(height), mIsDeferred(false), mIsShared(isShared),
-    mPhysicalCameraId(physicalCameraId), mIsMultiResolution(false),
+  : mSurfaces(surfaces), mRotation(rotation), mSurfaceSetID(surfaceSetID),
+    mSurfaceType(surfaceType), mWidth(width), mHeight(height), mIsDeferred(false),
+    mIsShared(isShared), mPhysicalCameraId(physicalCameraId), mIsMultiResolution(false),
     mDynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
     mColorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED),
     mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
     mTimestampBase(TIMESTAMP_BASE_DEFAULT),
-    mMirrorMode(MIRROR_MODE_AUTO), mMirrorModeForProducers(gbps.size(), mMirrorMode),
+    mMirrorMode(MIRROR_MODE_AUTO), mMirrorModeForProducers(surfaces.size(), mMirrorMode),
     mUseReadoutTimestamp(false), mFormat(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED),
     mDataspace(0), mUsage(0) { }
 
@@ -432,14 +430,18 @@
     err = parcel->writeInt32(mIsShared ? 1 : 0);
     if (err != OK) return err;
 
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+    err = parcel->writeParcelableVector(mSurfaces);
+#else
     std::vector<view::Surface> surfaceShims;
-    for (auto& gbp : mGbps) {
+    for (auto& gbp : mSurfaces) {
         view::Surface surfaceShim;
         surfaceShim.name = String16("unknown_name"); // name of surface
         surfaceShim.graphicBufferProducer = gbp;
         surfaceShims.push_back(surfaceShim);
     }
     err = parcel->writeParcelableVector(surfaceShims);
+#endif
     if (err != OK) return err;
 
     String16 physicalCameraId = toString16(mPhysicalCameraId);
@@ -513,10 +515,9 @@
     return false;
 }
 
-bool OutputConfiguration::gbpsEqual(const OutputConfiguration& other) const {
-    const std::vector<sp<IGraphicBufferProducer> >& otherGbps =
-            other.getGraphicBufferProducers();
-    return simpleVectorsEqual(otherGbps, mGbps);
+bool OutputConfiguration::surfacesEqual(const OutputConfiguration& other) const {
+    const std::vector<ParcelableSurfaceType>& otherSurfaces = other.getSurfaces();
+    return simpleVectorsEqual(otherSurfaces, mSurfaces);
 }
 
 bool OutputConfiguration::sensorPixelModesUsedEqual(const OutputConfiguration& other) const {
@@ -527,7 +528,6 @@
 bool OutputConfiguration::mirrorModesEqual(const OutputConfiguration& other) const {
     const std::vector<int>& otherMirrorModes = other.getMirrorModes();
     return simpleVectorsEqual(otherMirrorModes, mMirrorModeForProducers);
-
 }
 
 bool OutputConfiguration::sensorPixelModesUsedLessThan(const OutputConfiguration& other) const {
@@ -540,17 +540,16 @@
     return simpleVectorsLessThan(mMirrorModeForProducers, otherMirrorModes);
 }
 
-bool OutputConfiguration::gbpsLessThan(const OutputConfiguration& other) const {
-    const std::vector<sp<IGraphicBufferProducer> >& otherGbps =
-            other.getGraphicBufferProducers();
+bool OutputConfiguration::surfacesLessThan(const OutputConfiguration& other) const {
+    const std::vector<ParcelableSurfaceType>& otherSurfaces = other.getSurfaces();
 
-    if (mGbps.size() !=  otherGbps.size()) {
-        return mGbps.size() < otherGbps.size();
+    if (mSurfaces.size() != otherSurfaces.size()) {
+        return mSurfaces.size() < otherSurfaces.size();
     }
 
-    for (size_t i = 0; i < mGbps.size(); i++) {
-        if (mGbps[i] != otherGbps[i]) {
-            return mGbps[i] < otherGbps[i];
+    for (size_t i = 0; i < mSurfaces.size(); i++) {
+        if (mSurfaces[i] != otherSurfaces[i]) {
+            return mSurfaces[i] < otherSurfaces[i];
         }
     }
 
diff --git a/camera/camera_platform.aconfig b/camera/camera_platform.aconfig
index 3b199b3..5682ad2 100644
--- a/camera/camera_platform.aconfig
+++ b/camera/camera_platform.aconfig
@@ -263,3 +263,13 @@
         purpose: PURPOSE_BUGFIX
     }
 }
+
+flag {
+    namespace: "camera_platform"
+    name: "fmq_metadata"
+    description: "Allow CameraMetadata transfer for ndk / sdk clients."
+    bug: "362791857"
+    metadata {
+        purpose: PURPOSE_BUGFIX
+    }
+}
\ No newline at end of file
diff --git a/camera/include/camera/CameraMetadata.h b/camera/include/camera/CameraMetadata.h
index 2903dfb..10ecc4f 100644
--- a/camera/include/camera/CameraMetadata.h
+++ b/camera/include/camera/CameraMetadata.h
@@ -247,6 +247,38 @@
      */
     metadata_vendor_id_t getVendorId() const;
 
+   // Needed for auto-generated code if CameraMetadata is used in
+   // parcelables in .aidl files.
+   inline bool operator == (const CameraMetadata& rhs) const {
+        return mBuffer == rhs.mBuffer;
+    }
+
+    inline bool operator < (const CameraMetadata& rhs) const {
+        return mBuffer < rhs.mBuffer;
+    }
+
+    inline bool operator != (const CameraMetadata& rhs) const {
+        return !(*this == rhs);
+    }
+
+    inline bool operator > (const CameraMetadata& rhs) const {
+        return rhs < *this;
+    }
+
+    inline bool operator >= (const CameraMetadata& rhs) const {
+        return !(*this < rhs);
+    }
+
+    inline bool operator <= (const CameraMetadata& rhs) const {
+        return !(rhs < *this);
+    }
+
+  inline std::string toString() const {
+        std::string descStr = "CameraMetadata";
+        return descStr;
+  }
+
+
   private:
     camera_metadata_t *mBuffer;
     mutable bool       mLocked;
@@ -265,7 +297,6 @@
      * Resize metadata buffer if needed by reallocating it and copying it over.
      */
     status_t resizeIfNeeded(size_t extraEntries, size_t extraData);
-
 };
 
 namespace hardware {
diff --git a/camera/include/camera/CaptureResult.h b/camera/include/camera/CaptureResult.h
index e08c9ca..cc6b529 100644
--- a/camera/include/camera/CaptureResult.h
+++ b/camera/include/camera/CaptureResult.h
@@ -20,7 +20,7 @@
 #include <utils/RefBase.h>
 #include <binder/Parcelable.h>
 #include <camera/CameraMetadata.h>
-
+#include <android/hardware/camera2/CameraMetadataInfo.h>
 
 namespace android {
 
@@ -145,19 +145,26 @@
 };
 
 struct PhysicalCaptureResultInfo : public android::Parcelable {
-
+    using CameraMetadataInfo = android::hardware::camera2::CameraMetadataInfo;
     PhysicalCaptureResultInfo()
         : mPhysicalCameraId(),
-          mPhysicalCameraMetadata() {
+          mCameraMetadataInfo() {
     }
     PhysicalCaptureResultInfo(const std::string& cameraId,
             const CameraMetadata& cameraMetadata)
-            : mPhysicalCameraId(cameraId),
-              mPhysicalCameraMetadata(cameraMetadata) {
+            : mPhysicalCameraId(cameraId) {
+              mCameraMetadataInfo.set<CameraMetadataInfo::metadata>(cameraMetadata);
+    }
+
+   PhysicalCaptureResultInfo(const std::string& cameraId,
+            uint64_t fmqSize)
+            : mPhysicalCameraId(cameraId) {
+              mCameraMetadataInfo.set<CameraMetadataInfo::fmqSize>(fmqSize);
     }
 
     std::string mPhysicalCameraId;
-    CameraMetadata mPhysicalCameraMetadata;
+
+    CameraMetadataInfo mCameraMetadataInfo;
 
     virtual status_t                readFromParcel(const android::Parcel* parcel) override;
     virtual status_t                writeToParcel(android::Parcel* parcel) const override;
diff --git a/camera/include/camera/android/hardware/ICamera.h b/camera/include/camera/android/hardware/ICamera.h
index eb887fb..3c1670a 100644
--- a/camera/include/camera/android/hardware/ICamera.h
+++ b/camera/include/camera/android/hardware/ICamera.h
@@ -34,8 +34,13 @@
 
 class ICameraClient;
 
-class ICamera: public android::IInterface
-{
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+typedef Surface ProducerType;
+#else
+typedef IGraphicBufferProducer ProducerType;
+#endif
+
+class ICamera : public android::IInterface {
     /**
      * Keep up-to-date with ICamera.aidl in frameworks/base
      */
diff --git a/camera/include/camera/camera2/OutputConfiguration.h b/camera/include/camera/camera2/OutputConfiguration.h
index 671d065..13bedb3 100644
--- a/camera/include/camera/camera2/OutputConfiguration.h
+++ b/camera/include/camera/camera2/OutputConfiguration.h
@@ -19,7 +19,12 @@
 
 #include <string>
 
+#include <gui/Flags.h>  // remove with WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+#include <gui/view/Surface.h>
+#else
 #include <gui/IGraphicBufferProducer.h>
+#endif
 #include <binder/Parcelable.h>
 
 namespace android {
@@ -59,7 +64,7 @@
         MIRROR_MODE_V = 3,
     };
 
-    const std::vector<sp<IGraphicBufferProducer>>& getGraphicBufferProducers() const;
+    const std::vector<ParcelableSurfaceType>& getSurfaces() const;
     int                        getRotation() const;
     int                        getSurfaceSetID() const;
     int                        getSurfaceType() const;
@@ -73,7 +78,7 @@
     bool                       isMultiResolution() const;
     int64_t                    getStreamUseCase() const;
     int                        getTimestampBase() const;
-    int                        getMirrorMode(sp<IGraphicBufferProducer> surface) const;
+    int                        getMirrorMode(ParcelableSurfaceType surface) const;
     int                        getMirrorMode() const;
     bool                       useReadoutTimestamp() const;
     int                        getFormat() const;
@@ -100,11 +105,11 @@
     // getSurfaceSetID will be INVALID_SET_ID if error occurred
     OutputConfiguration(const android::Parcel& parcel);
 
-    OutputConfiguration(sp<IGraphicBufferProducer>& gbp, int rotation,
+    OutputConfiguration(ParcelableSurfaceType& surface, int rotation,
             const std::string& physicalCameraId,
             int surfaceSetID = INVALID_SET_ID, bool isShared = false);
 
-    OutputConfiguration(const std::vector<sp<IGraphicBufferProducer>>& gbps,
+    OutputConfiguration(const std::vector<ParcelableSurfaceType>& surfaces,
                         int rotation, const std::string& physicalCameraId,
                         int surfaceSetID = INVALID_SET_ID,
                         int surfaceType = SURFACE_TYPE_UNKNOWN, int width = 0,
@@ -121,7 +126,7 @@
                 mHeight == other.mHeight &&
                 mIsDeferred == other.mIsDeferred &&
                 mIsShared == other.mIsShared &&
-                gbpsEqual(other) &&
+                surfacesEqual(other) &&
                 mPhysicalCameraId == other.mPhysicalCameraId &&
                 mIsMultiResolution == other.mIsMultiResolution &&
                 sensorPixelModesUsedEqual(other) &&
@@ -201,18 +206,21 @@
         if (mUsage != other.mUsage) {
             return mUsage < other.mUsage;
         }
-        return gbpsLessThan(other);
+        return surfacesLessThan(other);
     }
 
     bool operator > (const OutputConfiguration& other) const {
         return (*this != other && !(*this < other));
     }
 
-    bool gbpsEqual(const OutputConfiguration& other) const;
+    bool surfacesEqual(const OutputConfiguration& other) const;
     bool sensorPixelModesUsedEqual(const OutputConfiguration& other) const;
     bool sensorPixelModesUsedLessThan(const OutputConfiguration& other) const;
-    bool gbpsLessThan(const OutputConfiguration& other) const;
-    void addGraphicProducer(sp<IGraphicBufferProducer> gbp) {mGbps.push_back(gbp);}
+    bool surfacesLessThan(const OutputConfiguration& other) const;
+    void addSurface(ParcelableSurfaceType surface) { mSurfaces.push_back(surface); }
+#if not WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+    void addGraphicProducer(sp<IGraphicBufferProducer> gbp) {addSurface(gbp);}
+#endif
     bool mirrorModesEqual(const OutputConfiguration& other) const;
     bool mirrorModesLessThan(const OutputConfiguration& other) const;
     const std::vector<int32_t>& getMirrorModes() const {return mMirrorModeForProducers;}
@@ -239,7 +247,7 @@
     }
 
 private:
-    std::vector<sp<IGraphicBufferProducer>> mGbps;
+    std::vector<ParcelableSurfaceType>  mSurfaces;
     int                        mRotation;
     int                        mSurfaceSetID;
     int                        mSurfaceType;
diff --git a/camera/ndk/Android.bp b/camera/ndk/Android.bp
index 508808f..fc1e547 100644
--- a/camera/ndk/Android.bp
+++ b/camera/ndk/Android.bp
@@ -79,6 +79,8 @@
     shared_libs: [
         "android.companion.virtual.virtualdevice_aidl-cpp",
         "android.companion.virtualdevice.flags-aconfig-cc",
+        "android.hardware.common-V2-cpp",
+        "android.hardware.common.fmq-V1-cpp",
         "camera_platform_flags_c_lib",
         "framework-permission-aidl-cpp",
         "libandroid_runtime",
@@ -86,6 +88,7 @@
         "libcamera_client",
         "libcamera_metadata",
         "libcutils",
+        "libfmq",
         "libgui",
         "liblog",
         "libmediandk",
diff --git a/camera/ndk/impl/ACameraDevice.cpp b/camera/ndk/impl/ACameraDevice.cpp
index aed740f..4d21467 100644
--- a/camera/ndk/impl/ACameraDevice.cpp
+++ b/camera/ndk/impl/ACameraDevice.cpp
@@ -37,6 +37,8 @@
 namespace android {
 namespace acam {
 
+using android::hardware::common::fmq::MQDescriptor;
+
 // Static member definitions
 const char* CameraDevice::kContextKey        = "Context";
 const char* CameraDevice::kDeviceKey         = "Device";
@@ -231,25 +233,27 @@
     SessionConfiguration sessionConfiguration(0 /*inputWidth*/, 0 /*inputHeight*/,
             -1 /*inputFormat*/, CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE);
     for (const auto& output : sessionOutputContainer->mOutputs) {
-        sp<IGraphicBufferProducer> iGBP(nullptr);
-        ret = getIGBPfromAnw(output.mWindow, iGBP);
+        sp<SurfaceType> surface(nullptr);
+        ret = getSurfacefromAnw(output.mWindow, surface);
         if (ret != ACAMERA_OK) {
             ALOGE("Camera device %s failed to extract graphic producer from native window",
                     getId());
             return ret;
         }
 
-        OutputConfiguration outConfig(iGBP, output.mRotation, output.mPhysicalCameraId,
+        ParcelableSurfaceType pSurface = flagtools::convertSurfaceTypeToParcelable(surface);
+        OutputConfiguration outConfig(pSurface, output.mRotation, output.mPhysicalCameraId,
                 OutputConfiguration::INVALID_SET_ID, true);
 
         for (auto& anw : output.mSharedWindows) {
-            ret = getIGBPfromAnw(anw, iGBP);
+            ret = getSurfacefromAnw(anw, surface);
             if (ret != ACAMERA_OK) {
                 ALOGE("Camera device %s failed to extract graphic producer from native window",
                         getId());
                 return ret;
             }
-            outConfig.addGraphicProducer(iGBP);
+            pSurface = flagtools::convertSurfaceTypeToParcelable(surface);
+            outConfig.addSurface(pSurface);
         }
 
         sessionConfiguration.addOutputConfiguration(outConfig);
@@ -295,25 +299,27 @@
         return ACAMERA_ERROR_INVALID_PARAMETER;
     }
 
-    sp<IGraphicBufferProducer> iGBP(nullptr);
-    ret = getIGBPfromAnw(output->mWindow, iGBP);
+    sp<SurfaceType> surface(nullptr);
+    ret = getSurfacefromAnw(output->mWindow, surface);
     if (ret != ACAMERA_OK) {
         ALOGE("Camera device %s failed to extract graphic producer from native window",
                 getId());
         return ret;
     }
 
-    OutputConfiguration outConfig(iGBP, output->mRotation, output->mPhysicalCameraId,
-            OutputConfiguration::INVALID_SET_ID, true);
+    ParcelableSurfaceType pSurface = flagtools::convertSurfaceTypeToParcelable(surface);
+    OutputConfiguration outConfig(pSurface, output->mRotation, output->mPhysicalCameraId,
+                                  OutputConfiguration::INVALID_SET_ID, true);
 
     for (auto& anw : output->mSharedWindows) {
-        ret = getIGBPfromAnw(anw, iGBP);
+        ret = getSurfacefromAnw(anw, surface);
         if (ret != ACAMERA_OK) {
             ALOGE("Camera device %s failed to extract graphic producer from native window",
                     getId());
             return ret;
         }
-        outConfig.addGraphicProducer(iGBP);
+        pSurface = flagtools::convertSurfaceTypeToParcelable(surface);
+        outConfig.addSurface(pSurface);
     }
 
     auto remoteRet = mRemote->updateOutputConfiguration(streamId, outConfig);
@@ -427,9 +433,9 @@
         for (const auto& kvPair : mConfiguredOutputs) {
             int streamId = kvPair.first;
             const OutputConfiguration& outConfig = kvPair.second.second;
-            const auto& gbps = outConfig.getGraphicBufferProducers();
-            for (int surfaceId = 0; surfaceId < (int) gbps.size(); surfaceId++) {
-                if (gbps[surfaceId] == surface->getIGraphicBufferProducer()) {
+            const auto& surfaces = outConfig.getSurfaces();
+            for (int surfaceId = 0; surfaceId < (int)surfaces.size(); surfaceId++) {
+                if (surfaces[surfaceId] == flagtools::surfaceToSurfaceType(surface)) {
                     found = true;
                     req->mStreamIdxList.push_back(streamId);
                     req->mSurfaceIdxList.push_back(surfaceId);
@@ -634,16 +640,13 @@
     return ACAMERA_OK;
 }
 
-camera_status_t
-CameraDevice::getIGBPfromAnw(
-        ANativeWindow* anw,
-        sp<IGraphicBufferProducer>& out) {
+camera_status_t CameraDevice::getSurfacefromAnw(ANativeWindow* anw, sp<SurfaceType>& out) {
     sp<Surface> surface;
     camera_status_t ret = getSurfaceFromANativeWindow(anw, surface);
     if (ret != ACAMERA_OK) {
         return ret;
     }
-    out = surface->getIGraphicBufferProducer();
+    out = flagtools::surfaceToSurfaceType(surface);
     return ACAMERA_OK;
 }
 
@@ -681,14 +684,16 @@
     std::set<std::pair<ANativeWindow*, OutputConfiguration>> outputSet;
     for (const auto& outConfig : outputs->mOutputs) {
         ANativeWindow* anw = outConfig.mWindow;
-        sp<IGraphicBufferProducer> iGBP(nullptr);
-        ret = getIGBPfromAnw(anw, iGBP);
+        sp<SurfaceType> surface(nullptr);
+        ret = getSurfacefromAnw(anw, surface);
         if (ret != ACAMERA_OK) {
             return ret;
         }
+        ParcelableSurfaceType pSurface = flagtools::convertSurfaceTypeToParcelable(surface);
         outputSet.insert(std::make_pair(
-                anw, OutputConfiguration(iGBP, outConfig.mRotation, outConfig.mPhysicalCameraId,
-                        OutputConfiguration::INVALID_SET_ID, outConfig.mIsShared)));
+                anw,
+                OutputConfiguration(pSurface, outConfig.mRotation, outConfig.mPhysicalCameraId,
+                                    OutputConfiguration::INVALID_SET_ID, outConfig.mIsShared)));
     }
     auto addSet = outputSet;
     std::vector<int> deleteList;
@@ -788,6 +793,27 @@
     mRemote = remote;
 }
 
+bool CameraDevice::setDeviceMetadataQueues() {
+    if (mRemote == nullptr) {
+        ALOGE("mRemote must not be null while trying to fetch metadata queues");
+        return false;
+    }
+    MQDescriptor<int8_t, SynchronizedReadWrite> resMqDescriptor;
+    binder::Status ret = mRemote->getCaptureResultMetadataQueue(&resMqDescriptor);
+    if (!ret.isOk()) {
+        ALOGE("Transaction error trying to get capture result metadata queue");
+        return false;
+    }
+    mCaptureResultMetadataQueue = std::make_unique<ResultMetadataQueue>(resMqDescriptor);
+    if (!mCaptureResultMetadataQueue->isValid()) {
+        ALOGE("Empty fmq from cameraserver");
+        mCaptureResultMetadataQueue = nullptr;
+        return false;
+    }
+
+    return true;
+}
+
 camera_status_t
 CameraDevice::checkCameraClosedOrErrorLocked() const {
     if (mRemote == nullptr) {
@@ -885,10 +911,14 @@
             return;
         }
 
-        const auto& gbps = outputPairIt->second.second.getGraphicBufferProducers();
-        for (const auto& outGbp : gbps) {
+        const auto& outSurfaces = outputPairIt->second.second.getSurfaces();
+        for (const auto& outSurface : outSurfaces) {
             for (const auto& surface : request->mSurfaceList) {
-                if (surface->getIGraphicBufferProducer() == outGbp) {
+                if ( outSurface == surface
+#if not WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+                                ->getIGraphicBufferProducer()
+#endif
+                                            ) {
                     ANativeWindow* anw = static_cast<ANativeWindow*>(surface.get());
                     ALOGV("Camera %s Lost output buffer for ANW %p frame %" PRId64,
                             getId(), anw, frameNumber);
@@ -1247,7 +1277,9 @@
                         String8 physicalId8 = toString8(physicalResultInfo[i].mPhysicalCameraId);
                         physicalCameraIds.push_back(physicalId8.c_str());
 
-                        CameraMetadata clone = physicalResultInfo[i].mPhysicalCameraMetadata;
+                        CameraMetadata clone =
+                                physicalResultInfo[i].
+                                        mCameraMetadataInfo.get<CameraMetadataInfo::metadata>();
                         clone.update(ANDROID_SYNC_FRAME_NUMBER,
                                 &physicalResult->mFrameNumber, /*data_count*/1);
                         sp<ACameraMetadata> metadata =
@@ -1777,7 +1809,7 @@
 
 binder::Status
 CameraDevice::ServiceCallback::onResultReceived(
-        const CameraMetadata& metadata,
+        const CameraMetadataInfo &resultMetadata,
         const CaptureResultExtras& resultExtras,
         const std::vector<PhysicalCaptureResultInfo>& physicalResultInfos) {
     binder::Status ret = binder::Status::ok();
@@ -1786,11 +1818,11 @@
     if (dev == nullptr) {
         return ret; // device has been closed
     }
+
     int sequenceId = resultExtras.requestId;
     int64_t frameNumber = resultExtras.frameNumber;
     int32_t burstId = resultExtras.burstId;
     bool    isPartialResult = (resultExtras.partialResultCount < dev->mPartialResultCount);
-
     if (!isPartialResult) {
         ALOGV("SeqId %d frame %" PRId64 " result arrive.", sequenceId, frameNumber);
     }
@@ -1808,7 +1840,13 @@
         return ret;
     }
 
-    CameraMetadata metadataCopy = metadata;
+    CameraMetadata metadataCopy;
+    camera_status_t status = readOneResultMetadata(resultMetadata,
+            dev->mCaptureResultMetadataQueue.get(), &metadataCopy);
+    if (status != ACAMERA_OK) {
+        ALOGE("%s: result metadata couldn't be converted", __FUNCTION__);
+        return ret;
+    }
     metadataCopy.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE, dev->mShadingMapSize, /*data_count*/2);
     metadataCopy.update(ANDROID_SYNC_FRAME_NUMBER, &frameNumber, /*data_count*/1);
 
@@ -1824,8 +1862,24 @@
         sp<CaptureRequest> request = cbh.mRequests[burstId];
         sp<ACameraMetadata> result(new ACameraMetadata(
                 metadataCopy.release(), ACameraMetadata::ACM_RESULT));
+
+        std::vector<PhysicalCaptureResultInfo> localPhysicalResult;
+        localPhysicalResult.resize(physicalResultInfos.size());
+        for (size_t i = 0; i < physicalResultInfos.size(); i++) {
+            CameraMetadata physicalMetadata;
+            localPhysicalResult[i].mPhysicalCameraId = physicalResultInfos[i].mPhysicalCameraId;
+            status = readOneResultMetadata(physicalResultInfos[i].mCameraMetadataInfo,
+                    dev->mCaptureResultMetadataQueue.get(),
+                    &physicalMetadata);
+            if (status != ACAMERA_OK) {
+                ALOGE("%s: physical camera result metadata couldn't be converted", __FUNCTION__);
+                return ret;
+            }
+            localPhysicalResult[i].mCameraMetadataInfo.set<CameraMetadataInfo::metadata>(
+                    std::move(physicalMetadata));
+        }
         sp<ACameraPhysicalCaptureResultInfo> physicalResult(
-                new ACameraPhysicalCaptureResultInfo(physicalResultInfos, frameNumber));
+                new ACameraPhysicalCaptureResultInfo(localPhysicalResult, frameNumber));
 
         sp<AMessage> msg = new AMessage(
                 cbh.mIsLogicalCameraCallback ? kWhatLogicalCaptureResult : kWhatCaptureResult,
@@ -1946,5 +2000,28 @@
     }
 }
 
+camera_status_t CameraDevice::ServiceCallback::readOneResultMetadata(
+        const CameraMetadataInfo& resultInfo, ResultMetadataQueue* metadataQueue,
+        CameraMetadata* metadata) {
+    if (metadataQueue == nullptr || metadata == nullptr) {
+        return ACAMERA_ERROR_INVALID_PARAMETER;
+    }
+    if (resultInfo.getTag() == CameraMetadataInfo::fmqSize) {
+        int64_t metadataSize = resultInfo.get<CameraMetadataInfo::fmqSize>();
+        auto metadataVec = std::make_unique<int8_t []>(metadataSize);
+        bool read = metadataQueue->read(reinterpret_cast<int8_t*>(metadataVec.get()), metadataSize);
+        if (!read) {
+            ALOGE("%s capture request settings could't be read from fmq", __FUNCTION__);
+            return ACAMERA_ERROR_UNKNOWN;
+        }
+        *metadata = CameraMetadata(reinterpret_cast<camera_metadata_t *>(metadataVec.release()));
+    } else {
+        *metadata =
+                resultInfo.get<CameraMetadataInfo::metadata>();
+    }
+
+    return ACAMERA_OK;
+}
+
 } // namespace acam
 } // namespace android
diff --git a/camera/ndk/impl/ACameraDevice.h b/camera/ndk/impl/ACameraDevice.h
index d3aed4b..ea7d9b6 100644
--- a/camera/ndk/impl/ACameraDevice.h
+++ b/camera/ndk/impl/ACameraDevice.h
@@ -37,15 +37,20 @@
 #include <camera/camera2/OutputConfiguration.h>
 #include <camera/camera2/SessionConfiguration.h>
 #include <camera/camera2/CaptureRequest.h>
+#include <fmq/AidlMessageQueueCpp.h>
 
-#include <camera/NdkCameraManager.h>
 #include <camera/NdkCameraCaptureSession.h>
+#include <camera/NdkCameraManager.h>
+#include <gui/Flags.h>  // remove with WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
 
 #include "ACameraMetadata.h"
 
 namespace android {
 namespace acam {
 
+using android::hardware::common::fmq::SynchronizedReadWrite;
+using ResultMetadataQueue = AidlMessageQueueCpp<int8_t, SynchronizedReadWrite>;
+
 // Wrap ACameraCaptureFailure so it can be ref-counted
 struct CameraCaptureFailure : public RefBase, public ACameraCaptureFailure {};
 
@@ -61,6 +66,8 @@
 
 class CameraDevice final : public RefBase {
   public:
+
+    using CameraMetadataInfo = android::hardware::camera2::CameraMetadataInfo;
     CameraDevice(const char* id, ACameraDevice_StateCallbacks* cb,
                   sp<ACameraMetadata> chars,
                   ACameraDevice* wrapper, bool sharedMode);
@@ -91,7 +98,7 @@
         binder::Status onDeviceIdle() override;
         binder::Status onCaptureStarted(const CaptureResultExtras& resultExtras,
                               int64_t timestamp) override;
-        binder::Status onResultReceived(const CameraMetadata& metadata,
+        binder::Status onResultReceived(const CameraMetadataInfo &resultInfo,
                               const CaptureResultExtras& resultExtras,
                               const std::vector<PhysicalCaptureResultInfo>& physicalResultInfos) override;
         binder::Status onPrepared(int streamId) override;
@@ -100,6 +107,9 @@
                 int32_t stoppedSequenceId) override;
         binder::Status onClientSharedAccessPriorityChanged(bool isPrimaryClient) override;
       private:
+        camera_status_t readOneResultMetadata(
+                const CameraMetadataInfo& resultInfo, ResultMetadataQueue* metadataQueue,
+                CameraMetadata* metadata);
         const wp<CameraDevice> mDevice;
     };
     inline sp<hardware::camera2::ICameraDeviceCallbacks> getServiceCallback() {
@@ -108,6 +118,7 @@
 
     // Camera device is only functional after remote being set
     void setRemoteDevice(sp<hardware::camera2::ICameraDeviceUser> remote);
+    bool setDeviceMetadataQueues();
 
     inline ACameraDevice* getWrapper() const { return mWrapper; };
 
@@ -178,8 +189,10 @@
     // Input message will be posted and cleared after this returns
     void postSessionMsgAndCleanup(sp<AMessage>& msg);
 
-    static camera_status_t getIGBPfromAnw(
-            ANativeWindow* anw, sp<IGraphicBufferProducer>& out);
+    // Only used when WB_LIBCAMERASERVICE_WITH_DEPENDENCIES is active
+    static ParcelableSurfaceType convertSurfaceTypeToView(sp<SurfaceType> surface);
+
+    static camera_status_t getSurfacefromAnw(ANativeWindow* anw, sp<SurfaceType>& out);
 
     static camera_status_t getSurfaceFromANativeWindow(
             ANativeWindow* anw, sp<Surface>& out);
@@ -399,6 +412,9 @@
     int32_t mPartialResultCount;  // const after constructor
     std::vector<std::string> mPhysicalIds; // const after constructor
 
+    // Metadata queue to write the result metadata to.
+    std::unique_ptr<ResultMetadataQueue> mCaptureResultMetadataQueue;
+
 };
 
 } // namespace acam;
@@ -452,6 +468,10 @@
         mDevice->setRemoteDevice(remote);
     }
 
+    inline bool setDeviceMetadataQueues() {
+        return mDevice->setDeviceMetadataQueues();
+    }
+
     inline void setPrimaryClient(bool isPrimary) {
         mDevice->setPrimaryClient(isPrimary);
     }
diff --git a/camera/ndk/impl/ACameraManager.cpp b/camera/ndk/impl/ACameraManager.cpp
index f9c1a8a..acd7917 100644
--- a/camera/ndk/impl/ACameraManager.cpp
+++ b/camera/ndk/impl/ACameraManager.cpp
@@ -971,6 +971,7 @@
         return ACAMERA_ERROR_CAMERA_DISCONNECTED;
     }
     device->setRemoteDevice(deviceRemote);
+    device->setDeviceMetadataQueues();
     if (flags::camera_multi_client() && sharedMode) {
         binder::Status remoteRet = deviceRemote->isPrimaryClient(primaryClient);
         if (!remoteRet.isOk()) {
diff --git a/camera/tests/CameraBinderTests.cpp b/camera/tests/CameraBinderTests.cpp
index 4384df9..5f7f2f6 100644
--- a/camera/tests/CameraBinderTests.cpp
+++ b/camera/tests/CameraBinderTests.cpp
@@ -63,6 +63,7 @@
 using namespace android;
 using ::android::hardware::ICameraService;
 using ::android::hardware::camera2::ICameraDeviceUser;
+using ::android::hardware::camera2::CameraMetadataInfo;
 
 #define ASSERT_NOT_NULL(x) \
     ASSERT_TRUE((x) != nullptr)
@@ -249,10 +250,10 @@
         return binder::Status::ok();
     }
 
-    virtual binder::Status onResultReceived(const CameraMetadata& metadata,
+    virtual binder::Status onResultReceived(const CameraMetadataInfo& resultInfo,
             const CaptureResultExtras& resultExtras,
             const std::vector<PhysicalCaptureResultInfo>& physicalResultInfos) {
-        (void) metadata;
+        (void) resultInfo;
         (void) resultExtras;
         (void) physicalResultInfos;
         Mutex::Autolock l(mLock);
diff --git a/camera/tests/fuzzer/camera_c2OutputConfiguration_fuzzer.cpp b/camera/tests/fuzzer/camera_c2OutputConfiguration_fuzzer.cpp
index b6fa817..22e04fc 100644
--- a/camera/tests/fuzzer/camera_c2OutputConfiguration_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_c2OutputConfiguration_fuzzer.cpp
@@ -19,6 +19,7 @@
 #include <fuzzer/FuzzedDataProvider.h>
 #include <gui/IGraphicBufferProducer.h>
 #include <gui/Surface.h>
+#include <gui/Flags.h>  // remove with WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
 #include <gui/SurfaceComposerClient.h>
 #include "camera2common.h"
 
@@ -37,11 +38,11 @@
   private:
     void invokeC2OutputConfigFuzzer();
     unique_ptr<OutputConfiguration> getC2OutputConfig();
-    sp<IGraphicBufferProducer> createIGraphicBufferProducer();
+    sp<SurfaceType> createSurface();
     FuzzedDataProvider* mFDP = nullptr;
 };
 
-sp<IGraphicBufferProducer> C2OutputConfigurationFuzzer::createIGraphicBufferProducer() {
+sp<SurfaceType> C2OutputConfigurationFuzzer::createSurface() {
     sp<SurfaceComposerClient> composerClient = new SurfaceComposerClient;
     sp<SurfaceControl> surfaceControl = composerClient->createSurface(
             static_cast<String8>(mFDP->ConsumeRandomLengthString(kMaxBytes).c_str()) /* name */,
@@ -51,10 +52,9 @@
             mFDP->ConsumeIntegral<int32_t>() /* flags */);
     if (surfaceControl) {
         sp<Surface> surface = surfaceControl->getSurface();
-        return surface->getIGraphicBufferProducer();
+        return flagtools::surfaceToSurfaceType(surface);
     } else {
-        sp<IGraphicBufferProducer> gbp;
-        return gbp;
+        return nullptr;
     }
 }
 
@@ -69,9 +69,9 @@
                         string physicalCameraId = mFDP->ConsumeRandomLengthString(kMaxBytes);
                         int32_t surfaceSetID = mFDP->ConsumeIntegral<int32_t>();
                         bool isShared = mFDP->ConsumeBool();
-                        sp<IGraphicBufferProducer> iGBP = createIGraphicBufferProducer();
+                        sp<SurfaceType> surface = createSurface();
                         outputConfiguration = make_unique<OutputConfiguration>(
-                                iGBP, rotation, physicalCameraId, surfaceSetID, isShared);
+                                surface, rotation, physicalCameraId, surfaceSetID, isShared);
                     },
 
                     [&]() {
@@ -79,14 +79,15 @@
                         string physicalCameraId = mFDP->ConsumeRandomLengthString(kMaxBytes);
                         int32_t surfaceSetID = mFDP->ConsumeIntegral<int32_t>();
                         bool isShared = mFDP->ConsumeBool();
-                        size_t iGBPSize = mFDP->ConsumeIntegralInRange<size_t>(kSizeMin, kSizeMax);
-                        vector<sp<IGraphicBufferProducer>> iGBPs;
-                        for (size_t idx = 0; idx < iGBPSize; ++idx) {
-                            sp<IGraphicBufferProducer> iGBP = createIGraphicBufferProducer();
-                            iGBPs.push_back(iGBP);
+                        size_t surfaceSize =
+                                mFDP->ConsumeIntegralInRange<size_t>(kSizeMin, kSizeMax);
+                        vector<sp<SurfaceType>> surfaces;
+                        for (size_t idx = 0; idx < surfaceSize; ++idx) {
+                            sp<SurfaceType> surface = createSurface();
+                            surfaces.push_back(surface);
                         }
                         outputConfiguration = make_unique<OutputConfiguration>(
-                                iGBPs, rotation, physicalCameraId, surfaceSetID, isShared);
+                                surfaces, rotation, physicalCameraId, surfaceSetID, isShared);
                     },
             });
     selectOutputConfigurationConstructor();
@@ -107,22 +108,22 @@
                 [&]() { outputConfiguration->isDeferred(); },
                 [&]() { outputConfiguration->isShared(); },
                 [&]() { outputConfiguration->getPhysicalCameraId(); },
-                [&]() { outputConfiguration->gbpsEqual(*outputConfiguration2); },
+                [&]() { outputConfiguration->surfacesEqual(*outputConfiguration2); },
                 [&]() { outputConfiguration->sensorPixelModesUsedEqual(*outputConfiguration2); },
-                [&]() { outputConfiguration->gbpsLessThan(*outputConfiguration2); },
+                [&]() { outputConfiguration->surfacesLessThan(*outputConfiguration2); },
                 [&]() { outputConfiguration->sensorPixelModesUsedLessThan(*outputConfiguration2); },
-                [&]() { outputConfiguration->getGraphicBufferProducers(); },
+                [&]() { outputConfiguration->getSurfaces(); },
                 [&]() {
-                    sp<IGraphicBufferProducer> gbp = createIGraphicBufferProducer();
-                    outputConfiguration->addGraphicProducer(gbp);
+                    sp<SurfaceType> surface = createSurface();
+                    outputConfiguration->addSurface(surface);
                 },
                 [&]() { outputConfiguration->isMultiResolution(); },
                 [&]() { outputConfiguration->getColorSpace(); },
                 [&]() { outputConfiguration->getStreamUseCase(); },
                 [&]() { outputConfiguration->getTimestampBase(); },
                 [&]() {
-                    sp<IGraphicBufferProducer> gbp = createIGraphicBufferProducer();
-                    outputConfiguration->getMirrorMode(gbp);
+                    sp<SurfaceType> surface = createSurface();
+                    outputConfiguration->getMirrorMode(surface);
                 },
                 [&]() { outputConfiguration->useReadoutTimestamp(); },
         });
diff --git a/media/aconfig/codec_fwk.aconfig b/media/aconfig/codec_fwk.aconfig
index 96fb3e3..af82982 100644
--- a/media/aconfig/codec_fwk.aconfig
+++ b/media/aconfig/codec_fwk.aconfig
@@ -29,6 +29,13 @@
 }
 
 flag {
+  name: "codec_availability_support"
+  namespace: "codec_fwk"
+  description: "Feature flag for codec availability HAL API implementation"
+  bug: "363282971"
+}
+
+flag {
   name: "codec_buffer_state_cleanup"
   namespace: "codec_fwk"
   description: "Bugfix flag for more buffer state cleanup in MediaCodec"
diff --git a/media/aconfig/swcodec_flags.aconfig b/media/aconfig/swcodec_flags.aconfig
index a435a43..9dd1fdd 100644
--- a/media/aconfig/swcodec_flags.aconfig
+++ b/media/aconfig/swcodec_flags.aconfig
@@ -12,3 +12,12 @@
   description: "Feature flag for APV Software C2 codec"
   bug: "376770121"
 }
+
+flag {
+  name: "mpeg2_keep_threads_active"
+  is_exported: true
+  is_fixed_read_only: true
+  namespace: "codec_fwk"
+  description: "Enable keep_threads_active in mpeg2 decoder"
+  bug: "343793479"
+}
diff --git a/media/codec2/components/mpeg2/Android.bp b/media/codec2/components/mpeg2/Android.bp
index e644ee3..ed711ee 100644
--- a/media/codec2/components/mpeg2/Android.bp
+++ b/media/codec2/components/mpeg2/Android.bp
@@ -14,11 +14,10 @@
         "libcodec2_soft_sanitize_signed-defaults",
     ],
 
-    cflags: [
-        "-DKEEP_THREADS_ACTIVE=0",
-    ],
-
     srcs: ["C2SoftMpeg2Dec.cpp"],
 
-    static_libs: ["libmpeg2dec"],
+    static_libs: [
+        "libmpeg2dec",
+        "android.media.swcodec.flags-aconfig-cc",
+    ],
 }
diff --git a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
index 52920c2..64e4bf0 100644
--- a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
+++ b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
@@ -16,11 +16,10 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "C2SoftMpeg2Dec"
-#ifndef KEEP_THREADS_ACTIVE
-#define KEEP_THREADS_ACTIVE 0
-#endif
 #include <log/log.h>
 
+#include <android_media_swcodec_flags.h>
+
 #include <media/stagefright/foundation/MediaDefs.h>
 
 #include <C2Debug.h>
@@ -320,14 +319,7 @@
         c2_node_id_t id,
         const std::shared_ptr<IntfImpl> &intfImpl)
     : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
-        mIntf(intfImpl),
-        mDecHandle(nullptr),
-        mMemRecords(nullptr),
-        mOutBufferDrain(nullptr),
-        mIvColorformat(IV_YUV_420P),
-        mWidth(320),
-        mHeight(240),
-        mOutIndex(0u) {
+        mIntf(intfImpl) {
     // If input dump is enabled, then open create an empty file
     GENERATE_FILE_NAMES();
     CREATE_DUMP_FILE(mInFile);
@@ -436,7 +428,7 @@
 
     s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.u4_size = sizeof(ivdext_fill_mem_rec_ip_t);
     s_fill_mem_ip.u4_share_disp_buf = 0;
-    s_fill_mem_ip.u4_keep_threads_active = KEEP_THREADS_ACTIVE;
+    s_fill_mem_ip.u4_keep_threads_active = mKeepThreadsActive;
     s_fill_mem_ip.e_output_format = mIvColorformat;
     s_fill_mem_ip.u4_deinterlace = 1;
     s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.e_cmd = IV_CMD_FILL_NUM_MEM_REC;
@@ -478,7 +470,7 @@
     s_init_ip.s_ivd_init_ip_t.u4_frm_max_ht = mHeight;
     s_init_ip.u4_share_disp_buf = 0;
     s_init_ip.u4_deinterlace = 1;
-    s_init_ip.u4_keep_threads_active = KEEP_THREADS_ACTIVE;
+    s_init_ip.u4_keep_threads_active = mKeepThreadsActive;
     s_init_ip.s_ivd_init_ip_t.u4_num_mem_rec = mNumMemRecords;
     s_init_ip.s_ivd_init_ip_t.e_output_format = mIvColorformat;
     s_init_op.s_ivd_init_op_t.u4_size = sizeof(ivdext_init_op_t);
@@ -571,6 +563,7 @@
     status_t ret = getNumMemRecords();
     if (OK != ret) return ret;
 
+    mKeepThreadsActive = android::media::swcodec::flags::mpeg2_keep_threads_active();
     ret = fillMemRecords();
     if (OK != ret) return ret;
 
diff --git a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.h b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.h
index 3965bcc..6d09694 100644
--- a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.h
+++ b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.h
@@ -144,21 +144,22 @@
     };
 
     std::shared_ptr<IntfImpl> mIntf;
-    iv_obj_t *mDecHandle;
-    iv_mem_rec_t *mMemRecords;
-    size_t mNumMemRecords;
+    iv_obj_t *mDecHandle = nullptr;
+    iv_mem_rec_t *mMemRecords = nullptr;
+    size_t mNumMemRecords = 0;
     std::shared_ptr<C2GraphicBlock> mOutBlock;
-    uint8_t *mOutBufferDrain;
+    uint8_t *mOutBufferDrain = nullptr;
 
-    size_t mNumCores;
-    IV_COLOR_FORMAT_T mIvColorformat;
+    size_t mNumCores = 1;
+    IV_COLOR_FORMAT_T mIvColorformat = IV_YUV_420P;
 
-    uint32_t mWidth;
-    uint32_t mHeight;
-    uint32_t mStride;
-    bool mSignalledOutputEos;
-    bool mSignalledError;
-    std::atomic_uint64_t mOutIndex;
+    uint32_t mWidth = 320;
+    uint32_t mHeight = 240;
+    uint32_t mStride = 0;
+    bool mSignalledOutputEos = false;
+    bool mSignalledError = false;
+    bool mKeepThreadsActive = false;
+    std::atomic_uint64_t mOutIndex = 0;
 
     // Color aspects. These are ISO values and are meant to detect changes in aspects to avoid
     // converting them to C2 values for each frame
diff --git a/media/libaaudio/include/aaudio/AAudio.h b/media/libaaudio/include/aaudio/AAudio.h
index 6c41198..c3e7583 100644
--- a/media/libaaudio/include/aaudio/AAudio.h
+++ b/media/libaaudio/include/aaudio/AAudio.h
@@ -895,6 +895,180 @@
 };
 typedef uint32_t aaudio_channel_mask_t;
 
+// The values are copied from JAVA SDK device types defined in android/media/AudioDeviceInfo.java
+// When a new value is added, it should be added here and handled by the conversion at
+// AAudioConvert_aaudioToAndroidDeviceType.
+typedef enum AAudio_DeviceType : int32_t {
+    /**
+     * A device type describing the attached earphone speaker.
+     */
+    AAUDIO_DEVICE_BUILTIN_EARPIECE = 1,
+
+    /**
+     * A device type describing the speaker system (i.e. a mono speaker or stereo speakers) built
+     * in a device.
+     */
+    AAUDIO_DEVICE_BUILTIN_SPEAKER = 2,
+
+    /**
+     * A device type describing a headset, which is the combination of a headphones and microphone.
+     */
+    AAUDIO_DEVICE_WIRED_HEADSET = 3,
+
+    /**
+     * A device type describing a pair of wired headphones.
+     */
+    AAUDIO_DEVICE_WIRED_HEADPHONES = 4,
+
+    /**
+     * A device type describing an analog line-level connection.
+     */
+    AAUDIO_DEVICE_LINE_ANALOG = 5,
+
+    /**
+     * A device type describing a digital line connection (e.g. SPDIF).
+     */
+    AAUDIO_DEVICE_LINE_DIGITAL = 6,
+
+    /**
+     * A device type describing a Bluetooth device typically used for telephony.
+     */
+    AAUDIO_DEVICE_BLUETOOTH_SCO = 7,
+
+    /**
+     * A device type describing a Bluetooth device supporting the A2DP profile.
+     */
+    AAUDIO_DEVICE_BLUETOOTH_A2DP = 8,
+
+    /**
+     * A device type describing an HDMI connection .
+     */
+    AAUDIO_DEVICE_HDMI = 9,
+
+    /**
+     * A device type describing the Audio Return Channel of an HDMI connection.
+     */
+    AAUDIO_DEVICE_HDMI_ARC = 10,
+
+    /**
+     * A device type describing a USB audio device.
+     */
+    AAUDIO_DEVICE_USB_DEVICE = 11,
+
+    /**
+     * A device type describing a USB audio device in accessory mode.
+     */
+    AAUDIO_DEVICE_USB_ACCESSORY = 12,
+
+    /**
+     * A device type describing the audio device associated with a dock.
+     * Starting at API 34, this device type only represents digital docks, while docks with an
+     * analog connection are represented with {@link #AAUDIO_DEVICE_DOCK_ANALOG}.
+     */
+    AAUDIO_DEVICE_DOCK = 13,
+
+    /**
+     * A device type associated with the transmission of audio signals over FM.
+     */
+    AAUDIO_DEVICE_FM = 14,
+
+    /**
+     * A device type describing the microphone(s) built in a device.
+     */
+    AAUDIO_DEVICE_BUILTIN_MIC = 15,
+
+    /**
+     * A device type for accessing the audio content transmitted over FM.
+     */
+    AAUDIO_DEVICE_FM_TUNER = 16,
+
+    /**
+     * A device type for accessing the audio content transmitted over the TV tuner system.
+     */
+    AAUDIO_DEVICE_TV_TUNER = 17,
+
+    /**
+     * A device type describing the transmission of audio signals over the telephony network.
+     */
+    AAUDIO_DEVICE_TELEPHONY = 18,
+
+    /**
+     * A device type describing the auxiliary line-level connectors.
+     */
+    AAUDIO_DEVICE_AUX_LINE = 19,
+
+    /**
+     * A device type connected over IP.
+     */
+    AAUDIO_DEVICE_IP = 20,
+
+    /**
+     * A type-agnostic device used for communication with external audio systems.
+     */
+    AAUDIO_DEVICE_BUS = 21,
+
+    /**
+     * A device type describing a USB audio headset.
+     */
+    AAUDIO_DEVICE_USB_HEADSET = 22,
+
+    /**
+     * A device type describing a Hearing Aid.
+     */
+    AAUDIO_DEVICE_HEARING_AID = 23,
+
+    /**
+     * A device type describing the speaker system (i.e. a mono speaker or stereo speakers) built
+     * in a device, that is specifically tuned for outputting sounds like notifications and alarms
+     * (i.e. sounds the user couldn't necessarily anticipate).
+     * <p>Note that this physical audio device may be the same as {@link #TYPE_BUILTIN_SPEAKER}
+     * but is driven differently to safely accommodate the different use case.</p>
+     */
+    AAUDIO_DEVICE_BUILTIN_SPEAKER_SAFE = 24,
+
+    /**
+     * A device type for rerouting audio within the Android framework between mixes and
+     * system applications.
+     */
+    AAUDIO_DEVICE_REMOTE_SUBMIX = 25,
+    /**
+     * A device type describing a Bluetooth Low Energy (BLE) audio headset or headphones.
+     * Headphones are grouped with headsets when the device is a sink:
+     * the features of headsets and headphones with regard to playback are the same.
+     */
+    AAUDIO_DEVICE_BLE_HEADSET = 26,
+
+    /**
+     * A device type describing a Bluetooth Low Energy (BLE) audio speaker.
+     */
+    AAUDIO_DEVICE_BLE_SPEAKER = 27,
+
+    /**
+     * A device type describing an Echo Canceller loopback Reference.
+     * This device is only used when capturing with MediaRecorder.AudioSource.ECHO_REFERENCE,
+     * which requires privileged permission
+     * {@link android.Manifest.permission#CAPTURE_AUDIO_OUTPUT}.
+     *
+     * Note that this is not exposed as it is a system API that requires privileged permission.
+     */
+    // AAUDIO_DEVICE_ECHO_REFERENCE = 28,
+
+    /**
+     * A device type describing the Enhanced Audio Return Channel of an HDMI connection.
+     */
+    AAUDIO_DEVICE_HDMI_EARC = 29,
+
+    /**
+     * A device type describing a Bluetooth Low Energy (BLE) broadcast group.
+     */
+    AAUDIO_DEVICE_BLE_BROADCAST = 30,
+
+    /**
+     * A device type describing the audio device associated with a dock using an analog connection.
+     */
+    AAUDIO_DEVICE_DOCK_ANALOG = 31
+} AAudio_DeviceType;
+
 typedef struct AAudioStreamStruct         AAudioStream;
 typedef struct AAudioStreamBuilderStruct  AAudioStreamBuilder;
 
@@ -1998,18 +2172,17 @@
  * Available since API level 26.
  *
  * @param stream reference provided by AAudioStreamBuilder_openStream()
- * @return actual device id. If there are multiple device ids used, the first device picked by
- *         the audio policy engine will be returned.
+ * @return actual device id. If there are multiple device ids used,
+ *         this will return the first device id from AAudioStream_getDeviceIds().
  */
 AAUDIO_API int32_t AAudioStream_getDeviceId(AAudioStream* _Nonnull stream) __INTRODUCED_IN(26);
 
 /**
- * Available since API level 36.
- *
  * Call this function after AAudioStreamBuilder_openStream().
- * This function will crash if stream is null.
  * An array of size 16 should generally be large enough to fit all device identifiers.
  *
+ * Available since API level 36.
+ *
  * @param stream reference provided by AAudioStreamBuilder_openStream().
  * @param ids reference to an array of ids.
  * @params numIds size allocated to the array of ids.
@@ -2022,7 +2195,7 @@
  *         Otherwise, if ids is null, return {@link #AAUDIO_ERROR_ILLEGAL_ARGUMENT}.
  */
 AAUDIO_API aaudio_result_t AAudioStream_getDeviceIds(AAudioStream* _Nonnull stream,
-        int32_t* _Nonnull ids, int32_t* _Nonnull numIds) __INTRODUCED_IN(36);
+        int32_t* _Nullable ids, int32_t* _Nullable numIds) __INTRODUCED_IN(36);
 
 /**
  * Available since API level 26.
@@ -2343,6 +2516,61 @@
 AAUDIO_API aaudio_result_t AAudioStream_setOffloadEndOfStream(AAudioStream* _Nonnull stream)
         __INTRODUCED_IN(36);
 
+/************************************************************************************
+ * Helper functions for AAudio MMAP.
+ * AAudio MMAP data path uses a memory region that is shared between the hardware and
+ * the audio software. The shared memory is referenced using a file descriptor that is
+ * generated by the ALSA driver. Apps can read/write directly from/to the shared
+ * memory, which helps improve the audio latency.
+ ************************************************************************************/
+
+/**
+ * When the audio is played/recorded via AAudio MMAP data path, the apps can write to/read from
+ * a shared memory that will also be accessed directly by hardware. That reduces the audio latency.
+ * The following values are used to describe how AAudio MMAP is supported.
+ */
+enum {
+    /**
+     * AAudio MMAP is disabled and never used.
+     */
+    AAUDIO_POLICY_NEVER = 1,
+
+    /**
+     * AAudio MMAP support depends on device's availability. It will be used
+     * when it is possible or fallback to the normal path, where the audio data
+     * will be delivered via audio framework data pipeline.
+     */
+    AAUDIO_POLICY_AUTO,
+
+    /**
+     * AAudio MMAP must be used or fail.
+     */
+    AAUDIO_POLICY_ALWAYS
+};
+typedef int32_t aaudio_policy_t;
+
+/**
+ * Query how aaudio mmap is supported for the given device type.
+ *
+ * @param device device type
+ * @param direction {@link AAUDIO_DIRECTION_OUTPUT} or {@link AAUDIO_DIRECTION_INPUT}
+ * @return the mmap policy or {@link #AAUDIO_ERROR_ILLEGAL_ARGUMENT} if the device or direction
+ *         is invalid or {@link #AAUDIO_ERROR_INTERNAL} if the audio HAL returns error.
+ */
+AAUDIO_API aaudio_policy_t AAudio_getPlatformMMapPolicy(
+        AAudio_DeviceType device, aaudio_direction_t direction) __INTRODUCED_IN(36);
+
+/**
+ * Query how aaudio exclusive mmap is supported for the given device type.
+ *
+ * @param device device type
+ * @param direction {@link AAUDIO_DIRECTION_OUTPUT} or {@link AAUDIO_DIRECTION_INPUT}
+ * @return the mmap exclusive policy or or {@link #AAUDIO_ERROR_ILLEGAL_ARGUMENT} if the device
+ *         or direction is invalid or {@link #AAUDIO_ERROR_INTERNAL} if the audio HAL returns error.
+ */
+AAUDIO_API aaudio_policy_t AAudio_getPlatformMMapExclusivePolicy(
+        AAudio_DeviceType device, aaudio_direction_t direction) __INTRODUCED_IN(36);
+
 #ifdef __cplusplus
 }
 #endif
diff --git a/media/libaaudio/include/aaudio/AAudioTesting.h b/media/libaaudio/include/aaudio/AAudioTesting.h
index 16d6c33..8758e07 100644
--- a/media/libaaudio/include/aaudio/AAudioTesting.h
+++ b/media/libaaudio/include/aaudio/AAudioTesting.h
@@ -32,225 +32,6 @@
  ************************************************************************************/
 
 /**
- * When the audio is played/recorded via AAudio MMAP data path, the apps can write to/read from
- * a shared memory that will also be accessed directly by hardware. That reduces the audio latency.
- * The following values are used to describe how AAudio MMAP is supported.
- */
-enum {
-    /**
-     * AAudio MMAP is disabled and never used.
-     */
-    AAUDIO_POLICY_NEVER = 1,
-
-    /**
-     * AAudio MMAP support depends on device's availability. It will be used
-     * when it is possible or fallback to the normal path, where the audio data
-     * will be delivered via audio framework data pipeline.
-     */
-    AAUDIO_POLICY_AUTO,
-
-    /**
-     * AAudio MMAP must be used or fail.
-     */
-    AAUDIO_POLICY_ALWAYS
-};
-typedef int32_t aaudio_policy_t;
-
-// The values are copied from JAVA SDK device types defined in android/media/AudioDeviceInfo.java
-// When a new value is added, it should be added here and handled by the conversion at
-// AAudioConvert_aaudioToAndroidDeviceType.
-typedef enum AAudio_DeviceType : int32_t {
-    /**
-     * A device type describing the attached earphone speaker.
-     */
-    AAUDIO_DEVICE_BUILTIN_EARPIECE = 1,
-
-    /**
-     * A device type describing the speaker system (i.e. a mono speaker or stereo speakers) built
-     * in a device.
-     */
-    AAUDIO_DEVICE_BUILTIN_SPEAKER = 2,
-
-    /**
-     * A device type describing a headset, which is the combination of a headphones and microphone.
-     */
-    AAUDIO_DEVICE_WIRED_HEADSET = 3,
-
-    /**
-     * A device type describing a pair of wired headphones.
-     */
-    AAUDIO_DEVICE_WIRED_HEADPHONES = 4,
-
-    /**
-     * A device type describing an analog line-level connection.
-     */
-    AAUDIO_DEVICE_LINE_ANALOG = 5,
-
-    /**
-     * A device type describing a digital line connection (e.g. SPDIF).
-     */
-    AAUDIO_DEVICE_LINE_DIGITAL = 6,
-
-    /**
-     * A device type describing a Bluetooth device typically used for telephony.
-     */
-    AAUDIO_DEVICE_BLUETOOTH_SCO = 7,
-
-    /**
-     * A device type describing a Bluetooth device supporting the A2DP profile.
-     */
-    AAUDIO_DEVICE_BLUETOOTH_A2DP = 8,
-
-    /**
-     * A device type describing an HDMI connection .
-     */
-    AAUDIO_DEVICE_HDMI = 9,
-
-    /**
-     * A device type describing the Audio Return Channel of an HDMI connection.
-     */
-    AAUDIO_DEVICE_HDMI_ARC = 10,
-
-    /**
-     * A device type describing a USB audio device.
-     */
-    AAUDIO_DEVICE_USB_DEVICE = 11,
-
-    /**
-     * A device type describing a USB audio device in accessory mode.
-     */
-    AAUDIO_DEVICE_USB_ACCESSORY = 12,
-
-    /**
-     * A device type describing the audio device associated with a dock.
-     * Starting at API 34, this device type only represents digital docks, while docks with an
-     * analog connection are represented with {@link #AAUDIO_DEVICE_DOCK_ANALOG}.
-     */
-    AAUDIO_DEVICE_DOCK = 13,
-
-    /**
-     * A device type associated with the transmission of audio signals over FM.
-     */
-    AAUDIO_DEVICE_FM = 14,
-
-    /**
-     * A device type describing the microphone(s) built in a device.
-     */
-    AAUDIO_DEVICE_BUILTIN_MIC = 15,
-
-    /**
-     * A device type for accessing the audio content transmitted over FM.
-     */
-    AAUDIO_DEVICE_FM_TUNER = 16,
-
-    /**
-     * A device type for accessing the audio content transmitted over the TV tuner system.
-     */
-    AAUDIO_DEVICE_TV_TUNER = 17,
-
-    /**
-     * A device type describing the transmission of audio signals over the telephony network.
-     */
-    AAUDIO_DEVICE_TELEPHONY = 18,
-
-    /**
-     * A device type describing the auxiliary line-level connectors.
-     */
-    AAUDIO_DEVICE_AUX_LINE = 19,
-
-    /**
-     * A device type connected over IP.
-     */
-    AAUDIO_DEVICE_IP = 20,
-
-    /**
-     * A type-agnostic device used for communication with external audio systems.
-     */
-    AAUDIO_DEVICE_BUS = 21,
-
-    /**
-     * A device type describing a USB audio headset.
-     */
-    AAUDIO_DEVICE_USB_HEADSET = 22,
-
-    /**
-     * A device type describing a Hearing Aid.
-     */
-    AAUDIO_DEVICE_HEARING_AID = 23,
-
-    /**
-     * A device type describing the speaker system (i.e. a mono speaker or stereo speakers) built
-     * in a device, that is specifically tuned for outputting sounds like notifications and alarms
-     * (i.e. sounds the user couldn't necessarily anticipate).
-     * <p>Note that this physical audio device may be the same as {@link #TYPE_BUILTIN_SPEAKER}
-     * but is driven differently to safely accommodate the different use case.</p>
-     */
-    AAUDIO_DEVICE_BUILTIN_SPEAKER_SAFE = 24,
-
-    /**
-     * A device type for rerouting audio within the Android framework between mixes and
-     * system applications.
-     */
-    AAUDIO_DEVICE_REMOTE_SUBMIX = 25,
-    /**
-     * A device type describing a Bluetooth Low Energy (BLE) audio headset or headphones.
-     * Headphones are grouped with headsets when the device is a sink:
-     * the features of headsets and headphones with regard to playback are the same.
-     */
-    AAUDIO_DEVICE_BLE_HEADSET = 26,
-
-    /**
-     * A device type describing a Bluetooth Low Energy (BLE) audio speaker.
-     */
-    AAUDIO_DEVICE_BLE_SPEAKER = 27,
-
-    /**
-     * A device type describing an Echo Canceller loopback Reference.
-     * This device is only used when capturing with MediaRecorder.AudioSource.ECHO_REFERENCE,
-     * which requires privileged permission
-     * {@link android.Manifest.permission#CAPTURE_AUDIO_OUTPUT}.
-     *
-     * Note that this is not exposed as it is a system API that requires privileged permission.
-     */
-    // AAUDIO_DEVICE_ECHO_REFERENCE = 28,
-
-    /**
-     * A device type describing the Enhanced Audio Return Channel of an HDMI connection.
-     */
-    AAUDIO_DEVICE_HDMI_EARC = 29,
-
-    /**
-     * A device type describing a Bluetooth Low Energy (BLE) broadcast group.
-     */
-    AAUDIO_DEVICE_BLE_BROADCAST = 30,
-
-    /**
-     * A device type describing the audio device associated with a dock using an analog connection.
-     */
-    AAUDIO_DEVICE_DOCK_ANALOG = 31
-} AAudio_DeviceType;
-
-/**
- * Query how aaudio mmap is supported for the given device type.
- *
- * @param device device type
- * @param direction {@link AAUDIO_DIRECTION_OUTPUT} or {@link AAUDIO_DIRECTION_INPUT}
- * @return the mmap policy or negative error
- */
-AAUDIO_API aaudio_policy_t AAudio_getPlatformMMapPolicy(
-        AAudio_DeviceType device, aaudio_direction_t direction) __INTRODUCED_IN(36);
-
-/**
- * Query how aaudio exclusive mmap is supported for the given device type.
- *
- * @param device device type
- * @param direction {@link AAUDIO_DIRECTION_OUTPUT} or {@link AAUDIO_DIRECTION_INPUT}
- * @return the mmap exclusive policy or negative error
- */
-AAUDIO_API aaudio_policy_t AAudio_getPlatformMMapExclusivePolicy(
-        AAudio_DeviceType device, aaudio_direction_t direction) __INTRODUCED_IN(36);
-
-/**
  * Control whether AAudioStreamBuilder_openStream() will use the new MMAP data path
  * or the older "Legacy" data path.
  *
diff --git a/media/libaaudio/src/client/AudioStreamInternal.cpp b/media/libaaudio/src/client/AudioStreamInternal.cpp
index 6bc7dc2..33f152c 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternal.cpp
@@ -42,7 +42,6 @@
 #include "fifo/FifoBuffer.h"
 #include "utility/AudioClock.h"
 #include <media/AidlConversion.h>
-#include <com_android_media_aaudio.h>
 
 #include "AudioStreamInternal.h"
 
@@ -197,15 +196,6 @@
         setSampleRate(configurationOutput.getSampleRate());
     }
 
-    if (!com::android::media::aaudio::sample_rate_conversion()) {
-        if (getSampleRate() != getDeviceSampleRate()) {
-            ALOGD("%s - skipping sample rate converter. SR = %d, Device SR = %d", __func__,
-                    getSampleRate(), getDeviceSampleRate());
-            result = AAUDIO_ERROR_INVALID_RATE;
-            goto error;
-        }
-    }
-
     // Save device format so we can do format conversion and volume scaling together.
     setDeviceFormat(configurationOutput.getFormat());
     setDeviceSamplesPerFrame(configurationOutput.getSamplesPerFrame());
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index eb7edaf..f917aa2 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -6092,9 +6092,9 @@
         }
         if (csd->size() == 0) {
             ALOGW("csd-%zu size is 0", i);
+        } else {
+            mCSD.push_back(csd);
         }
-
-        mCSD.push_back(csd);
         ++i;
     }
 
diff --git a/media/libstagefright/data/media_codecs_google_c2_video.xml b/media/libstagefright/data/media_codecs_google_c2_video.xml
index 72a2551..2fb2d59 100644
--- a/media/libstagefright/data/media_codecs_google_c2_video.xml
+++ b/media/libstagefright/data/media_codecs_google_c2_video.xml
@@ -101,6 +101,7 @@
             <Limit name="bitrate" range="1-240000000"/>
             <Limit name="block-size" value="16x16" />
             <Limit name="block-count" range="1-32768" /> <!-- max 4096x2048 equivalent -->
+            <Feature name="dynamic-color-aspects" />
             <Attribute name="software-codec"/>
          </MediaCodec>
     </Decoders>
diff --git a/media/libstagefright/data/media_codecs_sw.xml b/media/libstagefright/data/media_codecs_sw.xml
index 20c97dc..c79ac5c 100644
--- a/media/libstagefright/data/media_codecs_sw.xml
+++ b/media/libstagefright/data/media_codecs_sw.xml
@@ -262,6 +262,7 @@
             <Limit name="block-size" value="16x16" />
             <Limit name="block-count" range="1-32768" /> <!-- max 4096x2048 equivalent -->
             <Feature name="adaptive-playback" />
+            <Feature name="dynamic-color-aspects" />
             <Attribute name="software-codec"/>
         </MediaCodec>
     </Decoders>
diff --git a/media/ndk/NdkImageReader.cpp b/media/ndk/NdkImageReader.cpp
index 995c674..889ddad 100644
--- a/media/ndk/NdkImageReader.cpp
+++ b/media/ndk/NdkImageReader.cpp
@@ -678,6 +678,28 @@
     }
 }
 
+media_status_t
+AImageReader::setUsage(uint64_t usage) {
+    Mutex::Autolock _l(mLock);
+    if (!mIsOpen || mBufferItemConsumer == nullptr) {
+        ALOGE("not ready to perform setUsage()");
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+    if (mUsage == usage) {
+        return AMEDIA_OK;
+    }
+
+    uint64_t halUsage = AHardwareBuffer_convertToGrallocUsageBits(mUsage);
+    status_t ret = mBufferItemConsumer->setConsumerUsageBits(halUsage);
+    if (ret != OK) {
+        ALOGE("setConsumerUsageBits() failed %d", ret);
+        return AMEDIA_ERROR_UNKNOWN;
+    }
+    mUsage = usage;
+    mHalUsage = halUsage;
+    return AMEDIA_OK;
+}
+
 static
 media_status_t validateParameters(int32_t width, int32_t height, int32_t format,
                                   uint64_t usage, int32_t maxImages,
@@ -935,3 +957,14 @@
     reader->setBufferRemovedListener(listener);
     return AMEDIA_OK;
 }
+
+EXPORT
+media_status_t AImageReader_setUsage(
+    AImageReader *reader, uint64_t usage) {
+    ALOGV("%s", __FUNCTION__);
+    if (reader == nullptr) {
+        ALOGE("%s: invalid argument! reader %p", __FUNCTION__, reader);
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+    return reader->setUsage(usage);
+}
diff --git a/media/ndk/NdkImageReaderPriv.h b/media/ndk/NdkImageReaderPriv.h
index 985f42b..89a33f8 100644
--- a/media/ndk/NdkImageReaderPriv.h
+++ b/media/ndk/NdkImageReaderPriv.h
@@ -20,6 +20,7 @@
 #include <inttypes.h>
 
 #include <media/NdkImageReader.h>
+#include <media-vndk/VndkImageReader.h>
 
 #include <utils/List.h>
 #include <utils/Mutex.h>
@@ -68,6 +69,7 @@
 
     media_status_t setImageListener(AImageReader_ImageListener* listener);
     media_status_t setBufferRemovedListener(AImageReader_BufferRemovedListener* listener);
+    media_status_t setUsage(uint64_t usage);
 
     media_status_t acquireNextImage(/*out*/AImage** image, /*out*/int* fenceFd);
     media_status_t acquireLatestImage(/*out*/AImage** image, /*out*/int* fenceFd);
@@ -121,7 +123,7 @@
     const int32_t mWidth;
     const int32_t mHeight;
     int32_t mFormat;
-    const uint64_t mUsage;  // AHARDWAREBUFFER_USAGE_* flags.
+    uint64_t mUsage;  // AHARDWAREBUFFER_USAGE_* flags.
     const int32_t mMaxImages;
 
     // TODO(jwcai) Seems completely unused in AImageReader class.
diff --git a/media/ndk/include/media-vndk/VndkImageReader.h b/media/ndk/include/media-vndk/VndkImageReader.h
new file mode 100644
index 0000000..c67a38c
--- /dev/null
+++ b/media/ndk/include/media-vndk/VndkImageReader.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _VNDK_IMAGE_READER_H
+#define _VNDK_IMAGE_READER_H
+
+// vndk is a superset of the NDK
+#include <media/NdkImageReader.h>
+
+__BEGIN_DECLS
+
+/**
+ * Set the usage of this image reader.
+ *
+ * <p>Note that calling this method will replace the previously set usage.</p>
+ *
+ * <p>Note: This will trigger re-allocation, could cause producer failures mid-stream
+ * if the new usage combination isn't supported, and thus should be avoided as much as
+ * possible regardless.</p>
+ *
+ * Available since API level 36.
+ *
+ * @param reader The image reader of interest.
+ * @param usage specifies how the consumer will access the AImage.
+ *              See {@link AImageReader_newWithUsage} parameter description for more details.
+ * @return <ul>
+ *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_PARAMETER} if reader is NULL.</li>
+ *         <li>{@link AMEDIA_ERROR_UNKNOWN} if the method fails for some other reasons.</li></ul>
+ *
+ * @see AImage_getHardwareBuffer
+ */
+media_status_t AImageReader_setUsage(
+        AImageReader* _Nonnull reader, uint64_t usage) __INTRODUCED_IN(36);
+
+__END_DECLS
+
+#endif //_VNDK_IMAGE_READER_H
diff --git a/media/ndk/libmediandk.map.txt b/media/ndk/libmediandk.map.txt
index 262c169..8fb203f 100644
--- a/media/ndk/libmediandk.map.txt
+++ b/media/ndk/libmediandk.map.txt
@@ -16,6 +16,7 @@
     AImageReader_newWithDataSpace; # introduced=UpsideDownCake
     AImageReader_setBufferRemovedListener; # introduced=26
     AImageReader_setImageListener; # introduced=24
+    AImageReader_setUsage; # introduced=36 llndk
     AImage_delete; # introduced=24
     AImage_deleteAsync; # introduced=26
     AImage_getCropRect; # introduced=24
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index b2edaf7..c67fa13 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -110,10 +110,6 @@
 
 static constexpr char kAudioServiceName[] = "audio";
 
-// In order to avoid invalidating offloaded tracks each time a Visualizer is turned on and off
-// we define a minimum time during which a global effect is considered enabled.
-static const nsecs_t kMinGlobalEffectEnabletimeNs = seconds(7200);
-
 // Keep a strong reference to media.log service around forever.
 // The service is within our parent process so it can never die in a way that we could observe.
 // These two variables are const after initialization.
@@ -5019,11 +5015,6 @@
 
 bool AudioFlinger::isNonOffloadableGlobalEffectEnabled_l() const
 {
-    if (mGlobalEffectEnableTime != 0 &&
-            ((systemTime() - mGlobalEffectEnableTime) < kMinGlobalEffectEnabletimeNs)) {
-        return true;
-    }
-
     for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
         const auto thread = mPlaybackThreads.valueAt(i);
         audio_utils::lock_guard l(thread->mutex());
@@ -5039,8 +5030,6 @@
 {
     audio_utils::lock_guard _l(mutex());
 
-    mGlobalEffectEnableTime = systemTime();
-
     for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
         const sp<IAfPlaybackThread> t = mPlaybackThreads.valueAt(i);
         if (t->type() == IAfThreadBase::OFFLOAD) {
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 133410e..e99c3ed 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -775,9 +775,6 @@
     std::atomic<size_t> mClientSharedHeapSize = kMinimumClientSharedHeapSizeBytes;
     static constexpr size_t kMinimumClientSharedHeapSizeBytes = 1024 * 1024; // 1MB
 
-    // when a global effect was last enabled
-    nsecs_t mGlobalEffectEnableTime GUARDED_BY(mutex()) = 0;
-
     /* const */ sp<IAfPatchPanel> mPatchPanel;
 
     const sp<EffectsFactoryHalInterface> mEffectsFactoryHal =
diff --git a/services/audioflinger/IAfThread.h b/services/audioflinger/IAfThread.h
index 3163d4c..8fef263 100644
--- a/services/audioflinger/IAfThread.h
+++ b/services/audioflinger/IAfThread.h
@@ -163,6 +163,7 @@
     virtual audio_channel_mask_t mixerChannelMask() const = 0;
     virtual audio_format_t format() const = 0;
     virtual uint32_t channelCount() const = 0;
+    virtual std::string flagsAsString() const = 0;
 
     // Called by AudioFlinger::frameCount(audio_io_handle_t output) and effects,
     // and returns the [normal mix] buffer's frame count.
diff --git a/services/audioflinger/IAfTrack.h b/services/audioflinger/IAfTrack.h
index d27d52a..c9c766f 100644
--- a/services/audioflinger/IAfTrack.h
+++ b/services/audioflinger/IAfTrack.h
@@ -205,6 +205,7 @@
 
     virtual const char* getTrackStateAsString() const = 0;
 
+    virtual const std::string& getTraceSuffix() const = 0;
     // Called by the PlaybackThread to indicate that the track is becoming active
     // and a new interval should start with a given device list.
     virtual void logBeginInterval(const std::string& devices) = 0;
@@ -212,6 +213,9 @@
     // Called by the PlaybackThread to indicate the track is no longer active.
     virtual void logEndInterval() = 0;
 
+    // Called by the PlaybackThread when ATRACE is enabled.
+    virtual void logRefreshInterval(const std::string& devices) = 0;
+
     // Called to tally underrun frames in playback.
     virtual void tallyUnderrunFrames(size_t frames) = 0;
 
diff --git a/services/audioflinger/MmapTracks.h b/services/audioflinger/MmapTracks.h
index 0210bc2..259136b 100644
--- a/services/audioflinger/MmapTracks.h
+++ b/services/audioflinger/MmapTracks.h
@@ -78,6 +78,7 @@
     float getPortVolume() const override { return mVolume; }
     bool getPortMute() const override { return mMutedFromPort; }
 
+    std::string trackFlagsAsString() const final { return {}; }
 private:
     DISALLOW_COPY_AND_ASSIGN(MmapTrack);
 
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index 2c3212c..c335c70 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -231,6 +231,8 @@
     float getPortVolume() const override { return mVolume; }
     bool getPortMute() const override { return mMutedFromPort; }
 
+    std::string trackFlagsAsString() const final { return toString(mFlags); }
+
 protected:
 
     DISALLOW_COPY_AND_ASSIGN(Track);
diff --git a/services/audioflinger/RecordTracks.h b/services/audioflinger/RecordTracks.h
index 83cd024..000244e 100644
--- a/services/audioflinger/RecordTracks.h
+++ b/services/audioflinger/RecordTracks.h
@@ -98,6 +98,8 @@
         return mResamplerBufferProvider;
     }
 
+    std::string trackFlagsAsString() const final { return toString(mFlags); }
+
 private:
     DISALLOW_COPY_AND_ASSIGN(RecordTrack);
 
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 200175b..32690ff 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -33,6 +33,7 @@
 #include <afutils/Vibrator.h>
 #include <audio_utils/MelProcessor.h>
 #include <audio_utils/Metadata.h>
+#include <audio_utils/Trace.h>
 #include <com_android_media_audioserver.h>
 #ifdef DEBUG_CPU_USAGE
 #include <audio_utils/Statistics.h>
@@ -612,7 +613,7 @@
 // ----------------------------------------------------------------------------
 
 // static
-const char* ThreadBase::threadTypeToString(ThreadBase::type_t type)
+const char* IAfThreadBase::threadTypeToString(ThreadBase::type_t type)
 {
     switch (type) {
     case MIXER:
@@ -2184,6 +2185,7 @@
         mIsTimestampAdvancing(kMinimumTimeBetweenTimestampChecksNs)
 {
     snprintf(mThreadName, kThreadNameLength, "AudioOut_%X", id);
+    mFlagsAsString = toString(output->flags);
     mNBLogWriter = afThreadCallback->newWriter_l(kLogSize, mThreadName);
 
     // Assumes constructor is called by AudioFlinger with its mutex() held, but
@@ -4271,6 +4273,17 @@
                 track->updateTeePatches_l();
             }
 
+            // check if traces have been enabled.
+            bool atraceEnabled = ATRACE_ENABLED();
+            if (atraceEnabled != mAtraceEnabled) [[unlikely]] {
+                mAtraceEnabled = atraceEnabled;
+                if (atraceEnabled) {
+                    const auto devices = patchSinksToString(&mPatch);
+                    for (const auto& track : activeTracks) {
+                        track->logRefreshInterval(devices);
+                    }
+                }
+            }
             // signal actual start of output stream when the render position reported by
             // the kernel starts moving.
             if (!mHalStarted && ((isSuspended() && (mBytesWritten != 0)) || (!mStandby
@@ -5261,6 +5274,8 @@
         fastTrack->mHapticScale = os::HapticScale::none();
         fastTrack->mHapticMaxAmplitude = NAN;
         fastTrack->mGeneration++;
+        snprintf(fastTrack->mTraceName, sizeof(fastTrack->mTraceName),
+                 "%s.0.0.%d", AUDIO_TRACE_PREFIX_AUDIO_TRACK_FRDY, mId);
         state->mFastTracksGen++;
         state->mTrackMask = 1;
         // fast mixer will use the HAL output sink
@@ -5838,6 +5853,9 @@
                     fastTrack->mHapticScale = track->getHapticScale();
                     fastTrack->mHapticMaxAmplitude = track->getHapticMaxAmplitude();
                     fastTrack->mGeneration++;
+                    snprintf(fastTrack->mTraceName, sizeof(fastTrack->mTraceName),
+                             "%s%s", AUDIO_TRACE_PREFIX_AUDIO_TRACK_FRDY,
+                             track->getTraceSuffix().c_str());
                     state->mTrackMask |= 1 << j;
                     didModify = true;
                     // no acknowledgement required for newly active tracks
@@ -5980,11 +5998,9 @@
         }
 
         size_t framesReady = track->framesReady();
-        if (ATRACE_ENABLED()) {
-            // I wish we had formatted trace names
-            std::string traceName("nRdy");
-            traceName += std::to_string(trackId);
-            ATRACE_INT(traceName.c_str(), framesReady);
+        if (ATRACE_ENABLED()) [[unlikely]] {
+            ATRACE_INT(std::string(AUDIO_TRACE_PREFIX_AUDIO_TRACK_NRDY)
+                    .append(track->getTraceSuffix()).c_str(), framesReady);
         }
         if ((framesReady >= minFrames) && track->isReady() &&
                 !track->isPaused() && !track->isTerminated())
@@ -7011,10 +7027,9 @@
 
         const size_t framesReady = track->framesReady();
         const int trackId = track->id();
-        if (ATRACE_ENABLED()) {
-            std::string traceName("nRdy");
-            traceName += std::to_string(trackId);
-            ATRACE_INT(traceName.c_str(), framesReady);
+        if (ATRACE_ENABLED()) [[unlikely]] {
+            ATRACE_INT(std::string(AUDIO_TRACE_PREFIX_AUDIO_TRACK_NRDY)
+                    .append(track->getTraceSuffix()).c_str(), framesReady);
         }
         if ((framesReady >= minFrames) && track->isReady() && !track->isPaused() &&
                 !track->isStopping_2() && !track->isStopped())
@@ -7534,6 +7549,11 @@
             continue;
         }
 
+        const size_t framesReady = track->framesReady();
+        if (ATRACE_ENABLED()) [[unlikely]] {
+            ATRACE_INT(std::string(AUDIO_TRACE_PREFIX_AUDIO_TRACK_NRDY)
+                    .append(track->getTraceSuffix()).c_str(), framesReady);
+        }
         if (track->isPausePending()) {
             track->pauseAck();
             // It is possible a track might have been flushed or stopped.
@@ -7592,7 +7612,7 @@
                 // Do not handle new data in this iteration even if track->framesReady()
                 mixerStatus = MIXER_TRACKS_ENABLED;
             }
-        }  else if (track->framesReady() && track->isReady() &&
+        } else if (framesReady && track->isReady() &&
                 !track->isPaused() && !track->isTerminated() && !track->isStopping_2()) {
             ALOGVV("OffloadThread: track(%d) s=%08x [OK]", track->id(), cblk->mServer);
             if (track->fillingStatus() == IAfTrack::FS_FILLED) {
@@ -8237,6 +8257,7 @@
     , mBtNrecSuspended(false)
 {
     snprintf(mThreadName, kThreadNameLength, "AudioIn_%X", id);
+    mFlagsAsString = toString(input->flags);
     mNBLogWriter = afThreadCallback->newWriter_l(kLogSize, mThreadName);
 
     if (mInput->audioHwDev != nullptr) {
@@ -8574,7 +8595,6 @@
                     }
                     if (invalidate) {
                         activeTrack->invalidate();
-                        ALOG_ASSERT(fastTrackToRemove == 0);
                         fastTrackToRemove = activeTrack;
                         removeTrack_l(activeTrack);
                         mActiveTracks.remove(activeTrack);
@@ -8591,6 +8611,18 @@
 
             mActiveTracks.updatePowerState_l(this);
 
+            // check if traces have been enabled.
+            bool atraceEnabled = ATRACE_ENABLED();
+            if (atraceEnabled != mAtraceEnabled) [[unlikely]] {
+                mAtraceEnabled = atraceEnabled;
+                if (atraceEnabled) {
+                    const auto devices = patchSourcesToString(&mPatch);
+                    for (const auto& track : activeTracks) {
+                        track->logRefreshInterval(devices);
+                    }
+                }
+            }
+
             updateMetadata_l();
 
             if (allStopped) {
@@ -11172,6 +11204,7 @@
       mOutput(output)
 {
     snprintf(mThreadName, kThreadNameLength, "AudioMmapOut_%X", id);
+    mFlagsAsString = toString(output->flags);
     mChannelCount = audio_channel_count_from_out_mask(mChannelMask);
     mMasterVolume = afThreadCallback->masterVolume_l();
     mMasterMute = afThreadCallback->masterMute_l();
@@ -11515,6 +11548,7 @@
       mInput(input)
 {
     snprintf(mThreadName, kThreadNameLength, "AudioMmapIn_%X", id);
+    mFlagsAsString = toString(input->flags);
     mChannelCount = audio_channel_count_from_in_mask(mChannelMask);
 }
 
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index 1d6e244..6784341 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -43,7 +43,6 @@
 
 class ThreadBase : public virtual IAfThreadBase, public Thread {
 public:
-    static const char *threadTypeToString(type_t type);
 
     // ThreadBase_ThreadLoop is a virtual mutex (always nullptr) that
     // guards methods and variables that ONLY run and are accessed
@@ -400,6 +399,8 @@
         }
     }
 
+    std::string flagsAsString() const final {  return mFlagsAsString; }
+
     sp<IAfEffectHandle> createEffect_l(
                                     const sp<Client>& client,
                                     const sp<media::IEffectClient>& effectClient,
@@ -681,6 +682,9 @@
                 ThreadMetrics           mThreadMetrics;
                 const bool              mIsOut;
 
+    std::string mFlagsAsString;                                     // set in constructor.
+    bool mAtraceEnabled GUARDED_BY(ThreadBase_ThreadLoop) = false;  // checked in threadLoop.
+
     // mThreadBusy is checked under the ThreadBase_Mutex to ensure that
     // TrackHandle operations do not proceed while the ThreadBase is busy
     // with the track.  mThreadBusy is only true if the track is active.
diff --git a/services/audioflinger/TrackBase.h b/services/audioflinger/TrackBase.h
index cde7fc2..2b3d772 100644
--- a/services/audioflinger/TrackBase.h
+++ b/services/audioflinger/TrackBase.h
@@ -22,6 +22,7 @@
 
 #include <afutils/NBAIO_Tee.h>
 #include <android-base/macros.h>  // DISALLOW_COPY_AND_ASSIGN
+#include <audio_utils/Trace.h>
 #include <datapath/TrackMetrics.h>
 #include <mediautils/BatteryNotifier.h>
 #include <psh_utils/AudioPowerManager.h>
@@ -210,16 +211,16 @@
         }
     }
 
+    const std::string& getTraceSuffix() const final { return mTraceSuffix; }
     // Called by the PlaybackThread to indicate that the track is becoming active
     // and a new interval should start with a given device list.
-    void logBeginInterval(const std::string& devices) final {
-        mTrackMetrics.logBeginInterval(devices);
-    }
+    void logBeginInterval(const std::string& devices) final;
 
     // Called by the PlaybackThread to indicate the track is no longer active.
-    void logEndInterval() final {
-        mTrackMetrics.logEndInterval();
-    }
+    void logEndInterval() final;
+
+    // Called by the PlaybackThread when ATRACE is enabled.
+    void logRefreshInterval(const std::string& devices) final;
 
     // Called to tally underrun frames in playback.
     void tallyUnderrunFrames(size_t /* frames */) override {}
@@ -333,6 +334,10 @@
     void deferRestartIfDisabled();
     virtual void restartIfDisabled() {}
 
+    virtual std::string trackFlagsAsString() const = 0;
+
+    audio_utils::trace::Object createDeviceIntervalTrace(const std::string& devices);
+
     const wp<IAfThreadBase> mThread;
     const alloc_type     mAllocType;
     /*const*/ sp<Client> mClient;   // see explanation at ~TrackBase() why not const
@@ -384,6 +389,7 @@
 
     bool                mLogForceVolumeUpdate = true; // force volume update to TrackMetrics.
 
+    audio_utils::trace::Object mLastTrace;  // accessed by PlaybackThread or RecordThread
     TrackMetrics        mTrackMetrics;
 
     bool                mServerLatencySupported = false;
@@ -393,6 +399,10 @@
     const pid_t         mCreatorPid;  // can be different from mclient->pid() for instance
                                       // when created by NuPlayer on behalf of a client
 
+    const std::string mTraceSuffix;
+    const std::string mTraceActionId;
+    const std::string mTraceIntervalId;
+
     // If the last track change was notified to the client with readAndClearHasChanged
     std::atomic_flag    mChangeNotified = ATOMIC_FLAG_INIT;
     // RAII object for battery stats book-keeping
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 5fbe48c..78ba24d 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -28,6 +28,7 @@
 #include "IAfThread.h"
 #include "ResamplerBufferProvider.h"
 
+#include <audio_utils/StringUtils.h>
 #include <audio_utils/minifloat.h>
 #include <media/AudioValidator.h>
 #include <media/RecordBufferConverter.h>
@@ -124,7 +125,12 @@
         mPortId(portId),
         mIsInvalid(false),
         mTrackMetrics(std::move(metricsId), isOut, clientUid),
-        mCreatorPid(creatorPid)
+        mCreatorPid(creatorPid),
+        mTraceSuffix{std::to_string(mPortId).append(".").append(std::to_string(mId))
+                .append(".").append(std::to_string(mThreadIoHandle))},
+        mTraceActionId{std::string(AUDIO_TRACE_PREFIX_AUDIO_TRACK_ACTION).append(mTraceSuffix)},
+        mTraceIntervalId{std::string(AUDIO_TRACE_PREFIX_AUDIO_TRACK_INTERVAL)
+                .append(mTraceSuffix)}
 {
     const uid_t callingUid = IPCThreadState::self()->getCallingUid();
     if (!isAudioServerOrMediaServerUid(callingUid) || clientUid == AUDIO_UID_INVALID) {
@@ -337,6 +343,90 @@
     mTrackToken.reset();
 }
 
+audio_utils::trace::Object TrackBase::createDeviceIntervalTrace(const std::string& devices) {
+    audio_utils::trace::Object trace;
+
+    // Please do not modify any items without approval (look at git blame).
+    // Sanitize the device string to remove addresses.
+    std::string plainDevices;
+    if (devices.find(")") != std::string::npos) {
+        auto deviceAddrVector = audio_utils::stringutils::getDeviceAddressPairs(devices);
+        for (const auto& deviceAddr : deviceAddrVector) {
+            // "|" not compatible with ATRACE filtering so we use "+".
+            if (!plainDevices.empty()) plainDevices.append("+");
+            plainDevices.append(deviceAddr.first);
+        }
+    } else {
+        plainDevices = devices;
+    }
+
+    trace // the following key, value pairs should be alphabetical
+            .set(AUDIO_TRACE_OBJECT_KEY_CHANNEL_MASK, static_cast<int32_t>(mChannelMask))
+            .set(AUDIO_TRACE_OBJECT_KEY_CONTENT_TYPE, toString(mAttr.content_type))
+            .set(AUDIO_TRACE_OBJECT_KEY_DEVICES, plainDevices)
+            .set(AUDIO_TRACE_OBJECT_KEY_FLAGS, trackFlagsAsString())
+            .set(AUDIO_TRACE_OBJECT_KEY_FORMAT, IAfThreadBase::formatToString(mFormat))
+            .set(AUDIO_TRACE_OBJECT_KEY_FRAMECOUNT, static_cast<int64_t>(mFrameCount))
+            .set(AUDIO_TRACE_OBJECT_KEY_PID, static_cast<int32_t>(mClient->pid()))
+            .set(AUDIO_TRACE_OBJECT_KEY_SAMPLE_RATE, static_cast<int32_t>(sampleRate()));
+    if (const auto thread = mThread.promote()) {
+        trace // continue in alphabetical order
+                .set(AUDIO_TRACE_PREFIX_THREAD AUDIO_TRACE_OBJECT_KEY_CHANNEL_MASK,
+                        static_cast<int32_t>(thread->channelMask()))
+                .set(AUDIO_TRACE_PREFIX_THREAD AUDIO_TRACE_OBJECT_KEY_FLAGS,
+                        thread->flagsAsString())
+                .set(AUDIO_TRACE_PREFIX_THREAD AUDIO_TRACE_OBJECT_KEY_FORMAT,
+                        IAfThreadBase::formatToString(thread->format()))
+                .set(AUDIO_TRACE_PREFIX_THREAD AUDIO_TRACE_OBJECT_KEY_FRAMECOUNT,
+                        static_cast<int64_t>(thread->frameCount()))
+                .set(AUDIO_TRACE_PREFIX_THREAD AUDIO_TRACE_OBJECT_KEY_ID,
+                        static_cast<int32_t>(mThreadIoHandle))
+                .set(AUDIO_TRACE_PREFIX_THREAD AUDIO_TRACE_OBJECT_KEY_SAMPLE_RATE,
+                        static_cast<int32_t>(thread->sampleRate()))
+                .set(AUDIO_TRACE_PREFIX_THREAD AUDIO_TRACE_OBJECT_KEY_TYPE,
+                        IAfThreadBase::threadTypeToString(thread->type()));
+    }
+    trace // continue in alphabetical order
+            .set(AUDIO_TRACE_OBJECT_KEY_UID, static_cast<int32_t>(uid()))
+            .set(AUDIO_TRACE_OBJECT_KEY_USAGE, toString(mAttr.usage));
+    return trace;
+}
+
+void TrackBase::logBeginInterval(const std::string& devices) {
+    mTrackMetrics.logBeginInterval(devices);
+
+    if (ATRACE_ENABLED()) [[unlikely]] {
+        auto trace = createDeviceIntervalTrace(devices);
+        mLastTrace = trace;
+        ATRACE_INSTANT_FOR_TRACK(mTraceIntervalId.c_str(),
+                trace.set(AUDIO_TRACE_OBJECT_KEY_EVENT, AUDIO_TRACE_EVENT_BEGIN_INTERVAL)
+                        .toTrace().c_str());
+    }
+}
+
+void TrackBase::logEndInterval() {
+    if (!mLastTrace.empty()) {
+        if (ATRACE_ENABLED()) [[unlikely]] {
+            ATRACE_INSTANT_FOR_TRACK(mTraceIntervalId.c_str(),
+                    mLastTrace.set(AUDIO_TRACE_OBJECT_KEY_EVENT, AUDIO_TRACE_EVENT_END_INTERVAL)
+                            .toTrace().c_str());
+        }
+        mLastTrace.clear();
+    }
+    mTrackMetrics.logEndInterval();
+}
+
+void TrackBase::logRefreshInterval(const std::string& devices) {
+    if (ATRACE_ENABLED()) [[unlikely]] {
+        if (mLastTrace.empty()) mLastTrace = createDeviceIntervalTrace(devices);
+        auto trace = mLastTrace;
+        ATRACE_INSTANT_FOR_TRACK(mTraceIntervalId.c_str(),
+                trace.set(AUDIO_TRACE_OBJECT_KEY_EVENT,
+                               AUDIO_TRACE_EVENT_REFRESH_INTERVAL)
+                        .toTrace().c_str());
+    }
+}
+
 PatchTrackBase::PatchTrackBase(const sp<ClientProxy>& proxy,
         IAfThreadBase* thread, const Timeout& timeout)
     : mProxy(proxy)
@@ -1159,6 +1249,12 @@
         ALOGV("%s(%d): underrun,  framesReady(%zu) < framesDesired(%zd), state: %d",
                 __func__, mId, buf.mFrameCount, desiredFrames, (int)mState);
         mAudioTrackServerProxy->tallyUnderrunFrames(desiredFrames);
+        if (ATRACE_ENABLED()) [[unlikely]] {
+            ATRACE_INSTANT_FOR_TRACK(mTraceActionId.c_str(), audio_utils::trace::Object{}
+                    .set(AUDIO_TRACE_OBJECT_KEY_EVENT, AUDIO_TRACE_EVENT_UNDERRUN)
+                    .set(AUDIO_TRACE_OBJECT_KEY_FRAMECOUNT, desiredFrames)
+                    .toTrace().c_str());
+        }
     } else {
         mAudioTrackServerProxy->tallyUnderrunFrames(0);
     }
@@ -1271,6 +1367,11 @@
 status_t Track::start(AudioSystem::sync_event_t event __unused,
                                                     audio_session_t triggerSession __unused)
 {
+    if (ATRACE_ENABLED()) [[unlikely]] {
+        ATRACE_INSTANT_FOR_TRACK(mTraceActionId.c_str(), audio_utils::trace::Object{}
+                .set(AUDIO_TRACE_OBJECT_KEY_EVENT, AUDIO_TRACE_EVENT_START)
+                .toTrace().c_str());
+    }
     status_t status = NO_ERROR;
     ALOGV("%s(%d): calling pid %d session %d",
             __func__, mId, IPCThreadState::self()->getCallingPid(), mSessionId);
@@ -1415,6 +1516,11 @@
 void Track::stop()
 {
     ALOGV("%s(%d): calling pid %d", __func__, mId, IPCThreadState::self()->getCallingPid());
+    if (ATRACE_ENABLED()) [[unlikely]] {
+        ATRACE_INSTANT_FOR_TRACK(mTraceActionId.c_str(), audio_utils::trace::Object{}
+                .set(AUDIO_TRACE_OBJECT_KEY_EVENT, AUDIO_TRACE_EVENT_STOP)
+                .toTrace().c_str());
+    }
     const sp<IAfThreadBase> thread = mThread.promote();
     if (thread != 0) {
         audio_utils::unique_lock ul(thread->mutex());
@@ -1452,6 +1558,11 @@
 void Track::pause()
 {
     ALOGV("%s(%d): calling pid %d", __func__, mId, IPCThreadState::self()->getCallingPid());
+    if (ATRACE_ENABLED()) [[unlikely]] {
+        ATRACE_INSTANT_FOR_TRACK(mTraceActionId.c_str(), audio_utils::trace::Object{}
+                .set(AUDIO_TRACE_OBJECT_KEY_EVENT, AUDIO_TRACE_EVENT_PAUSE)
+                .toTrace().c_str());
+    }
     const sp<IAfThreadBase> thread = mThread.promote();
     if (thread != 0) {
         audio_utils::unique_lock ul(thread->mutex());
@@ -1491,6 +1602,11 @@
 void Track::flush()
 {
     ALOGV("%s(%d)", __func__, mId);
+    if (ATRACE_ENABLED()) [[unlikely]] {
+        ATRACE_INSTANT_FOR_TRACK(mTraceActionId.c_str(), audio_utils::trace::Object{}
+                .set(AUDIO_TRACE_OBJECT_KEY_EVENT, AUDIO_TRACE_EVENT_FLUSH)
+                .toTrace().c_str());
+    }
     const sp<IAfThreadBase> thread = mThread.promote();
     if (thread != 0) {
         audio_utils::unique_lock ul(thread->mutex());
@@ -2936,6 +3052,11 @@
 status_t RecordTrack::start(AudioSystem::sync_event_t event,
                                                         audio_session_t triggerSession)
 {
+    if (ATRACE_ENABLED()) [[unlikely]] {
+        ATRACE_INSTANT_FOR_TRACK(mTraceActionId.c_str(), audio_utils::trace::Object{}
+                .set(AUDIO_TRACE_OBJECT_KEY_EVENT, AUDIO_TRACE_EVENT_START)
+                .toTrace().c_str());
+    }
     const sp<IAfThreadBase> thread = mThread.promote();
     if (thread != 0) {
         auto* const recordThread = thread->asIAfRecordThread().get();
@@ -2948,6 +3069,11 @@
 
 void RecordTrack::stop()
 {
+    if (ATRACE_ENABLED()) [[unlikely]] {
+        ATRACE_INSTANT_FOR_TRACK(mTraceActionId.c_str(), audio_utils::trace::Object{}
+                .set(AUDIO_TRACE_OBJECT_KEY_EVENT, AUDIO_TRACE_EVENT_STOP)
+                .toTrace().c_str());
+    }
     const sp<IAfThreadBase> thread = mThread.promote();
     if (thread != 0) {
         auto* const recordThread = thread->asIAfRecordThread().get();
@@ -3619,11 +3745,21 @@
 status_t MmapTrack::start(AudioSystem::sync_event_t event __unused,
                                                     audio_session_t triggerSession __unused)
 {
+    if (ATRACE_ENABLED()) [[unlikely]] {
+        ATRACE_INSTANT_FOR_TRACK(mTraceActionId.c_str(), audio_utils::trace::Object{}
+                .set(AUDIO_TRACE_OBJECT_KEY_EVENT, AUDIO_TRACE_EVENT_START)
+                .toTrace().c_str());
+    }
     return NO_ERROR;
 }
 
 void MmapTrack::stop()
 {
+    if (ATRACE_ENABLED()) [[unlikely]] {
+        ATRACE_INSTANT_FOR_TRACK(mTraceActionId.c_str(), audio_utils::trace::Object{}
+                .set(AUDIO_TRACE_OBJECT_KEY_EVENT, AUDIO_TRACE_EVENT_STOP)
+                .toTrace().c_str());
+    }
 }
 
 // AudioBufferProvider interface
diff --git a/services/audioflinger/fastpath/FastMixer.cpp b/services/audioflinger/fastpath/FastMixer.cpp
index 1d41b3f..760ce1d 100644
--- a/services/audioflinger/fastpath/FastMixer.cpp
+++ b/services/audioflinger/fastpath/FastMixer.cpp
@@ -36,6 +36,7 @@
 #include <cpustats/ThreadCpuUsage.h>
 #endif
 #endif
+#include <audio_utils/Trace.h>
 #include <audio_utils/channels.h>
 #include <audio_utils/format.h>
 #include <audio_utils/mono_blend.h>
@@ -397,12 +398,7 @@
             // in the overall fast mix cycle being delayed.  Should use a non-blocking FIFO.
             const size_t framesReady = fastTrack->mBufferProvider->framesReady();
             if (ATRACE_ENABLED()) {
-                // I wish we had formatted trace names
-                char traceName[16];
-                strcpy(traceName, "fRdy");
-                traceName[4] = i + (i < 10 ? '0' : 'A' - 10);
-                traceName[5] = '\0';
-                ATRACE_INT(traceName, framesReady);
+                ATRACE_INT(fastTrack->mTraceName, framesReady);
             }
             FastTrackDump *ftDump = &dumpState->mTracks[i];
             FastTrackUnderruns underruns = ftDump->mUnderruns;
diff --git a/services/audioflinger/fastpath/FastMixerState.h b/services/audioflinger/fastpath/FastMixerState.h
index 0a56f92..f01dd4b 100644
--- a/services/audioflinger/fastpath/FastMixerState.h
+++ b/services/audioflinger/fastpath/FastMixerState.h
@@ -56,6 +56,7 @@
     bool                    mHapticPlaybackEnabled = false; // haptic playback is enabled or not
     os::HapticScale mHapticScale = os::HapticScale::mute(); // scale of haptic data
     float                   mHapticMaxAmplitude = NAN; // max amplitude allowed for haptic data
+    char mTraceName[32]{};
 };
 
 // No virtuals.
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 1f669e1..303f313 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -3805,19 +3805,21 @@
         }
     }
 
-    // update voice volume if the an active call route exists
-    if (mCallRxSourceClient != nullptr && mCallRxSourceClient->isConnected()
-            && (curSrcDevices.find(
-                Volume::getDeviceForVolume({mCallRxSourceClient->sinkDevice()->type()}))
-                != curSrcDevices.end())) {
-        bool isVoiceVolSrc;
-        bool isBtScoVolSrc;
-        if (isVolumeConsistentForCalls(vs, {mCallRxSourceClient->sinkDevice()->type()},
-                isVoiceVolSrc, isBtScoVolSrc, __func__)
-                && (isVoiceVolSrc || isBtScoVolSrc)) {
-            bool voiceVolumeManagedByHost = !isBtScoVolSrc &&
-                    !audio_is_ble_out_device(mCallRxSourceClient->sinkDevice()->type());
-            setVoiceVolume(index, curves, voiceVolumeManagedByHost, 0);
+    // update voice volume if the an active call route exists and target device is same as current
+    if (mCallRxSourceClient != nullptr && mCallRxSourceClient->isConnected()) {
+        audio_devices_t rxSinkDevice = mCallRxSourceClient->sinkDevice()->type();
+        audio_devices_t curVoiceDevice = Volume::getDeviceForVolume({rxSinkDevice});
+        if (curVoiceDevice == device
+                && curSrcDevices.find(curVoiceDevice) != curSrcDevices.end()) {
+            bool isVoiceVolSrc;
+            bool isBtScoVolSrc;
+            if (isVolumeConsistentForCalls(vs, {rxSinkDevice},
+                    isVoiceVolSrc, isBtScoVolSrc, __func__)
+                    && (isVoiceVolSrc || isBtScoVolSrc)) {
+                bool voiceVolumeManagedByHost = !isBtScoVolSrc &&
+                        !audio_is_ble_out_device(rxSinkDevice);
+                setVoiceVolume(index, curves, voiceVolumeManagedByHost, 0);
+            }
         }
     }
 
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index b9c8206..b44f949 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -87,7 +87,9 @@
         "android.hardware.camera.device@3.6",
         "android.hardware.camera.device@3.7",
         "android.hardware.common-V2-ndk",
+        "android.hardware.common-V2-cpp",
         "android.hardware.common.fmq-V1-ndk",
+        "android.hardware.common.fmq-V1-cpp",
         "camera_platform_flags_c_lib",
         "com.android.window.flags.window-aconfig_flags_c_lib",
         "media_permission-aidl-cpp",
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 31a45c3..fdb5b7d 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -1492,6 +1492,7 @@
         int servicePid, std::pair<int, IPCTransport> deviceVersionAndTransport,
         apiLevel effectiveApiLevel, bool overrideForPerfClass, int rotationOverride,
         bool forceSlowJpegMode, const std::string& originalCameraId, bool sharedMode,
+        bool isVendorClient,
         /*out*/sp<BasicClient>* client) {
     // For HIDL devices
     if (deviceVersionAndTransport.second == IPCTransport::HIDL) {
@@ -1537,7 +1538,8 @@
                 cameraService, tmp, cameraService->mCameraServiceProxyWrapper,
                 cameraService->mAttributionAndPermissionUtils, clientAttribution, callingPid,
                 systemNativeClient, cameraId, facing, sensorOrientation, servicePid,
-                overrideForPerfClass, rotationOverride, originalCameraId, sharedMode);
+                overrideForPerfClass, rotationOverride, originalCameraId, sharedMode,
+                isVendorClient);
         ALOGI("%s: Camera2 API, rotationOverride %d", __FUNCTION__, rotationOverride);
     }
     return Status::ok();
@@ -1638,7 +1640,7 @@
                   /*rotationOverride*/
                   hardware::ICameraService::ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT,
                   /*forceSlowJpegMode*/ false, cameraIdStr, /*isNonSystemNdk*/ false,
-                  /*sharedMode*/false, /*out*/ tmp))
+                  /*sharedMode*/false, /*isVendorClient*/false,/*out*/ tmp))
                  .isOk()) {
         ALOGE("%s: Error initializing shim metadata: %s", __FUNCTION__, ret.toString8().c_str());
     }
@@ -2202,7 +2204,8 @@
             cameraClient, cameraIdStr, api1CameraId, resolvedClientAttribution,
             /*systemNativeClient*/ false, API_1,
             /*shimUpdateOnly*/ false, /*oomScoreOffset*/ 0, targetSdkVersion, rotationOverride,
-            forceSlowJpegMode, cameraIdStr, isNonSystemNdk, /*sharedMode*/false, /*out*/ client);
+            forceSlowJpegMode, cameraIdStr, isNonSystemNdk, /*sharedMode*/false,
+            /*isVendorClient*/ false, /*out*/ client);
 
     if (!ret.isOk()) {
         logRejected(cameraIdStr, getCallingPid(),
@@ -2286,7 +2289,32 @@
         const std::string& unresolvedCameraId,
         int oomScoreOffset, int targetSdkVersion,
         int rotationOverride, const AttributionSourceState& clientAttribution, int32_t devicePolicy,
-        bool sharedMode, /*out*/sp<hardware::camera2::ICameraDeviceUser>* device) {
+        bool sharedMode,
+        /*out*/sp<hardware::camera2::ICameraDeviceUser>* device) {
+    return connectDeviceImpl(cameraCb, unresolvedCameraId, oomScoreOffset, targetSdkVersion,
+            rotationOverride, clientAttribution, devicePolicy, sharedMode,
+            /*isVendorClient*/false, device);
+}
+
+Status CameraService::connectDeviceVendor(
+        const sp<hardware::camera2::ICameraDeviceCallbacks>& cameraCb,
+        const std::string& unresolvedCameraId,
+        int oomScoreOffset, int targetSdkVersion,
+        int rotationOverride, const AttributionSourceState& clientAttribution, int32_t devicePolicy,
+        bool sharedMode,
+        /*out*/sp<hardware::camera2::ICameraDeviceUser>* device) {
+            return connectDeviceImpl(cameraCb, unresolvedCameraId, oomScoreOffset, targetSdkVersion,
+                    rotationOverride, clientAttribution, devicePolicy, sharedMode,
+                    /*isVendorClient*/true, device);
+}
+
+Status CameraService::connectDeviceImpl(
+        const sp<hardware::camera2::ICameraDeviceCallbacks>& cameraCb,
+        const std::string& unresolvedCameraId,
+        int oomScoreOffset, int targetSdkVersion,
+        int rotationOverride, const AttributionSourceState& clientAttribution, int32_t devicePolicy,
+        bool sharedMode, bool isVendorClient,
+        /*out*/sp<hardware::camera2::ICameraDeviceUser>* device) {
     ATRACE_CALL();
     RunThreadWithRealtimePriority priorityBump;
     Status ret = Status::ok();
@@ -2367,7 +2395,7 @@
             cameraCb, cameraId, /*api1CameraId*/ -1, resolvedClientAttribution, systemNativeClient,
             API_2, /*shimUpdateOnly*/ false, oomScoreOffset, targetSdkVersion, rotationOverride,
             /*forceSlowJpegMode*/ false, unresolvedCameraId, isNonSystemNdk, sharedMode,
-            /*out*/ client);
+            isVendorClient, /*out*/ client);
 
     if (!ret.isOk()) {
         logRejected(cameraId, clientPid, clientPackageName, toStdString(ret.toString8()));
@@ -2447,7 +2475,8 @@
                                     bool shimUpdateOnly, int oomScoreOffset, int targetSdkVersion,
                                     int rotationOverride, bool forceSlowJpegMode,
                                     const std::string& originalCameraId, bool isNonSystemNdk,
-                                    bool sharedMode, /*out*/ sp<CLIENT>& device) {
+                                    bool sharedMode, bool isVendorClient,
+                                    /*out*/ sp<CLIENT>& device) {
     binder::Status ret = binder::Status::ok();
 
     nsecs_t openTimeNs = systemTime();
@@ -2546,7 +2575,7 @@
                                systemNativeClient, cameraId, api1CameraId, facing, orientation,
                                getpid(), deviceVersionAndTransport, effectiveApiLevel,
                                overrideForPerfClass, rotationOverride, forceSlowJpegMode,
-                               originalCameraId, sharedMode,
+                               originalCameraId, sharedMode, isVendorClient,
                                /*out*/ &tmp))
                      .isOk()) {
             return ret;
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 6f29ff4..c4d2d67 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -280,6 +280,14 @@
             std::vector<hardware::CameraStatus>* cameraStatuses, bool isVendor = false,
             bool isProcessLocalTest = false);
 
+    binder::Status  connectDeviceVendor(
+            const sp<hardware::camera2::ICameraDeviceCallbacks>& cameraCb,
+            const std::string& cameraId, int scoreOffset, int targetSdkVersion,
+            int rotationOverride, const AttributionSourceState& clientAttribution,
+            int32_t devicePolicy, bool sharedMode,
+            /*out*/
+            sp<hardware::camera2::ICameraDeviceUser>* device);
+
     // Monitored UIDs availability notification
     void                notifyMonitoredUids();
     void                notifyMonitoredUids(const std::unordered_set<uid_t> &notifyUidSet);
@@ -996,7 +1004,16 @@
                                  bool shimUpdateOnly, int scoreOffset, int targetSdkVersion,
                                  int rotationOverride, bool forceSlowJpegMode,
                                  const std::string& originalCameraId, bool isNonSystemNdk,
-                                 bool sharedMode, /*out*/ sp<CLIENT>& device);
+                                 bool sharedMode, bool isVendorClient,
+                                 /*out*/ sp<CLIENT>& device);
+
+    binder::Status connectDeviceImpl(
+            const sp<hardware::camera2::ICameraDeviceCallbacks>& cameraCb,
+            const std::string& cameraId, int scoreOffset, int targetSdkVersion,
+            int rotationOverride, const AttributionSourceState& clientAttribution,
+            int32_t devicePolicy, bool sharedMode, bool isVendorClient,
+            /*out*/
+            sp<hardware::camera2::ICameraDeviceUser>* device);
 
     // Lock guarding camera service state
     Mutex               mServiceLock;
@@ -1493,6 +1510,7 @@
                                      apiLevel effectiveApiLevel, bool overrideForPerfClass,
                                      int rotationOverride, bool forceSlowJpegMode,
                                      const std::string& originalCameraId, bool sharedMode,
+                                     bool isVendorClient,
                                      /*out*/ sp<BasicClient>* client);
 
     static std::string toString(std::set<userid_t> intSet);
diff --git a/services/camera/libcameraservice/FwkOnlyMetadataTags.h b/services/camera/libcameraservice/FwkOnlyMetadataTags.h
new file mode 100644
index 0000000..768afeb
--- /dev/null
+++ b/services/camera/libcameraservice/FwkOnlyMetadataTags.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <array>
+#pragma once
+
+/**
+ * ! Do not edit this file directly !
+ *
+ * Generated automatically from fwk_only_metadata_tags.mako. To be included in libcameraservice
+ * only by Camera3Device.cpp.
+ */
+
+namespace android {
+
+/**
+ * Framework only CaptureRequest keys. To be used for filtering out keys in CaptureRequest
+ * before sending to the HAL.
+ */
+constexpr std::array kFwkOnlyMetadataKeys = {
+        ANDROID_CONTROL_AF_REGIONS_SET,
+        ANDROID_CONTROL_AE_REGIONS_SET,
+        ANDROID_CONTROL_AWB_REGIONS_SET,
+        ANDROID_CONTROL_ZOOM_METHOD,
+        ANDROID_SCALER_CROP_REGION_SET,
+        ANDROID_EXTENSION_STRENGTH,
+};
+
+} //namespace android
diff --git a/services/camera/libcameraservice/aidl/AidlCameraDeviceCallbacks.cpp b/services/camera/libcameraservice/aidl/AidlCameraDeviceCallbacks.cpp
index 70647b4..950ea05 100644
--- a/services/camera/libcameraservice/aidl/AidlCameraDeviceCallbacks.cpp
+++ b/services/camera/libcameraservice/aidl/AidlCameraDeviceCallbacks.cpp
@@ -174,11 +174,19 @@
 }
 
 binder::Status AidlCameraDeviceCallbacks::onResultReceived(
-    const CameraMetadataNative& result,
+    const CameraMetadataInfo &resultInfo,
     const UCaptureResultExtras& resultExtras,
     const ::std::vector<UPhysicalCaptureResultInfo>& physicalCaptureResultInfos) {
     // Wrap CameraMetadata, resultExtras and physicalCaptureResultInfos in on
     // sp<RefBase>-able structure and post it.
+    // We modify metadata - since we want to filter out tags based on the vndk
+    // version, and also this communication is an in process function call.
+    // So we don't use FMQ for the shim layer. FMQ is still used for VNDK IPC.
+    if (resultInfo.getTag() != CameraMetadataInfo::metadata) {
+        ALOGE("Vendor callbacks got metadata in fmq ? ");
+        return binder::Status::ok();
+    }
+    const CameraMetadataNative &result = resultInfo.get<CameraMetadataInfo::metadata>();
     sp<ResultWrapper> resultWrapper = new ResultWrapper(const_cast<CameraMetadataNative &>(result),
                                                         resultExtras, physicalCaptureResultInfos);
     sp<AMessage> msg = new AMessage(kWhatResultReceived, mHandler);
diff --git a/services/camera/libcameraservice/aidl/AidlCameraDeviceCallbacks.h b/services/camera/libcameraservice/aidl/AidlCameraDeviceCallbacks.h
index 07bf7d8..6504cdc 100644
--- a/services/camera/libcameraservice/aidl/AidlCameraDeviceCallbacks.h
+++ b/services/camera/libcameraservice/aidl/AidlCameraDeviceCallbacks.h
@@ -46,6 +46,7 @@
 using ::android::frameworks::cameraservice::utils::DeathPipe;
 using ::android::hardware::camera2::impl::CameraMetadataNative;
 
+using CameraMetadataInfo = android::hardware::camera2::CameraMetadataInfo;
 using CaptureResultMetadataQueue = AidlMessageQueue<int8_t, SynchronizedReadWrite>;
 
 class AidlCameraDeviceCallbacks : public UBnCameraDeviceCallbacks {
@@ -65,7 +66,8 @@
                                     int64_t timestamp) override;
 
     binder::Status onResultReceived(
-            const CameraMetadataNative& result, const CaptureResultExtras& resultExtras,
+            const CameraMetadataInfo &resultInfo,
+            const CaptureResultExtras& resultExtras,
             const std::vector<PhysicalCaptureResultInfo>& physicalCaptureResultInfos) override;
 
     binder::Status onPrepared(int32_t streamId) override;
diff --git a/services/camera/libcameraservice/aidl/AidlCameraService.cpp b/services/camera/libcameraservice/aidl/AidlCameraService.cpp
index a2c431e..46e2280 100644
--- a/services/camera/libcameraservice/aidl/AidlCameraService.cpp
+++ b/services/camera/libcameraservice/aidl/AidlCameraService.cpp
@@ -177,7 +177,7 @@
                     kDefaultDeviceId);
     clientAttribution.packageName = "";
     clientAttribution.attributionTag = std::nullopt;
-    binder::Status serviceRet = mCameraService->connectDevice(
+    binder::Status serviceRet = mCameraService->connectDeviceVendor(
             callbacks,
             in_cameraId,
             /* scoreOffset= */ 0,
diff --git a/services/camera/libcameraservice/aidl/AidlUtils.cpp b/services/camera/libcameraservice/aidl/AidlUtils.cpp
index 1ec5072..7f927f1 100644
--- a/services/camera/libcameraservice/aidl/AidlUtils.cpp
+++ b/services/camera/libcameraservice/aidl/AidlUtils.cpp
@@ -24,6 +24,7 @@
 #include <aidlcommonsupport/NativeHandle.h>
 #include <camera/StringUtils.h>
 #include <device3/Camera3StreamInterface.h>
+#include <gui/Flags.h>  // remove with WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
 #include <gui/bufferqueue/1.0/H2BGraphicBufferProducer.h>
 #include <mediautils/AImageReaderUtils.h>
 #include "utils/Utils.h"
@@ -32,6 +33,7 @@
 
 using aimg::AImageReader_getHGBPFromHandle;
 using hardware::graphics::bufferqueue::V1_0::utils::H2BGraphicBufferProducer;
+using CameraMetadataInfo = android::hardware::camera2::CameraMetadataInfo;
 
 // Note: existing data in dst will be gone. Caller still owns the memory of src
 void cloneToAidl(const camera_metadata_t* src, SCameraMetadata* dst) {
@@ -76,20 +78,25 @@
 }
 
 UOutputConfiguration convertFromAidl(const SOutputConfiguration &src) {
-    std::vector<sp<IGraphicBufferProducer>> iGBPs;
+    std::vector<ParcelableSurfaceType> pSurfaces;
     if (!src.surfaces.empty()) {
         auto& surfaces = src.surfaces;
-        iGBPs.reserve(surfaces.size());
+        pSurfaces.reserve(surfaces.size());
 
         for (auto& sSurface : surfaces) {
-            sp<IGraphicBufferProducer> igbp =
-                    Surface::getIGraphicBufferProducer(sSurface.get());
-            if (igbp == nullptr) {
-                ALOGE("%s: ANativeWindow (%p) not backed by a Surface.",
-                      __FUNCTION__, sSurface.get());
+            ParcelableSurfaceType pSurface;
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+            pSurface.graphicBufferProducer = Surface::getIGraphicBufferProducer(sSurface.get());
+            if (pSurface.isEmpty()) {
+#else
+            pSurface = Surface::getIGraphicBufferProducer(sSurface.get());
+            if (pSurface == nullptr) {
+#endif
+                ALOGE("%s: ANativeWindow (%p) not backed by a Surface.", __FUNCTION__,
+                      sSurface.get());
                 continue;
             }
-            iGBPs.push_back(igbp);
+            pSurfaces.push_back(pSurface);
         }
     } else {
 #pragma clang diagnostic push
@@ -100,7 +107,7 @@
         auto &windowHandles = src.windowHandles;
 #pragma clang diagnostic pop
 
-        iGBPs.reserve(windowHandles.size());
+        pSurfaces.reserve(windowHandles.size());
 
         for (auto &handle : windowHandles) {
             native_handle_t* nh = makeFromAidl(handle);
@@ -111,15 +118,20 @@
                 continue;
             }
 
-            iGBPs.push_back(new H2BGraphicBufferProducer(igbp));
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+            view::Surface viewSurface;
+            viewSurface.graphicBufferProducer = new H2BGraphicBufferProducer(igbp);
+            pSurfaces.push_back(viewSurface);
+#else
+            pSurfaces.push_back(new H2BGraphicBufferProducer(igbp));
+#endif
             native_handle_delete(nh);
         }
     }
 
     UOutputConfiguration outputConfiguration(
-        iGBPs, convertFromAidl(src.rotation), src.physicalCameraId,
-        src.windowGroupId, OutputConfiguration::SURFACE_TYPE_UNKNOWN, 0, 0,
-        (iGBPs.size() > 1));
+            pSurfaces, convertFromAidl(src.rotation), src.physicalCameraId, src.windowGroupId,
+            OutputConfiguration::SURFACE_TYPE_UNKNOWN, 0, 0, (pSurfaces.size() > 1));
     return outputConfiguration;
 }
 
@@ -254,7 +266,8 @@
     SPhysicalCaptureResultInfo dst;
     dst.physicalCameraId = src.mPhysicalCameraId;
 
-    const camera_metadata_t *rawMetadata = src.mPhysicalCameraMetadata.getAndLock();
+    const camera_metadata_t *rawMetadata =
+            src.mCameraMetadataInfo.get<CameraMetadataInfo::metadata>().getAndLock();
     // Try using fmq at first.
     size_t metadata_size = get_camera_metadata_size(rawMetadata);
     if ((metadata_size > 0) && (fmq->availableToWrite() > 0)) {
@@ -267,7 +280,7 @@
             dst.physicalCameraMetadata.set<SCaptureMetadataInfo::metadata>(std::move(metadata));
         }
     }
-    src.mPhysicalCameraMetadata.unlock(rawMetadata);
+    src.mCameraMetadataInfo.get<CameraMetadataInfo::metadata>().unlock(rawMetadata);
     return dst;
 }
 
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 8c30d54..5bcb8e8 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -16,17 +16,22 @@
 
 #define LOG_TAG "CameraDeviceClient"
 #define ATRACE_TAG ATRACE_TAG_CAMERA
+#ifdef LOG_NNDEBUG
+#define ALOGVV(...) ALOGV(__VA_ARGS__)
+#else
+#define ALOGVV(...) ((void)0)
+#endif
 //#define LOG_NDEBUG 0
 
+#include <camera/CameraUtils.h>
+#include <camera/StringUtils.h>
+#include <camera/camera2/CaptureRequest.h>
 #include <com_android_internal_camera_flags.h>
 #include <cutils/properties.h>
+#include <gui/Surface.h>
 #include <utils/Log.h>
 #include <utils/SessionConfigurationUtils.h>
 #include <utils/Trace.h>
-#include <gui/Surface.h>
-#include <camera/camera2/CaptureRequest.h>
-#include <camera/CameraUtils.h>
-#include <camera/StringUtils.h>
 
 #include "common/CameraDeviceBase.h"
 #include "device3/Camera3Device.h"
@@ -40,6 +45,7 @@
 #include "JpegRCompositeStream.h"
 
 // Convenience methods for constructing binder::Status objects for error returns
+constexpr int32_t METADATA_QUEUE_SIZE = 1 << 20;
 
 #define STATUS_ERROR(errorCode, errorString) \
     binder::Status::fromServiceSpecificError(errorCode, \
@@ -80,7 +86,7 @@
         const AttributionSourceState& clientAttribution, int callingPid, bool systemNativeClient,
         const std::string& cameraId, int cameraFacing, int sensorOrientation, int servicePid,
         bool overrideForPerfClass, int rotationOverride, const std::string& originalCameraId,
-        bool sharedMode)
+        bool sharedMode, bool isVendorClient)
     : Camera2ClientBase(cameraService, remoteCallback, cameraServiceProxyWrapper,
                         attributionAndPermissionUtils, clientAttribution, callingPid,
                         systemNativeClient, cameraId, /*API1 camera ID*/ -1, cameraFacing,
@@ -90,7 +96,8 @@
       mStreamingRequestId(REQUEST_ID_NONE),
       mRequestIdCounter(0),
       mOverrideForPerfClass(overrideForPerfClass),
-      mOriginalCameraId(originalCameraId) {
+      mOriginalCameraId(originalCameraId),
+      mIsVendorClient(isVendorClient) {
     ATRACE_CALL();
     ALOGI("CameraDeviceClient %s: Opened", cameraId.c_str());
 }
@@ -180,6 +187,14 @@
             mHighResolutionSensors.insert(physicalId);
         }
     }
+    int32_t resultMQSize =
+            property_get_int32("ro.vendor.camera.res.fmq.size", /*default*/METADATA_QUEUE_SIZE);
+    res = CreateMetadataQueue(&mResultMetadataQueue, resultMQSize);
+    if (res != OK) {
+        ALOGE("%s: Creating result metadata queue failed: %s(%d)", __FUNCTION__,
+            strerror(-res), res);
+        return res;
+    }
     return OK;
 }
 
@@ -195,10 +210,44 @@
     return submitRequestList(requestList, streaming, submitInfo);
 }
 
-binder::Status CameraDeviceClient::insertGbpLocked(const sp<IGraphicBufferProducer>& gbp,
+status_t CameraDeviceClient::getSurfaceKey(ParcelableSurfaceType surface, SurfaceKey* out) const {
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+    auto ret = surface.getUniqueId(out);
+    if (ret != OK) {
+        ALOGE("%s: Camera %s: Could not getUniqueId.", __FUNCTION__, mCameraIdStr.c_str());
+        return ret;
+    }
+    return OK;
+#else
+    *out = IInterface::asBinder(surface);
+    return OK;
+#endif
+}
+
+status_t CameraDeviceClient::getSurfaceKey(sp<Surface> surface, SurfaceKey* out) const {
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+    auto ret = surface->getUniqueId(out);
+    if (ret != OK) {
+        ALOGE("%s: Camera %s: Could not getUniqueId.", __FUNCTION__, mCameraIdStr.c_str());
+        return ret;
+    }
+    return OK;
+#else
+    *out = IInterface::asBinder(surface->getIGraphicBufferProducer());
+    return OK;
+#endif
+}
+
+binder::Status CameraDeviceClient::insertSurfaceLocked(const ParcelableSurfaceType& surface,
         SurfaceMap* outSurfaceMap, Vector<int32_t>* outputStreamIds, int32_t *currentStreamId) {
     int compositeIdx;
-    int idx = mStreamMap.indexOfKey(IInterface::asBinder(gbp));
+    SurfaceKey surfaceKey;
+    status_t ret = getSurfaceKey(surface, &surfaceKey);
+    if(ret != OK) {
+        ALOGE("%s: Camera %s: Could not get the SurfaceKey", __FUNCTION__, mCameraIdStr.c_str());
+        return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, "Could not get the SurfaceKey");
+    }
+    int idx = mStreamMap.indexOfKey(surfaceKey);
 
     Mutex::Autolock l(mCompositeLock);
     // Trying to submit request with surface that wasn't created
@@ -208,7 +257,7 @@
                 __FUNCTION__, mCameraIdStr.c_str());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
                 "Request targets Surface that is not part of current capture session");
-    } else if ((compositeIdx = mCompositeStreamMap.indexOfKey(IInterface::asBinder(gbp)))
+    } else if ((compositeIdx = mCompositeStreamMap.indexOfKey(surfaceKey))
             != NAME_NOT_FOUND) {
         mCompositeStreamMap.valueAt(compositeIdx)->insertGbp(outSurfaceMap, outputStreamIds,
                 currentStreamId);
@@ -332,8 +381,12 @@
                 if (surface == 0) continue;
 
                 int32_t streamId;
-                sp<IGraphicBufferProducer> gbp = surface->getIGraphicBufferProducer();
-                res = insertGbpLocked(gbp, &surfaceMap, &outputStreamIds, &streamId);
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+                ParcelableSurfaceType surface_type = view::Surface::fromSurface(surface);
+#else
+                ParcelableSurfaceType surface_type = surface->getIGraphicBufferProducer();
+#endif
+                res = insertSurfaceLocked(surface_type, &surfaceMap, &outputStreamIds, &streamId);
                 if (!res.isOk()) {
                     return res;
                 }
@@ -363,8 +416,8 @@
                             "Request targets Surface that is not part of current capture session");
                 }
 
-                const auto& gbps = mConfiguredOutputs.valueAt(index).getGraphicBufferProducers();
-                if ((size_t)surfaceIdx >= gbps.size()) {
+                const auto& surfaces = mConfiguredOutputs.valueAt(index).getSurfaces();
+                if ((size_t)surfaceIdx >= surfaces.size()) {
                     ALOGE("%s: Camera %s: Tried to submit a request with a surface that"
                             " we have not called createStream on: stream %d, surfaceIdx %d",
                             __FUNCTION__, mCameraIdStr.c_str(), streamId, surfaceIdx);
@@ -372,7 +425,9 @@
                             "Request targets Surface has invalid surface index");
                 }
 
-                res = insertGbpLocked(gbps[surfaceIdx], &surfaceMap, &outputStreamIds, nullptr);
+                res = insertSurfaceLocked(surfaces[surfaceIdx], &surfaceMap, &outputStreamIds,
+                                          nullptr);
+
                 if (!res.isOk()) {
                     return res;
                 }
@@ -787,7 +842,7 @@
     }
 
     bool isInput = false;
-    std::vector<sp<IBinder>> surfaces;
+    std::vector<SurfaceKey> surfaces;
     std::vector<size_t> removedSurfaceIds;
     ssize_t dIndex = NAME_NOT_FOUND;
     ssize_t compositeIndex  = NAME_NOT_FOUND;
@@ -899,13 +954,12 @@
                 "OutputConfiguration isn't valid!");
     }
 
-    const std::vector<sp<IGraphicBufferProducer>>& bufferProducers =
-            outputConfiguration.getGraphicBufferProducers();
-    size_t numBufferProducers = bufferProducers.size();
+    const std::vector<ParcelableSurfaceType>& surfaces = outputConfiguration.getSurfaces();
+    size_t numSurfaces = surfaces.size();
     bool deferredConsumer = outputConfiguration.isDeferred();
     bool isShared = outputConfiguration.isShared();
     const std::string &physicalCameraId = outputConfiguration.getPhysicalCameraId();
-    bool deferredConsumerOnly = deferredConsumer && numBufferProducers == 0;
+    bool deferredConsumerOnly = deferredConsumer && numSurfaces == 0;
     bool isMultiResolution = outputConfiguration.isMultiResolution();
     int64_t dynamicRangeProfile = outputConfiguration.getDynamicRangeProfile();
     int64_t streamUseCase = outputConfiguration.getStreamUseCase();
@@ -913,7 +967,7 @@
     int32_t colorSpace = outputConfiguration.getColorSpace();
     bool useReadoutTimestamp = outputConfiguration.useReadoutTimestamp();
 
-    res = SessionConfigurationUtils::checkSurfaceType(numBufferProducers, deferredConsumer,
+    res = SessionConfigurationUtils::checkSurfaceType(numSurfaces, deferredConsumer,
             outputConfiguration.getSurfaceType(), /*isConfigurationComplete*/true);
     if (!res.isOk()) {
         return res;
@@ -928,8 +982,8 @@
         return res;
     }
 
-    std::vector<SurfaceHolder> surfaces;
-    std::vector<sp<IBinder>> binders;
+    std::vector<SurfaceHolder> surfaceHolders;
+    std::vector<SurfaceKey> surfaceKeys;
     std::vector<OutputStreamInfo> streamInfos;
     status_t err;
 
@@ -942,10 +996,19 @@
     bool isStreamInfoValid = false;
     const std::vector<int32_t> &sensorPixelModesUsed =
             outputConfiguration.getSensorPixelModesUsed();
-    for (auto& bufferProducer : bufferProducers) {
+
+    for (auto& surface : surfaces) {
         // Don't create multiple streams for the same target surface
-        sp<IBinder> binder = IInterface::asBinder(bufferProducer);
-        ssize_t index = mStreamMap.indexOfKey(binder);
+        SurfaceKey surfaceKey;
+        status_t ret = getSurfaceKey(surface, &surfaceKey);
+        if(ret != OK) {
+            ALOGE("%s: Camera %s: Could not get the SurfaceKey", __FUNCTION__,
+                mCameraIdStr.c_str());
+            return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION,
+                "Could not get the SurfaceKey");
+        }
+
+        ssize_t index = mStreamMap.indexOfKey(surfaceKey);
         if (index != NAME_NOT_FOUND) {
             std::string msg = std::string("Camera ") + mCameraIdStr
                     + ": Surface already has a stream created for it (ID "
@@ -954,10 +1017,14 @@
             return STATUS_ERROR(CameraService::ERROR_ALREADY_EXISTS, msg.c_str());
         }
 
-        int mirrorMode = outputConfiguration.getMirrorMode(bufferProducer);
-        sp<Surface> surface;
+        int mirrorMode = outputConfiguration.getMirrorMode(surface);
+        sp<Surface> outSurface;
         res = SessionConfigurationUtils::createSurfaceFromGbp(streamInfo,
-                isStreamInfoValid, surface, bufferProducer, mCameraIdStr,
+                isStreamInfoValid, outSurface, surface
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+                .graphicBufferProducer
+#endif
+                , mCameraIdStr,
                 mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed, dynamicRangeProfile,
                 streamUseCase, timestampBase, mirrorMode, colorSpace, /*respectSurfaceSize*/false);
 
@@ -968,8 +1035,8 @@
             isStreamInfoValid = true;
         }
 
-        binders.push_back(IInterface::asBinder(bufferProducer));
-        surfaces.push_back({surface, mirrorMode});
+        surfaceKeys.push_back(surfaceKey);
+        surfaceHolders.push_back({outSurface, mirrorMode});
         if (flags::camera_multi_client() && mSharedMode) {
             streamInfos.push_back(streamInfo);
         }
@@ -980,15 +1047,15 @@
     if (flags::camera_multi_client() && mSharedMode) {
         err = mDevice->getSharedStreamId(outputConfiguration, &streamId);
         if (err == OK) {
-            err = mDevice->addSharedSurfaces(streamId, streamInfos, surfaces, &surfaceIds);
+            err = mDevice->addSharedSurfaces(streamId, streamInfos, surfaceHolders, &surfaceIds);
         }
     } else {
         bool isDepthCompositeStream =
-                camera3::DepthCompositeStream::isDepthCompositeStream(surfaces[0].mSurface);
+                camera3::DepthCompositeStream::isDepthCompositeStream(surfaceHolders[0].mSurface);
         bool isHeicCompositeStream = camera3::HeicCompositeStream::isHeicCompositeStream(
-                surfaces[0].mSurface);
+                surfaceHolders[0].mSurface);
         bool isJpegRCompositeStream =
-            camera3::JpegRCompositeStream::isJpegRCompositeStream(surfaces[0].mSurface) &&
+            camera3::JpegRCompositeStream::isJpegRCompositeStream(surfaceHolders[0].mSurface) &&
             !mDevice->isCompositeJpegRDisabled();
         if (isDepthCompositeStream || isHeicCompositeStream || isJpegRCompositeStream) {
             sp<CompositeStream> compositeStream;
@@ -999,7 +1066,7 @@
             } else {
                 compositeStream = new camera3::JpegRCompositeStream(mDevice, getRemoteCallback());
             }
-            err = compositeStream->createStream(surfaces, deferredConsumer, streamInfo.width,
+            err = compositeStream->createStream(surfaceHolders, deferredConsumer, streamInfo.width,
                 streamInfo.height, streamInfo.format,
                 static_cast<camera_stream_rotation_t>(outputConfiguration.getRotation()),
                 &streamId, physicalCameraId, streamInfo.sensorPixelModesUsed, &surfaceIds,
@@ -1008,12 +1075,18 @@
                 useReadoutTimestamp);
             if (err == OK) {
                 Mutex::Autolock l(mCompositeLock);
-                mCompositeStreamMap.add(
-                        IInterface::asBinder(surfaces[0].mSurface->getIGraphicBufferProducer()),
-                        compositeStream);
+                SurfaceKey surfaceKey;
+                status_t ret = getSurfaceKey(surfaceHolders[0].mSurface, &surfaceKey);
+                if(ret != OK) {
+                    ALOGE("%s: Camera %s: Could not get the SurfaceKey", __FUNCTION__,
+                        mCameraIdStr.c_str());
+                    return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION,
+                        "Could not get the SurfaceKey");
+                }
+                mCompositeStreamMap.add(surfaceKey, compositeStream);
             }
         } else {
-            err = mDevice->createStream(surfaces, deferredConsumer, streamInfo.width,
+            err = mDevice->createStream(surfaceHolders, deferredConsumer, streamInfo.width,
                     streamInfo.height, streamInfo.format, streamInfo.dataSpace,
                     static_cast<camera_stream_rotation_t>(outputConfiguration.getRotation()),
                     &streamId, physicalCameraId, streamInfo.sensorPixelModesUsed, &surfaceIds,
@@ -1030,10 +1103,15 @@
                 static_cast<int>(streamInfo.dataSpace), strerror(-err), err);
     } else {
         int i = 0;
-        for (auto& binder : binders) {
-            ALOGV("%s: mStreamMap add binder %p streamId %d, surfaceId %d",
-                    __FUNCTION__, binder.get(), streamId, i);
-            mStreamMap.add(binder, StreamSurfaceId(streamId, surfaceIds[i]));
+        for (auto& surfaceKey : surfaceKeys) {
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+            ALOGV("%s: mStreamMap add surfaceKey %lu streamId %d, surfaceId %d",
+                    __FUNCTION__, surfaceKey, streamId, i);
+#else
+            ALOGV("%s: mStreamMap add surfaceKey %p streamId %d, surfaceId %d",
+                    __FUNCTION__, surfaceKey.get(), streamId, i);
+#endif
+            mStreamMap.add(surfaceKey, StreamSurfaceId(streamId, surfaceIds[i]));
             i++;
         }
 
@@ -1257,40 +1335,52 @@
                 "OutputConfiguration isn't valid!");
     }
 
-    const std::vector<sp<IGraphicBufferProducer> >& bufferProducers =
-            outputConfiguration.getGraphicBufferProducers();
-    const std::string &physicalCameraId = outputConfiguration.getPhysicalCameraId();
+    const std::vector<ParcelableSurfaceType>& surfaces = outputConfiguration.getSurfaces();
+    const std::string& physicalCameraId = outputConfiguration.getPhysicalCameraId();
 
-    auto producerCount = bufferProducers.size();
+    auto producerCount = surfaces.size();
     if (producerCount == 0) {
-        ALOGE("%s: bufferProducers must not be empty", __FUNCTION__);
+        ALOGE("%s: surfaces must not be empty", __FUNCTION__);
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
-                "bufferProducers must not be empty");
+                            "surfaces must not be empty");
     }
 
     // The first output is the one associated with the output configuration.
     // It should always be present, valid and the corresponding stream id should match.
-    sp<IBinder> binder = IInterface::asBinder(bufferProducers[0]);
-    ssize_t index = mStreamMap.indexOfKey(binder);
+    SurfaceKey surfaceKey;
+    status_t ret = getSurfaceKey(surfaces[0], &surfaceKey);
+    if(ret != OK) {
+        ALOGE("%s: Camera %s: Could not get the SurfaceKey", __FUNCTION__, mCameraIdStr.c_str());
+        return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, "Could not get the SurfaceKey");
+    }
+    ssize_t index = mStreamMap.indexOfKey(surfaceKey);
     if (index == NAME_NOT_FOUND) {
         ALOGE("%s: Outputconfiguration is invalid", __FUNCTION__);
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
                 "OutputConfiguration is invalid");
     }
-    if (mStreamMap.valueFor(binder).streamId() != streamId) {
+    if (mStreamMap.valueFor(surfaceKey).streamId() != streamId) {
         ALOGE("%s: Stream Id: %d provided doesn't match the id: %d in the stream map",
-                __FUNCTION__, streamId, mStreamMap.valueFor(binder).streamId());
+                __FUNCTION__, streamId, mStreamMap.valueFor(surfaceKey).streamId());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
                 "Stream id is invalid");
     }
 
     std::vector<size_t> removedSurfaceIds;
-    std::vector<sp<IBinder>> removedOutputs;
+    std::vector<SurfaceKey> removedOutputs;
     std::vector<SurfaceHolder> newOutputs;
     std::vector<OutputStreamInfo> streamInfos;
-    KeyedVector<sp<IBinder>, sp<IGraphicBufferProducer>> newOutputsMap;
-    for (auto &it : bufferProducers) {
-        newOutputsMap.add(IInterface::asBinder(it), it);
+    KeyedVector<SurfaceKey, ParcelableSurfaceType> newOutputsMap;
+    for (auto& surface : surfaces) {
+        SurfaceKey surfaceKey;
+        status_t ret = getSurfaceKey(surface, &surfaceKey);
+        if(ret != OK) {
+            ALOGE("%s: Camera %s: Could not get the SurfaceKey", __FUNCTION__,
+                 mCameraIdStr.c_str());
+            return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION,
+                 "Could not get the SurfaceKey");
+        }
+        newOutputsMap.add(surfaceKey, surface);
     }
 
     for (size_t i = 0; i < mStreamMap.size(); i++) {
@@ -1318,17 +1408,24 @@
 
     for (size_t i = 0; i < newOutputsMap.size(); i++) {
         OutputStreamInfo outInfo;
-        sp<Surface> surface;
+        sp<Surface> outSurface;
         int mirrorMode = outputConfiguration.getMirrorMode(newOutputsMap.valueAt(i));
-        res = SessionConfigurationUtils::createSurfaceFromGbp(outInfo,
-                /*isStreamInfoValid*/ false, surface, newOutputsMap.valueAt(i), mCameraIdStr,
-                mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed, dynamicRangeProfile,
-                streamUseCase, timestampBase, mirrorMode, colorSpace, /*respectSurfaceSize*/false);
-        if (!res.isOk())
-            return res;
+        res = SessionConfigurationUtils::createSurfaceFromGbp(
+                outInfo,
+                /*isStreamInfoValid*/ false, outSurface,
+                newOutputsMap
+                        .valueAt(i)
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+                        .graphicBufferProducer
+#endif
+                ,
+                mCameraIdStr, mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed,
+                dynamicRangeProfile, streamUseCase, timestampBase, mirrorMode, colorSpace,
+                /*respectSurfaceSize*/ false);
+        if (!res.isOk()) return res;
 
         streamInfos.push_back(outInfo);
-        newOutputs.push_back({surface, mirrorMode});
+        newOutputs.push_back({outSurface, mirrorMode});
     }
 
     //Trivial case no changes required
@@ -1337,8 +1434,7 @@
     }
 
     KeyedVector<sp<Surface>, size_t> outputMap;
-    auto ret = mDevice->updateStream(streamId, newOutputs, streamInfos, removedSurfaceIds,
-            &outputMap);
+    ret = mDevice->updateStream(streamId, newOutputs, streamInfos, removedSurfaceIds, &outputMap);
     if (ret != OK) {
         switch (ret) {
             case NAME_NOT_FOUND:
@@ -1360,8 +1456,15 @@
         }
 
         for (size_t i = 0; i < outputMap.size(); i++) {
-            mStreamMap.add(IInterface::asBinder(outputMap.keyAt(i)->getIGraphicBufferProducer()),
-                    StreamSurfaceId(streamId, outputMap.valueAt(i)));
+            SurfaceKey surfaceKey;
+            status_t ret = getSurfaceKey(outputMap.keyAt(i), &surfaceKey);
+            if(ret != OK) {
+                ALOGE("%s: Camera %s: Could not get the SurfaceKey", __FUNCTION__,
+                     mCameraIdStr.c_str());
+                return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION,
+                     "Could not get the SurfaceKey");
+            }
+            mStreamMap.add(surfaceKey, StreamSurfaceId(streamId, outputMap.valueAt(i)));
         }
 
         mConfiguredOutputs.replaceValueFor(streamId, outputConfiguration);
@@ -1641,12 +1744,11 @@
                 "OutputConfiguration isn't valid!");
     }
 
-    const std::vector<sp<IGraphicBufferProducer> >& bufferProducers =
-            outputConfiguration.getGraphicBufferProducers();
-    const std::string &physicalId = outputConfiguration.getPhysicalCameraId();
+    const std::vector<ParcelableSurfaceType>& surfaces = outputConfiguration.getSurfaces();
+    const std::string& physicalId = outputConfiguration.getPhysicalCameraId();
 
-    if (bufferProducers.size() == 0) {
-        ALOGE("%s: bufferProducers must not be empty", __FUNCTION__);
+    if (surfaces.size() == 0) {
+        ALOGE("%s: surfaces must not be empty", __FUNCTION__);
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "Target Surface is invalid");
     }
 
@@ -1686,34 +1788,47 @@
     }
 
     std::vector<SurfaceHolder> consumerSurfaceHolders;
-    const std::vector<int32_t> &sensorPixelModesUsed =
+    const std::vector<int32_t>& sensorPixelModesUsed =
             outputConfiguration.getSensorPixelModesUsed();
     int64_t dynamicRangeProfile = outputConfiguration.getDynamicRangeProfile();
     int32_t colorSpace = outputConfiguration.getColorSpace();
     int64_t streamUseCase = outputConfiguration.getStreamUseCase();
     int timestampBase = outputConfiguration.getTimestampBase();
-    for (auto& bufferProducer : bufferProducers) {
+
+    for (auto& surface : surfaces) {
         // Don't create multiple streams for the same target surface
-        ssize_t index = mStreamMap.indexOfKey(IInterface::asBinder(bufferProducer));
+        SurfaceKey surfaceKey;
+        status_t ret = getSurfaceKey(surface, &surfaceKey);
+        if(ret != OK) {
+            ALOGE("%s: Camera %s: Could not get the SurfaceKey", __FUNCTION__,
+                 mCameraIdStr.c_str());
+            return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION,
+                 "Could not get the SurfaceKey");
+        }
+        ssize_t index = mStreamMap.indexOfKey(surfaceKey);
         if (index != NAME_NOT_FOUND) {
             ALOGV("Camera %s: Surface already has a stream created "
-                    " for it (ID %zd)", mCameraIdStr.c_str(), index);
+                  " for it (ID %zd)",
+                  mCameraIdStr.c_str(), index);
             continue;
         }
 
-        sp<Surface> surface;
-        int mirrorMode = outputConfiguration.getMirrorMode(bufferProducer);
-        res = SessionConfigurationUtils::createSurfaceFromGbp(mStreamInfoMap[streamId],
-                true /*isStreamInfoValid*/, surface, bufferProducer, mCameraIdStr,
-                mDevice->infoPhysical(physicalId), sensorPixelModesUsed, dynamicRangeProfile,
-                streamUseCase, timestampBase, mirrorMode, colorSpace, /*respectSurfaceSize*/false);
+        sp<Surface> outSurface;
+        int mirrorMode = outputConfiguration.getMirrorMode(surface);
+        res = SessionConfigurationUtils::createSurfaceFromGbp(
+                mStreamInfoMap[streamId], true /*isStreamInfoValid*/, outSurface,
+                surface
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+                .graphicBufferProducer
+#endif
+                , mCameraIdStr, mDevice->infoPhysical(physicalId),
+                sensorPixelModesUsed, dynamicRangeProfile, streamUseCase, timestampBase, mirrorMode,
+                colorSpace, /*respectSurfaceSize*/ false);
 
-        if (!res.isOk())
-            return res;
+        if (!res.isOk()) return res;
 
-        consumerSurfaceHolders.push_back({surface, mirrorMode});
+        consumerSurfaceHolders.push_back({outSurface, mirrorMode});
     }
-
     // Gracefully handle case where finalizeOutputConfigurations is called
     // without any new surface.
     if (consumerSurfaceHolders.size() == 0) {
@@ -1727,11 +1842,22 @@
     err = mDevice->setConsumerSurfaces(streamId, consumerSurfaceHolders, &consumerSurfaceIds);
     if (err == OK) {
         for (size_t i = 0; i < consumerSurfaceHolders.size(); i++) {
-            sp<IBinder> binder = IInterface::asBinder(
-                    consumerSurfaceHolders[i].mSurface->getIGraphicBufferProducer());
-            ALOGV("%s: mStreamMap add binder %p streamId %d, surfaceId %d", __FUNCTION__,
-                    binder.get(), streamId, consumerSurfaceIds[i]);
-            mStreamMap.add(binder, StreamSurfaceId(streamId, consumerSurfaceIds[i]));
+            SurfaceKey surfaceKey;
+            status_t ret = getSurfaceKey(consumerSurfaceHolders[i].mSurface, &surfaceKey);
+            if(ret != OK) {
+                ALOGE("%s: Camera %s: Could not get the SurfaceKey", __FUNCTION__,
+                     mCameraIdStr.c_str());
+                return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION,
+                     "Could not get the SurfaceKey");
+            }
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+            ALOGV("%s: mStreamMap add surface_key %lu streamId %d, surfaceId %d", __FUNCTION__,
+                    surfaceKey, streamId, consumerSurfaceIds[i]);
+#else
+            ALOGV("%s: mStreamMap add surface_key %p streamId %d, surfaceId %d", __FUNCTION__,
+                    surfaceKey.get(), streamId, consumerSurfaceIds[i]);
+#endif
+            mStreamMap.add(surfaceKey, StreamSurfaceId(streamId, consumerSurfaceIds[i]));
         }
         if (deferredStreamIndex != NAME_NOT_FOUND) {
             mDeferredStreams.removeItemsAt(deferredStreamIndex);
@@ -1768,6 +1894,34 @@
     return binder::Status::ok();
 }
 
+status_t CameraDeviceClient::CreateMetadataQueue(
+        std::unique_ptr<MetadataQueue>* metadata_queue, uint32_t default_size_bytes) {
+        if (metadata_queue == nullptr) {
+            ALOGE("%s: metadata_queue is nullptr", __FUNCTION__);
+            return BAD_VALUE;
+        }
+
+        int32_t size = default_size_bytes;
+
+        *metadata_queue =
+                std::make_unique<MetadataQueue>(static_cast<size_t>(size),
+                        /*configureEventFlagWord*/ false);
+        if (!(*metadata_queue)->isValid()) {
+            ALOGE("%s: Creating metadata queue (size %d) failed.", __FUNCTION__, size);
+            return NO_INIT;
+        }
+
+        return OK;
+}
+
+binder::Status CameraDeviceClient::getCaptureResultMetadataQueue(
+          android::hardware::common::fmq::MQDescriptor<
+          int8_t, android::hardware::common::fmq::SynchronizedReadWrite>* aidl_return) {
+
+    *aidl_return = mResultMetadataQueue->dupeDesc();
+    return binder::Status::ok();
+}
+
 binder::Status CameraDeviceClient::getGlobalAudioRestriction(/*out*/ int32_t* outMode) {
     ATRACE_CALL();
     binder::Status res;
@@ -1866,7 +2020,7 @@
 
     std::vector<int32_t> offlineStreamIds;
     offlineStreamIds.reserve(offlineOutputIds.size());
-    KeyedVector<sp<IBinder>, sp<CompositeStream>> offlineCompositeStreamMap;
+    KeyedVector<SurfaceKey, sp<CompositeStream>> offlineCompositeStreamMap;
     for (const auto& streamId : offlineOutputIds) {
         ssize_t index = mConfiguredOutputs.indexOfKey(streamId);
         if (index == NAME_NOT_FOUND) {
@@ -1885,24 +2039,37 @@
 
         Mutex::Autolock l(mCompositeLock);
         bool isCompositeStream = false;
-        for (const auto& gbp : mConfiguredOutputs.valueAt(index).getGraphicBufferProducers()) {
-            sp<Surface> s = new Surface(gbp, false /*controlledByApp*/);
+
+        for (const auto& surface : mConfiguredOutputs.valueAt(index).getSurfaces()) {
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+            sp<Surface> s = surface.toSurface();
+#else
+            sp<Surface> s = new Surface(surface, false /*controlledByApp*/);
+#endif
             isCompositeStream = camera3::DepthCompositeStream::isDepthCompositeStream(s) ||
-                camera3::HeicCompositeStream::isHeicCompositeStream(s) ||
-                (camera3::JpegRCompositeStream::isJpegRCompositeStream(s) &&
-                 !mDevice->isCompositeJpegRDisabled());
+                                camera3::HeicCompositeStream::isHeicCompositeStream(s) ||
+                                (camera3::JpegRCompositeStream::isJpegRCompositeStream(s) &&
+                                 !mDevice->isCompositeJpegRDisabled());
             if (isCompositeStream) {
-                auto compositeIdx = mCompositeStreamMap.indexOfKey(IInterface::asBinder(gbp));
+                SurfaceKey surfaceKey;
+                status_t ret = getSurfaceKey(surface, &surfaceKey);
+                if(ret != OK) {
+                    ALOGE("%s: Camera %s: Could not get the SurfaceKey", __FUNCTION__,
+                        mCameraIdStr.c_str());
+                    return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION,
+                        "Could not get the SurfaceKey");
+                }
+                auto compositeIdx = mCompositeStreamMap.indexOfKey(surfaceKey);
                 if (compositeIdx == NAME_NOT_FOUND) {
                     ALOGE("%s: Unknown composite stream", __FUNCTION__);
                     return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
-                            "Unknown composite stream");
+                                        "Unknown composite stream");
                 }
 
-                mCompositeStreamMap.valueAt(compositeIdx)->insertCompositeStreamIds(
-                        &offlineStreamIds);
+                mCompositeStreamMap.valueAt(compositeIdx)
+                        ->insertCompositeStreamIds(&offlineStreamIds);
                 offlineCompositeStreamMap.add(mCompositeStreamMap.keyAt(compositeIdx),
-                        mCompositeStreamMap.valueAt(compositeIdx));
+                                              mCompositeStreamMap.valueAt(compositeIdx));
                 break;
             }
         }
@@ -2190,16 +2357,76 @@
     mCameraServiceProxyWrapper->logClose(mCameraIdStr, closeLatencyMs, hasDeviceError);
 }
 
+size_t CameraDeviceClient::writeResultMetadataIntoResultQueue(
+        const CameraMetadata &resultMetadata) {
+    ATRACE_CALL();
+
+    const camera_metadata_t *resultMetadataP = resultMetadata.getAndLock();
+    size_t resultSize = get_camera_metadata_size(resultMetadataP);
+    if (mResultMetadataQueue != nullptr &&
+        mResultMetadataQueue->write(reinterpret_cast<const int8_t*>(resultMetadataP),
+                resultSize)) {
+        resultMetadata.unlock(resultMetadataP);
+        return resultSize;
+    }
+    resultMetadata.unlock(resultMetadataP);
+    ALOGE(" %s couldn't write metadata into result queue ", __FUNCTION__);
+    return 0;
+}
+
 /** Device-related methods */
+std::vector<PhysicalCaptureResultInfo> CameraDeviceClient::convertToFMQ(
+        const std::vector<PhysicalCaptureResultInfo> &physicalResults) {
+    std::vector<PhysicalCaptureResultInfo> retVal;
+    ALOGVV("%s E", __FUNCTION__);
+    for (const auto &srcPhysicalResult : physicalResults) {
+        size_t fmqSize = 0;
+        if (!mIsVendorClient && flags::fmq_metadata()) {
+            fmqSize = writeResultMetadataIntoResultQueue(
+                    srcPhysicalResult.mCameraMetadataInfo.get<CameraMetadataInfo::metadata>());
+        }
+        ALOGVV("%s physical metadata write size is %d", __FUNCTION__, (int)fmqSize);
+        if (fmqSize != 0) {
+            retVal.emplace_back(srcPhysicalResult.mPhysicalCameraId, fmqSize);
+        } else {
+            // The flag was off / we're serving VNDK shim call or FMQ write failed.
+            retVal.emplace_back(srcPhysicalResult.mPhysicalCameraId,
+                    srcPhysicalResult.mCameraMetadataInfo.get<CameraMetadataInfo::metadata>());
+        }
+    }
+    ALOGVV("%s X", __FUNCTION__);
+    return retVal;
+}
+
 void CameraDeviceClient::onResultAvailable(const CaptureResult& result) {
     ATRACE_CALL();
-    ALOGV("%s", __FUNCTION__);
+    ALOGVV("%s E", __FUNCTION__);
 
     // Thread-safe. No lock necessary.
     sp<hardware::camera2::ICameraDeviceCallbacks> remoteCb = mRemoteCallback;
     if (remoteCb != NULL) {
-        remoteCb->onResultReceived(result.mMetadata, result.mResultExtras,
-                result.mPhysicalMetadatas);
+        // Write  result metadata into metadataQueue
+        size_t fmqMetadataSize = 0;
+        // Vendor clients need to modify metadata and also this call is in process
+        // before going through FMQ to vendor clients. So don't use FMQ here.
+        if (!mIsVendorClient && flags::fmq_metadata()) {
+            fmqMetadataSize = writeResultMetadataIntoResultQueue(result.mMetadata);
+        }
+        hardware::camera2::impl::CameraMetadataNative resultMetadata;
+        CameraMetadataInfo resultInfo;
+        if (fmqMetadataSize == 0) {
+            // The flag was off / we're serving VNDK shim call or FMQ write failed.
+            resultMetadata = result.mMetadata;
+            resultInfo.set<CameraMetadataInfo::metadata>(resultMetadata);
+        } else {
+            resultInfo.set<CameraMetadataInfo::fmqSize>(fmqMetadataSize);
+        }
+
+        std::vector<PhysicalCaptureResultInfo> physicalMetadatas =
+                convertToFMQ(result.mPhysicalMetadatas);
+
+        remoteCb->onResultReceived(resultInfo, result.mResultExtras,
+                physicalMetadatas);
     }
 
     // Access to the composite stream map must be synchronized
@@ -2207,6 +2434,7 @@
     for (size_t i = 0; i < mCompositeStreamMap.size(); i++) {
         mCompositeStreamMap.valueAt(i)->onResultAvailable(result);
     }
+    ALOGVV("%s X", __FUNCTION__);
 }
 
 binder::Status CameraDeviceClient::checkPidStatus(const char* checkLocation) {
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index a8cf451..4ad3c49 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -23,6 +23,9 @@
 #include <camera/camera2/SessionConfiguration.h>
 #include <camera/camera2/SubmitInfo.h>
 #include <unordered_map>
+#include <gui/Flags.h>  // remove with WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+
+#include <fmq/AidlMessageQueueCpp.h>
 
 #include "CameraOfflineSessionClient.h"
 #include "CameraService.h"
@@ -37,6 +40,12 @@
 
 namespace android {
 
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+typedef uint64_t SurfaceKey;
+#else
+typedef sp<IBinder> SurfaceKey;
+#endif
+
 struct CameraDeviceClientBase :
          public CameraService::BasicClient,
          public hardware::camera2::BnCameraDeviceUser
@@ -161,6 +170,11 @@
 
     virtual binder::Status setCameraAudioRestriction(int32_t mode) override;
 
+    virtual binder::Status getCaptureResultMetadataQueue(
+          android::hardware::common::fmq::MQDescriptor<
+          int8_t, android::hardware::common::fmq::SynchronizedReadWrite>*
+          aidl_return) override;
+
     virtual binder::Status getGlobalAudioRestriction(/*out*/int32_t* outMode) override;
 
     virtual binder::Status switchToOffline(
@@ -182,7 +196,8 @@
                        const AttributionSourceState& clientAttribution, int callingPid,
                        bool clientPackageOverride, const std::string& cameraId, int cameraFacing,
                        int sensorOrientation, int servicePid, bool overrideForPerfClass,
-                       int rotationOverride, const std::string& originalCameraId, bool sharedMode);
+                       int rotationOverride, const std::string& originalCameraId, bool sharedMode,
+                       bool isVendorClient);
     virtual ~CameraDeviceClient();
 
     virtual status_t      initialize(sp<CameraProviderManager> manager,
@@ -233,6 +248,10 @@
      */
 protected:
     /** FilteredListener implementation **/
+
+    size_t writeResultMetadataIntoResultQueue(const CameraMetadata &result);
+    std::vector<PhysicalCaptureResultInfo> convertToFMQ(
+            const std::vector<PhysicalCaptureResultInfo> &physicalResults);
     virtual void          onResultAvailable(const CaptureResult& result);
     virtual void          detachDevice();
 
@@ -244,6 +263,11 @@
     const CameraMetadata &getStaticInfo(const std::string &cameraId);
 
 private:
+    using MetadataQueue = AidlMessageQueueCpp<
+            int8_t, android::hardware::common::fmq::SynchronizedReadWrite>;
+    using CameraMetadataInfo = android::hardware::camera2::CameraMetadataInfo;
+    status_t CreateMetadataQueue(
+            std::unique_ptr<MetadataQueue>* metadata_queue, uint32_t default_size);
     // StreamSurfaceId encapsulates streamId + surfaceId for a particular surface.
     // streamId specifies the index of the stream the surface belongs to, and the
     // surfaceId specifies the index of the surface within the stream. (one stream
@@ -293,12 +317,20 @@
             int* newStreamId = NULL);
 
     // Utility method to insert the surface into SurfaceMap
-    binder::Status insertGbpLocked(const sp<IGraphicBufferProducer>& gbp,
+    binder::Status insertSurfaceLocked(const ParcelableSurfaceType& surface,
             /*out*/SurfaceMap* surfaceMap, /*out*/Vector<int32_t>* streamIds,
             /*out*/int32_t*  currentStreamId);
 
+    // A ParcelableSurfaceType can be either a view::Surface or IGBP.
+    // We use this type of surface when we need to be able to have a parcelable data type.
+    // view::Surface has helper functions to make converting between a regular Surface and a
+    // view::Surface easy.
+    status_t getSurfaceKey(ParcelableSurfaceType surface, SurfaceKey* out) const;
+    // Surface only
+    status_t getSurfaceKey(sp<Surface> surface, SurfaceKey* out) const;
+
     // IGraphicsBufferProducer binder -> Stream ID + Surface ID for output streams
-    KeyedVector<sp<IBinder>, StreamSurfaceId> mStreamMap;
+    KeyedVector<SurfaceKey, StreamSurfaceId> mStreamMap;
 
     // Stream ID -> OutputConfiguration. Used for looking up Surface by stream/surface index
     KeyedVector<int32_t, hardware::camera2::params::OutputConfiguration> mConfiguredOutputs;
@@ -322,6 +354,9 @@
 
     int32_t mRequestIdCounter;
 
+    // Metadata queue to write the result metadata to.
+    std::unique_ptr<MetadataQueue> mResultMetadataQueue;
+
     std::vector<std::string> mPhysicalCameraIds;
 
     // The list of output streams whose surfaces are deferred. We have to track them separately
@@ -340,7 +375,7 @@
 
     // Synchronize access to 'mCompositeStreamMap'
     Mutex mCompositeLock;
-    KeyedVector<sp<IBinder>, sp<CompositeStream>> mCompositeStreamMap;
+    KeyedVector<SurfaceKey, sp<CompositeStream>> mCompositeStreamMap;
 
     sp<CameraProviderManager> mProviderManager;
 
@@ -361,6 +396,8 @@
 
     // This only exists in case of camera ID Remapping.
     const std::string mOriginalCameraId;
+
+    bool mIsVendorClient = false;
 };
 
 }; // namespace android
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
index 71fd3ba..1e73d79 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
@@ -299,7 +299,10 @@
     ALOGV("%s", __FUNCTION__);
 
     if (mRemoteCallback.get() != NULL) {
-        mRemoteCallback->onResultReceived(result.mMetadata, result.mResultExtras,
+        using hardware::camera2::CameraMetadataInfo;
+        CameraMetadataInfo resultInfo;
+        resultInfo.set<CameraMetadataInfo::metadata>(result.mMetadata);
+        mRemoteCallback->onResultReceived(resultInfo, result.mResultExtras,
                 result.mPhysicalMetadatas);
     }
 
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
index 78a3055..3799ba3 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
@@ -30,6 +30,12 @@
 using android::hardware::camera2::ICameraDeviceCallbacks;
 using camera3::CompositeStream;
 
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+typedef uint64_t SurfaceKey;
+#else
+typedef sp<IBinder> SurfaceKey;
+#endif
+
 // Client for offline session. Note that offline session client does not affect camera service's
 // client arbitration logic. It is camera HAL's decision to decide whether a normal camera
 // client is conflicting with existing offline client(s).
@@ -45,7 +51,7 @@
 public:
     CameraOfflineSessionClient(
             const sp<CameraService>& cameraService, sp<CameraOfflineSessionBase> session,
-            const KeyedVector<sp<IBinder>, sp<CompositeStream>>& offlineCompositeStreamMap,
+            const KeyedVector<SurfaceKey, sp<CompositeStream>>& offlineCompositeStreamMap,
             const sp<ICameraDeviceCallbacks>& remoteCallback,
             std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
             const AttributionSourceState& clientAttribution, int callingPid,
@@ -135,7 +141,7 @@
     sp<camera2::FrameProcessorBase> mFrameProcessor;
 
     // Offline composite stream map, output surface -> composite stream
-    KeyedVector<sp<IBinder>, sp<CompositeStream>> mCompositeStreamMap;
+    KeyedVector<SurfaceKey, sp<CompositeStream>> mCompositeStreamMap;
 };
 
 } // namespace android
diff --git a/services/camera/libcameraservice/common/FrameProcessorBase.cpp b/services/camera/libcameraservice/common/FrameProcessorBase.cpp
index 2322def..31dcce2 100644
--- a/services/camera/libcameraservice/common/FrameProcessorBase.cpp
+++ b/services/camera/libcameraservice/common/FrameProcessorBase.cpp
@@ -29,6 +29,8 @@
 namespace android {
 namespace camera2 {
 
+using CameraMetadataInfo = android::hardware::camera2::CameraMetadataInfo;
+
 FrameProcessorBase::FrameProcessorBase(wp<FrameProducer> device) :
     Thread(/*canCallJava*/false),
     mDevice(device),
@@ -99,7 +101,7 @@
 
         for (const auto& physicalFrame : mLastPhysicalFrames) {
             lastPhysicalFrames.emplace(physicalFrame.mPhysicalCameraId,
-                    physicalFrame.mPhysicalCameraMetadata);
+                    physicalFrame.mCameraMetadataInfo.get<CameraMetadataInfo::metadata>());
         }
     }
     lastFrame.dump(fd, /*verbosity*/2, /*indentation*/6);
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 9e89a19..c050f1b 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -61,6 +61,7 @@
 #include <com_android_window_flags.h>
 
 #include "CameraService.h"
+#include "FwkOnlyMetadataTags.h"
 #include "aidl/android/hardware/graphics/common/Dataspace.h"
 #include "aidl/AidlUtils.h"
 #include "device3/Camera3Device.h"
@@ -3797,18 +3798,12 @@
 }
 
 status_t Camera3Device::removeFwkOnlyKeys(CameraMetadata *request) {
-    static const std::array<uint32_t, 5> kFwkOnlyKeys = {
-            ANDROID_CONTROL_AF_REGIONS_SET,
-            ANDROID_CONTROL_AE_REGIONS_SET,
-            ANDROID_CONTROL_AWB_REGIONS_SET,
-            ANDROID_SCALER_CROP_REGION_SET,
-            ANDROID_CONTROL_ZOOM_METHOD};
     if (request == nullptr) {
         ALOGE("%s request metadata nullptr", __FUNCTION__);
         return BAD_VALUE;
     }
     status_t res = OK;
-    for (const auto &key : kFwkOnlyKeys) {
+    for (const auto &key : kFwkOnlyMetadataKeys) {
         if (request->exists(key)) {
             res = request->erase(key);
             if (res != OK) {
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
index ed11a96..66dcbc3 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
@@ -56,6 +56,7 @@
 using namespace android::camera3;
 using namespace android::camera3::SessionConfigurationUtils;
 using namespace android::hardware::camera;
+using CameraMetadataInfo = android::hardware::camera2::CameraMetadataInfo;
 namespace flags = com::android::internal::camera::flags;
 
 namespace android {
@@ -231,11 +232,12 @@
 
     // Update vendor tag id for physical metadata
     for (auto& physicalMetadata : result->mPhysicalMetadatas) {
-        camera_metadata_t *pmeta = const_cast<camera_metadata_t *>(
-                physicalMetadata.mPhysicalCameraMetadata.getAndLock());
+        auto &metadata =
+                physicalMetadata.mCameraMetadataInfo.get<CameraMetadataInfo::metadata>();
+        camera_metadata_t *pmeta = const_cast<camera_metadata_t *>(metadata.getAndLock());
         set_camera_metadata_vendor_id(pmeta, states.vendorTagId);
         correctMeteringRegions(pmeta);
-        physicalMetadata.mPhysicalCameraMetadata.unlock(pmeta);
+        metadata.unlock(pmeta);
     }
 
     // Valid result, insert into queue
@@ -362,7 +364,8 @@
 
     for (auto& physicalMetadata : captureResult.mPhysicalMetadatas) {
         camera_metadata_entry timestamp =
-                physicalMetadata.mPhysicalCameraMetadata.find(ANDROID_SENSOR_TIMESTAMP);
+                physicalMetadata.mCameraMetadataInfo.get<CameraMetadataInfo::metadata>().
+                        find(ANDROID_SENSOR_TIMESTAMP);
         if (timestamp.count == 0) {
             SET_ERR("No timestamp provided by HAL for physical camera %s frame %d!",
                     physicalMetadata.mPhysicalCameraId.c_str(), frameNumber);
@@ -415,7 +418,8 @@
         return;
     }
     for (auto& physicalMetadata : captureResult.mPhysicalMetadatas) {
-        res = fixupManualFlashStrengthControlTags(physicalMetadata.mPhysicalCameraMetadata);
+        res = fixupManualFlashStrengthControlTags(physicalMetadata.mCameraMetadataInfo.
+                get<CameraMetadataInfo::metadata>());
         if (res != OK) {
             SET_ERR("Failed to set flash strength level defaults in physical result"
                     " metadata: %s (%d)", strerror(-res), res);
@@ -431,7 +435,8 @@
         return;
     }
     for (auto& physicalMetadata : captureResult.mPhysicalMetadatas) {
-        res = fixupAutoframingTags(physicalMetadata.mPhysicalCameraMetadata);
+        res = fixupAutoframingTags(physicalMetadata.mCameraMetadataInfo.
+                get<CameraMetadataInfo::metadata>());
         if (res != OK) {
             SET_ERR("Failed to set autoframing defaults in physical result metadata: %s (%d)",
                     strerror(-res), res);
@@ -444,7 +449,7 @@
         auto mapper = states.distortionMappers.find(cameraId);
         if (mapper != states.distortionMappers.end()) {
             res = mapper->second.correctCaptureResult(
-                    &physicalMetadata.mPhysicalCameraMetadata);
+                    &physicalMetadata.mCameraMetadataInfo.get<CameraMetadataInfo::metadata>());
             if (res != OK) {
                 SET_ERR("Unable to correct physical capture result metadata for frame %d: %s (%d)",
                         frameNumber, strerror(-res), res);
@@ -455,7 +460,8 @@
         // Note: Physical camera continues to use SCALER_CROP_REGION to reflect
         // zoom levels.
         res = states.zoomRatioMappers[cameraId].updateCaptureResult(
-                &physicalMetadata.mPhysicalCameraMetadata, /*zoomMethodIsRatio*/false,
+                &physicalMetadata.mCameraMetadataInfo.get<CameraMetadataInfo::metadata>(),
+                /*zoomMethodIsRatio*/false,
                 /*zoomRatioIs1*/false);
         if (res != OK) {
             SET_ERR("Failed to update camera %s's physical zoom ratio metadata for "
@@ -474,7 +480,7 @@
         const std::string &cameraId = physicalMetadata.mPhysicalCameraId;
         res = fixupMonochromeTags(states,
                 states.physicalDeviceInfoMap.at(cameraId),
-                physicalMetadata.mPhysicalCameraMetadata);
+                physicalMetadata.mCameraMetadataInfo.get<CameraMetadataInfo::metadata>());
         if (res != OK) {
             SET_ERR("Failed to override result metadata: %s (%d)", strerror(-res), res);
             return;
@@ -484,7 +490,7 @@
     std::unordered_map<std::string, CameraMetadata> monitoredPhysicalMetadata;
     for (auto& m : physicalMetadatas) {
         monitoredPhysicalMetadata.emplace(m.mPhysicalCameraId,
-                CameraMetadata(m.mPhysicalCameraMetadata));
+                CameraMetadata(m.mCameraMetadataInfo.get<CameraMetadataInfo::metadata>()));
     }
     states.tagMonitor.monitorMetadata(TagMonitor::RESULT,
             frameNumber, sensorTimestamp, captureResult.mMetadata,
diff --git a/services/camera/libcameraservice/hidl/AidlCameraDeviceCallbacks.cpp b/services/camera/libcameraservice/hidl/AidlCameraDeviceCallbacks.cpp
index de51ffa..24d9a7e 100644
--- a/services/camera/libcameraservice/hidl/AidlCameraDeviceCallbacks.cpp
+++ b/services/camera/libcameraservice/hidl/AidlCameraDeviceCallbacks.cpp
@@ -167,11 +167,19 @@
 }
 
 binder::Status H2BCameraDeviceCallbacks::onResultReceived(
-    const CameraMetadataNative& result,
+    const CameraMetadataInfo &resultInfo,
     const CaptureResultExtras& resultExtras,
     const ::std::vector<PhysicalCaptureResultInfo>& physicalCaptureResultInfos) {
     // Wrap CameraMetadata, resultExtras and physicalCaptureResultInfos in on
     // sp<RefBase>-able structure and post it.
+    // We modify metadata - since we want to filter out tags based on the vndk
+    // version, and also this communication is an in process function call.
+    // So we don't use FMQ for the shim layer. FMQ is still used for VNDK IPC.
+    if (resultInfo.getTag() != CameraMetadataInfo::metadata) {
+        ALOGE("Vendor callbacks got metadata in fmq ? ");
+        return binder::Status::ok();
+    }
+    const CameraMetadataNative &result = resultInfo.get<CameraMetadataInfo::metadata>();
     sp<ResultWrapper> resultWrapper = new ResultWrapper(const_cast<CameraMetadataNative &>(result),
                                                         resultExtras, physicalCaptureResultInfos);
     sp<AMessage> msg = new AMessage(kWhatResultReceived, mHandler);
diff --git a/services/camera/libcameraservice/hidl/AidlCameraDeviceCallbacks.h b/services/camera/libcameraservice/hidl/AidlCameraDeviceCallbacks.h
index 98a0dbb..e36c2ea 100644
--- a/services/camera/libcameraservice/hidl/AidlCameraDeviceCallbacks.h
+++ b/services/camera/libcameraservice/hidl/AidlCameraDeviceCallbacks.h
@@ -54,6 +54,7 @@
 using hardware::kSynchronizedReadWrite;
 using hardware::MessageQueue;
 using CaptureResultMetadataQueue = MessageQueue<uint8_t, kSynchronizedReadWrite>;
+using CameraMetadataInfo = android::hardware::camera2::CameraMetadataInfo;
 
 struct H2BCameraDeviceCallbacks :
     public H2BConverter<HCameraDeviceCallback, ICameraDeviceCallbacks, BnCameraDeviceCallbacks> {
@@ -72,7 +73,8 @@
                                             int64_t timestamp) override;
 
     virtual binder::Status onResultReceived(
-        const CameraMetadataNative& result, const CaptureResultExtras& resultExtras,
+        const CameraMetadataInfo &,
+        const CaptureResultExtras& resultExtras,
         const std::vector<PhysicalCaptureResultInfo>& physicalCaptureResultInfos) override;
 
     virtual binder::Status onPrepared(int32_t streamId) override;
diff --git a/services/camera/libcameraservice/hidl/HidlCameraService.cpp b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
index 9d140f2..9e66236 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraService.cpp
+++ b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
@@ -132,7 +132,7 @@
                     kDefaultDeviceId);
     clientAttribution.packageName = "";
     clientAttribution.attributionTag = std::nullopt;
-    binder::Status serviceRet = mAidlICameraService->connectDevice(
+    binder::Status serviceRet = mAidlICameraService->connectDeviceVendor(
             callbacks, cameraId, 0/*oomScoreOffset*/,
             /*targetSdkVersion*/__ANDROID_API_FUTURE__, ROTATION_OVERRIDE_NONE,
             clientAttribution, /*devicePolicy*/0, /*sharedMode*/false, /*out*/&deviceRemote);
diff --git a/services/camera/libcameraservice/hidl/Utils.cpp b/services/camera/libcameraservice/hidl/Utils.cpp
index d0302d0..786087d 100644
--- a/services/camera/libcameraservice/hidl/Utils.cpp
+++ b/services/camera/libcameraservice/hidl/Utils.cpp
@@ -14,11 +14,12 @@
  * limitations under the License.
  */
 
-#include <hidl/Utils.h>
-#include <gui/bufferqueue/1.0/H2BGraphicBufferProducer.h>
-#include <cutils/native_handle.h>
-#include <mediautils/AImageReaderUtils.h>
 #include <camera/StringUtils.h>
+#include <cutils/native_handle.h>
+#include <gui/Flags.h>  // remove with WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+#include <gui/bufferqueue/1.0/H2BGraphicBufferProducer.h>
+#include <hidl/Utils.h>
+#include <mediautils/AImageReaderUtils.h>
 
 namespace android {
 namespace hardware {
@@ -28,6 +29,7 @@
 
 using hardware::graphics::bufferqueue::V1_0::utils::H2BGraphicBufferProducer;
 using aimg::AImageReader_getHGBPFromHandle;
+using CameraMetadataInfo = android::hardware::camera2::CameraMetadataInfo;
 
 // Note: existing data in dst will be gone. Caller still owns the memory of src
 void convertToHidl(const camera_metadata_t *src, HCameraMetadata* dst) {
@@ -84,9 +86,9 @@
 
 hardware::camera2::params::OutputConfiguration convertFromHidl(
     const HOutputConfiguration &hOutputConfiguration) {
-    std::vector<sp<IGraphicBufferProducer>> iGBPs;
-    auto &windowHandles = hOutputConfiguration.windowHandles;
-    iGBPs.reserve(windowHandles.size());
+    std::vector<ParcelableSurfaceType> surfaces;
+    auto& windowHandles = hOutputConfiguration.windowHandles;
+    surfaces.reserve(windowHandles.size());
     for (auto &handle : windowHandles) {
         auto igbp = AImageReader_getHGBPFromHandle(handle);
         if (igbp == nullptr) {
@@ -94,10 +96,16 @@
                     __FUNCTION__, handle.getNativeHandle());
             continue;
         }
-        iGBPs.push_back(new H2BGraphicBufferProducer(igbp));
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+        view::Surface surface;
+        surface.graphicBufferProducer = new H2BGraphicBufferProducer(igbp);
+        surfaces.push_back(surface);
+#else
+        surfaces.push_back(new H2BGraphicBufferProducer(igbp));
+#endif
     }
     hardware::camera2::params::OutputConfiguration outputConfiguration(
-        iGBPs, convertFromHidl(hOutputConfiguration.rotation),
+        surfaces, convertFromHidl(hOutputConfiguration.rotation),
         hOutputConfiguration.physicalCameraId,
         hOutputConfiguration.windowGroupId, OutputConfiguration::SURFACE_TYPE_UNKNOWN, 0, 0,
         (windowHandles.size() > 1));
@@ -274,7 +282,8 @@
     hPhysicalCaptureResultInfo.physicalCameraId =
         toString8(physicalCaptureResultInfo.mPhysicalCameraId);
     const camera_metadata_t *rawMetadata =
-        physicalCaptureResultInfo.mPhysicalCameraMetadata.getAndLock();
+        physicalCaptureResultInfo.mCameraMetadataInfo.get<CameraMetadataInfo::metadata>().
+                getAndLock();
     // Try using fmq at first.
     size_t metadata_size = get_camera_metadata_size(rawMetadata);
     if ((metadata_size > 0) && (captureResultMetadataQueue->availableToWrite() > 0)) {
@@ -287,7 +296,8 @@
             hPhysicalCaptureResultInfo.physicalCameraMetadata.metadata(std::move(metadata));
         }
     }
-    physicalCaptureResultInfo.mPhysicalCameraMetadata.unlock(rawMetadata);
+    physicalCaptureResultInfo.mCameraMetadataInfo.get<CameraMetadataInfo::metadata>().
+            unlock(rawMetadata);
     return hPhysicalCaptureResultInfo;
 }
 
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
index 6c98837..8c7d39e 100644
--- a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
@@ -54,6 +54,7 @@
 
 using ICameraService::ROTATION_OVERRIDE_NONE;
 using ICameraService::ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT;
+using android::hardware::camera2::CameraMetadataInfo;
 
 const int32_t kPreviewThreshold = 8;
 const int32_t kNumRequestsTested = 8;
@@ -778,7 +779,7 @@
         return binder::Status::ok();
     }
 
-    virtual binder::Status onResultReceived(const CameraMetadata& /*metadata*/,
+    virtual binder::Status onResultReceived(const CameraMetadataInfo& /*metadata*/,
             const CaptureResultExtras& /*resultExtras*/,
             const std::vector<PhysicalCaptureResultInfo>& /*physicalResultInfos*/) {
         return binder::Status::ok();
diff --git a/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp b/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp
index ff58c4a..2f035e7 100644
--- a/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp
+++ b/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp
@@ -34,6 +34,7 @@
 
 using namespace android;
 using namespace android::hardware::camera;
+using android::hardware::camera2::CameraMetadataInfo;
 
 // Empty service listener.
 class TestCameraServiceListener : public hardware::BnCameraServiceListener {
@@ -107,7 +108,7 @@
         return binder::Status::ok();
     }
 
-    virtual binder::Status onResultReceived(const CameraMetadata& /*metadata*/,
+    virtual binder::Status onResultReceived(const CameraMetadataInfo& /*metadata*/,
             const CaptureResultExtras& /*resultExtras*/,
             const std::vector<PhysicalCaptureResultInfo>& /*physicalResultInfos*/) {
         return binder::Status::ok();
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
index 85bca6f..8f93ee0 100644
--- a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
@@ -18,13 +18,14 @@
 #define ATRACE_TAG ATRACE_TAG_CAMERA
 //#define LOG_NDEBUG 0
 
+#include <binder/IServiceManager.h>
+#include <camera/StringUtils.h>
+#include <gui/Flags.h>  // remove with WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
 #include <gui/Surface.h>
 #include <inttypes.h>
+#include <system/window.h>
 #include <utils/Log.h>
 #include <utils/String16.h>
-#include <camera/StringUtils.h>
-#include <binder/IServiceManager.h>
-#include <system/window.h>
 
 #include "aidl/android/hardware/graphics/common/Dataspace.h"
 
@@ -265,16 +266,24 @@
             }
 
             // Check 4K
-            const auto& gbps = config.getGraphicBufferProducers();
+            const std::vector<ParcelableSurfaceType>& surfaces = config.getSurfaces();
             int32_t width = 0, height = 0;
-            if (gbps.size() > 0) {
-                if (gbps[0] == nullptr) {
+            if (surfaces.size() > 0) {
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+                if (surfaces[0].isEmpty()) {
+#else
+                if (surfaces[0] == nullptr) {
+#endif
                     ALOGE("%s: Failed to query size due to abandoned surface.",
                             __FUNCTION__);
                     return CameraFeatureCombinationStats::CAMERA_FEATURE_UNKNOWN;
                 }
 
-                sp<Surface> surface = new Surface(gbps[0], /*useAsync*/false);
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+                sp<Surface> surface = surfaces[0].toSurface();
+#else
+                sp<Surface> surface = new Surface(surfaces[0], /*useAsync*/false);
+#endif
                 ANativeWindow *anw = surface.get();
 
                 width = ANativeWindow_getWidth(anw);
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
index 202ab96..fd877ed 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
@@ -18,21 +18,23 @@
 
 #include "SessionConfigurationUtils.h"
 #include <android/data_space.h>
+#include <camera/StringUtils.h>
+#include <gui/Flags.h>  // remove with WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+#include <ui/PublicFormat.h>
+#include "../CameraService.h"
 #include "../api2/DepthCompositeStream.h"
 #include "../api2/HeicCompositeStream.h"
+#include "SessionConfigurationUtils.h"
 #include "aidl/android/hardware/graphics/common/Dataspace.h"
 #include "api2/JpegRCompositeStream.h"
 #include "binder/Status.h"
 #include "common/CameraDeviceBase.h"
 #include "common/HalConversionsTemplated.h"
-#include "../CameraService.h"
-#include "device3/aidl/AidlCamera3Device.h"
-#include "device3/hidl/HidlCamera3Device.h"
 #include "device3/Camera3OutputStream.h"
 #include "device3/ZoomRatioMapper.h"
+#include "device3/aidl/AidlCamera3Device.h"
+#include "device3/hidl/HidlCamera3Device.h"
 #include "system/graphics-base-v1.1.h"
-#include <camera/StringUtils.h>
-#include <ui/PublicFormat.h>
 
 using android::camera3::OutputStreamInfo;
 using android::camera3::OutputStreamInfo;
@@ -827,8 +829,7 @@
     }
 
     for (const auto &it : outputConfigs) {
-        const std::vector<sp<IGraphicBufferProducer>>& bufferProducers =
-            it.getGraphicBufferProducers();
+        const std::vector<ParcelableSurfaceType>& surfaces = it.getSurfaces();
         bool deferredConsumer = it.isDeferred();
         bool isConfigurationComplete = it.isComplete();
         const std::string &physicalCameraId = it.getPhysicalCameraId();
@@ -841,12 +842,12 @@
         const CameraMetadata &metadataChosen =
                 physicalCameraId.size() > 0 ? physicalDeviceInfo : deviceInfo;
 
-        size_t numBufferProducers = bufferProducers.size();
+        size_t numSurfaces = surfaces.size();
         bool isStreamInfoValid = false;
         int32_t groupId = it.isMultiResolution() ? it.getSurfaceSetID() : -1;
         OutputStreamInfo streamInfo;
 
-        res = checkSurfaceType(numBufferProducers, deferredConsumer, it.getSurfaceType(),
+        res = checkSurfaceType(numSurfaces, deferredConsumer, it.getSurfaceType(),
                                isConfigurationComplete);
         if (!res.isOk()) {
             return res;
@@ -861,7 +862,7 @@
         int timestampBase = it.getTimestampBase();
         // If the configuration is a deferred consumer, or a not yet completed
         // configuration with no buffer producers attached.
-        if (deferredConsumer || (!isConfigurationComplete && numBufferProducers == 0)) {
+        if (deferredConsumer || (!isConfigurationComplete && numSurfaces == 0)) {
             streamInfo.width = it.getWidth();
             streamInfo.height = it.getHeight();
             auto surfaceType = it.getSurfaceType();
@@ -912,26 +913,31 @@
 
             isStreamInfoValid = true;
 
-            if (numBufferProducers == 0) {
+            if (numSurfaces == 0) {
                 continue;
             }
         }
 
-        for (auto& bufferProducer : bufferProducers) {
-            int mirrorMode = it.getMirrorMode(bufferProducer);
+        for (auto& surface_type : surfaces) {
             sp<Surface> surface;
-            res = createSurfaceFromGbp(streamInfo, isStreamInfoValid, surface, bufferProducer,
-                    logicalCameraId, metadataChosen, sensorPixelModesUsed, dynamicRangeProfile,
-                    streamUseCase, timestampBase, mirrorMode, colorSpace,
-                    /*respectSurfaceSize*/true);
+            int mirrorMode = it.getMirrorMode(surface_type);
+            res = createSurfaceFromGbp(streamInfo, isStreamInfoValid, surface,
+                                       surface_type
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+                                       .graphicBufferProducer
+#endif
+                                       , logicalCameraId,
+                                       metadataChosen, sensorPixelModesUsed, dynamicRangeProfile,
+                                       streamUseCase, timestampBase, mirrorMode, colorSpace,
+                                       /*respectSurfaceSize*/ true);
 
-            if (!res.isOk())
-                return res;
+            if (!res.isOk()) return res;
 
             if (!isStreamInfoValid) {
-                auto status  = mapStream(streamInfo, isCompositeJpegRDisabled, deviceInfo,
-                        static_cast<camera_stream_rotation_t> (it.getRotation()), &streamIdx,
-                        physicalCameraId, groupId, logicalCameraId, streamConfiguration, earlyExit);
+                auto status = mapStream(streamInfo, isCompositeJpegRDisabled, deviceInfo,
+                                        static_cast<camera_stream_rotation_t>(it.getRotation()),
+                                        &streamIdx, physicalCameraId, groupId, logicalCameraId,
+                                        streamConfiguration, earlyExit);
                 if (*earlyExit || !status.isOk()) {
                     return status;
                 }
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.cpp b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
index 66918c1..6c31d2c 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.cpp
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
@@ -150,15 +150,9 @@
 
         // Try other formats if the config from APM is the same as our current config.
         // Some HALs may report its format support incorrectly.
-        if (previousConfig.format == config.format) {
-            if (previousConfig.sample_rate == config.sample_rate) {
-                config.format = getNextFormatToTry(config.format);
-            } else if (!com::android::media::aaudio::sample_rate_conversion()) {
-                ALOGI("%s() - AAudio SRC feature not enabled, different rates! %d != %d",
-                      __func__, previousConfig.sample_rate, config.sample_rate);
-                result = AAUDIO_ERROR_INVALID_RATE;
-                break;
-            }
+        if ((previousConfig.format == config.format) &&
+                (previousConfig.sample_rate == config.sample_rate)) {
+            config.format = getNextFormatToTry(config.format);
         }
 
         ALOGD("%s() %#x %d failed, perhaps due to format or sample rate. Try again with %#x %d",