Merge "Add effect proxy shared capability and parameters clamping" into main
diff --git a/apex/Android.bp b/apex/Android.bp
index b0d7c02..356bf03 100644
--- a/apex/Android.bp
+++ b/apex/Android.bp
@@ -173,6 +173,7 @@
         "mediaswcodec",
     ],
     native_shared_libs: [
+        "libapexcodecs",
         "libcodec2_hidl@1.0",
         "libcodec2_hidl@1.1",
         "libcodec2_hidl@1.2",
diff --git a/apex/ld.config.txt b/apex/ld.config.txt
index 4dc5fb1..c24d51f 100644
--- a/apex/ld.config.txt
+++ b/apex/ld.config.txt
@@ -33,7 +33,7 @@
 # TODO: replace the following when apex has a way to auto-generate this list
 # namespace.default.link.platform.shared_libs  = %LLNDK_LIBRARIES%
 # namespace.default.link.platform.shared_libs += %SANITIZER_RUNTIME_LIBRARIES%
-namespace.default.link.platform.shared_libs = libEGL.so:libGLESv1_CM.so:libGLESv2.so:libGLESv3.so:libandroid_net.so:libc.so:libcgrouprc.so:libclang_rt.asan-aarch64-android.so:libclang_rt.asan-arm-android.so:libclang_rt.hwasan-aarch64-android.so:libclang_rt.asan-i686-android.so:libclang_rt.asan-x86_64-android.so:libdl.so:libft2.so:liblog.so:libm.so:libmediandk.so:libnativewindow.so:libneuralnetworks.so:libsync.so:libvndksupport.so:libdl_android.so:libvulkan.so:libbinder_ndk.so
+namespace.default.link.platform.shared_libs = libEGL.so:libGLESv1_CM.so:libGLESv2.so:libGLESv3.so:libandroid_net.so:libc.so:libclang_rt.asan-aarch64-android.so:libclang_rt.asan-arm-android.so:libclang_rt.hwasan-aarch64-android.so:libclang_rt.asan-i686-android.so:libclang_rt.asan-x86_64-android.so:libdl.so:libft2.so:liblog.so:libm.so:libmediandk.so:libnativewindow.so:libneuralnetworks.so:libsync.so:libvndksupport.so:libdl_android.so:libvulkan.so:libbinder_ndk.so
 
 ###############################################################################
 # "platform" namespace
@@ -138,7 +138,7 @@
 # TODO: replace the following when apex has a way to auto-generate this list
 # namespace.sphal.link.platform.shared_libs  = %LLNDK_LIBRARIES%
 # namespace.sphal.link.platform.shared_libs += %SANITIZER_RUNTIME_LIBRARIES%
-namespace.sphal.link.platform.shared_libs = libEGL.so:libGLESv1_CM.so:libGLESv2.so:libGLESv3.so:libandroid_net.so:libc.so:libcgrouprc.so:libclang_rt.asan-aarch64-android.so:libclang_rt.asan-arm-android.so:libclang_rt.hwasan-aarch64-android.so:libclang_rt.asan-i686-android.so:libclang_rt.asan-x86_64-android.so:libdl.so:libft2.so:liblog.so:libm.so:libmediandk.so:libnativewindow.so:libneuralnetworks.so:libsync.so:libvndksupport.so:libvulkan.so:libbinder_ndk.so
+namespace.sphal.link.platform.shared_libs = libEGL.so:libGLESv1_CM.so:libGLESv2.so:libGLESv3.so:libandroid_net.so:libc.so:libclang_rt.asan-aarch64-android.so:libclang_rt.asan-arm-android.so:libclang_rt.hwasan-aarch64-android.so:libclang_rt.asan-i686-android.so:libclang_rt.asan-x86_64-android.so:libdl.so:libft2.so:liblog.so:libm.so:libmediandk.so:libnativewindow.so:libneuralnetworks.so:libsync.so:libvndksupport.so:libvulkan.so:libbinder_ndk.so
 
 # Add a link for libz.so which is llndk on devices where VNDK is not enforced.
 namespace.sphal.link.platform.shared_libs += libz.so
diff --git a/camera/CameraUtils.cpp b/camera/CameraUtils.cpp
index 3473780..ebb3305 100644
--- a/camera/CameraUtils.cpp
+++ b/camera/CameraUtils.cpp
@@ -32,7 +32,7 @@
 const char *kCameraServiceDisabledProperty = "config.disable_cameraservice";
 
 status_t CameraUtils::getRotationTransform(const CameraMetadata& staticInfo,
-        int mirrorMode, /*out*/int32_t* transform) {
+        int mirrorMode,  bool enableTransformInverseDisplay, /*out*/int32_t* transform) {
     ALOGV("%s", __FUNCTION__);
 
     if (transform == NULL) {
@@ -128,7 +128,9 @@
      * aspect ratio, or the preview will end up looking non-uniformly
      * stretched.
      */
-    flags |= NATIVE_WINDOW_TRANSFORM_INVERSE_DISPLAY;
+    if (enableTransformInverseDisplay) {
+        flags |= NATIVE_WINDOW_TRANSFORM_INVERSE_DISPLAY;
+    }
 
     ALOGV("%s: final transform = 0x%x", __FUNCTION__, flags);
 
diff --git a/camera/aidl/android/hardware/ICameraService.aidl b/camera/aidl/android/hardware/ICameraService.aidl
index ce6c2d3..6431737 100644
--- a/camera/aidl/android/hardware/ICameraService.aidl
+++ b/camera/aidl/android/hardware/ICameraService.aidl
@@ -157,6 +157,7 @@
      *                     policy for default device context). Only virtual cameras would be exposed
      *                     only for custom policy and only real cameras would be exposed for default
      *                     policy.
+     * @param sharedMode Parameter specifying if the camera should be opened in shared mode.
      */
     ICameraDeviceUser connectDevice(ICameraDeviceCallbacks callbacks,
             @utf8InCpp String cameraId,
@@ -164,7 +165,8 @@
             int targetSdkVersion,
             int rotationOverride,
             in AttributionSourceState clientAttribution,
-            int devicePolicy);
+            int devicePolicy,
+            boolean sharedMode);
 
     /**
      * Add listener for changes to camera device and flashlight state.
diff --git a/camera/aidl/android/hardware/ICameraServiceListener.aidl b/camera/aidl/android/hardware/ICameraServiceListener.aidl
index 9c8c88a..c0fd50e 100644
--- a/camera/aidl/android/hardware/ICameraServiceListener.aidl
+++ b/camera/aidl/android/hardware/ICameraServiceListener.aidl
@@ -105,5 +105,6 @@
      * will receive such callbacks.
      */
     oneway void onCameraOpened(@utf8InCpp String cameraId, @utf8InCpp String clientPackageId, int deviceId);
+    oneway void onCameraOpenedInSharedMode(@utf8InCpp String cameraId, @utf8InCpp String clientPackageId, int deviceId, boolean primaryClient);
     oneway void onCameraClosed(@utf8InCpp String cameraId, int deviceId);
 }
diff --git a/camera/aidl/android/hardware/camera2/ICameraDeviceCallbacks.aidl b/camera/aidl/android/hardware/camera2/ICameraDeviceCallbacks.aidl
index 58b19a3..49e9920 100644
--- a/camera/aidl/android/hardware/camera2/ICameraDeviceCallbacks.aidl
+++ b/camera/aidl/android/hardware/camera2/ICameraDeviceCallbacks.aidl
@@ -50,4 +50,15 @@
     oneway void onRepeatingRequestError(in long lastFrameNumber,
                                         in int repeatingRequestId);
     oneway void onRequestQueueEmpty();
+
+    /**
+     * Notify registered clients about client shared access priority changes when the camera device
+     * has been opened in shared mode.
+     *
+     * If the client priority changes from secondary to primary, then it can now
+     * create capture request and change the capture request parameters. If client priority
+     * changes from primary to secondary, that implies that another higher priority client is also
+     * accessing the camera in shared mode and is now the primary client.
+     */
+    oneway void onClientSharedAccessPriorityChanged(boolean primaryClient);
 }
diff --git a/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl b/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
index 8e1fcc0..c1da126 100644
--- a/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
+++ b/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
@@ -68,6 +68,17 @@
     const int CONSTRAINED_HIGH_SPEED_MODE = 1;
 
     /**
+     * The shared operating mode for a camera device.
+     *
+     * <p>
+     * When in shared mode, the camera device can be opened and accessed by multiple applications
+     * simultaneously.
+     * </p>
+     *
+     */
+    const int SHARED_MODE = 2;
+
+    /**
      * Start of custom vendor modes
      */
     const int VENDOR_MODE_START = 0x8000;
@@ -194,4 +205,12 @@
      */
     ICameraOfflineSession switchToOffline(in ICameraDeviceCallbacks callbacks,
             in int[] offlineOutputIds);
+
+    /**
+     * Get the client status as primary or secondary when camera is opened in shared mode.
+     *
+     * @return true if this is primary client when camera is opened in shared mode.
+     *         false if another higher priority client with primary access is also using the camera.
+     */
+    boolean isPrimaryClient();
 }
diff --git a/camera/camera2/OutputConfiguration.cpp b/camera/camera2/OutputConfiguration.cpp
index daa2656..a89d7ca 100644
--- a/camera/camera2/OutputConfiguration.cpp
+++ b/camera/camera2/OutputConfiguration.cpp
@@ -34,6 +34,7 @@
 namespace android {
 
 const int OutputConfiguration::INVALID_ROTATION = -1;
+const int OutputConfiguration::ROTATION_0 = 0;
 const int OutputConfiguration::INVALID_SET_ID = -1;
 
 const std::vector<sp<IGraphicBufferProducer>>&
@@ -97,6 +98,10 @@
     return mTimestampBase;
 }
 
+int OutputConfiguration::getMirrorMode() const {
+    return mMirrorMode;
+}
+
 int OutputConfiguration::getMirrorMode(sp<IGraphicBufferProducer> surface) const {
     if (!flags::mirror_mode_shared_surfaces()) {
         return mMirrorMode;
@@ -164,6 +169,29 @@
         mUsage(0) {
 }
 
+OutputConfiguration::OutputConfiguration(int surfaceType, int width, int height, int format,
+        int32_t colorSpace, int mirrorMode, bool useReadoutTimestamp, int timestampBase,
+        int dataspace, int64_t usage, int64_t streamusecase, std::string physicalCamId):
+        mRotation(ROTATION_0),
+        mSurfaceSetID(INVALID_SET_ID),
+        mSurfaceType(surfaceType),
+        mWidth(width),
+        mHeight(height),
+        mIsDeferred(false),
+        mIsShared(false),
+        mPhysicalCameraId(physicalCamId),
+        mIsMultiResolution(false),
+        mDynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
+        mColorSpace(colorSpace),
+        mStreamUseCase(streamusecase),
+        mTimestampBase(timestampBase),
+        mMirrorMode(mirrorMode),
+        mUseReadoutTimestamp(useReadoutTimestamp),
+        mFormat(format),
+        mDataspace(dataspace),
+        mUsage(usage){
+}
+
 OutputConfiguration::OutputConfiguration(const android::Parcel& parcel) :
         mRotation(INVALID_ROTATION),
         mSurfaceSetID(INVALID_SET_ID) {
diff --git a/camera/camera_platform.aconfig b/camera/camera_platform.aconfig
index a03ccf5..2feebb4 100644
--- a/camera/camera_platform.aconfig
+++ b/camera/camera_platform.aconfig
@@ -175,16 +175,10 @@
 
 flag {
     namespace: "camera_platform"
-    name: "use_context_attribution_source"
-    description: "Use the context-provided AttributionSource when checking for client permissions"
+    name: "data_delivery_permission_checks"
+    description: "Pass the full AttributionSource chain to PermissionChecker for data delivery"
     bug: "190657833"
-}
-
-flag {
-    namespace: "camera_platform"
-    name: "check_full_attribution_source_chain"
-    description: "Pass the full AttributionSource chain to PermissionChecker"
-    bug: "190657833"
+    is_fixed_read_only: true
 }
 
 flag {
@@ -218,3 +212,47 @@
     description: "Indicates when to activate Night Mode Camera Extension"
     bug: "335902696"
 }
+
+flag {
+    namespace: "camera_platform"
+    name: "zoom_method"
+    is_exported: true
+    description: "Gives apps explicit control on reflects zoom via ZOOM_RATIO capture result"
+    bug: "298899993"
+}
+
+flag {
+    namespace: "camera_platform"
+    is_exported: true
+    name: "ae_priority"
+    description: "Add AE priority modes"
+    bug: "359944765"
+}
+
+flag {
+    namespace: "camera_platform"
+    name: "feature_combination_baklava"
+    description: "Add new feature combination query version for Baklava"
+    bug: "370778206"
+}
+
+flag {
+    namespace: "camera_platform"
+    name: "camera_multi_client"
+    is_exported: true
+    description: "add new feature to allow multiple clients to access the camera in shared mode"
+    bug: "265196098"
+    metadata {
+        purpose: PURPOSE_FEATURE
+    }
+}
+
+flag {
+    namespace: "camera_platform"
+    name: "query_process_state"
+    description: "In opChanged, query the process state from AM instead of relying on mUidPolicy"
+    bug: "378016494"
+    metadata {
+        purpose: PURPOSE_BUGFIX
+    }
+}
diff --git a/camera/cameraserver/manifest_android.frameworks.cameraservice.service.xml b/camera/cameraserver/manifest_android.frameworks.cameraservice.service.xml
index 5d85909..d8264df 100644
--- a/camera/cameraserver/manifest_android.frameworks.cameraservice.service.xml
+++ b/camera/cameraserver/manifest_android.frameworks.cameraservice.service.xml
@@ -11,7 +11,7 @@
 
     <hal format="aidl">
         <name>android.frameworks.cameraservice.service</name>
-        <version>2</version>
+        <version>3</version>
         <interface>
             <name>ICameraService</name>
             <instance>default</instance>
diff --git a/camera/include/camera/CameraUtils.h b/camera/include/camera/CameraUtils.h
index d358407..766cac1 100644
--- a/camera/include/camera/CameraUtils.h
+++ b/camera/include/camera/CameraUtils.h
@@ -46,7 +46,7 @@
          * Returns OK on success, or a negative error code.
          */
         static status_t getRotationTransform(const CameraMetadata& staticInfo,
-                int mirrorMode, /*out*/int32_t* transform);
+                int mirrorMode, bool enableTransformInverseDisplay, /*out*/int32_t* transform);
 
         /**
          * Check if the image data is VideoNativeHandleMetadata, that contains a native handle.
diff --git a/camera/include/camera/camera2/OutputConfiguration.h b/camera/include/camera/camera2/OutputConfiguration.h
index 2049a31..671d065 100644
--- a/camera/include/camera/camera2/OutputConfiguration.h
+++ b/camera/include/camera/camera2/OutputConfiguration.h
@@ -33,6 +33,7 @@
 class OutputConfiguration : public android::Parcelable {
 public:
 
+    static const int ROTATION_0;
     static const int INVALID_ROTATION;
     static const int INVALID_SET_ID;
     enum SurfaceType {
@@ -73,6 +74,7 @@
     int64_t                    getStreamUseCase() const;
     int                        getTimestampBase() const;
     int                        getMirrorMode(sp<IGraphicBufferProducer> surface) const;
+    int                        getMirrorMode() const;
     bool                       useReadoutTimestamp() const;
     int                        getFormat() const;
     int                        getDataspace() const;
@@ -107,6 +109,9 @@
                         int surfaceSetID = INVALID_SET_ID,
                         int surfaceType = SURFACE_TYPE_UNKNOWN, int width = 0,
                         int height = 0, bool isShared = false);
+    OutputConfiguration(int surfaceType, int width, int height, int format, int32_t colorSpace,
+            int mirrorMode, bool useReadoutTimestamp,int timestampBase, int dataspace,
+            int64_t usage, int64_t streamusecase, std::string physicalCamId);
 
     bool operator == (const OutputConfiguration& other) const {
         return ( mRotation == other.mRotation &&
@@ -211,6 +216,28 @@
     bool mirrorModesEqual(const OutputConfiguration& other) const;
     bool mirrorModesLessThan(const OutputConfiguration& other) const;
     const std::vector<int32_t>& getMirrorModes() const {return mMirrorModeForProducers;}
+    bool sharedConfigEqual(const OutputConfiguration& other) const {
+        return (mRotation == other.mRotation &&
+                mSurfaceSetID == other.mSurfaceSetID &&
+                mSurfaceType == other.mSurfaceType &&
+                mWidth == other.mWidth &&
+                mHeight == other.mHeight &&
+                mIsDeferred == other.mIsDeferred &&
+                mIsShared == other.mIsShared &&
+                mPhysicalCameraId == other.mPhysicalCameraId &&
+                mIsMultiResolution == other.mIsMultiResolution &&
+                sensorPixelModesUsedEqual(other) &&
+                mDynamicRangeProfile == other.mDynamicRangeProfile &&
+                mColorSpace == other.mColorSpace &&
+                mStreamUseCase == other.mStreamUseCase &&
+                mTimestampBase == other.mTimestampBase &&
+                mMirrorMode == other.mMirrorMode &&
+                mUseReadoutTimestamp == other.mUseReadoutTimestamp &&
+                mFormat == other.mFormat &&
+                mDataspace == other.mDataspace &&
+                mUsage == other.mUsage);
+    }
+
 private:
     std::vector<sp<IGraphicBufferProducer>> mGbps;
     int                        mRotation;
diff --git a/camera/ndk/Android.bp b/camera/ndk/Android.bp
index 379c0b5..508808f 100644
--- a/camera/ndk/Android.bp
+++ b/camera/ndk/Android.bp
@@ -79,6 +79,7 @@
     shared_libs: [
         "android.companion.virtual.virtualdevice_aidl-cpp",
         "android.companion.virtualdevice.flags-aconfig-cc",
+        "camera_platform_flags_c_lib",
         "framework-permission-aidl-cpp",
         "libandroid_runtime",
         "libbinder",
@@ -147,8 +148,8 @@
 
     shared_libs: [
         "android.frameworks.cameraservice.common-V1-ndk",
-        "android.frameworks.cameraservice.device-V2-ndk",
-        "android.frameworks.cameraservice.service-V2-ndk",
+        "android.frameworks.cameraservice.device-V3-ndk",
+        "android.frameworks.cameraservice.service-V3-ndk",
         "libbinder_ndk",
         "libcamera_metadata",
         "libcutils",
diff --git a/camera/ndk/NdkCameraCaptureSession.cpp b/camera/ndk/NdkCameraCaptureSession.cpp
index 92de1e4..58370e5 100644
--- a/camera/ndk/NdkCameraCaptureSession.cpp
+++ b/camera/ndk/NdkCameraCaptureSession.cpp
@@ -177,6 +177,34 @@
 }
 
 EXPORT
+camera_status_t ACameraCaptureSessionShared_startStreaming(
+    ACameraCaptureSession* /*session*/, ACameraCaptureSession_captureCallbacksV2* /*callbacks*/,
+    int /*numOutputWindows*/, ANativeWindow** /*window*/,
+    int* /*captureSequenceId*/) {
+    ATRACE_CALL();
+    // Todo: need to add implementation
+    return  ACAMERA_OK;
+}
+
+EXPORT
+camera_status_t ACameraCaptureSessionShared_logicalCamera_startStreaming(
+    ACameraCaptureSession* /*session*/,
+    ACameraCaptureSession_logicalCamera_captureCallbacksV2* /*callbacks*/,
+    int /*numOutputWindows*/, ANativeWindow** /*windows*/,
+    int* /*captureSequenceId*/) {
+    ATRACE_CALL();
+    // Todo: need to add implementation
+    return  ACAMERA_OK;
+}
+
+EXPORT
+camera_status_t ACameraCaptureSessionShared_stopStreaming(ACameraCaptureSession* /*session*/) {
+    ATRACE_CALL();
+    // Todo: need to add implementation
+    return  ACAMERA_OK;
+}
+
+EXPORT
 camera_status_t ACameraCaptureSession_updateSharedOutput(ACameraCaptureSession* session,
         ACaptureSessionOutput* output) {
     ATRACE_CALL();
diff --git a/camera/ndk/NdkCameraManager.cpp b/camera/ndk/NdkCameraManager.cpp
index 1b3343e..28cc9af 100644
--- a/camera/ndk/NdkCameraManager.cpp
+++ b/camera/ndk/NdkCameraManager.cpp
@@ -27,6 +27,8 @@
 #include "ndk_vendor/impl/ACameraManager.h"
 #else
 #include "impl/ACameraManager.h"
+#include <com_android_internal_camera_flags.h>
+namespace flags = com::android::internal::camera::flags;
 #endif
 #include "impl/ACameraMetadata.h"
 
@@ -159,6 +161,23 @@
 }
 
 EXPORT
+camera_status_t ACameraManager_isCameraDeviceSharingSupported(ACameraManager *mgr,
+        const char *cameraId, bool *isSharingSupported) {
+    ATRACE_CALL();
+    #ifndef __ANDROID_VNDK__
+    if (!flags::camera_multi_client()) {
+        return ACAMERA_ERROR_UNSUPPORTED_OPERATION;
+    }
+    #endif
+    if (mgr == nullptr || cameraId == nullptr || isSharingSupported == nullptr) {
+        ALOGE("%s: invalid argument! mgr %p cameraId %p isSharingSupported %p",
+                __FUNCTION__, mgr, cameraId, isSharingSupported);
+        return ACAMERA_ERROR_INVALID_PARAMETER;
+    }
+    return mgr->isCameraDeviceSharingSupported(cameraId, isSharingSupported);
+}
+
+EXPORT
 camera_status_t ACameraManager_getCameraCharacteristics(
         ACameraManager* mgr, const char* cameraId, ACameraMetadata** chars){
     ATRACE_CALL();
@@ -188,7 +207,27 @@
                 __FUNCTION__, mgr, cameraId, callback, device);
         return ACAMERA_ERROR_INVALID_PARAMETER;
     }
-    return mgr->openCamera(cameraId, callback, device);
+    bool primaryClient;
+    return mgr->openCamera(cameraId, /*sharedMode*/false, callback, device, &primaryClient);
+}
+
+EXPORT
+camera_status_t ACameraManager_openSharedCamera(
+        ACameraManager* mgr, const char* cameraId, ACameraDevice_StateCallbacks* callback,
+        /*out*/ACameraDevice** device, /*out*/bool* primaryClient) {
+    ATRACE_CALL();
+    #ifndef __ANDROID_VNDK__
+    if (!flags::camera_multi_client()) {
+        return ACAMERA_ERROR_UNSUPPORTED_OPERATION;
+    }
+    #endif
+    if (mgr == nullptr || cameraId == nullptr || callback == nullptr || device == nullptr ||
+            primaryClient == nullptr) {
+        ALOGE("%s: invalid argument! mgr %p cameraId %p callback %p device %p primary %p",
+                __FUNCTION__, mgr, cameraId, callback, device, primaryClient);
+        return ACAMERA_ERROR_INVALID_PARAMETER;
+    }
+    return mgr->openCamera(cameraId, /*sharedMode*/true, callback, device, primaryClient);
 }
 
 #ifdef __ANDROID_VNDK__
diff --git a/camera/ndk/impl/ACameraDevice.cpp b/camera/ndk/impl/ACameraDevice.cpp
index 1fa71f4..aed740f 100644
--- a/camera/ndk/impl/ACameraDevice.cpp
+++ b/camera/ndk/impl/ACameraDevice.cpp
@@ -26,6 +26,9 @@
 #include "ACameraMetadata.h"
 #include "ACaptureRequest.h"
 #include "ACameraCaptureSession.h"
+#include <com_android_internal_camera_flags.h>
+
+namespace flags = com::android::internal::camera::flags;
 
 ACameraDevice::~ACameraDevice() {
     mDevice->stopLooperAndDisconnect();
@@ -57,12 +60,13 @@
         const char* id,
         ACameraDevice_StateCallbacks* cb,
         sp<ACameraMetadata> chars,
-        ACameraDevice* wrapper) :
+        ACameraDevice* wrapper, bool sharedMode) :
         mCameraId(id),
         mAppCallbacks(*cb),
         mChars(chars),
         mServiceCallback(new ServiceCallback(this)),
         mWrapper(wrapper),
+        mSharedMode(sharedMode),
         mInError(false),
         mError(ACAMERA_OK),
         mIdle(true),
@@ -970,6 +974,7 @@
         case kWhatCaptureSeqAbort:
         case kWhatCaptureBufferLost:
         case kWhatPreparedCb:
+        case kWhatClientSharedAccessPriorityChanged:
             ALOGV("%s: Received msg %d", __FUNCTION__, msg->what());
             break;
         case kWhatCleanUpSessions:
@@ -1007,6 +1012,32 @@
             (*onDisconnected)(context, dev);
             break;
         }
+
+        case kWhatClientSharedAccessPriorityChanged:
+        {
+            if (!flags::camera_multi_client()) {
+                break;
+            }
+            ACameraDevice* dev;
+            found = msg->findPointer(kDeviceKey, (void**) &dev);
+            if (!found || dev == nullptr) {
+                ALOGE("%s: Cannot find device pointer!", __FUNCTION__);
+                return;
+            }
+            ACameraDevice_ClientSharedAccessPriorityChangedCallback
+                    onClientSharedAccessPriorityChanged;
+            found = msg->findPointer(kCallbackFpKey, (void**) &onClientSharedAccessPriorityChanged);
+            if (!found) {
+                ALOGE("%s: Cannot find onClientSharedAccessPriorityChanged!", __FUNCTION__);
+                return;
+            }
+            if (onClientSharedAccessPriorityChanged == nullptr) {
+                return;
+            }
+            (*onClientSharedAccessPriorityChanged)(context, dev, dev->isPrimaryClient());
+            break;
+        }
+
         case kWhatOnError:
         {
             ACameraDevice* dev;
@@ -1624,6 +1655,31 @@
 }
 
 binder::Status
+CameraDevice::ServiceCallback::onClientSharedAccessPriorityChanged(bool primaryClient) {
+    ALOGV("onClientSharedAccessPriorityChanged received. primaryClient = %d", primaryClient);
+    binder::Status ret = binder::Status::ok();
+    if (!flags::camera_multi_client()) {
+        return ret;
+    }
+    sp<CameraDevice> dev = mDevice.promote();
+    if (dev == nullptr) {
+        return ret; // device has been closed
+    }
+    Mutex::Autolock _l(dev->mDeviceLock);
+    if (dev->isClosed() || dev->mRemote == nullptr) {
+        return ret;
+    }
+    dev->setPrimaryClient(primaryClient);
+    sp<AMessage> msg = new AMessage(kWhatClientSharedAccessPriorityChanged, dev->mHandler);
+    msg->setPointer(kContextKey, dev->mAppCallbacks.context);
+    msg->setPointer(kDeviceKey, (void*) dev->getWrapper());
+    msg->setPointer(kCallbackFpKey, (void*) dev->mAppCallbacks.onClientSharedAccessPriorityChanged);
+    msg->post();
+
+    return binder::Status::ok();
+}
+
+binder::Status
 CameraDevice::ServiceCallback::onDeviceIdle() {
     ALOGV("Camera is now idle");
     binder::Status ret = binder::Status::ok();
diff --git a/camera/ndk/impl/ACameraDevice.h b/camera/ndk/impl/ACameraDevice.h
index 2b9f327..d3aed4b 100644
--- a/camera/ndk/impl/ACameraDevice.h
+++ b/camera/ndk/impl/ACameraDevice.h
@@ -63,7 +63,7 @@
   public:
     CameraDevice(const char* id, ACameraDevice_StateCallbacks* cb,
                   sp<ACameraMetadata> chars,
-                  ACameraDevice* wrapper);
+                  ACameraDevice* wrapper, bool sharedMode);
     ~CameraDevice();
 
     inline const char* getId() const { return mCameraId.c_str(); }
@@ -98,6 +98,7 @@
         binder::Status onRequestQueueEmpty() override;
         binder::Status onRepeatingRequestError(int64_t lastFrameNumber,
                 int32_t stoppedSequenceId) override;
+        binder::Status onClientSharedAccessPriorityChanged(bool isPrimaryClient) override;
       private:
         const wp<CameraDevice> mDevice;
     };
@@ -113,6 +114,9 @@
     // Stop the looper thread and unregister the handler
     void stopLooperAndDisconnect();
 
+    void setPrimaryClient(bool isPrimary) {mIsPrimaryClient = isPrimary;};
+    bool isPrimaryClient() {return mIsPrimaryClient;};
+
   private:
     friend ACameraCaptureSession;
     camera_status_t checkCameraClosedOrErrorLocked() const;
@@ -186,6 +190,8 @@
     const sp<ACameraMetadata> mChars;                 // Camera characteristics
     const sp<ServiceCallback> mServiceCallback;
     ACameraDevice* mWrapper;
+    bool mSharedMode;
+    bool mIsPrimaryClient;
 
     // stream id -> pair of (ANW* from application, OutputConfiguration used for camera service)
     std::map<int, std::pair<ANativeWindow*, OutputConfiguration>> mConfiguredOutputs;
@@ -227,7 +233,8 @@
         kWhatCaptureBufferLost, // onCaptureBufferLost
         kWhatPreparedCb, // onWindowPrepared
         // Internal cleanup
-        kWhatCleanUpSessions   // Cleanup cached sp<ACameraCaptureSession>
+        kWhatCleanUpSessions,   // Cleanup cached sp<ACameraCaptureSession>
+        kWhatClientSharedAccessPriorityChanged
     };
     static const char* kContextKey;
     static const char* kDeviceKey;
@@ -403,8 +410,8 @@
  */
 struct ACameraDevice {
     ACameraDevice(const char* id, ACameraDevice_StateCallbacks* cb,
-                  sp<ACameraMetadata> chars) :
-            mDevice(new android::acam::CameraDevice(id, cb, chars, this)) {}
+                  sp<ACameraMetadata> chars, bool sharedMode) :
+            mDevice(new android::acam::CameraDevice(id, cb, chars, this, sharedMode)) {}
 
     ~ACameraDevice();
 
@@ -445,6 +452,14 @@
         mDevice->setRemoteDevice(remote);
     }
 
+    inline void setPrimaryClient(bool isPrimary) {
+        mDevice->setPrimaryClient(isPrimary);
+    }
+
+    inline bool isPrimaryClient() {
+        return mDevice->isPrimaryClient();
+    }
+
   private:
     android::sp<android::acam::CameraDevice> mDevice;
 };
diff --git a/camera/ndk/impl/ACameraManager.cpp b/camera/ndk/impl/ACameraManager.cpp
index 53c4489..f9c1a8a 100644
--- a/camera/ndk/impl/ACameraManager.cpp
+++ b/camera/ndk/impl/ACameraManager.cpp
@@ -28,9 +28,11 @@
 #include <memory>
 #include "ACameraDevice.h"
 #include "ACameraMetadata.h"
+#include <com_android_internal_camera_flags.h>
 
 using namespace android::acam;
 namespace vd_flags = android::companion::virtualdevice::flags;
+namespace flags = com::android::internal::camera::flags;
 
 namespace android {
 namespace acam {
@@ -860,10 +862,36 @@
 }
 
 camera_status_t
-ACameraManager::openCamera(
+ACameraManager::isCameraDeviceSharingSupported(
         const char* cameraId,
+        /*out*/bool* isSharingSupported) {
+    if (!flags::camera_multi_client()) {
+        return ACAMERA_ERROR_UNSUPPORTED_OPERATION;
+    }
+    sp<ACameraMetadata> spChars;
+    camera_status_t ret = getCameraCharacteristics(cameraId, &spChars);
+    if (ret != ACAMERA_OK) {
+        ALOGE("%s: cannot get camera characteristics for camera %s. err %d",
+                __FUNCTION__, cameraId, ret);
+        return ret;
+    }
+
+    ACameraMetadata* chars = spChars.get();
+    ACameraMetadata_const_entry entry;
+    ret = ACameraMetadata_getConstEntry(chars, ANDROID_SHARED_SESSION_OUTPUT_CONFIGURATIONS,
+            &entry);
+    if (ret != ACAMERA_OK) {
+        return ret;
+    }
+    *isSharingSupported =  (entry.count > 0) ? true : false;
+    return ACAMERA_OK;
+}
+
+camera_status_t
+ACameraManager::openCamera(
+        const char* cameraId, bool sharedMode,
         ACameraDevice_StateCallbacks* callback,
-        /*out*/ACameraDevice** outDevice) {
+        /*out*/ACameraDevice** outDevice, /*out*/bool* primaryClient) {
     sp<ACameraMetadata> chars;
     camera_status_t ret = getCameraCharacteristics(cameraId, &chars);
     Mutex::Autolock _l(mLock);
@@ -873,7 +901,7 @@
         return ACAMERA_ERROR_INVALID_PARAMETER;
     }
 
-    ACameraDevice* device = new ACameraDevice(cameraId, callback, chars);
+    ACameraDevice* device = new ACameraDevice(cameraId, callback, chars, sharedMode);
 
     sp<hardware::ICameraService> cs = mGlobalManager->getCameraService();
     if (cs == nullptr) {
@@ -892,13 +920,14 @@
     clientAttribution.deviceId = mDeviceContext.deviceId;
     clientAttribution.packageName = "";
     clientAttribution.attributionTag = std::nullopt;
+    clientAttribution.token = sp<BBinder>::make();
 
     // No way to get package name from native.
     // Send a zero length package name and let camera service figure it out from UID
     binder::Status serviceRet = cs->connectDevice(
             callbacks, cameraId, /*oomScoreOffset*/0,
             targetSdkVersion, /*rotationOverride*/hardware::ICameraService::ROTATION_OVERRIDE_NONE,
-            clientAttribution, static_cast<int32_t>(mDeviceContext.policy),
+            clientAttribution, static_cast<int32_t>(mDeviceContext.policy), sharedMode,
             /*out*/&deviceRemote);
 
     if (!serviceRet.isOk()) {
@@ -942,6 +971,14 @@
         return ACAMERA_ERROR_CAMERA_DISCONNECTED;
     }
     device->setRemoteDevice(deviceRemote);
+    if (flags::camera_multi_client() && sharedMode) {
+        binder::Status remoteRet = deviceRemote->isPrimaryClient(primaryClient);
+        if (!remoteRet.isOk()) {
+            delete device;
+            return ACAMERA_ERROR_UNKNOWN;
+        }
+        device->setPrimaryClient(*primaryClient);
+    }
     *outDevice = device;
     return ACAMERA_OK;
 }
diff --git a/camera/ndk/impl/ACameraManager.h b/camera/ndk/impl/ACameraManager.h
index cb7a4ff..fffe037 100644
--- a/camera/ndk/impl/ACameraManager.h
+++ b/camera/ndk/impl/ACameraManager.h
@@ -138,6 +138,10 @@
         virtual binder::Status onCameraOpened(const std::string&, const std::string&, int32_t) {
             return binder::Status::ok();
         }
+        virtual binder::Status onCameraOpenedInSharedMode(const std::string&, const std::string&,
+                int32_t, bool)  {
+            return binder::Status::ok();
+        }
         virtual binder::Status onCameraClosed(const std::string&, int32_t) {
             return binder::Status::ok();
         }
@@ -327,16 +331,17 @@
 
     camera_status_t getCameraCharacteristics(
             const char* cameraId, android::sp<ACameraMetadata>* characteristics);
-    camera_status_t openCamera(const char* cameraId,
+    camera_status_t openCamera(const char* cameraId, bool sharedMode,
                                ACameraDevice_StateCallbacks* callback,
-                               /*out*/ACameraDevice** device);
+                               /*out*/ACameraDevice** device, /*out*/bool* primaryClient);
     void registerAvailabilityCallback(const ACameraManager_AvailabilityCallbacks* callback);
     void unregisterAvailabilityCallback(const ACameraManager_AvailabilityCallbacks* callback);
     void registerExtendedAvailabilityCallback(
             const ACameraManager_ExtendedAvailabilityCallbacks* callback);
     void unregisterExtendedAvailabilityCallback(
             const ACameraManager_ExtendedAvailabilityCallbacks* callback);
-
+    camera_status_t isCameraDeviceSharingSupported(
+            const char* cameraId, bool* isSharingSupported);
   private:
     enum {
         kCameraIdListNotInit = -1
diff --git a/camera/ndk/impl/ACameraMetadata.cpp b/camera/ndk/impl/ACameraMetadata.cpp
index fef6443..32e2f3d 100644
--- a/camera/ndk/impl/ACameraMetadata.cpp
+++ b/camera/ndk/impl/ACameraMetadata.cpp
@@ -562,6 +562,8 @@
         case ACAMERA_CONTROL_ZOOM_RATIO:
         case ACAMERA_CONTROL_SETTINGS_OVERRIDE:
         case ACAMERA_CONTROL_AUTOFRAMING:
+        case ACAMERA_CONTROL_ZOOM_METHOD:
+        case ACAMERA_CONTROL_AE_PRIORITY_MODE:
         case ACAMERA_EDGE_MODE:
         case ACAMERA_FLASH_MODE:
         case ACAMERA_FLASH_STRENGTH_LEVEL:
diff --git a/camera/ndk/include/camera/NdkCameraCaptureSession.h b/camera/ndk/include/camera/NdkCameraCaptureSession.h
index 1400121..e73222b 100644
--- a/camera/ndk/include/camera/NdkCameraCaptureSession.h
+++ b/camera/ndk/include/camera/NdkCameraCaptureSession.h
@@ -1099,6 +1099,92 @@
 camera_status_t ACameraCaptureSession_prepareWindow(
     ACameraCaptureSession* session,
     ANativeWindow *window) __INTRODUCED_IN(34);
+
+/**
+ * Request continuous streaming of a sequence of images for the shared capture session
+ * when more than one clients can open the same camera in shared mode by calling
+ * {@link ACameraManager_openSharedCamera}. In shared session, only primary clients can create
+ * a capture request and change capture parameters. Secondary clients can only request streaming of
+ * images by calling this api {@link ACameraCaptureSessionShared_startStreaming}. Calling this api
+ * for normal sessions when {@link ACameraManager_openCamera} is used to open the camera will throw
+ * {@link ACAMERA_ERROR_INVALID_OPERATION}.
+ *
+ * <p>With this method, the camera device will continually capture images, cycling through the
+ * settings in the list of {@link ACaptureRequest} specified by the primary client. If primary
+ * client does not have ongoing repeating request, camera service will use a capture request with
+ * default capture parameters for preview template.</p>
+ *
+ * <p>To stop the continuous streaming, call {@link ACameraCaptureSessionShared_stopStreaming}.</p>
+ *
+ * <p>Calling this method will replace an existing continuous streaming request.</p>
+ *
+ * @param sharedSession the shared capture session when camera is opened in
+ *        shared mode.
+ * @param callbacks the {@link ACameraCaptureSession_captureCallbacks} to be associated with this
+ *        capture sequence. No capture callback will be fired if callbacks is set to NULL.
+ * @param numOutputWindows number of native windows to be used for streaming. Must be at least 1.
+ * @param windows an array of {@link ANativeWindow} to be used for streaming. Length must be at
+ *        least numOutputWindows.
+ * @param captureSequenceId the capture sequence ID associated with this capture method invocation
+ *        will be stored here if this argument is not NULL and the method call succeeds.
+ *        When this argument is set to NULL, the capture sequence ID will not be returned.
+ *
+ * @return <ul>
+ *         <li>{@link ACAMERA_OK} if the method succeeds. captureSequenceId will be filled
+ *             if it is not NULL.</li>
+ *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if session or requests is NULL, or
+ *             if numRequests < 1</li>
+ *         <li>{@link ACAMERA_ERROR_SESSION_CLOSED} if the capture session has been closed</li>
+ *         <li>{@link ACAMERA_ERROR_CAMERA_DISCONNECTED} if the camera device is closed</li>
+ *         <li>{@link ACAMERA_ERROR_CAMERA_DEVICE} if the camera device encounters fatal error</li>
+ *         <li>{@link ACAMERA_ERROR_CAMERA_SERVICE} if the camera service encounters fatal error
+ *         </li>
+ *         <li>{@link ACAMERA_ERROR_INVALID_OPERATION} if the session passed is not a shared
+ *              session</li>
+ *         <li>{@link ACAMERA_ERROR_UNKNOWN} if the method fails for  some other reasons</li>
+ *         </ul>
+ */
+camera_status_t ACameraCaptureSessionShared_startStreaming(
+    ACameraCaptureSession* sharedSession, ACameraCaptureSession_captureCallbacksV2 *callbacks,
+    int numOutputWindows, ANativeWindow **window,
+    int *captureSequenceId) __INTRODUCED_IN(36);
+
+/**
+ * This has the same functionality as ACameraCaptureSessionShared_startStreaming, with added
+ * support for logical multi-camera where the capture callbacks supports result metadata for
+ * physical cameras.
+ */
+camera_status_t ACameraCaptureSessionShared_logicalCamera_startStreaming(
+    ACameraCaptureSession* sharedSession,
+    ACameraCaptureSession_logicalCamera_captureCallbacksV2 *callbacks,
+    int numOutputWindows, ANativeWindow **windows,
+    int *captureSequenceId) __INTRODUCED_IN(36);
+
+/**
+ * Cancel any ongoing streaming started by {@link ACameraCaptureSessionShared_startStreaming}.
+ * Calling this api does not effect any streaming requests submitted by other clients who have
+ * opened the camera in shared mode. Calling this api for normal sessions when
+ * {@link ACameraManager_openCamera} is used to open the camera will throw
+ * {@link ACAMERA_ERROR_INVALID_OPERATION}.
+ *
+ * @param sharedSession the capture session of interest
+ *
+ * @return <ul>
+ *         <li>{@link ACAMERA_OK} if the method succeeds.</li>
+ *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if session is NULL.</li>
+ *         <li>{@link ACAMERA_ERROR_SESSION_CLOSED} if the capture session has been closed</li>
+ *         <li>{@link ACAMERA_ERROR_CAMERA_DISCONNECTED} if the camera device is closed</li>
+ *         <li>{@link ACAMERA_ERROR_CAMERA_DEVICE} if the camera device encounters fatal error</li>
+ *         <li>{@link ACAMERA_ERROR_CAMERA_SERVICE} if the camera service encounters fatal error
+ *         </li>
+ *         <li>{@link ACAMERA_ERROR_INVALID_OPERATION} if the session passed is not a shared
+ *              session</li>
+ *         <li>{@link ACAMERA_ERROR_UNKNOWN} if the method fails for some other reasons</li>
+ *         </ul>
+ */
+camera_status_t ACameraCaptureSessionShared_stopStreaming(
+  ACameraCaptureSession *sharedSession
+)  __INTRODUCED_IN(36);
 __END_DECLS
 
 #endif /* _NDK_CAMERA_CAPTURE_SESSION_H */
diff --git a/camera/ndk/include/camera/NdkCameraDevice.h b/camera/ndk/include/camera/NdkCameraDevice.h
index fbd0ee1..8c7eb1f 100644
--- a/camera/ndk/include/camera/NdkCameraDevice.h
+++ b/camera/ndk/include/camera/NdkCameraDevice.h
@@ -125,6 +125,18 @@
 typedef void (*ACameraDevice_ErrorStateCallback)(void* context, ACameraDevice* device, int error);
 
 /**
+ * Client access priorities changed callbacks to be used in {@link ACameraDevice_StateCallbacks}
+ * when camera is opened in shared mode.
+ *
+ * @param context The optional context in {@link ACameraDevice_StateCallbacks} will be passed to
+ *                this callback.
+ * @param device The {@link ACameraDevice} whose access priorities has been changed.
+ * @param isPrimaryClient whether the client is primary client.
+ */
+typedef void (*ACameraDevice_ClientSharedAccessPriorityChangedCallback)(void* context,
+        ACameraDevice* device, bool isPrimaryClient);
+
+/**
  * Applications' callbacks for camera device state changes, register with
  * {@link ACameraManager_openCamera}.
  */
@@ -163,6 +175,17 @@
      *
      */
     ACameraDevice_ErrorStateCallback  onError;
+
+    /**
+     * Notify registered clients about client shared access priority changes when the camera device
+     * has been opened in shared mode.
+     *
+     * If the client priority changes from secondary to primary, then it can now
+     * create capture request and change the capture request parameters. If client priority
+     * changes from primary to secondary, that implies that another higher priority client is also
+     * accessing the camera in shared mode and is now the primary client.
+     */
+    ACameraDevice_ClientSharedAccessPriorityChangedCallback onClientSharedAccessPriorityChanged;
 } ACameraDevice_StateCallbacks;
 
 /**
@@ -671,7 +694,9 @@
  *         <li>{@link ACAMERA_OK} if the method call succeeds. The created capture session will be
  *                                filled in session argument.</li>
  *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if any of device, outputs, callbacks or
- *                                session is NULL.</li>
+ *                                session is NULL or if the outputs does not match the predefined
+ *                                shared session configuration when camera is opened in shared mode.
+ *                                </li>
  *         <li>{@link ACAMERA_ERROR_CAMERA_DISCONNECTED} if the camera device is closed.</li>
  *         <li>{@link ACAMERA_ERROR_CAMERA_DEVICE} if the camera device encounters fatal error.</li>
  *         <li>{@link ACAMERA_ERROR_CAMERA_SERVICE} if the camera service encounters fatal error.</li>
diff --git a/camera/ndk/include/camera/NdkCameraManager.h b/camera/ndk/include/camera/NdkCameraManager.h
index b4f3bf1..a9b0174 100644
--- a/camera/ndk/include/camera/NdkCameraManager.h
+++ b/camera/ndk/include/camera/NdkCameraManager.h
@@ -293,6 +293,46 @@
         /*out*/ACameraDevice** device) __INTRODUCED_IN(24);
 
 /**
+ * Open a shared connection to a camera with the given ID. The opened camera device will be
+ * returned in the `device` parameter. The behavior of this method matches that of
+ * {@link ACameraManager_openCamera(ACameraManager*, const char*, ACameraDevice_StateCallbacks*,
+ * ACameraDevice**)} except that it opens the camera in shared mode so that more
+ * than one client can access the camera at the same time.
+ *
+ * <p>Processes need to have android.permission.SYSTEM_CAMERA in addition to
+ * android.permission.CAMERA in order to connect to this camera device in shared
+ * mode.</p>
+ *
+ * @param manager the {@link ACameraManager} of interest.
+ * @param cameraId the ID string of the camera device to be opened.
+ * @param callback the {@link ACameraDevice_StateCallbacks} associated with the opened camera
+ *                 device.
+ * @param device the opened {@link ACameraDevice} will be filled here if the method call succeeds.
+ * @param primaryClient will return as true if the client is primaryClient.
+ *
+ * @return <ul>
+ *         <li>{@link ACAMERA_OK} if the method call succeeds.</li>
+ *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if manager, cameraId, callback, or device
+ *                  is NULL, or cameraId does not match any camera devices connected.</li>
+ *         <li>{@link ACAMERA_ERROR_CAMERA_DISCONNECTED} if connection to camera service fails.</li>
+ *         <li>{@link ACAMERA_ERROR_NOT_ENOUGH_MEMORY} if allocating memory fails.</li>
+ *         <li>{@link ACAMERA_ERROR_CAMERA_IN_USE} if camera device is being used by a higher
+ *                   priority camera API client.</li>
+ *         <li>{@link ACAMERA_ERROR_MAX_CAMERA_IN_USE} if the system-wide limit for number of open
+ *                   cameras or camera resources has been reached, and more camera devices cannot be
+ *                   opened until previous instances are closed.</li>
+ *         <li>{@link ACAMERA_ERROR_CAMERA_DISABLED} if the camera is disabled due to a device
+ *                   policy, and cannot be opened.</li>
+ *         <li>{@link ACAMERA_ERROR_PERMISSION_DENIED} if the application does not have permission
+ *                   to open camera.</li>
+ *         <li>{@link ACAMERA_ERROR_UNKNOWN} if the method fails for some other reasons.</li></ul>
+ */
+camera_status_t ACameraManager_openSharedCamera(
+        ACameraManager* manager, const char* cameraId,
+        ACameraDevice_StateCallbacks* callback,
+        /*out*/ACameraDevice** device,/*out*/bool* isPrimaryClient) __INTRODUCED_IN(36);
+
+/**
  * Definition of camera access permission change callback.
  *
  * <p>Notification that camera access priorities have changed and the camera may
@@ -397,6 +437,27 @@
         ACameraManager* manager,
         const ACameraManager_ExtendedAvailabilityCallbacks* callback) __INTRODUCED_IN(29);
 
+
+/**
+ * Checks if a camera can be opened in shared mode by multiple clients.
+ *
+ * @param manager the {@link ACameraManager} of interest.
+ * @param cameraId the ID string of the camera device of interest.
+ * @param isSharingSupported output will be filled here if the method succeeds.
+ *        This will be true if camera can be opened in shared mode, false
+ *        otherwise.
+ *
+ * @return <ul>
+ *         <li>{@link ACAMERA_OK} if the method call succeeds.</li>
+ *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if any parameter is not
+ *         valid.</li>
+ *         </ul>
+ */
+camera_status_t ACameraManager_isCameraDeviceSharingSupported(
+        ACameraManager *manager,
+        const char *cameraId,
+        bool *isSharingSupported) __INTRODUCED_IN(36);
+
 #ifdef __ANDROID_VNDK__
 /**
  * Retrieve the tag value, given the tag name and camera id.
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 6c88288..fc6b932 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -91,6 +91,7 @@
     ACAMERA_AUTOMOTIVE_LENS,
     ACAMERA_EXTENSION,
     ACAMERA_JPEGR,
+    ACAMERA_SHARED_SESSION,
     ACAMERA_SECTION_COUNT,
 
     ACAMERA_VENDOR = 0x8000
@@ -138,6 +139,7 @@
     ACAMERA_AUTOMOTIVE_LENS_START  = ACAMERA_AUTOMOTIVE_LENS   << 16,
     ACAMERA_EXTENSION_START        = ACAMERA_EXTENSION         << 16,
     ACAMERA_JPEGR_START            = ACAMERA_JPEGR             << 16,
+    ACAMERA_SHARED_SESSION_START   = ACAMERA_SHARED_SESSION    << 16,
     ACAMERA_VENDOR_START           = ACAMERA_VENDOR            << 16
 } acamera_metadata_section_start_t;
 
@@ -563,7 +565,9 @@
      * application's selected exposure time, sensor sensitivity,
      * and frame duration (ACAMERA_SENSOR_EXPOSURE_TIME,
      * ACAMERA_SENSOR_SENSITIVITY, and
-     * ACAMERA_SENSOR_FRAME_DURATION). If one of the FLASH modes
+     * ACAMERA_SENSOR_FRAME_DURATION). If ACAMERA_CONTROL_AE_PRIORITY_MODE is
+     * enabled, the relevant priority CaptureRequest settings will not be overridden.
+     * See ACAMERA_CONTROL_AE_PRIORITY_MODE for more details. If one of the FLASH modes
      * is selected, the camera device's flash unit controls are
      * also overridden.</p>
      * <p>The FLASH modes are only available if the camera device
@@ -583,6 +587,7 @@
      * different ACAMERA_FLASH_STRENGTH_LEVEL.</p>
      *
      * @see ACAMERA_CONTROL_AE_MODE
+     * @see ACAMERA_CONTROL_AE_PRIORITY_MODE
      * @see ACAMERA_CONTROL_MODE
      * @see ACAMERA_FLASH_INFO_AVAILABLE
      * @see ACAMERA_FLASH_MODE
@@ -2401,6 +2406,95 @@
      */
     ACAMERA_CONTROL_LOW_LIGHT_BOOST_STATE =                     // byte (acamera_metadata_enum_android_control_low_light_boost_state_t)
             ACAMERA_CONTROL_START + 59,
+    /**
+     * <p>Whether the application uses ACAMERA_SCALER_CROP_REGION or ACAMERA_CONTROL_ZOOM_RATIO
+     * to control zoom levels.</p>
+     *
+     * @see ACAMERA_CONTROL_ZOOM_RATIO
+     * @see ACAMERA_SCALER_CROP_REGION
+     *
+     * <p>Type: byte (acamera_metadata_enum_android_control_zoom_method_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+     *   <li>ACaptureRequest</li>
+     * </ul></p>
+     *
+     * <p>If set to AUTO, the camera device detects which capture request key the application uses
+     * to do zoom, ACAMERA_SCALER_CROP_REGION or ACAMERA_CONTROL_ZOOM_RATIO. If
+     * the application doesn't set android.scaler.zoomRatio or sets it to 1.0 in the capture
+     * request, the effective zoom level is reflected in ACAMERA_SCALER_CROP_REGION in capture
+     * results. If ACAMERA_CONTROL_ZOOM_RATIO is set to values other than 1.0, the effective
+     * zoom level is reflected in ACAMERA_CONTROL_ZOOM_RATIO. AUTO is the default value
+     * for this control, and also the behavior of the OS before Android version
+     * <a href="https://developer.android.com/reference/android/os/Build.VERSION_CODES.html#BAKLAVA">BAKLAVA</a>.</p>
+     * <p>If set to ZOOM_RATIO, the application explicitly specifies zoom level be controlled
+     * by ACAMERA_CONTROL_ZOOM_RATIO, and the effective zoom level is reflected in
+     * ACAMERA_CONTROL_ZOOM_RATIO in capture results. This addresses an ambiguity with AUTO,
+     * with which the camera device cannot know if the application is using cropRegion or
+     * zoomRatio at 1.0x.</p>
+     *
+     * @see ACAMERA_CONTROL_ZOOM_RATIO
+     * @see ACAMERA_SCALER_CROP_REGION
+     */
+    ACAMERA_CONTROL_ZOOM_METHOD =                               // byte (acamera_metadata_enum_android_control_zoom_method_t)
+            ACAMERA_CONTROL_START + 60,
+    /**
+     * <p>Turn on AE priority mode.</p>
+     *
+     * <p>Type: byte (acamera_metadata_enum_android_control_ae_priority_mode_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+     *   <li>ACaptureRequest</li>
+     * </ul></p>
+     *
+     * <p>This control is only effective if ACAMERA_CONTROL_MODE is
+     * AUTO and ACAMERA_CONTROL_AE_MODE is set to one of its
+     * ON modes, with the exception of ON_LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY.</p>
+     * <p>When a priority mode is enabled, the camera device's
+     * auto-exposure routine will maintain the application's
+     * selected parameters relevant to the priority mode while overriding
+     * the remaining exposure parameters
+     * (ACAMERA_SENSOR_EXPOSURE_TIME, ACAMERA_SENSOR_SENSITIVITY, and
+     * ACAMERA_SENSOR_FRAME_DURATION). For example, if
+     * SENSOR_SENSITIVITY_PRIORITY mode is enabled, the camera device will
+     * maintain the application-selected ACAMERA_SENSOR_SENSITIVITY
+     * while adjusting ACAMERA_SENSOR_EXPOSURE_TIME
+     * and ACAMERA_SENSOR_FRAME_DURATION. The overridden fields for a
+     * given capture will be available in its CaptureResult.</p>
+     *
+     * @see ACAMERA_CONTROL_AE_MODE
+     * @see ACAMERA_CONTROL_MODE
+     * @see ACAMERA_SENSOR_EXPOSURE_TIME
+     * @see ACAMERA_SENSOR_FRAME_DURATION
+     * @see ACAMERA_SENSOR_SENSITIVITY
+     */
+    ACAMERA_CONTROL_AE_PRIORITY_MODE =                          // byte (acamera_metadata_enum_android_control_ae_priority_mode_t)
+            ACAMERA_CONTROL_START + 61,
+    /**
+     * <p>List of auto-exposure priority modes for ACAMERA_CONTROL_AE_PRIORITY_MODE
+     * that are supported by this camera device.</p>
+     *
+     * @see ACAMERA_CONTROL_AE_PRIORITY_MODE
+     *
+     * <p>Type: byte[n]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>This entry lists the valid modes for
+     * ACAMERA_CONTROL_AE_PRIORITY_MODE for this camera device.
+     * If no AE priority modes are available for a device, this will only list OFF.</p>
+     *
+     * @see ACAMERA_CONTROL_AE_PRIORITY_MODE
+     */
+    ACAMERA_CONTROL_AE_AVAILABLE_PRIORITY_MODES =               // byte[n]
+            ACAMERA_CONTROL_START + 62,
     ACAMERA_CONTROL_END,
 
     /**
@@ -4887,9 +4981,12 @@
      * duration exposed to the nearest possible value (rather than expose longer).
      * The final exposure time used will be available in the output capture result.</p>
      * <p>This control is only effective if ACAMERA_CONTROL_AE_MODE or ACAMERA_CONTROL_MODE is set to
-     * OFF; otherwise the auto-exposure algorithm will override this value.</p>
+     * OFF; otherwise the auto-exposure algorithm will override this value. However, in the
+     * case that ACAMERA_CONTROL_AE_PRIORITY_MODE is set to SENSOR_EXPOSURE_TIME_PRIORITY, this
+     * control will be effective and not controlled by the auto-exposure algorithm.</p>
      *
      * @see ACAMERA_CONTROL_AE_MODE
+     * @see ACAMERA_CONTROL_AE_PRIORITY_MODE
      * @see ACAMERA_CONTROL_MODE
      */
     ACAMERA_SENSOR_EXPOSURE_TIME =                              // int64
@@ -4998,7 +5095,9 @@
      * value. The final sensitivity used will be available in the
      * output capture result.</p>
      * <p>This control is only effective if ACAMERA_CONTROL_AE_MODE or ACAMERA_CONTROL_MODE is set to
-     * OFF; otherwise the auto-exposure algorithm will override this value.</p>
+     * OFF; otherwise the auto-exposure algorithm will override this value. However, in the
+     * case that ACAMERA_CONTROL_AE_PRIORITY_MODE is set to SENSOR_SENSITIVITY_PRIORITY, this
+     * control will be effective and not controlled by the auto-exposure algorithm.</p>
      * <p>Note that for devices supporting postRawSensitivityBoost, the total sensitivity applied
      * to the final processed image is the combination of ACAMERA_SENSOR_SENSITIVITY and
      * ACAMERA_CONTROL_POST_RAW_SENSITIVITY_BOOST. In case the application uses the sensor
@@ -5007,6 +5106,7 @@
      * set postRawSensitivityBoost.</p>
      *
      * @see ACAMERA_CONTROL_AE_MODE
+     * @see ACAMERA_CONTROL_AE_PRIORITY_MODE
      * @see ACAMERA_CONTROL_MODE
      * @see ACAMERA_CONTROL_POST_RAW_SENSITIVITY_BOOST
      * @see ACAMERA_SENSOR_INFO_SENSITIVITY_RANGE
@@ -9652,6 +9752,64 @@
 
 } acamera_metadata_enum_android_control_low_light_boost_state_t;
 
+// ACAMERA_CONTROL_ZOOM_METHOD
+typedef enum acamera_metadata_enum_acamera_control_zoom_method {
+    /**
+     * <p>The camera device automatically detects whether the application does zoom with
+     * ACAMERA_SCALER_CROP_REGION or ACAMERA_CONTROL_ZOOM_RATIO, and in turn decides which
+     * metadata tag reflects the effective zoom level.</p>
+     *
+     * @see ACAMERA_CONTROL_ZOOM_RATIO
+     * @see ACAMERA_SCALER_CROP_REGION
+     */
+    ACAMERA_CONTROL_ZOOM_METHOD_AUTO                                 = 0,
+
+    /**
+     * <p>The application intends to control zoom via ACAMERA_CONTROL_ZOOM_RATIO, and
+     * the effective zoom level is reflected by ACAMERA_CONTROL_ZOOM_RATIO in capture results.</p>
+     *
+     * @see ACAMERA_CONTROL_ZOOM_RATIO
+     */
+    ACAMERA_CONTROL_ZOOM_METHOD_ZOOM_RATIO                           = 1,
+
+} acamera_metadata_enum_android_control_zoom_method_t;
+
+// ACAMERA_CONTROL_AE_PRIORITY_MODE
+typedef enum acamera_metadata_enum_acamera_control_ae_priority_mode {
+    /**
+     * <p>Disable AE priority mode. This is the default value.</p>
+     */
+    ACAMERA_CONTROL_AE_PRIORITY_MODE_OFF                             = 0,
+
+    /**
+     * <p>The camera device's auto-exposure routine is active and
+     * prioritizes the application-selected ISO (ACAMERA_SENSOR_SENSITIVITY).</p>
+     * <p>The application has control over ACAMERA_SENSOR_SENSITIVITY while
+     * the application's values for ACAMERA_SENSOR_EXPOSURE_TIME and
+     * ACAMERA_SENSOR_FRAME_DURATION are ignored.</p>
+     *
+     * @see ACAMERA_SENSOR_EXPOSURE_TIME
+     * @see ACAMERA_SENSOR_FRAME_DURATION
+     * @see ACAMERA_SENSOR_SENSITIVITY
+     */
+    ACAMERA_CONTROL_AE_PRIORITY_MODE_SENSOR_SENSITIVITY_PRIORITY     = 1,
+
+    /**
+     * <p>The camera device's auto-exposure routine is active and
+     * prioritizes the application-selected exposure time
+     * (ACAMERA_SENSOR_EXPOSURE_TIME).</p>
+     * <p>The application has control over ACAMERA_SENSOR_EXPOSURE_TIME while
+     * the application's values for ACAMERA_SENSOR_SENSITIVITY and
+     * ACAMERA_SENSOR_FRAME_DURATION are ignored.</p>
+     *
+     * @see ACAMERA_SENSOR_EXPOSURE_TIME
+     * @see ACAMERA_SENSOR_FRAME_DURATION
+     * @see ACAMERA_SENSOR_SENSITIVITY
+     */
+    ACAMERA_CONTROL_AE_PRIORITY_MODE_SENSOR_EXPOSURE_TIME_PRIORITY   = 2,
+
+} acamera_metadata_enum_android_control_ae_priority_mode_t;
+
 
 
 // ACAMERA_EDGE_MODE
@@ -11923,6 +12081,7 @@
 
 
 
+
 __END_DECLS
 
 #endif /* _NDK_CAMERA_METADATA_TAGS_H */
diff --git a/camera/ndk/libcamera2ndk.map.txt b/camera/ndk/libcamera2ndk.map.txt
index 7d7868b..60d4775 100644
--- a/camera/ndk/libcamera2ndk.map.txt
+++ b/camera/ndk/libcamera2ndk.map.txt
@@ -28,6 +28,8 @@
     ACameraManager_getCameraCharacteristics;
     ACameraManager_getCameraIdList;
     ACameraManager_openCamera;
+    ACameraManager_openSharedCamera; # systemapi introduced=36
+    ACameraManager_isCameraDeviceSharingSupported; # systemapi introduced=36
     ACameraManager_registerAvailabilityCallback;
     ACameraManager_unregisterAvailabilityCallback;
     ACameraManager_registerExtendedAvailabilityCallback; # introduced=29
@@ -72,6 +74,9 @@
     ACaptureSessionSharedOutput_remove; # introduced=28
     ACaptureSessionPhysicalOutput_create; # introduced=29
     ACaptureSessionOutput_free;
+    ACameraCaptureSessionShared_startStreaming; # systemapi introduced=36
+    ACameraCaptureSessionShared_logicalCamera_startStreaming; # systemapi introduced=36
+    ACameraCaptureSessionShared_stopStreaming; # systemapi introduced=36
   local:
     *;
 };
diff --git a/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp b/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
index 3325da6..d3a8e0d 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
+++ b/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
@@ -85,11 +85,12 @@
         const char* id,
         ACameraDevice_StateCallbacks* cb,
         sp<ACameraMetadata> chars,
-        ACameraDevice* wrapper) :
+        ACameraDevice* wrapper, bool sharedMode) :
         mCameraId(id),
         mAppCallbacks(*cb),
         mChars(std::move(chars)),
         mWrapper(wrapper),
+        mSharedMode(sharedMode),
         mInError(false),
         mError(ACAMERA_OK),
         mIdle(true),
@@ -960,6 +961,7 @@
         case kWhatCaptureSeqAbort:
         case kWhatCaptureBufferLost:
         case kWhatPreparedCb:
+        case kWhatClientSharedAccessPriorityChanged:
             ALOGV("%s: Received msg %d", __FUNCTION__, msg->what());
             break;
         case kWhatCleanUpSessions:
@@ -997,6 +999,28 @@
             (*onDisconnected)(context, dev);
             break;
         }
+        case kWhatClientSharedAccessPriorityChanged:
+        {
+            ACameraDevice* dev;
+            found = msg->findPointer(kDeviceKey, (void**) &dev);
+            if (!found || dev == nullptr) {
+                ALOGE("%s: Cannot find device pointer!", __FUNCTION__);
+                return;
+            }
+            ACameraDevice_ClientSharedAccessPriorityChangedCallback
+                    onClientSharedAccessPriorityChanged;
+            found = msg->findPointer(kCallbackFpKey, (void**) &onClientSharedAccessPriorityChanged);
+            if (!found) {
+                ALOGE("%s: Cannot find onClientSharedAccessPriorityChanged!", __FUNCTION__);
+                return;
+            }
+            if (onClientSharedAccessPriorityChanged == nullptr) {
+                return;
+            }
+            (*onClientSharedAccessPriorityChanged)(context, dev, dev->isPrimaryClient());
+            break;
+        }
+
         case kWhatOnError:
         {
             ACameraDevice* dev;
@@ -1614,6 +1638,28 @@
     return ScopedAStatus::ok();
 }
 
+ScopedAStatus CameraDevice::ServiceCallback::onClientSharedAccessPriorityChanged(
+        bool primaryClient) {
+    ALOGV("onClientSharedAccessPriorityChanged received. primaryClient = %d", primaryClient);
+    ScopedAStatus ret = ScopedAStatus::ok();
+    std::shared_ptr<CameraDevice> dev = mDevice.lock();
+    if (dev == nullptr) {
+        return ret; // device has been closed
+    }
+    Mutex::Autolock _l(dev->mDeviceLock);
+    if (dev->isClosed() || dev->mRemote == nullptr) {
+        return ret;
+    }
+    dev->setPrimaryClient(primaryClient);
+    sp<AMessage> msg = new AMessage(kWhatClientSharedAccessPriorityChanged, dev->mHandler);
+    msg->setPointer(kContextKey, dev->mAppCallbacks.context);
+    msg->setPointer(kDeviceKey, (void*) dev->getWrapper());
+    msg->setPointer(kCallbackFpKey, (void*) dev->mAppCallbacks.onClientSharedAccessPriorityChanged);
+    msg->post();
+
+    return ScopedAStatus::ok();
+}
+
 ScopedAStatus CameraDevice::ServiceCallback::onDeviceIdle() {
     ALOGV("Camera is now idle");
 
@@ -1684,8 +1730,9 @@
                     __FUNCTION__, burstId, cbh.mRequests.size());
             dev->setCameraDeviceErrorLocked(ACAMERA_ERROR_CAMERA_SERVICE);
         }
+
         sp<CaptureRequest> request = cbh.mRequests[burstId];
-        ALOGE("%s: request = %p", __FUNCTION__, request.get());
+        ALOGV("%s: request = %p", __FUNCTION__, request.get());
         sp<AMessage> msg = nullptr;
         if (v2Callback) {
             msg = new AMessage(kWhatCaptureStart2, dev->mHandler);
diff --git a/camera/ndk/ndk_vendor/impl/ACameraDevice.h b/camera/ndk/ndk_vendor/impl/ACameraDevice.h
index b771d47..6ba30bb 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraDevice.h
+++ b/camera/ndk/ndk_vendor/impl/ACameraDevice.h
@@ -95,7 +95,7 @@
   public:
     CameraDevice(const char* id, ACameraDevice_StateCallbacks* cb,
                   sp<ACameraMetadata> chars,
-                  ACameraDevice* wrapper);
+                  ACameraDevice* wrapper, bool sharedMode);
     ~CameraDevice();
 
     // Called to initialize fields that require shared_ptr to `this`
@@ -136,6 +136,7 @@
                                             const CaptureResultExtras& in_resultExtras,
                                             const std::vector<PhysicalCaptureResultInfo>&
                                                     in_physicalCaptureResultInfos) override;
+         ndk::ScopedAStatus onClientSharedAccessPriorityChanged(bool isPrimaryClient) override;
 
       private:
         camera_status_t readOneResultMetadata(const CaptureMetadataInfo& captureMetadataInfo,
@@ -154,6 +155,8 @@
 
     // Stop the looper thread and unregister the handler
     void stopLooperAndDisconnect();
+    void setPrimaryClient(bool isPrimary) {mIsPrimaryClient = isPrimary;};
+    bool isPrimaryClient() {return mIsPrimaryClient;};
 
   private:
     friend ACameraCaptureSession;
@@ -232,6 +235,8 @@
     const sp<ACameraMetadata> mChars;    // Camera characteristics
     std::shared_ptr<ServiceCallback> mServiceCallback;
     ACameraDevice* mWrapper;
+    bool mSharedMode;
+    bool mIsPrimaryClient;
 
     // stream id -> pair of (ACameraWindowType* from application, OutputConfiguration used for
     // camera service)
@@ -274,7 +279,8 @@
         kWhatCaptureBufferLost, // onCaptureBufferLost
         kWhatPreparedCb, // onPrepared
         // Internal cleanup
-        kWhatCleanUpSessions   // Cleanup cached sp<ACameraCaptureSession>
+        kWhatCleanUpSessions,   // Cleanup cached sp<ACameraCaptureSession>
+        kWhatClientSharedAccessPriorityChanged
     };
     static const char* kContextKey;
     static const char* kDeviceKey;
@@ -434,9 +440,9 @@
  */
 struct ACameraDevice {
     ACameraDevice(const char* id, ACameraDevice_StateCallbacks* cb,
-                  sp<ACameraMetadata> chars) :
+                  sp<ACameraMetadata> chars, bool sharedMode) :
             mDevice(std::make_shared<android::acam::CameraDevice>(id, cb,
-                                                                std::move(chars), this)) {
+            std::move(chars), this, sharedMode)) {
         mDevice->init();
     }
 
@@ -481,6 +487,13 @@
     inline bool setDeviceMetadataQueues() {
         return mDevice->setDeviceMetadataQueues();
     }
+    inline void setPrimaryClient(bool isPrimary) {
+        mDevice->setPrimaryClient(isPrimary);
+    }
+    inline bool isPrimaryClient() {
+        return mDevice->isPrimaryClient();
+    }
+
   private:
     std::shared_ptr<android::acam::CameraDevice> mDevice;
 };
diff --git a/camera/ndk/ndk_vendor/impl/ACameraManager.cpp b/camera/ndk/ndk_vendor/impl/ACameraManager.cpp
index 5b69f5c..c34c4bd 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraManager.cpp
+++ b/camera/ndk/ndk_vendor/impl/ACameraManager.cpp
@@ -789,10 +789,33 @@
 }
 
 camera_status_t
-ACameraManager::openCamera(
+ACameraManager::isCameraDeviceSharingSupported(
         const char* cameraId,
+        /*out*/bool* isSharingSupported) {
+    sp<ACameraMetadata> spChars;
+    camera_status_t ret = getCameraCharacteristics(cameraId, &spChars);
+    if (ret != ACAMERA_OK) {
+        ALOGE("%s: cannot get camera characteristics for camera %s. err %d",
+                __FUNCTION__, cameraId, ret);
+        return ret;
+    }
+
+    ACameraMetadata* chars = spChars.get();
+    ACameraMetadata_const_entry entry;
+    ret = ACameraMetadata_getConstEntry(chars, ANDROID_SHARED_SESSION_OUTPUT_CONFIGURATIONS,
+            &entry);
+    if (ret != ACAMERA_OK) {
+        return ret;
+    }
+    *isSharingSupported =  (entry.count > 0) ? true : false;
+    return ACAMERA_OK;
+}
+
+camera_status_t
+ACameraManager::openCamera(
+        const char* cameraId, bool sharedMode,
         ACameraDevice_StateCallbacks* callback,
-        /*out*/ACameraDevice** outDevice) {
+        /*out*/ACameraDevice** outDevice, /*out*/bool* isPrimaryClient) {
     sp<ACameraMetadata> rawChars;
     camera_status_t ret = getCameraCharacteristics(cameraId, &rawChars);
     Mutex::Autolock _l(mLock);
@@ -802,7 +825,7 @@
         return ACAMERA_ERROR_INVALID_PARAMETER;
     }
 
-    ACameraDevice* device = new ACameraDevice(cameraId, callback, std::move(rawChars));
+    ACameraDevice* device = new ACameraDevice(cameraId, callback, std::move(rawChars), sharedMode);
 
     std::shared_ptr<ICameraService> cs = CameraManagerGlobal::getInstance()->getCameraService();
     if (cs == nullptr) {
@@ -813,11 +836,18 @@
 
     std::shared_ptr<BnCameraDeviceCallback> deviceCallback = device->getServiceCallback();
     std::shared_ptr<ICameraDeviceUser> deviceRemote;
+    ScopedAStatus serviceRet;
 
     // No way to get package name from native.
     // Send a zero length package name and let camera service figure it out from UID
-    ScopedAStatus serviceRet = cs->connectDevice(deviceCallback,
-                                                 std::string(cameraId), &deviceRemote);
+    if (sharedMode) {
+        serviceRet = cs->connectDeviceV2(deviceCallback,
+                std::string(cameraId), sharedMode, &deviceRemote);
+    } else {
+        serviceRet = cs->connectDevice(deviceCallback,
+                std::string(cameraId), &deviceRemote);
+    }
+
     if (!serviceRet.isOk()) {
         if (serviceRet.getExceptionCode() == EX_SERVICE_SPECIFIC) {
             Status errStatus = static_cast<Status>(serviceRet.getServiceSpecificError());
@@ -840,6 +870,13 @@
     }
 
     device->setRemoteDevice(deviceRemote);
+    if (sharedMode) {
+        ScopedAStatus remoteRet = deviceRemote->isPrimaryClient(isPrimaryClient);
+        if (!remoteRet.isOk()) {
+            return ACAMERA_ERROR_UNKNOWN;
+        }
+        device->setPrimaryClient(*isPrimaryClient);
+    }
     device->setDeviceMetadataQueues();
     *outDevice = device;
     return ACAMERA_OK;
diff --git a/camera/ndk/ndk_vendor/impl/ACameraManager.h b/camera/ndk/ndk_vendor/impl/ACameraManager.h
index 5688e76..e9973e6 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraManager.h
+++ b/camera/ndk/ndk_vendor/impl/ACameraManager.h
@@ -261,9 +261,9 @@
     camera_status_t getCameraCharacteristics(
             const char* cameraId, android::sp<ACameraMetadata>* characteristics);
 
-    camera_status_t openCamera(const char* cameraId,
-                               ACameraDevice_StateCallbacks* callback,
-                               /*out*/ACameraDevice** device);
+    camera_status_t openCamera(const char* cameraId, bool sharedMode,
+            ACameraDevice_StateCallbacks* callback, /*out*/ACameraDevice** device,
+            /*out*/bool* primaryClient);
     camera_status_t getTagFromName(const char *cameraId, const char *name, uint32_t *tag);
     void registerAvailabilityCallback(const ACameraManager_AvailabilityCallbacks* callback);
     void unregisterAvailabilityCallback(const ACameraManager_AvailabilityCallbacks* callback);
@@ -271,6 +271,8 @@
             const ACameraManager_ExtendedAvailabilityCallbacks* callback);
     void unregisterExtendedAvailabilityCallback(
             const ACameraManager_ExtendedAvailabilityCallbacks* callback);
+    camera_status_t isCameraDeviceSharingSupported(const char *cameraId,
+            bool *isSharingSupported);
 
   private:
     enum {
diff --git a/camera/tests/CameraBinderTests.cpp b/camera/tests/CameraBinderTests.cpp
index 5135b5d..4384df9 100644
--- a/camera/tests/CameraBinderTests.cpp
+++ b/camera/tests/CameraBinderTests.cpp
@@ -130,6 +130,15 @@
         return binder::Status::ok();
     }
 
+    virtual binder::Status onCameraOpenedInSharedMode(
+            [[maybe_unused]] const std::string& /*cameraId*/,
+            [[maybe_unused]] const std::string& /*clientPackageName*/,
+            [[maybe_unused]] int32_t /*deviceId*/,
+            [[maybe_unused]] bool /*isPrimaryClient*/) override {
+        // No op
+        return binder::Status::ok();
+    }
+
     bool waitForNumCameras(size_t num) const {
         Mutex::Autolock l(mLock);
 
@@ -281,6 +290,12 @@
         return binder::Status::ok();
     }
 
+    virtual binder::Status onClientSharedAccessPriorityChanged(
+            [[maybe_unused]] bool /*isPrimaryClient*/) {
+        // No-op
+        return binder::Status::ok();
+    }
+
     // Test helper functions:
 
     bool hadError() const {
@@ -402,7 +417,8 @@
         res = service->connectDevice(callbacks, cameraId,
                 /*oomScoreOffset*/ 0,
                 /*targetSdkVersion*/__ANDROID_API_FUTURE__,
-                /*overrideToPortrait*/false, clientAttribution, /*devicePolicy*/0, /*out*/&device);
+                /*overrideToPortrait*/false, clientAttribution, /*devicePolicy*/0,
+                /*sharedMode*/false, /*out*/&device);
         EXPECT_TRUE(res.isOk()) << res;
         ASSERT_NE(nullptr, device.get());
         device->disconnect();
@@ -451,7 +467,7 @@
                     /*oomScoreOffset*/ 0,
                     /*targetSdkVersion*/__ANDROID_API_FUTURE__,
                     /*overrideToPortrait*/false, clientAttribution, /*devicePolicy*/0,
-                    /*out*/&device);
+                    /*sharedMode*/false, /*out*/&device);
             EXPECT_TRUE(res.isOk()) << res;
         }
         auto p = std::make_pair(callbacks, device);
diff --git a/camera/tests/fuzzer/camera_utils_fuzzer.cpp b/camera/tests/fuzzer/camera_utils_fuzzer.cpp
index c816f82..ca0a06f 100644
--- a/camera/tests/fuzzer/camera_utils_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_utils_fuzzer.cpp
@@ -75,6 +75,7 @@
 
                     CameraUtils::getRotationTransform(
                             staticMetadata, mFDP->ConsumeIntegral<int32_t>() /* mirrorMode */,
+                            true /*enableTransformInverseDisplay*/,
                             &transform /*out*/);
                 },
                 [&]() { CameraUtils::isCameraServiceDisabled(); },
diff --git a/drm/libmediadrm/DrmHalHidl.cpp b/drm/libmediadrm/DrmHalHidl.cpp
index c8c6e8e..33ea5ea 100644
--- a/drm/libmediadrm/DrmHalHidl.cpp
+++ b/drm/libmediadrm/DrmHalHidl.cpp
@@ -339,7 +339,7 @@
             DrmUtils::LOG2BI("makeDrmFactories: using default passthrough drm instance");
             factories.push_back(passthrough);
         } else {
-            DrmUtils::LOG2BE("Failed to find passthrough drm factories");
+            DrmUtils::LOG2BW("Failed to find passthrough drm factories");
         }
     }
     return factories;
diff --git a/media/aconfig/codec_fwk.aconfig b/media/aconfig/codec_fwk.aconfig
index ee697ee..c820a2c 100644
--- a/media/aconfig/codec_fwk.aconfig
+++ b/media/aconfig/codec_fwk.aconfig
@@ -21,6 +21,13 @@
 }
 
 flag {
+  name: "codec_availability"
+  namespace: "codec_fwk"
+  description: "Feature flag for codec availability HAL API support"
+  bug: "363282971"
+}
+
+flag {
   name: "codec_buffer_state_cleanup"
   namespace: "codec_fwk"
   description: "Bugfix flag for more buffer state cleanup in MediaCodec"
diff --git a/media/audio/aconfig/Android.bp b/media/audio/aconfig/Android.bp
index 2da6758..cab126f 100644
--- a/media/audio/aconfig/Android.bp
+++ b/media/audio/aconfig/Android.bp
@@ -120,6 +120,7 @@
         "//frameworks/base/api",
         "//frameworks/base/core/res",
     ],
+    exportable: true,
 }
 
 aconfig_declarations {
@@ -152,6 +153,20 @@
 }
 
 java_aconfig_library {
+    name: "android.media.audio-aconfig-exported-java",
+    aconfig_declarations: "android.media.audio-aconfig",
+    defaults: ["framework-minus-apex-aconfig-java-defaults"],
+    min_sdk_version: "Tiramisu",
+    mode: "exported",
+    apex_available: [
+        "com.android.btservices",
+    ],
+    visibility: [
+        "//packages/modules/Bluetooth:__subpackages__",
+    ],
+}
+
+java_aconfig_library {
     name: "android.media.audiopolicy-aconfig-java",
     aconfig_declarations: "android.media.audiopolicy-aconfig",
     defaults: ["framework-minus-apex-aconfig-java-defaults"],
diff --git a/media/audio/aconfig/audio_framework.aconfig b/media/audio/aconfig/audio_framework.aconfig
index 7ee1360..1450417 100644
--- a/media/audio/aconfig/audio_framework.aconfig
+++ b/media/audio/aconfig/audio_framework.aconfig
@@ -120,6 +120,14 @@
 }
 
 flag {
+    name: "hardening_permission_spa"
+    is_exported: true
+    namespace: "media_audio"
+    description: "Flag for special app access impl for hardening."
+    bug: "376480814"
+}
+
+flag {
     name: "iamf_definitions_api"
     is_exported: true
     namespace: "media_audio"
@@ -199,6 +207,13 @@
 }
 
 flag {
+    name: "spatial_audio_settings_versioning"
+    namespace: "media_audio"
+    description: "introduce versioning of spatial audio settings"
+    bug: "377977731"
+}
+
+flag {
     name: "spatializer_capabilities"
     namespace: "media_audio"
     description: "spatializer reports effective channel masks"
diff --git a/media/codec2/components/apv/C2SoftApvDec.cpp b/media/codec2/components/apv/C2SoftApvDec.cpp
index 0617d06..77305ce 100644
--- a/media/codec2/components/apv/C2SoftApvDec.cpp
+++ b/media/codec2/components/apv/C2SoftApvDec.cpp
@@ -279,6 +279,10 @@
         if (isHalPixelFormatSupported((AHardwareBuffer_Format)HAL_PIXEL_FORMAT_YCBCR_P010)) {
             pixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
         }
+        if (isHalPixelFormatSupported((AHardwareBuffer_Format)AHARDWAREBUFFER_FORMAT_YCbCr_P210)) {
+            pixelFormats.push_back(AHARDWAREBUFFER_FORMAT_YCbCr_P210);
+        }
+
         // If color format surface isn't added to supported formats, there is no way to know
         // when the color-format is configured to surface. This is necessary to be able to
         // choose 10-bit format while decoding 10-bit clips in surface mode.
@@ -374,6 +378,8 @@
         (void)mayBlock;
         ALOGV("%s", __FUNCTION__);
         // take default values for all unspecified fields, and coded values for specified ones
+        ALOGV("%s - coded range: %u, primaries: %u, transfer: %u, matrix: %u",
+             __func__, coded.v.range, coded.v.primaries, coded.v.transfer, coded.v.matrix);
         me.set().range = coded.v.range == RANGE_UNSPECIFIED ? def.v.range : coded.v.range;
         me.set().primaries =
                 coded.v.primaries == PRIMARIES_UNSPECIFIED ? def.v.primaries : coded.v.primaries;
@@ -493,10 +499,13 @@
       mOutBufferFlush(nullptr),
       mIvColorformat(IV_YUV_420P),
       mOutputDelay(kDefaultOutputDelay),
+      mHeaderDecoded(false),
+      mOutIndex(0u),
+      mHalPixelFormat(HAL_PIXEL_FORMAT_YV12),
       mWidth(320),
       mHeight(240),
-      mHeaderDecoded(false),
-      mOutIndex(0u) {
+      mSignalledOutputEos(false),
+      mSignalledError(false) {
     oapvdHandle = NULL;
     oapvmHandle = NULL;
     outputCsp = OUTPUT_CSP_NATIVE;
@@ -714,6 +723,22 @@
     }
 }
 
+static void copyBufferP210(uint16_t *dstY, uint16_t *dstUV, const uint16_t *srcY,
+            const uint16_t *srcUV, size_t srcYStride, size_t srcUVStride, size_t dstYStride,
+            size_t dstUVStride, size_t width, size_t height) {
+    for (size_t y = 0; y < height; ++y) {
+        memcpy(dstY, srcY, width * sizeof(uint16_t));
+        srcY += srcYStride;
+        dstY += dstYStride;
+    }
+
+    for (size_t y = 0; y < height; ++y) {
+        memcpy(dstUV, srcUV, width * sizeof(uint16_t));
+        srcUV += srcUVStride;
+        dstUV += dstUVStride;
+    }
+}
+
 static void copyBufferFromYUV422ToYV12(uint8_t* dstY, uint8_t* dstU, uint8_t* dstV,
                                        const uint8_t* srcY, const uint8_t* srcU,
                                        const uint8_t* srcV, size_t srcYStride, size_t srcUStride,
@@ -989,28 +1014,81 @@
     }
 }
 
+void C2SoftApvDec::getVuiParams(VuiColorAspects* buffer) {
+    VuiColorAspects vuiColorAspects;
+    vuiColorAspects.primaries = buffer->primaries;
+    vuiColorAspects.transfer = buffer->transfer;
+    vuiColorAspects.coeffs = buffer->coeffs;
+    vuiColorAspects.fullRange = buffer->fullRange;
+
+    // convert vui aspects to C2 values if changed
+    if (!(vuiColorAspects == mBitstreamColorAspects)) {
+        mBitstreamColorAspects = vuiColorAspects;
+        ColorAspects sfAspects;
+        C2StreamColorAspectsInfo::input codedAspects = { 0u };
+        ColorUtils::convertIsoColorAspectsToCodecAspects(
+                vuiColorAspects.primaries, vuiColorAspects.transfer, vuiColorAspects.coeffs,
+                vuiColorAspects.fullRange, sfAspects);
+        if (!C2Mapper::map(sfAspects.mPrimaries, &codedAspects.primaries)) {
+            codedAspects.primaries = C2Color::PRIMARIES_UNSPECIFIED;
+        }
+        if (!C2Mapper::map(sfAspects.mRange, &codedAspects.range)) {
+            codedAspects.range = C2Color::RANGE_UNSPECIFIED;
+        }
+        if (!C2Mapper::map(sfAspects.mMatrixCoeffs, &codedAspects.matrix)) {
+            codedAspects.matrix = C2Color::MATRIX_UNSPECIFIED;
+        }
+        if (!C2Mapper::map(sfAspects.mTransfer, &codedAspects.transfer)) {
+            codedAspects.transfer = C2Color::TRANSFER_UNSPECIFIED;
+        }
+        ALOGV("colorAspects: primaries:%d, transfer:%d, coeffs:%d, fullRange:%d",
+                codedAspects.primaries, codedAspects.transfer, codedAspects.matrix,
+                codedAspects.range);
+        std::vector<std::unique_ptr<C2SettingResult>> failures;
+        mIntf->config({&codedAspects}, C2_MAY_BLOCK, &failures);
+    }
+}
+
 status_t C2SoftApvDec::outputBuffer(const std::shared_ptr<C2BlockPool>& pool,
                                     const std::unique_ptr<C2Work>& work) {
     if (!(work && pool)) return BAD_VALUE;
 
-    oapv_imgb_t* imgbOutput;
+    oapv_imgb_t* imgbOutput = nullptr;
     std::shared_ptr<C2GraphicBlock> block;
 
     if (ofrms.num_frms > 0) {
-        oapv_frm_t* frm = &ofrms.frm[0];
-        imgbOutput = frm->imgb;
+        for(int i = 0; i < ofrms.num_frms; i++) {
+            oapv_frm_t* frm = &ofrms.frm[0];
+            if(frm->pbu_type == OAPV_PBU_TYPE_PRIMARY_FRAME) {
+                imgbOutput = frm->imgb;
+                break;
+            }
+        }
+        if(imgbOutput == nullptr) {
+            ALOGW("No OAPV primary frame");
+            return false;
+        }
     } else {
         ALOGW("No output frames");
         return false;
     }
     bool isMonochrome = OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CS_YCBCR400;
 
+    // TODO: use bitstream color aspect after vui parsing
+    VuiColorAspects colorAspect;
+    colorAspect.primaries = 2;
+    colorAspect.transfer = 2;
+    colorAspect.coeffs = 2;
+    colorAspect.fullRange = 1;
+    getVuiParams(&colorAspect);
+
     uint32_t format = HAL_PIXEL_FORMAT_YV12;
     std::shared_ptr<C2StreamColorAspectsInfo::output> codedColorAspects;
     if (OAPV_CS_GET_BIT_DEPTH(imgbOutput->cs) == 10 &&
         mPixelFormatInfo->value != HAL_PIXEL_FORMAT_YCBCR_420_888) {
         IntfImpl::Lock lock = mIntf->lock();
         codedColorAspects = mIntf->getColorAspects_l();
+
         bool allowRGBA1010102 = false;
         if (codedColorAspects->primaries == C2Color::PRIMARIES_BT2020 &&
             codedColorAspects->matrix == C2Color::MATRIX_BT2020 &&
@@ -1067,7 +1145,34 @@
     size_t dstUStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
     size_t dstVStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
 
-    if (format == HAL_PIXEL_FORMAT_YCBCR_P010) {
+    if(format == AHARDWAREBUFFER_FORMAT_YCbCr_P210) {
+        if(OAPV_CS_GET_BIT_DEPTH(imgbOutput->cs) == 10) {
+            const uint16_t *srcY = (const uint16_t *)imgbOutput->a[0];
+            const uint16_t *srcU = (const uint16_t *)imgbOutput->a[1];
+            const uint16_t *srcV = (const uint16_t *)imgbOutput->a[2];
+            size_t srcYStride = imgbOutput->s[0] / 2;
+            size_t srcUStride = imgbOutput->s[1] / 2;
+            size_t srcVStride = imgbOutput->s[2] / 2;
+            dstYStride /= 2;
+            dstUStride /= 2;
+            dstVStride /= 2;
+            ALOGV("OAPV_CS_P210 buffer");
+            copyBufferP210((uint16_t *)dstY, (uint16_t *)dstU, srcY, srcU,
+                            srcYStride, srcUStride, dstYStride, dstUStride, mWidth, mHeight);
+        } else {
+            ALOGE("Not supported convder from bd:%d, format: %d(%s), to format: %d(%s)",
+                OAPV_CS_GET_BIT_DEPTH(imgbOutput->cs),
+                OAPV_CS_GET_FORMAT(imgbOutput->cs),
+                OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_YCBCR420 ?
+                    "YUV420" : (OAPV_CS_GET_FORMAT(imgbOutput->cs) == OAPV_CF_YCBCR422 ?
+                                 "YUV422" : "UNKNOWN"),
+                format,
+                format == HAL_PIXEL_FORMAT_YCBCR_P010 ?
+                    "P010" : (format == HAL_PIXEL_FORMAT_YCBCR_420_888 ?
+                         "YUV420" : (format == HAL_PIXEL_FORMAT_YV12 ? "YV12" : "UNKNOWN"))
+                );
+        }
+    } else if(format == HAL_PIXEL_FORMAT_YCBCR_P010) {
         if (OAPV_CS_GET_BIT_DEPTH(imgbOutput->cs) == 10) {
             const uint16_t* srcY = (const uint16_t*)imgbOutput->a[0];
             const uint16_t* srcU = (const uint16_t*)imgbOutput->a[1];
diff --git a/media/codec2/components/apv/C2SoftApvDec.h b/media/codec2/components/apv/C2SoftApvDec.h
index 22bfcff..05afdb2 100644
--- a/media/codec2/components/apv/C2SoftApvDec.h
+++ b/media/codec2/components/apv/C2SoftApvDec.h
@@ -66,7 +66,7 @@
 
 namespace android {
 
-struct C2SoftApvDec : public SimpleC2Component {
+struct C2SoftApvDec final : public SimpleC2Component {
     class IntfImpl;
 
     C2SoftApvDec(const char* name, c2_node_id_t id, const std::shared_ptr<IntfImpl>& intfImpl);
@@ -118,6 +118,26 @@
     uint32_t mHeight;
     bool mSignalledOutputEos;
     bool mSignalledError;
+    // Color aspects. These are ISO values and are meant to detect changes in aspects to avoid
+    // converting them to C2 values for each frame
+    struct VuiColorAspects {
+        uint8_t primaries;
+        uint8_t transfer;
+        uint8_t coeffs;
+        uint8_t fullRange;
+
+        // default color aspects
+        VuiColorAspects()
+            : primaries(C2Color::PRIMARIES_UNSPECIFIED),
+            transfer(C2Color::TRANSFER_UNSPECIFIED),
+            coeffs(C2Color::MATRIX_UNSPECIFIED),
+            fullRange(C2Color::RANGE_UNSPECIFIED) { }
+
+        bool operator==(const VuiColorAspects &o) {
+            return primaries == o.primaries && transfer == o.transfer && coeffs == o.coeffs
+                && fullRange == o.fullRange;
+        }
+    } mBitstreamColorAspects;
 
     oapvd_t oapvdHandle;
     oapvm_t oapvmHandle;
@@ -126,6 +146,8 @@
 
     int outputCsp;
 
+    void getVuiParams(VuiColorAspects* buffer);
+
     C2_DO_NOT_COPY(C2SoftApvDec);
 };
 
diff --git a/media/codec2/components/apv/C2SoftApvEnc.cpp b/media/codec2/components/apv/C2SoftApvEnc.cpp
index 999c08d..d6a9597 100644
--- a/media/codec2/components/apv/C2SoftApvEnc.cpp
+++ b/media/codec2/components/apv/C2SoftApvEnc.cpp
@@ -221,11 +221,14 @@
                              .withSetter(CodedColorAspectsSetter, mColorAspects)
                              .build());
         std::vector<uint32_t> pixelFormats = {
-                HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
+            HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
         };
         if (isHalPixelFormatSupported((AHardwareBuffer_Format)HAL_PIXEL_FORMAT_YCBCR_P010)) {
             pixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
         }
+        if (isHalPixelFormatSupported((AHardwareBuffer_Format)AHARDWAREBUFFER_FORMAT_YCbCr_P210)) {
+            pixelFormats.push_back(AHARDWAREBUFFER_FORMAT_YCbCr_P210);
+        }
         addParameter(DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
                              .withDefault(new C2StreamPixelFormatInfo::input(
                                      0u, HAL_PIXEL_FORMAT_YCBCR_P010))
@@ -505,6 +508,8 @@
     mReceivedFrames = 0;
     mReceivedFirstFrame = false;
     mColorFormat = OAPV_CF_PLANAR2;
+    memset(&mInputFrames, 0, sizeof(mInputFrames));
+    memset(&mReconFrames, 0, sizeof(mReconFrames));
     return C2_OK;
 }
 
@@ -512,6 +517,7 @@
     for (int32_t i = 0; i < MAX_NUM_FRMS; i++) {
         if (mInputFrames.frm[i].imgb != nullptr) {
             imgb_release(mInputFrames.frm[i].imgb);
+            mInputFrames.frm[i].imgb = nullptr;
         }
     }
 
@@ -620,12 +626,6 @@
         return C2_NO_MEMORY;
     }
 
-    /* Calculate SDR to HDR mapping values */
-    mSdrToHdrMapping.clear();
-    for (int32_t i = 0; i < 256; i++) {
-        mSdrToHdrMapping.push_back((uint16_t)(i * 1023 / 255 + 0.5));
-    }
-
     mStarted = true;
     mInitEncoder = true;
     return C2_OK;
@@ -689,26 +689,48 @@
 
     switch (layout.type) {
         case C2PlanarLayout::TYPE_RGB:
-            [[fallthrough]];
-        case C2PlanarLayout::TYPE_RGBA: {
-            // TODO: Add RGBA1010102 support
             ALOGE("Not supported RGB color format");
             return C2_BAD_VALUE;
+        case C2PlanarLayout::TYPE_RGBA: {
+            [[fallthrough]];
+        }
+        case C2PlanarLayout::TYPE_YUVA: {
+            ALOGV("Convert from ABGR2101010 to P210");
+            uint16_t *dstY, *dstU, *dstV;
+            dstY = (uint16_t*)inputFrames->frm[0].imgb->a[0];
+            dstU = (uint16_t*)inputFrames->frm[0].imgb->a[1];
+            dstV = (uint16_t*)inputFrames->frm[0].imgb->a[2];
+            convertRGBA1010102ToYUV420Planar16(dstY, dstU, dstV, (uint32_t*)(input->data()[0]),
+                                                layout.planes[layout.PLANE_Y].rowInc / 4, width,
+                                                height, mColorAspects->matrix,
+                                                mColorAspects->range);
+            break;
         }
         case C2PlanarLayout::TYPE_YUV: {
             if (IsP010(*input)) {
                 if (mColorFormat == OAPV_CF_YCBCR422) {
                     ColorConvertP010ToYUV422P10le(input, inputFrames->frm[0].imgb);
                 } else if (mColorFormat == OAPV_CF_PLANAR2) {
-                    ColorConvertP010ToP210(input, inputFrames->frm[0].imgb);
+                    uint16_t *srcY  = (uint16_t*)(input->data()[0]);
+                    uint16_t *srcUV = (uint16_t*)(input->data()[1]);
+                    uint16_t *dstY  = (uint16_t*)inputFrames->frm[0].imgb->a[0];
+                    uint16_t *dstUV = (uint16_t*)inputFrames->frm[0].imgb->a[1];
+                    convertP010ToP210(dstY, dstUV, srcY, srcUV,
+                                      input->width(), input->width(), input->width(),
+                                      input->height());
                 } else {
                     ALOGE("Not supported color format. %d", mColorFormat);
                     return C2_BAD_VALUE;
                 }
             } else if (IsNV12(*input)) {
-                ColorConvertNv12ToP210(input, inputFrames->frm[0].imgb);
-            } else if (IsNV21(*input)) {
-                ColorConvertNv12ToP210(input, inputFrames->frm[0].imgb);
+                uint8_t  *srcY  = (uint8_t*)input->data()[0];
+                uint8_t  *srcUV = (uint8_t*)input->data()[1];
+                uint16_t *dstY  = (uint16_t*)inputFrames->frm[0].imgb->a[0];
+                uint16_t *dstUV = (uint16_t*)inputFrames->frm[0].imgb->a[1];
+                convertSemiPlanar8ToP210(dstY, dstUV, srcY, srcUV,
+                                         input->width(), input->width(), input->width(),
+                                         input->width(), input->width(), input->height(),
+                                         CONV_FORMAT_I420);
             } else if (IsYUV420(*input)) {
                 return C2_BAD_VALUE;
             } else if (IsI420(*input)) {
@@ -728,46 +750,6 @@
     return C2_OK;
 }
 
-void C2SoftApvEnc::ColorConvertNv12ToP210(const C2GraphicView* const input, oapv_imgb_t* imgb) {
-    auto width = input->width();
-    auto height = input->height();
-
-    auto* yPlane = (uint8_t*)input->data()[0];
-    auto* uvPlane = (uint8_t*)input->data()[1];
-
-    auto* dst = (uint16_t*)imgb->a[0];
-    int32_t lumaOffset = 0;
-    for (int32_t y = 0; y < height; ++y) {
-        for (int32_t x = 0; x < width; ++x) {
-            lumaOffset = y * width + x;
-            dst[lumaOffset] = (mSdrToHdrMapping[yPlane[lumaOffset]] << 6) |
-                              ((mSdrToHdrMapping[yPlane[lumaOffset]] & 0x300) >> 3);
-        }
-    }
-
-    auto* dst_uv = (uint16_t*)imgb->a[1];
-    uint32_t uvDstStride = width;
-    int32_t srcOffset = 0;
-    int32_t dstOffset1 = 0, dstOffset2 = 0;
-    int32_t tmp1 = 0, tmp2 = 0;
-    for (int32_t y = 0; y < height / 2; ++y) {
-        for (int32_t x = 0; x < width; x += 2) {
-            srcOffset = y * width + x;
-            dstOffset1 = (y * 2) * width + x;
-            dstOffset2 = ((y * 2) + 1) * width + x;
-
-            tmp1 = (mSdrToHdrMapping[uvPlane[srcOffset]] << 6) |
-                   ((mSdrToHdrMapping[uvPlane[srcOffset]] & 0x300) >> 3);
-            tmp2 = (mSdrToHdrMapping[uvPlane[srcOffset + 1]] << 6) |
-                   ((mSdrToHdrMapping[uvPlane[srcOffset + 1]] & 0x300) >> 3);
-            dst_uv[dstOffset1] = (uint16_t)tmp1;
-            dst_uv[dstOffset1 + 1] = (uint16_t)tmp2;
-            dst_uv[dstOffset2] = (uint16_t)tmp1;
-            dst_uv[dstOffset2 + 1] = (uint16_t)tmp2;
-        }
-    }
-}
-
 C2Config::level_t C2SoftApvEnc::decisionApvLevel(int32_t width, int32_t height, int32_t fps,
                                                  int32_t bitrate, int32_t band) {
     C2Config::level_t level = C2Config::LEVEL_APV_1_BAND_0;
@@ -882,30 +864,6 @@
     return level;
 }
 
-void C2SoftApvEnc::ColorConvertP010ToP210(const C2GraphicView* const input, oapv_imgb_t* imgb) {
-    auto width = input->width();
-    auto height = input->height();
-
-    auto* yPlane = (uint8_t*)input->data()[0];
-    auto* uvPlane = (uint8_t*)input->data()[1];
-    uint32_t uvStride = width * 2;
-
-    auto* src = yPlane;
-    auto* dst = (uint8_t*)imgb->a[0];
-    std::memcpy(dst, src, width * height * 2);
-
-    auto* dst_uv = (uint8_t*)imgb->a[1];
-    int32_t offset1 = 0, offset2 = 0;
-    for (int32_t y = 0; y < height / 2; ++y) {
-        offset1 = (y * 2) * uvStride;
-        offset2 = (y * 2 + 1) * uvStride;
-        src = uvPlane + (y * uvStride);
-
-        std::memcpy(dst_uv + offset1, src, uvStride);
-        std::memcpy(dst_uv + offset2, src, uvStride);
-    }
-}
-
 void C2SoftApvEnc::ColorConvertP010ToYUV422P10le(const C2GraphicView* const input,
                                                  oapv_imgb_t* imgb) {
     uint32_t width = input->width();
diff --git a/media/codec2/components/apv/C2SoftApvEnc.h b/media/codec2/components/apv/C2SoftApvEnc.h
index 445b042..fc4ad7d 100644
--- a/media/codec2/components/apv/C2SoftApvEnc.h
+++ b/media/codec2/components/apv/C2SoftApvEnc.h
@@ -31,11 +31,11 @@
 #define APV_QP_MIN 1
 #define APV_QP_MAX 51
 
-struct C2SoftApvEnc : public SimpleC2Component {
+struct C2SoftApvEnc final : public SimpleC2Component {
     class IntfImpl;
 
     C2SoftApvEnc(const char* name, c2_node_id_t id, const std::shared_ptr<IntfImpl>& intfImpl);
-    ~C2SoftApvEnc();
+    virtual ~C2SoftApvEnc();
 
     // From SimpleC2Component
     c2_status_t onInit() override;
@@ -65,9 +65,7 @@
 
     void showEncoderParams(oapve_cdesc_t* cdsc);
 
-    void ColorConvertNv12ToP210(const C2GraphicView* const input, oapv_imgb_t* imgb);
     void ColorConvertP010ToYUV422P10le(const C2GraphicView* const input, oapv_imgb_t* imgb);
-    void ColorConvertP010ToP210(const C2GraphicView* const input, oapv_imgb_t* imgb);
 
     void createCsdData(const std::unique_ptr<C2Work>& work, oapv_bitb_t* bitb,
                        uint32_t encodedSize);
@@ -106,7 +104,6 @@
     oapve_t mEncoderId;
     oapvm_t mMetaId;
     uint8_t* mBitstreamBuf = nullptr;
-    std::vector<uint16_t> mSdrToHdrMapping;
     bool mReceivedFirstFrame = false;
     C2_DO_NOT_COPY(C2SoftApvEnc);
 };
diff --git a/media/codec2/components/base/SimpleC2Component.cpp b/media/codec2/components/base/SimpleC2Component.cpp
index aec6523..a03f24f 100644
--- a/media/codec2/components/base/SimpleC2Component.cpp
+++ b/media/codec2/components/base/SimpleC2Component.cpp
@@ -463,6 +463,19 @@
     }
 }
 
+void convertP010ToP210(uint16_t *dstY, uint16_t *dstUV, const uint16_t *srcY, const uint16_t *srcUV,
+                       size_t srcUVStride, size_t dstUVStride, size_t width, size_t height) {
+    std::memcpy(dstY, srcY, width * height * sizeof(uint16_t));
+
+    int32_t offsetTop, offsetBot;
+    for (size_t y = 0; y < (height + 1) / 2; ++y) {
+        offsetTop = (y * 2) * dstUVStride;
+        offsetBot = (y * 2 + 1) * dstUVStride;
+        std::memcpy(dstUV + offsetTop, srcUV + (y * srcUVStride), srcUVStride * sizeof(uint16_t));
+        std::memcpy(dstUV + offsetBot, srcUV + (y * srcUVStride), srcUVStride * sizeof(uint16_t));
+    }
+}
+
 static const int16_t bt709Matrix_10bit[2][3][3] = {
     { { 218, 732, 74 }, { -117, -395, 512 }, { 512, -465, -47 } }, /* RANGE_FULL */
     { { 186, 627, 63 }, { -103, -345, 448 }, { 448, -407, -41 } }, /* RANGE_LIMITED */
@@ -517,6 +530,45 @@
     }
 }
 
+void convertRGBA1010102ToP210(uint16_t* dstY, uint16_t* dstUV, const uint32_t* srcRGBA,
+                              size_t srcRGBStride, size_t width, size_t height,
+                              C2Color::matrix_t colorMatrix, C2Color::range_t colorRange) {
+    uint16_t r, g, b;
+    int32_t i32Y, i32U, i32V;
+    uint16_t zeroLvl =  colorRange == C2Color::RANGE_FULL ? 0 : 64;
+    uint16_t maxLvlLuma =  colorRange == C2Color::RANGE_FULL ? 1023 : 940;
+    uint16_t maxLvlChroma =  colorRange == C2Color::RANGE_FULL ? 1023 : 960;
+    // set default range as limited
+    if (colorRange != C2Color::RANGE_FULL) {
+        colorRange = C2Color::RANGE_LIMITED;
+    }
+    const int16_t(*weights)[3] = (colorMatrix == C2Color::MATRIX_BT709)
+                                         ? bt709Matrix_10bit[colorRange - 1]
+                                         : bt2020Matrix_10bit[colorRange - 1];
+
+    for (size_t y = 0; y < height; ++y) {
+        for (size_t x = 0; x < width; ++x) {
+            b = (srcRGBA[x]  >> 20) & 0x3FF;
+            g = (srcRGBA[x]  >> 10) & 0x3FF;
+            r = srcRGBA[x] & 0x3FF;
+
+            i32Y = ((r * weights[0][0] + g * weights[0][1] + b * weights[0][2] + 512) >> 10) +
+                   zeroLvl;
+            dstY[x] = (CLIP3(zeroLvl, i32Y, maxLvlLuma) << 6) & 0xFFC0;
+            if (x % 2 == 0) {
+                i32U = ((r * weights[1][0] + g * weights[1][1] + b * weights[1][2] + 512) >> 10) +
+                       512;
+                i32V = ((r * weights[2][0] + g * weights[2][1] + b * weights[2][2] + 512) >> 10) +
+                       512;
+                dstUV[x] = (CLIP3(zeroLvl, i32U, maxLvlChroma) << 6) & 0xFFC0;
+                dstUV[x + 1] = (CLIP3(zeroLvl, i32V, maxLvlChroma) << 6) & 0xFFC0;
+            }
+        }
+        srcRGBA += srcRGBStride;
+        dstY += width;
+    }
+}
+
 void convertPlanar16ToY410OrRGBA1010102(uint8_t* dst, const uint16_t* srcY, const uint16_t* srcU,
                                         const uint16_t* srcV, size_t srcYStride, size_t srcUStride,
                                         size_t srcVStride, size_t dstStride, size_t width,
@@ -631,6 +683,36 @@
                                    isMonochrome);
     }
 }
+
+void convertSemiPlanar8ToP210(uint16_t *dstY, uint16_t *dstUV,
+                              const uint8_t *srcY, const uint8_t *srcUV,
+                              size_t srcYStride, size_t srcUVStride,
+                              size_t dstYStride, size_t dstUVStride,
+                              uint32_t width, uint32_t height,
+                              CONV_FORMAT_T format) {
+  if (format != CONV_FORMAT_I420) {
+    ALOGE("No support for semi-planar8 to P210. format is %d", format);
+    return;
+  }
+
+  for (int32_t y = 0; y < height; ++y) {
+    for (int32_t x = 0; x < width; ++x) {
+      dstY[x] = ((uint16_t)((double)srcY[x] * 1023 / 255 + 0.5) << 6) & 0xFFC0;
+    }
+    dstY += dstYStride;
+    srcY += srcYStride;
+  }
+
+  for (int32_t y = 0; y < height / 2; ++y) {
+    for (int32_t x = 0; x < width; ++x) {
+      dstUV[x] = dstUV[dstUVStride + x] =
+          ((uint16_t)((double)srcUV[x] * 1023 / 255 + 0.5) << 6) & 0xFFC0;
+    }
+    srcUV += srcUVStride;
+    dstUV += dstUVStride << 1;
+  }
+}
+
 std::unique_ptr<C2Work> SimpleC2Component::WorkQueue::pop_front() {
     std::unique_ptr<C2Work> work = std::move(mQueue.front().work);
     mQueue.pop_front();
diff --git a/media/codec2/components/base/include/SimpleC2Component.h b/media/codec2/components/base/include/SimpleC2Component.h
index b28c47e..4306e55 100644
--- a/media/codec2/components/base/include/SimpleC2Component.h
+++ b/media/codec2/components/base/include/SimpleC2Component.h
@@ -68,10 +68,19 @@
                                  size_t dstUStride, size_t dstVStride, size_t width,
                                  size_t height, bool isMonochrome = false);
 
+void convertP010ToP210(uint16_t *dstY, uint16_t *dstUV, const uint16_t *srcY,
+                       const uint16_t *srcUV, size_t srcUVStride, size_t dstUVStride,
+                       size_t width, size_t height);
+
 void convertRGBA1010102ToYUV420Planar16(uint16_t* dstY, uint16_t* dstU, uint16_t* dstV,
                                         const uint32_t* srcRGBA, size_t srcRGBStride, size_t width,
                                         size_t height, C2Color::matrix_t colorMatrix,
                                         C2Color::range_t colorRange);
+
+void convertRGBA1010102ToP210(uint16_t* dstY, uint16_t* dstUV, const uint32_t* srcRGBA,
+                              size_t srcRGBStride, size_t width, size_t height,
+                              C2Color::matrix_t colorMatrix, C2Color::range_t colorRange);
+
 void convertPlanar16ToY410OrRGBA1010102(uint8_t* dst, const uint16_t* srcY, const uint16_t* srcU,
                                         const uint16_t* srcV, size_t srcYStride, size_t srcUStride,
                                         size_t srcVStride, size_t dstStride, size_t width,
@@ -96,6 +105,12 @@
                           size_t srcUStride, size_t srcVStride, size_t dstYStride,
                           size_t dstUStride, size_t dstVStride, uint32_t width, uint32_t height,
                           bool isMonochrome, CONV_FORMAT_T format);
+void convertSemiPlanar8ToP210(uint16_t *dstY, uint16_t *dstUV,
+                              const uint8_t *srcY, const uint8_t *srcUV,
+                              size_t srcYStride, size_t srcUVStride,
+                              size_t dstYStride, size_t dstUVStride,
+                              uint32_t width, uint32_t height,
+                              CONV_FORMAT_T format);
 
 class SimpleC2Component
         : public C2Component, public std::enable_shared_from_this<SimpleC2Component> {
diff --git a/media/codec2/components/dav1d/C2SoftDav1dDec.cpp b/media/codec2/components/dav1d/C2SoftDav1dDec.cpp
index 4ec26d6..44a8dd1 100644
--- a/media/codec2/components/dav1d/C2SoftDav1dDec.cpp
+++ b/media/codec2/components/dav1d/C2SoftDav1dDec.cpp
@@ -139,8 +139,8 @@
         addParameter(DefineParam(mMaxSize, C2_PARAMKEY_MAX_PICTURE_SIZE)
                              .withDefault(new C2StreamMaxPictureSizeTuning::output(0u, 320, 240))
                              .withFields({
-                                     C2F(mSize, width).inRange(2, 2048, 2),
-                                     C2F(mSize, height).inRange(2, 2048, 2),
+                                     C2F(mSize, width).inRange(2, 4096, 2),
+                                     C2F(mSize, height).inRange(2, 4096, 2),
                              })
                              .withSetter(MaxPictureSizeSetter, mSize)
                              .build());
diff --git a/media/codec2/components/vpx/C2SoftVpxDec.cpp b/media/codec2/components/vpx/C2SoftVpxDec.cpp
index 318f093..83cbe47 100644
--- a/media/codec2/components/vpx/C2SoftVpxDec.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxDec.cpp
@@ -69,8 +69,8 @@
                 DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
                 .withDefault(new C2StreamPictureSizeInfo::output(0u, 320, 240))
                 .withFields({
-                    C2F(mSize, width).inRange(2, 2048),
-                    C2F(mSize, height).inRange(2, 2048),
+                    C2F(mSize, width).inRange(2, 4096),
+                    C2F(mSize, height).inRange(2, 4096),
                 })
                 .withSetter(SizeSetter)
                 .build());
@@ -167,8 +167,8 @@
                 DefineParam(mMaxSize, C2_PARAMKEY_MAX_PICTURE_SIZE)
                 .withDefault(new C2StreamMaxPictureSizeTuning::output(0u, 320, 240))
                 .withFields({
-                    C2F(mSize, width).inRange(2, 2048, 2),
-                    C2F(mSize, height).inRange(2, 2048, 2),
+                    C2F(mSize, width).inRange(2, 4096, 2),
+                    C2F(mSize, height).inRange(2, 4096, 2),
                 })
                 .withSetter(MaxPictureSizeSetter, mSize)
                 .build());
diff --git a/media/codec2/sfplugin/CCodecConfig.cpp b/media/codec2/sfplugin/CCodecConfig.cpp
index a943626..897a696 100644
--- a/media/codec2/sfplugin/CCodecConfig.cpp
+++ b/media/codec2/sfplugin/CCodecConfig.cpp
@@ -1907,7 +1907,7 @@
                         bottom = c2_min(bottom, height);
                         if (right > left && bottom > top) {
                             C2Rect rect(right - left, bottom - top);
-                            rect.at(left, top);
+                            rect = rect.at(left, top);
                             c2QpOffsetRects.push_back(C2QpOffsetRectStruct(rect, offset));
                         } else {
                             ALOGE("Rects configuration %s is not valid.", box);
diff --git a/media/codec2/sfplugin/Codec2InfoBuilder.cpp b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
index 692f700..fd242a1 100644
--- a/media/codec2/sfplugin/Codec2InfoBuilder.cpp
+++ b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
@@ -740,6 +740,7 @@
                     pixelFormatMap[HAL_PIXEL_FORMAT_YCBCR_P010]    = COLOR_FormatYUVP010;
                     pixelFormatMap[HAL_PIXEL_FORMAT_RGBA_1010102]  = COLOR_Format32bitABGR2101010;
                     pixelFormatMap[HAL_PIXEL_FORMAT_RGBA_FP16]     = COLOR_Format64bitABGRFloat;
+                    pixelFormatMap[AHARDWAREBUFFER_FORMAT_YCbCr_P210]    = COLOR_FormatYUVP210;
 
                     std::shared_ptr<C2StoreFlexiblePixelFormatDescriptorsInfo> pixelFormatInfo;
                     std::vector<std::unique_ptr<C2Param>> heapParams;
diff --git a/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp b/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
index 7a33af4..aa87e97 100644
--- a/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
+++ b/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
@@ -64,6 +64,13 @@
     return kVendorApiLevel >= __ANDROID_API_T__;
 }
 
+static bool isP210Allowed() {
+    static const int32_t kVendorApiLevel =
+        base::GetIntProperty<int32_t>("ro.vendor.api_level", 0);
+
+    return kVendorApiLevel > __ANDROID_API_V__;
+}
+
 bool isHalPixelFormatSupported(AHardwareBuffer_Format format) {
     // HAL_PIXEL_FORMAT_YCBCR_P010 requirement was added in T VSR, although it could have been
     // supported prior to this.
@@ -76,6 +83,12 @@
         return false;
     }
 
+    // P210 is not available before Android B
+    if (format == (AHardwareBuffer_Format)AHARDWAREBUFFER_FORMAT_YCbCr_P210 &&
+            !isP210Allowed()) {
+        return false;
+    }
+
     // Default scenario --- the consumer is display or GPU
     const AHardwareBuffer_Desc consumableForDisplayOrGpu = {
             .width = 320,
diff --git a/media/codec2/vndk/platform/C2BqBuffer.cpp b/media/codec2/vndk/platform/C2BqBuffer.cpp
index 665f9fc..17dfe9c 100644
--- a/media/codec2/vndk/platform/C2BqBuffer.cpp
+++ b/media/codec2/vndk/platform/C2BqBuffer.cpp
@@ -997,7 +997,7 @@
         return -1;
     }
 
-    if (toUsage != graphicBuffer->getUsage()) {
+    if ((toUsage & graphicBuffer->getUsage()) != toUsage) {
         sp<GraphicBuffer> newBuffer = new GraphicBuffer(
             graphicBuffer->handle, GraphicBuffer::CLONE_HANDLE,
             graphicBuffer->width, graphicBuffer->height, graphicBuffer->format,
diff --git a/media/janitors/media_solutions_OWNERS b/media/janitors/media_solutions_OWNERS
index 95c2b97..004fa30 100644
--- a/media/janitors/media_solutions_OWNERS
+++ b/media/janitors/media_solutions_OWNERS
@@ -17,3 +17,8 @@
 
 # MediaRouter and native mirroring only:
 aquilescanta@google.com
+
+# Emergency rollbacks and fixes outside LON timezone
+jmtrivi@google.com # US-MTV
+lajos@google.com # US-MTV
+scottnien@google.com # TW-NTC
diff --git a/media/libaaudio/fuzzer/Android.bp b/media/libaaudio/fuzzer/Android.bp
index a1ed6a0..5b48401 100644
--- a/media/libaaudio/fuzzer/Android.bp
+++ b/media/libaaudio/fuzzer/Android.bp
@@ -60,10 +60,9 @@
         "libaaudio",
         "libaaudio_internal",
         "libaudioclient",
+        "libaudiofoundation",
         "libaudioutils",
         "libbase_ndk",
-        "libcgrouprc",
-        "libcgrouprc_format",
         "libcutils",
         "libjsoncpp",
         "liblog",
diff --git a/media/libaaudio/fuzzer/libaaudio_fuzzer.cpp b/media/libaaudio/fuzzer/libaaudio_fuzzer.cpp
index 1b06ea7..c3b43ab 100644
--- a/media/libaaudio/fuzzer/libaaudio_fuzzer.cpp
+++ b/media/libaaudio/fuzzer/libaaudio_fuzzer.cpp
@@ -17,6 +17,7 @@
 
 #include "aaudio/AAudio.h"
 #include "aaudio/AAudioTesting.h"
+#include "system/aaudio/AAudio.h"
 #include <fuzzer/FuzzedDataProvider.h>
 
 #include <functional>
@@ -183,6 +184,12 @@
           fdp.PickValueInArray({AAUDIO_UNSPECIFIED, fdp.ConsumeIntegral<int32_t>()});
   AAudioStreamBuilder_setFramesPerDataCallback(mAaudioBuilder, framesPerDataCallback);
 
+  const size_t tagsNumBytes = fdp.ConsumeIntegralInRange<size_t>(
+          0, AAUDIO_ATTRIBUTES_TAGS_MAX_SIZE + 10);
+  AAudioStreamBuilder_setTags(mAaudioBuilder,
+                              (tagsNumBytes == 0 ? nullptr
+                                                 : fdp.ConsumeBytesAsString(tagsNumBytes).c_str()));
+
   aaudio_policy_t policy =
           fdp.PickValueInArray({fdp.PickValueInArray(kPolicies), fdp.ConsumeIntegral<int32_t>()});
   AAudio_setMMapPolicy(policy);
@@ -193,6 +200,7 @@
   int32_t maxFrames = 0;
   int32_t count = 0;
   aaudio_stream_state_t state = AAUDIO_STREAM_STATE_UNKNOWN;
+  char tags[AAUDIO_ATTRIBUTES_TAGS_MAX_SIZE + 1];
 
   invokeAAudioSetAPIs(fdp);
 
@@ -312,6 +320,9 @@
                 (void)AAudioStream_getBufferSizeInFrames(mAaudioStream);
             },
             [&]() {
+                (void)AAudioStream_getTags(mAaudioStream, tags);
+            },
+            [&]() {
                 (void)AAudioStream_isMMapUsed(mAaudioStream);
             },
     });
diff --git a/media/libaaudio/include/aaudio/AAudio.h b/media/libaaudio/include/aaudio/AAudio.h
index e19d526..136edcc 100644
--- a/media/libaaudio/include/aaudio/AAudio.h
+++ b/media/libaaudio/include/aaudio/AAudio.h
@@ -1909,11 +1909,33 @@
  * Available since API level 26.
  *
  * @param stream reference provided by AAudioStreamBuilder_openStream()
- * @return actual device ID
+ * @return actual device id. If there are multiple device ids used, the first device picked by
+ *         the audio policy engine will be returned.
  */
 AAUDIO_API int32_t AAudioStream_getDeviceId(AAudioStream* _Nonnull stream) __INTRODUCED_IN(26);
 
 /**
+ * Available since API level 36.
+ *
+ * Call this function after AAudioStreamBuilder_openStream().
+ * This function will crash if stream is null.
+ * An array of size 16 should generally be large enough to fit all device identifiers.
+ *
+ * @param stream reference provided by AAudioStreamBuilder_openStream().
+ * @param ids reference to an array of ids.
+ * @params numIds size allocated to the array of ids.
+ *         The input should be the size of the ids array.
+ *         The output will be the actual number of device ids.
+ * @return {@link #AAUDIO_OK} or an error code.
+ *         If numIds is null, return {@link #AAUDIO_ERROR_ILLEGAL_ARGUMENT}.
+ *         If numIds is smaller than the number of device ids, return
+ *         {@link #AAUDIO_ERROR_OUT_OF_RANGE}. The value of numIds will still be updated.
+ *         Otherwise, if ids is null, return {@link #AAUDIO_ERROR_ILLEGAL_ARGUMENT}.
+ */
+AAUDIO_API aaudio_result_t AAudioStream_getDeviceIds(AAudioStream* _Nonnull stream,
+        int32_t* _Nonnull ids, int32_t* _Nonnull numIds) __INTRODUCED_IN(36);
+
+/**
  * Available since API level 26.
  *
  * @param stream reference provided by AAudioStreamBuilder_openStream()
diff --git a/media/libaaudio/src/Android.bp b/media/libaaudio/src/Android.bp
index ebb7637..cccb096 100644
--- a/media/libaaudio/src/Android.bp
+++ b/media/libaaudio/src/Android.bp
@@ -103,6 +103,7 @@
         "framework-permission-aidl-cpp",
         "libaaudio_internal",
         "libaudioclient",
+        "libaudiofoundation",
         "libaudioutils",
         "libbinder",
         "libcutils",
@@ -166,6 +167,7 @@
         "framework-permission-aidl-cpp",
         "libaudioclient",
         "libaudioclient_aidl_conversion",
+        "libaudiofoundation",
         "libaudioutils",
         "libbinder",
         "libcutils",
diff --git a/media/libaaudio/src/binding/AAudioStreamConfiguration.cpp b/media/libaaudio/src/binding/AAudioStreamConfiguration.cpp
index c53a897..37c1a98 100644
--- a/media/libaaudio/src/binding/AAudioStreamConfiguration.cpp
+++ b/media/libaaudio/src/binding/AAudioStreamConfiguration.cpp
@@ -34,7 +34,16 @@
 AAudioStreamConfiguration::AAudioStreamConfiguration(const StreamParameters& parcelable) {
     setChannelMask(parcelable.channelMask);
     setSampleRate(parcelable.sampleRate);
-    setDeviceId(parcelable.deviceId);
+    auto deviceIds = android::convertContainer<android::DeviceIdVector>(
+            parcelable.deviceIds, android::aidl2legacy_int32_t_audio_port_handle_t);
+    if (deviceIds.ok()) {
+        setDeviceIds(deviceIds.value());
+    } else {
+        ALOGE("deviceIds (%s) aidl2legacy conversion failed",
+              android::toString(parcelable.deviceIds).c_str());
+        android::DeviceIdVector emptyDeviceIds;
+        setDeviceIds(emptyDeviceIds);
+    }
     static_assert(sizeof(aaudio_sharing_mode_t) == sizeof(parcelable.sharingMode));
     setSharingMode(parcelable.sharingMode);
     auto convFormat = android::aidl2legacy_AudioFormatDescription_audio_format_t(
@@ -87,7 +96,15 @@
     StreamParameters result;
     result.channelMask = getChannelMask();
     result.sampleRate = getSampleRate();
-    result.deviceId = getDeviceId();
+    auto deviceIds = android::convertContainer<std::vector<int32_t>>(
+            getDeviceIds(), android::legacy2aidl_audio_port_handle_t_int32_t);
+    if (deviceIds.ok()) {
+        result.deviceIds = deviceIds.value();
+    } else {
+        ALOGE("deviceIds (%s) legacy2aidl conversion failed",
+              android::toString(getDeviceIds()).c_str());
+        result.deviceIds = {};
+    }
     static_assert(sizeof(aaudio_sharing_mode_t) == sizeof(result.sharingMode));
     result.sharingMode = getSharingMode();
     auto convAudioFormat = android::legacy2aidl_audio_format_t_AudioFormatDescription(getFormat());
diff --git a/media/libaaudio/src/binding/aidl/aaudio/StreamParameters.aidl b/media/libaaudio/src/binding/aidl/aaudio/StreamParameters.aidl
index a301da8..7d7abce 100644
--- a/media/libaaudio/src/binding/aidl/aaudio/StreamParameters.aidl
+++ b/media/libaaudio/src/binding/aidl/aaudio/StreamParameters.aidl
@@ -21,7 +21,7 @@
 parcelable StreamParameters {
     int                                       channelMask;  //          = AAUDIO_UNSPECIFIED;
     int                                       sampleRate;  //           = AAUDIO_UNSPECIFIED;
-    int                                       deviceId;  //             = AAUDIO_UNSPECIFIED;
+    int[]                                     deviceIds;  //            = null;
     int /* aaudio_sharing_mode_t */           sharingMode;  //          = AAUDIO_SHARING_MODE_SHARED;
     AudioFormatDescription                    audioFormat;  //          = AUDIO_FORMAT_DEFAULT;
     int /* aaudio_direction_t */              direction;  //            = AAUDIO_DIRECTION_OUTPUT;
diff --git a/media/libaaudio/src/client/AudioStreamInternal.cpp b/media/libaaudio/src/client/AudioStreamInternal.cpp
index 99b90e2..6bc7dc2 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternal.cpp
@@ -121,7 +121,7 @@
     request.setSharingModeMatchRequired(isSharingModeMatchRequired());
     request.setInService(isInService());
 
-    request.getConfiguration().setDeviceId(getDeviceId());
+    request.getConfiguration().setDeviceIds(getDeviceIds());
     request.getConfiguration().setSampleRate(getSampleRate());
     request.getConfiguration().setDirection(getDirection());
     request.getConfiguration().setSharingMode(getSharingMode());
@@ -180,7 +180,7 @@
         setChannelMask(configurationOutput.getChannelMask());
     }
 
-    setDeviceId(configurationOutput.getDeviceId());
+    setDeviceIds(configurationOutput.getDeviceIds());
     setSessionId(configurationOutput.getSessionId());
     setSharingMode(configurationOutput.getSharingMode());
 
diff --git a/media/libaaudio/src/core/AAudioAudio.cpp b/media/libaaudio/src/core/AAudioAudio.cpp
index fb87dd9..64152f0 100644
--- a/media/libaaudio/src/core/AAudioAudio.cpp
+++ b/media/libaaudio/src/core/AAudioAudio.cpp
@@ -95,7 +95,11 @@
                                                 int32_t deviceId)
 {
     AudioStreamBuilder *streamBuilder = convertAAudioBuilderToStreamBuilder(builder);
-    streamBuilder->setDeviceId(deviceId);
+    android::DeviceIdVector deviceIds;
+    if (deviceId != AAUDIO_UNSPECIFIED) {
+        deviceIds.push_back(deviceId);
+    }
+    streamBuilder->setDeviceIds(deviceIds);
 }
 
 AAUDIO_API void AAudioStreamBuilder_setPackageName(AAudioStreamBuilder* builder,
@@ -537,7 +541,33 @@
 AAUDIO_API int32_t AAudioStream_getDeviceId(AAudioStream* stream)
 {
     AudioStream *audioStream = convertAAudioStreamToAudioStream(stream);
-    return audioStream->getDeviceId();
+    auto deviceIds = audioStream->getDeviceIds();
+    if (deviceIds.empty()) {
+        return AAUDIO_UNSPECIFIED;
+    }
+    return deviceIds[0];
+}
+
+AAUDIO_API aaudio_result_t AAudioStream_getDeviceIds(AAudioStream* stream, int32_t* ids,
+                                                     int32_t* numIds)
+{
+    if (numIds == nullptr) {
+        return AAUDIO_ERROR_ILLEGAL_ARGUMENT;
+    }
+    AudioStream *audioStream = convertAAudioStreamToAudioStream(stream);
+    auto deviceIds = audioStream->getDeviceIds();
+    if (*numIds < deviceIds.size()) {
+        *numIds = deviceIds.size();
+        return AAUDIO_ERROR_OUT_OF_RANGE;
+    }
+    if (ids == nullptr) {
+        return AAUDIO_ERROR_ILLEGAL_ARGUMENT;
+    }
+    for (int i = 0; i < deviceIds.size(); i++) {
+        ids[i] = deviceIds[i];
+    }
+    *numIds = deviceIds.size();
+    return AAUDIO_OK;
 }
 
 AAUDIO_API aaudio_sharing_mode_t AAudioStream_getSharingMode(AAudioStream* stream)
diff --git a/media/libaaudio/src/core/AAudioStreamParameters.cpp b/media/libaaudio/src/core/AAudioStreamParameters.cpp
index 056918a..f504fa9 100644
--- a/media/libaaudio/src/core/AAudioStreamParameters.cpp
+++ b/media/libaaudio/src/core/AAudioStreamParameters.cpp
@@ -27,7 +27,7 @@
 void AAudioStreamParameters::copyFrom(const AAudioStreamParameters &other) {
     mSamplesPerFrame      = other.mSamplesPerFrame;
     mSampleRate           = other.mSampleRate;
-    mDeviceId             = other.mDeviceId;
+    mDeviceIds            = other.mDeviceIds;
     mSessionId            = other.mSessionId;
     mSharingMode          = other.mSharingMode;
     mAudioFormat          = other.mAudioFormat;
@@ -74,9 +74,13 @@
         return AAUDIO_ERROR_OUT_OF_RANGE;
     }
 
-    if (mDeviceId < 0) {
-        ALOGD("deviceId out of range = %d", mDeviceId);
-        return AAUDIO_ERROR_OUT_OF_RANGE;
+    // TODO(b/379139078): Query AudioSystem::listAudioPorts
+    for (auto deviceId : mDeviceIds) {
+        if (deviceId < 0) {
+            ALOGE("deviceId out of range = %d, deviceIds = %s", deviceId,
+                      android::toString(mDeviceIds).c_str());
+            return AAUDIO_ERROR_OUT_OF_RANGE;
+        }
     }
 
     // All Session ID values are legal.
@@ -296,7 +300,7 @@
 }
 
 void AAudioStreamParameters::dump() const {
-    ALOGD("mDeviceId             = %6d", mDeviceId);
+    ALOGD("mDeviceIds            = %s",  android::toString(mDeviceIds).c_str());
     ALOGD("mSessionId            = %6d", mSessionId);
     ALOGD("mSampleRate           = %6d", mSampleRate);
     ALOGD("mSamplesPerFrame      = %6d", mSamplesPerFrame);
diff --git a/media/libaaudio/src/core/AAudioStreamParameters.h b/media/libaaudio/src/core/AAudioStreamParameters.h
index cad27a7..c4c0a4f 100644
--- a/media/libaaudio/src/core/AAudioStreamParameters.h
+++ b/media/libaaudio/src/core/AAudioStreamParameters.h
@@ -20,6 +20,7 @@
 #include <stdint.h>
 
 #include <aaudio/AAudio.h>
+#include <media/AudioContainers.h>
 #include <utility/AAudioUtilities.h>
 
 namespace aaudio {
@@ -29,12 +30,12 @@
     AAudioStreamParameters() = default;
     virtual ~AAudioStreamParameters() = default;
 
-    int32_t getDeviceId() const {
-        return mDeviceId;
+    android::DeviceIdVector getDeviceIds() const {
+        return mDeviceIds;
     }
 
-    void setDeviceId(int32_t deviceId) {
-        mDeviceId = deviceId;
+    void setDeviceIds(const android::DeviceIdVector& deviceIds) {
+        mDeviceIds = deviceIds;
     }
 
     int32_t getSampleRate() const {
@@ -225,7 +226,7 @@
 
     int32_t                         mSamplesPerFrame      = AAUDIO_UNSPECIFIED;
     int32_t                         mSampleRate           = AAUDIO_UNSPECIFIED;
-    int32_t                         mDeviceId             = AAUDIO_UNSPECIFIED;
+    android::DeviceIdVector         mDeviceIds;
     aaudio_sharing_mode_t           mSharingMode          = AAUDIO_SHARING_MODE_SHARED;
     audio_format_t                  mAudioFormat          = AUDIO_FORMAT_DEFAULT;
     aaudio_direction_t              mDirection            = AAUDIO_DIRECTION_OUTPUT;
diff --git a/media/libaaudio/src/core/AudioStream.cpp b/media/libaaudio/src/core/AudioStream.cpp
index a75a2a1..8e3bcf7 100644
--- a/media/libaaudio/src/core/AudioStream.cpp
+++ b/media/libaaudio/src/core/AudioStream.cpp
@@ -79,7 +79,7 @@
     mSamplesPerFrame = builder.getSamplesPerFrame();
     mChannelMask = builder.getChannelMask();
     mSampleRate = builder.getSampleRate();
-    mDeviceId = builder.getDeviceId();
+    mDeviceIds = builder.getDeviceIds();
     mFormat = builder.getFormat();
     mSharingMode = builder.getSharingMode();
     mSharingModeMatchRequired = builder.isSharingModeMatchRequired();
@@ -204,7 +204,7 @@
     aaudio_result_t result = requestStart_l();
     if (result == AAUDIO_OK) {
         // We only call this for logging in "dumpsys audio". So ignore return code.
-        (void) mPlayerBase->startWithStatus(getDeviceId());
+        (void) mPlayerBase->startWithStatus(getDeviceIds());
     }
     return result;
 }
diff --git a/media/libaaudio/src/core/AudioStream.h b/media/libaaudio/src/core/AudioStream.h
index 3271882..3354adf 100644
--- a/media/libaaudio/src/core/AudioStream.h
+++ b/media/libaaudio/src/core/AudioStream.h
@@ -27,6 +27,7 @@
 #include <utils/StrongPointer.h>
 
 #include <aaudio/AAudio.h>
+#include <media/AudioContainers.h>
 #include <media/AudioSystem.h>
 #include <media/PlayerBase.h>
 #include <media/VolumeShaper.h>
@@ -268,8 +269,8 @@
         mPerformanceMode = performanceMode;
     }
 
-    int32_t getDeviceId() const {
-        return mDeviceId;
+    android::DeviceIdVector getDeviceIds() const {
+        return mDeviceIds;
     }
 
     aaudio_sharing_mode_t getSharingMode() const {
@@ -411,7 +412,7 @@
 
     // Implement AudioDeviceCallback
     void onAudioDeviceUpdate(audio_io_handle_t audioIo,
-            audio_port_handle_t deviceId) override {};
+            const android::DeviceIdVector& deviceIds) override {};
 
     // ============== I/O ===========================
     // A Stream will only implement read() or write() depending on its direction.
@@ -632,8 +633,8 @@
     }
     void setDisconnected();
 
-    void setDeviceId(int32_t deviceId) {
-        mDeviceId = deviceId;
+    void setDeviceIds(const android::DeviceIdVector& deviceIds) {
+        mDeviceIds = deviceIds;
     }
 
     // This should not be called after the open() call.
@@ -774,7 +775,7 @@
     int32_t                     mSampleRate = AAUDIO_UNSPECIFIED;
     int32_t                     mDeviceSampleRate = AAUDIO_UNSPECIFIED;
     int32_t                     mHardwareSampleRate = AAUDIO_UNSPECIFIED;
-    int32_t                     mDeviceId = AAUDIO_UNSPECIFIED;
+    android::DeviceIdVector     mDeviceIds;
     aaudio_sharing_mode_t       mSharingMode = AAUDIO_SHARING_MODE_SHARED;
     bool                        mSharingModeMatchRequired = false; // must match sharing mode requested
     audio_format_t              mFormat = AUDIO_FORMAT_DEFAULT;
diff --git a/media/libaaudio/src/core/AudioStreamBuilder.cpp b/media/libaaudio/src/core/AudioStreamBuilder.cpp
index b0dc669..73bd69f 100644
--- a/media/libaaudio/src/core/AudioStreamBuilder.cpp
+++ b/media/libaaudio/src/core/AudioStreamBuilder.cpp
@@ -307,8 +307,8 @@
           getSampleRate(), getSamplesPerFrame(), getChannelMask(), getFormat(),
           AAudio_convertSharingModeToShortText(getSharingMode()),
           AAudio_convertDirectionToText(getDirection()));
-    ALOGI("device = %6d, sessionId = %d, perfMode = %d, callback: %s with frames = %d",
-          getDeviceId(),
+    ALOGI("devices = %s, sessionId = %d, perfMode = %d, callback: %s with frames = %d",
+          android::toString(getDeviceIds()).c_str(),
           getSessionId(),
           getPerformanceMode(),
           ((getDataCallbackProc() != nullptr) ? "ON" : "OFF"),
diff --git a/media/libaaudio/src/legacy/AudioStreamLegacy.cpp b/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
index 255bd0f..dfb9a01 100644
--- a/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
@@ -260,36 +260,41 @@
 }
 
 void AudioStreamLegacy::onAudioDeviceUpdate(audio_io_handle_t /* audioIo */,
-            audio_port_handle_t deviceId) {
-    // Check for an invalid deviceId. Why change to UNSPECIFIED?
-    if (deviceId == AAUDIO_UNSPECIFIED) {
-        ALOGE("%s(, deviceId = AAUDIO_UNSPECIFIED)! Why?", __func__);
+            const android::DeviceIdVector& deviceIds) {
+    // Check for empty deviceIds. Callbacks for duplicating threads returns empty devices.
+    if (deviceIds.empty()) {
+        ALOGW("%s(empty deviceIds", __func__);
         return;
     }
+    android::DeviceIdVector oldDeviceIds = getDeviceIds();
     // Device routing is a common source of errors and DISCONNECTS.
     // Please leave this log in place. If there is a bug then this might
     // get called after the stream has been deleted so log before we
     // touch the stream object.
-    ALOGD("%s(deviceId = %d)", __func__, (int)deviceId);
-    if (getDeviceId() != AAUDIO_UNSPECIFIED
-            && getDeviceId() != deviceId
+    ALOGD("%s() devices %s => %s",
+            __func__, android::toString(oldDeviceIds).c_str(),
+            android::toString(deviceIds).c_str());
+    if (!oldDeviceIds.empty()
+            && !android::areDeviceIdsEqual(oldDeviceIds, deviceIds)
             && !isDisconnected()
             ) {
         // Note that isDataCallbackActive() is affected by state so call it before DISCONNECTING.
         // If we have a data callback and the stream is active, then ask the data callback
         // to DISCONNECT and call the error callback.
         if (isDataCallbackActive()) {
-            ALOGD("%s() request DISCONNECT in data callback, device %d => %d",
-                  __func__, (int) getDeviceId(), (int) deviceId);
+            ALOGD("%s() request DISCONNECT in data callback, devices %s => %s",
+                    __func__, android::toString(oldDeviceIds).c_str(),
+                    android::toString(deviceIds).c_str());
             // If the stream is stopped before the data callback has a chance to handle the
             // request then the requestStop_l() and requestPause() methods will handle it after
             // the callback has stopped.
             mRequestDisconnect.request();
         } else {
-            ALOGD("%s() DISCONNECT the stream now, device %d => %d",
-                  __func__, (int) getDeviceId(), (int) deviceId);
+            ALOGD("%s() DISCONNECT the stream now, devices %s => %s",
+                    __func__, android::toString(oldDeviceIds).c_str(),
+                    android::toString(deviceIds).c_str());
             forceDisconnect();
         }
     }
-    setDeviceId(deviceId);
+    setDeviceIds(deviceIds);
 }
diff --git a/media/libaaudio/src/legacy/AudioStreamLegacy.h b/media/libaaudio/src/legacy/AudioStreamLegacy.h
index 53f6e06..a729161 100644
--- a/media/libaaudio/src/legacy/AudioStreamLegacy.h
+++ b/media/libaaudio/src/legacy/AudioStreamLegacy.h
@@ -95,7 +95,7 @@
                                      android::ExtendedTimestamp *extendedTimestamp);
 
     void onAudioDeviceUpdate(audio_io_handle_t audioIo,
-            audio_port_handle_t deviceId) override;
+            const android::DeviceIdVector& deviceIds) override;
 
     /*
      * Check to see whether a callback thread has requested a disconnected.
diff --git a/media/libaaudio/src/legacy/AudioStreamRecord.cpp b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
index fe4bf2c..1591f7d 100644
--- a/media/libaaudio/src/legacy/AudioStreamRecord.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
@@ -112,9 +112,7 @@
     mCallbackBufferSize = builder.getFramesPerDataCallback();
 
     // Don't call mAudioRecord->setInputDevice() because it will be overwritten by set()!
-    audio_port_handle_t selectedDeviceId = (getDeviceId() == AAUDIO_UNSPECIFIED)
-                                           ? AUDIO_PORT_HANDLE_NONE
-                                           : getDeviceId();
+    audio_port_handle_t selectedDeviceId = getFirstDeviceId(getDeviceIds());
 
     const audio_content_type_t contentType =
             AAudioConvert_contentTypeToInternal(builder.getContentType());
@@ -198,7 +196,8 @@
                  AudioGlobal_convertPerformanceModeToText(builder.getPerformanceMode()))
             .set(AMEDIAMETRICS_PROP_SHARINGMODE,
                  AudioGlobal_convertSharingModeToText(builder.getSharingMode()))
-            .set(AMEDIAMETRICS_PROP_ENCODINGCLIENT, toString(requestedFormat).c_str()).record();
+            .set(AMEDIAMETRICS_PROP_ENCODINGCLIENT,
+                 android::toString(requestedFormat).c_str()).record();
 
     // Get the actual values from the AudioRecord.
     setChannelMask(AAudioConvert_androidToAAudioChannelMask(
@@ -275,7 +274,7 @@
              perfMode, actualPerformanceMode);
 
     setState(AAUDIO_STREAM_STATE_OPEN);
-    setDeviceId(mAudioRecord->getRoutedDeviceId());
+    setDeviceIds(mAudioRecord->getRoutedDeviceIds());
 
     aaudio_session_id_t actualSessionId =
             (requestedSessionId == AAUDIO_SESSION_ID_NONE)
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.cpp b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
index 16c0bcd..2e57f0d 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
@@ -132,9 +132,7 @@
           notificationFrames, (uint)frameCount);
 
     // Don't call mAudioTrack->setDeviceId() because it will be overwritten by set()!
-    audio_port_handle_t selectedDeviceId = (getDeviceId() == AAUDIO_UNSPECIFIED)
-                                           ? AUDIO_PORT_HANDLE_NONE
-                                           : getDeviceId();
+    audio_port_handle_t selectedDeviceId = getFirstDeviceId(getDeviceIds());
 
     const audio_content_type_t contentType =
             AAudioConvert_contentTypeToInternal(builder.getContentType());
@@ -197,7 +195,8 @@
                  AudioGlobal_convertPerformanceModeToText(builder.getPerformanceMode()))
             .set(AMEDIAMETRICS_PROP_SHARINGMODE,
                  AudioGlobal_convertSharingModeToText(builder.getSharingMode()))
-            .set(AMEDIAMETRICS_PROP_ENCODINGCLIENT, toString(getFormat()).c_str()).record();
+            .set(AMEDIAMETRICS_PROP_ENCODINGCLIENT,
+                 android::toString(getFormat()).c_str()).record();
 
     doSetVolume();
 
@@ -233,7 +232,7 @@
         mBlockAdapter = nullptr;
     }
 
-    setDeviceId(mAudioTrack->getRoutedDeviceId());
+    setDeviceIds(mAudioTrack->getRoutedDeviceIds());
 
     aaudio_session_id_t actualSessionId =
             (requestedSessionId == AAUDIO_SESSION_ID_NONE)
@@ -317,7 +316,7 @@
     if (mAudioTrack->channelCount() != getSamplesPerFrame()
           || mAudioTrack->format() != getFormat()
           || mAudioTrack->getSampleRate() != getSampleRate()
-          || mAudioTrack->getRoutedDeviceId() != getDeviceId()
+          || !areDeviceIdsEqual(mAudioTrack->getRoutedDeviceIds(), getDeviceIds())
           || getBufferCapacityFromDevice() != getBufferCapacity()
           || getFramesPerBurstFromDevice() != getFramesPerBurst()) {
         AudioStreamLegacy::onNewIAudioTrack();
diff --git a/media/libaaudio/src/libaaudio.map.txt b/media/libaaudio/src/libaaudio.map.txt
index 13c19a1..36d76aa 100644
--- a/media/libaaudio/src/libaaudio.map.txt
+++ b/media/libaaudio/src/libaaudio.map.txt
@@ -72,6 +72,7 @@
     AAudioStream_getHardwareSampleRate;   # introduced=UpsideDownCake
     AAudio_getPlatformMMapPolicy; # introduced=36
     AAudio_getPlatformMMapExclusivePolicy; #introduced=36
+    AAudioStream_getDeviceIds; # introduced=36
 
     AAudioStreamBuilder_setTags; # systemapi
     AAudioStream_getTags; # systemapi
diff --git a/media/libaudioclient/AudioRecord.cpp b/media/libaudioclient/AudioRecord.cpp
index 1e6be68..9a4b45d 100644
--- a/media/libaudioclient/AudioRecord.cpp
+++ b/media/libaudioclient/AudioRecord.cpp
@@ -731,7 +731,7 @@
         mSelectedDeviceId = deviceId;
         if (mStatus == NO_ERROR) {
             if (mActive) {
-                if (mSelectedDeviceId != mRoutedDeviceId) {
+                if (getFirstDeviceId(mRoutedDeviceIds) != mSelectedDeviceId) {
                     // stop capture so that audio policy manager does not reject the new instance
                     // start request as only one capture can be active at a time.
                     if (mAudioRecord != 0) {
@@ -758,7 +758,7 @@
 }
 
 // must be called with mLock held
-void AudioRecord::updateRoutedDeviceId_l()
+void AudioRecord::updateRoutedDeviceIds_l()
 {
     // if the record is inactive, do not update actual device as the input stream maybe routed
     // from a device not relevant to this client because of other active use cases.
@@ -766,17 +766,21 @@
         return;
     }
     if (mInput != AUDIO_IO_HANDLE_NONE) {
-        audio_port_handle_t deviceId = AudioSystem::getDeviceIdForIo(mInput);
-        if (deviceId != AUDIO_PORT_HANDLE_NONE) {
-            mRoutedDeviceId = deviceId;
+        DeviceIdVector deviceIds;
+        status_t result = AudioSystem::getDeviceIdsForIo(mInput, deviceIds);
+        if (result != OK) {
+            ALOGW("%s: getDeviceIdsForIo returned: %d", __func__, result);
+        }
+        if (!deviceIds.empty()) {
+            mRoutedDeviceIds = deviceIds;
         }
      }
 }
 
-audio_port_handle_t AudioRecord::getRoutedDeviceId() {
+DeviceIdVector AudioRecord::getRoutedDeviceIds() {
     AutoMutex lock(mLock);
-    updateRoutedDeviceId_l();
-    return mRoutedDeviceId;
+    updateRoutedDeviceIds_l();
+    return mRoutedDeviceIds;
 }
 
 status_t AudioRecord::dump(int fd, const Vector<String16>& args __unused) const
@@ -794,10 +798,11 @@
                   mFrameCount, mReqFrameCount);
     result.appendFormat("  notif. frame count(%u), req. notif. frame count(%u)\n",
              mNotificationFramesAct, mNotificationFramesReq);
-    result.appendFormat("  input(%d), latency(%u), selected device Id(%d), routed device Id(%d)\n",
-                        mInput, mLatency, mSelectedDeviceId, mRoutedDeviceId);
-    result.appendFormat("  mic direction(%d) mic field dimension(%f)",
-                        mSelectedMicDirection, mSelectedMicFieldDimension);
+    result.appendFormat("  input(%d), latency(%u), selected device Id(%d)\n",
+                        mInput, mLatency, mSelectedDeviceId);
+    result.appendFormat("  routed device Ids(%s), mic direction(%d) mic field dimension(%f)",
+                        toString(mRoutedDeviceIds).c_str(), mSelectedMicDirection,
+                        mSelectedMicFieldDimension);
     ::write(fd, result.c_str(), result.size());
     return NO_ERROR;
 }
@@ -940,7 +945,7 @@
         mAwaitBoost = true;
     }
     mFlags = output.flags;
-    mRoutedDeviceId = output.selectedDeviceId;
+    mRoutedDeviceIds = { output.selectedDeviceId };
     mSessionId = output.sessionId;
     mSampleRate = output.sampleRate;
     mServerConfig = output.serverConfig;
@@ -1063,7 +1068,8 @@
         .set(AMEDIAMETRICS_PROP_SOURCE, toString(mAttributes.source).c_str())
         .set(AMEDIAMETRICS_PROP_THREADID, (int32_t)output.inputId)
         .set(AMEDIAMETRICS_PROP_SELECTEDDEVICEID, (int32_t)mSelectedDeviceId)
-        .set(AMEDIAMETRICS_PROP_ROUTEDDEVICEID, (int32_t)mRoutedDeviceId)
+        .set(AMEDIAMETRICS_PROP_ROUTEDDEVICEID, (int32_t)(getFirstDeviceId(mRoutedDeviceIds)))
+        .set(AMEDIAMETRICS_PROP_ROUTEDDEVICEIDS, toString(mRoutedDeviceIds).c_str())
         .set(AMEDIAMETRICS_PROP_ENCODING, toString(mFormat).c_str())
         .set(AMEDIAMETRICS_PROP_CHANNELMASK, (int32_t)mChannelMask)
         .set(AMEDIAMETRICS_PROP_FRAMECOUNT, (int32_t)mFrameCount)
@@ -1656,7 +1662,7 @@
 }
 
 void AudioRecord::onAudioDeviceUpdate(audio_io_handle_t audioIo,
-                                 audio_port_handle_t deviceId)
+                                      const DeviceIdVector& deviceIds)
 {
     sp<AudioSystem::AudioDeviceCallback> callback;
     {
@@ -1668,11 +1674,11 @@
         // only update device if the record is active as route changes due to other use cases are
         // irrelevant for this client
         if (mActive) {
-            mRoutedDeviceId = deviceId;
+            mRoutedDeviceIds = deviceIds;
         }
     }
     if (callback.get() != nullptr) {
-        callback->onAudioDeviceUpdate(mInput, mRoutedDeviceId);
+        callback->onAudioDeviceUpdate(mInput, mRoutedDeviceIds);
     }
 }
 
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index f68b506..b8dadb4 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -95,7 +95,7 @@
 // ServiceSingleton to provide interaction with the service notifications and
 // binder death notifications.
 //
-// If the AF/AP service is unavailable for kServiceWaitMs from ServiceManager,
+// If the AF/AP service is unavailable for kServiceClientWaitMs from ServiceManager,
 // ServiceSingleton will return a nullptr service handle resulting in the same dead object error
 // as if the service died (which it did, otherwise we'd be returning the cached handle).
 //
@@ -127,11 +127,12 @@
 // Such an audioserver failure is considered benign as the ground truth is stored in
 // the Java AudioService and can be restored once audioserver has finished initialization.
 //
-// TODO(b/375691003) We use 10s as a conservative timeout value, and will tune closer to 3s.
+// TODO(b/375691003) We use 5s as a conservative timeout value, and will tune closer to 3s.
 // Too small a value (i.e. less than 1s would churn repeated calls to get the service).
-static constexpr int32_t kServiceWaitMs = 10'000;
+// The value can be tuned by the property audio.service.client_wait_ms.
+static constexpr int32_t kServiceClientWaitMs = 5'000;
 
-static constexpr const char kServiceWaitProperty[] = "audio.service.wait_ms";
+static constexpr const char kServiceWaitProperty[] = "audio.service.client_wait_ms";
 
 // AudioFlingerServiceTraits is a collection of methods that parameterize the
 // ServiceSingleton handler for IAudioFlinger
@@ -172,7 +173,7 @@
             }
             mediautils::initService<media::IAudioFlingerService, AudioFlingerServiceTraits>();
             mWaitMs = std::chrono::milliseconds(
-                property_get_int32(kServiceWaitProperty, kServiceWaitMs));
+                property_get_int32(kServiceWaitProperty, kServiceClientWaitMs));
             init = true;
         }
         if (mValid) return mService;
@@ -272,7 +273,8 @@
     static inline constinit std::mutex mMutex;
     static inline constinit sp<AudioSystem::AudioFlingerClient> mClient GUARDED_BY(mMutex);
     static inline constinit sp<IAudioFlinger> mService GUARDED_BY(mMutex);
-    static inline constinit std::chrono::milliseconds mWaitMs GUARDED_BY(mMutex) {kServiceWaitMs};
+    static inline constinit std::chrono::milliseconds mWaitMs
+            GUARDED_BY(mMutex) {kServiceClientWaitMs};
     static inline constinit bool mValid GUARDED_BY(mMutex) = false;
     static inline constinit std::atomic_bool mDisableThreadPoolStart = false;
 };
@@ -681,7 +683,7 @@
 
     if (ioDesc->getIoHandle() == AUDIO_IO_HANDLE_NONE) return Status::ok();
 
-    audio_port_handle_t deviceId = AUDIO_PORT_HANDLE_NONE;
+    DeviceIdVector deviceIds;
     std::vector<sp<AudioDeviceCallback>> callbacksToCall;
     {
         std::lock_guard _l(mMutex);
@@ -693,12 +695,12 @@
             case AUDIO_INPUT_OPENED:
             case AUDIO_INPUT_REGISTERED: {
                 if (sp<AudioIoDescriptor> oldDesc = getIoDescriptor_l(ioDesc->getIoHandle())) {
-                    deviceId = oldDesc->getDeviceId();
+                    deviceIds = oldDesc->getDeviceIds();
                 }
                 mIoDescriptors[ioDesc->getIoHandle()] = ioDesc;
 
-                if (ioDesc->getDeviceId() != AUDIO_PORT_HANDLE_NONE) {
-                    deviceId = ioDesc->getDeviceId();
+                if (!ioDesc->getDeviceIds().empty()) {
+                    deviceIds = ioDesc->getDeviceIds();
                     if (event == AUDIO_OUTPUT_OPENED || event == AUDIO_INPUT_OPENED) {
                         auto it = mAudioDeviceCallbacks.find(ioDesc->getIoHandle());
                         if (it != mAudioDeviceCallbacks.end()) {
@@ -739,11 +741,12 @@
                     break;
                 }
 
-                deviceId = oldDesc->getDeviceId();
+                deviceIds = oldDesc->getDeviceIds();
                 mIoDescriptors[ioDesc->getIoHandle()] = ioDesc;
 
-                if (deviceId != ioDesc->getDeviceId()) {
-                    deviceId = ioDesc->getDeviceId();
+                DeviceIdVector ioDescDeviceIds = ioDesc->getDeviceIds();
+                if (!areDeviceIdsEqual(deviceIds, ioDescDeviceIds)) {
+                    deviceIds = ioDescDeviceIds;
                     auto it = mAudioDeviceCallbacks.find(ioDesc->getIoHandle());
                     if (it != mAudioDeviceCallbacks.end()) {
                         callbacks = it->second;
@@ -771,7 +774,7 @@
                     auto it2 = cbks.find(ioDesc->getPortId());
                     if (it2 != cbks.end()) {
                         callbacks.emplace(ioDesc->getPortId(), it2->second);
-                        deviceId = oldDesc->getDeviceId();
+                        deviceIds = oldDesc->getDeviceIds();
                     }
                 }
             }
@@ -790,7 +793,7 @@
     // example getRoutedDevice that updates the device and tries to acquire mMutex.
     for (auto cb  : callbacksToCall) {
         // If callbacksToCall is not empty, it implies ioDesc->getIoHandle() and deviceId are valid
-        cb->onAudioDeviceUpdate(ioDesc->getIoHandle(), deviceId);
+        cb->onAudioDeviceUpdate(ioDesc->getIoHandle(), deviceIds);
     }
 
     return Status::ok();
@@ -1014,7 +1017,7 @@
             }
             mediautils::initService<IAudioPolicyService, AudioPolicyServiceTraits>();
             mWaitMs = std::chrono::milliseconds(
-                    property_get_int32(kServiceWaitProperty, kServiceWaitMs));
+                    property_get_int32(kServiceWaitProperty, kServiceClientWaitMs));
             init = true;
         }
         if (mValid) return mService;
@@ -1071,7 +1074,8 @@
     static inline constinit sp<AudioSystem::AudioPolicyServiceClient> mClient GUARDED_BY(mMutex);
     static inline constinit sp<IAudioPolicyService> mService GUARDED_BY(mMutex);
     static inline constinit bool mValid GUARDED_BY(mMutex) = false;
-    static inline constinit std::chrono::milliseconds mWaitMs GUARDED_BY(mMutex) {kServiceWaitMs};
+    static inline constinit std::chrono::milliseconds mWaitMs
+            GUARDED_BY(mMutex) {kServiceClientWaitMs};
     static inline constinit std::atomic_bool mDisableThreadPoolStart = false;
 };
 
@@ -1961,14 +1965,16 @@
     return afc->removeSupportedLatencyModesCallback(callback);
 }
 
-audio_port_handle_t AudioSystem::getDeviceIdForIo(audio_io_handle_t audioIo) {
+status_t AudioSystem::getDeviceIdsForIo(audio_io_handle_t audioIo, DeviceIdVector& deviceIds) {
     const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == nullptr) return AudioFlingerServiceTraits::getError();
     const sp<AudioIoDescriptor> desc = getIoDescriptor(audioIo);
     if (desc == 0) {
-        return AUDIO_PORT_HANDLE_NONE;
+        deviceIds.clear();
+    } else {
+        deviceIds = desc->getDeviceIds();
     }
-    return desc->getDeviceId();
+    return OK;
 }
 
 status_t AudioSystem::acquireSoundTriggerSession(audio_session_t* session,
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index a9409eb..3591fbf 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -1736,7 +1736,7 @@
                 // allow track invalidation when track is not playing to propagate
                 // the updated mSelectedDeviceId
                 if (isPlaying_l()) {
-                    if (mSelectedDeviceId != mRoutedDeviceId) {
+                    if (getFirstDeviceId(mRoutedDeviceIds) != mSelectedDeviceId) {
                         android_atomic_or(CBLK_INVALID, &mCblk->mFlags);
                         mProxy->interrupt();
                     }
@@ -1759,7 +1759,7 @@
 }
 
 // must be called with mLock held
-void AudioTrack::updateRoutedDeviceId_l()
+void AudioTrack::updateRoutedDeviceIds_l()
 {
     // if the track is inactive, do not update actual device as the output stream maybe routed
     // to a device not relevant to this client because of other active use cases.
@@ -1767,17 +1767,21 @@
         return;
     }
     if (mOutput != AUDIO_IO_HANDLE_NONE) {
-        audio_port_handle_t deviceId = AudioSystem::getDeviceIdForIo(mOutput);
-        if (deviceId != AUDIO_PORT_HANDLE_NONE) {
-            mRoutedDeviceId = deviceId;
+        DeviceIdVector deviceIds;
+        status_t result = AudioSystem::getDeviceIdsForIo(mOutput, deviceIds);
+        if (result != OK) {
+            ALOGW("%s: getDeviceIdsForIo returned: %d", __func__, result);
+        }
+        if (!deviceIds.empty()) {
+            mRoutedDeviceIds = deviceIds;
         }
     }
 }
 
-audio_port_handle_t AudioTrack::getRoutedDeviceId() {
+DeviceIdVector AudioTrack::getRoutedDeviceIds() {
     AutoMutex lock(mLock);
-    updateRoutedDeviceId_l();
-    return mRoutedDeviceId;
+    updateRoutedDeviceIds_l();
+    return mRoutedDeviceIds;
 }
 
 status_t AudioTrack::attachAuxEffect(int effectId)
@@ -1937,7 +1941,7 @@
 
     mFrameCount = output.frameCount;
     mNotificationFramesAct = (uint32_t)output.notificationFrameCount;
-    mRoutedDeviceId = output.selectedDeviceId;
+    mRoutedDeviceIds = output.selectedDeviceIds;
     mSessionId = output.sessionId;
     mStreamType = output.streamType;
 
@@ -2106,7 +2110,8 @@
         .set(AMEDIAMETRICS_PROP_USAGE, toString(mAttributes.usage).c_str())
         .set(AMEDIAMETRICS_PROP_THREADID, (int32_t)output.outputId)
         .set(AMEDIAMETRICS_PROP_SELECTEDDEVICEID, (int32_t)mSelectedDeviceId)
-        .set(AMEDIAMETRICS_PROP_ROUTEDDEVICEID, (int32_t)mRoutedDeviceId)
+        .set(AMEDIAMETRICS_PROP_ROUTEDDEVICEID, (int32_t)(getFirstDeviceId(mRoutedDeviceIds)))
+        .set(AMEDIAMETRICS_PROP_ROUTEDDEVICEIDS, toString(mRoutedDeviceIds).c_str())
         .set(AMEDIAMETRICS_PROP_ENCODING, toString(mFormat).c_str())
         .set(AMEDIAMETRICS_PROP_CHANNELMASK, (int32_t)mChannelMask)
         .set(AMEDIAMETRICS_PROP_FRAMECOUNT, (int32_t)mFrameCount)
@@ -3555,8 +3560,8 @@
     result.appendFormat("  notif. frame count(%u), req. notif. frame count(%u),"
             " req. notif. per buff(%u)\n",
              mNotificationFramesAct, mNotificationFramesReq, mNotificationsPerBufferReq);
-    result.appendFormat("  latency (%d), selected device Id(%d), routed device Id(%d)\n",
-                        mLatency, mSelectedDeviceId, mRoutedDeviceId);
+    result.appendFormat("  latency (%d), selected device Id(%d), routed device Ids(%s)\n",
+                        mLatency, mSelectedDeviceId, toString(mRoutedDeviceIds).c_str());
     result.appendFormat("  output(%d) AF latency (%u) AF frame count(%zu) AF SampleRate(%u)\n",
                         mOutput, mAfLatency, mAfFrameCount, mAfSampleRate);
     ::write(fd, result.c_str(), result.size());
@@ -3623,7 +3628,7 @@
 
     // first time when the track is created we do not have a valid piid
     if (mPlayerIId != PLAYER_PIID_INVALID) {
-        mAudioManager->playerEvent(mPlayerIId, PLAYER_UPDATE_PORT_ID, mPortId);
+        mAudioManager->playerEvent(mPlayerIId, PLAYER_UPDATE_PORT_ID, {mPortId});
     }
 }
 
@@ -3672,7 +3677,7 @@
 
 
 void AudioTrack::onAudioDeviceUpdate(audio_io_handle_t audioIo,
-                                 audio_port_handle_t deviceId)
+                                     const DeviceIdVector& deviceIds)
 {
     sp<AudioSystem::AudioDeviceCallback> callback;
     {
@@ -3684,12 +3689,12 @@
         // only update device if the track is active as route changes due to other use cases are
         // irrelevant for this client
         if (mState == STATE_ACTIVE) {
-            mRoutedDeviceId = deviceId;
+            mRoutedDeviceIds = deviceIds;
         }
     }
 
     if (callback.get() != nullptr) {
-        callback->onAudioDeviceUpdate(mOutput, mRoutedDeviceId);
+        callback->onAudioDeviceUpdate(mOutput, mRoutedDeviceIds);
     }
 }
 
diff --git a/media/libaudioclient/IAudioFlinger.cpp b/media/libaudioclient/IAudioFlinger.cpp
index 168b47e..1523607 100644
--- a/media/libaudioclient/IAudioFlinger.cpp
+++ b/media/libaudioclient/IAudioFlinger.cpp
@@ -103,8 +103,8 @@
     aidl.flags = VALUE_OR_RETURN(legacy2aidl_audio_output_flags_t_int32_t_mask(flags));
     aidl.frameCount = VALUE_OR_RETURN(convertIntegral<int64_t>(frameCount));
     aidl.notificationFrameCount = VALUE_OR_RETURN(convertIntegral<int64_t>(notificationFrameCount));
-    aidl.selectedDeviceId = VALUE_OR_RETURN(
-            legacy2aidl_audio_port_handle_t_int32_t(selectedDeviceId));
+    aidl.selectedDeviceIds = VALUE_OR_RETURN(convertContainer<std::vector<int32_t>>(
+            selectedDeviceIds, legacy2aidl_audio_port_handle_t_int32_t));
     aidl.sessionId = VALUE_OR_RETURN(legacy2aidl_audio_session_t_int32_t(sessionId));
     aidl.sampleRate = VALUE_OR_RETURN(convertIntegral<int32_t>(sampleRate));
     aidl.streamType =  VALUE_OR_RETURN(
@@ -132,8 +132,8 @@
     legacy.frameCount = VALUE_OR_RETURN(convertIntegral<size_t>(aidl.frameCount));
     legacy.notificationFrameCount = VALUE_OR_RETURN(
             convertIntegral<size_t>(aidl.notificationFrameCount));
-    legacy.selectedDeviceId = VALUE_OR_RETURN(
-            aidl2legacy_int32_t_audio_port_handle_t(aidl.selectedDeviceId));
+    legacy.selectedDeviceIds = VALUE_OR_RETURN(convertContainer<DeviceIdVector>(
+            aidl.selectedDeviceIds, aidl2legacy_int32_t_audio_port_handle_t));
     legacy.sessionId = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_session_t(aidl.sessionId));
     legacy.sampleRate = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.sampleRate));
     legacy.streamType = VALUE_OR_RETURN(
diff --git a/media/libaudioclient/PlayerBase.cpp b/media/libaudioclient/PlayerBase.cpp
index 651255a..5999040 100644
--- a/media/libaudioclient/PlayerBase.cpp
+++ b/media/libaudioclient/PlayerBase.cpp
@@ -30,8 +30,7 @@
 PlayerBase::PlayerBase() : BnPlayer(),
         mPanMultiplierL(1.0f), mPanMultiplierR(1.0f),
         mVolumeMultiplierL(1.0f), mVolumeMultiplierR(1.0f),
-        mPIId(PLAYER_PIID_INVALID), mLastReportedEvent(PLAYER_STATE_UNKNOWN),
-        mLastReportedDeviceId(AUDIO_PORT_HANDLE_NONE)
+        mPIId(PLAYER_PIID_INVALID), mLastReportedEvent(PLAYER_STATE_UNKNOWN)
 {
     ALOGD("PlayerBase::PlayerBase()");
     // use checkService() to avoid blocking if audio service is not up yet
@@ -68,7 +67,7 @@
     }
 
     if (mPIId != PLAYER_PIID_INVALID && portId != AUDIO_PORT_HANDLE_NONE) {
-        mAudioManager->playerEvent(mPIId, android::PLAYER_UPDATE_PORT_ID, portId);
+        mAudioManager->playerEvent(mPIId, android::PLAYER_UPDATE_PORT_ID, { portId });
     }
 }
 
@@ -80,13 +79,13 @@
 }
 
 //------------------------------------------------------------------------------
-void PlayerBase::servicePlayerEvent(player_state_t event, audio_port_handle_t deviceId) {
+void PlayerBase::servicePlayerEvent(player_state_t event, const DeviceIdVector& deviceIds) {
     if (mAudioManager != 0) {
         bool changed = false;
         {
             Mutex::Autolock _l(mDeviceIdLock);
-            changed = mLastReportedDeviceId != deviceId;
-            mLastReportedDeviceId = deviceId;
+            changed = !areDeviceIdsEqual(deviceIds, mLastReportedDeviceIds);
+            mLastReportedDeviceIds = deviceIds;
         }
 
         {
@@ -99,7 +98,7 @@
             }
         }
         if (changed && (mPIId != PLAYER_PIID_INVALID)) {
-            mAudioManager->playerEvent(mPIId, event, deviceId);
+            mAudioManager->playerEvent(mPIId, event, deviceIds);
         }
     }
 }
@@ -112,18 +111,18 @@
 }
 
 //FIXME temporary method while some player state is outside of this class
-void PlayerBase::reportEvent(player_state_t event, audio_port_handle_t deviceId) {
-    servicePlayerEvent(event, deviceId);
+void PlayerBase::reportEvent(player_state_t event, const DeviceIdVector& deviceIds) {
+    servicePlayerEvent(event, deviceIds);
 }
 
-void PlayerBase::baseUpdateDeviceId(audio_port_handle_t deviceId) {
-    servicePlayerEvent(PLAYER_UPDATE_DEVICE_ID, deviceId);
+void PlayerBase::baseUpdateDeviceIds(const DeviceIdVector& deviceIds) {
+    servicePlayerEvent(PLAYER_UPDATE_DEVICE_ID, deviceIds);
 }
 
-status_t PlayerBase::startWithStatus(audio_port_handle_t deviceId) {
+status_t PlayerBase::startWithStatus(const DeviceIdVector& deviceIds) {
     status_t status = playerStart();
     if (status == NO_ERROR) {
-        servicePlayerEvent(PLAYER_STATE_STARTED, deviceId);
+        servicePlayerEvent(PLAYER_STATE_STARTED, deviceIds);
     } else {
         ALOGW("PlayerBase::start() error %d", status);
     }
@@ -133,7 +132,7 @@
 status_t PlayerBase::pauseWithStatus() {
     status_t status = playerPause();
     if (status == NO_ERROR) {
-        servicePlayerEvent(PLAYER_STATE_PAUSED, AUDIO_PORT_HANDLE_NONE);
+        servicePlayerEvent(PLAYER_STATE_PAUSED, {});
     } else {
         ALOGW("PlayerBase::pause() error %d", status);
     }
@@ -144,7 +143,7 @@
     status_t status = playerStop();
 
     if (status == NO_ERROR) {
-        servicePlayerEvent(PLAYER_STATE_STOPPED, AUDIO_PORT_HANDLE_NONE);
+        servicePlayerEvent(PLAYER_STATE_STOPPED, {});
     } else {
         ALOGW("PlayerBase::stop() error %d", status);
     }
@@ -155,12 +154,12 @@
 // Implementation of IPlayer
 binder::Status PlayerBase::start() {
     ALOGD("PlayerBase::start() from IPlayer");
-    audio_port_handle_t deviceId;
+    DeviceIdVector deviceIds;
     {
         Mutex::Autolock _l(mDeviceIdLock);
-        deviceId = mLastReportedDeviceId;
+        deviceIds = mLastReportedDeviceIds;
     }
-    (void)startWithStatus(deviceId);
+    (void)startWithStatus(deviceIds);
     return binder::Status::ok();
 }
 
diff --git a/media/libaudioclient/TrackPlayerBase.cpp b/media/libaudioclient/TrackPlayerBase.cpp
index bc38251..7928c65 100644
--- a/media/libaudioclient/TrackPlayerBase.cpp
+++ b/media/libaudioclient/TrackPlayerBase.cpp
@@ -60,8 +60,8 @@
 }
 
 void TrackPlayerBase::SelfAudioDeviceCallback::onAudioDeviceUpdate(audio_io_handle_t __unused,
-                                                                   audio_port_handle_t deviceId) {
-    mSelf.baseUpdateDeviceId(deviceId);
+        const DeviceIdVector& deviceIds) {
+    mSelf.baseUpdateDeviceIds(deviceIds);
 }
 
 void TrackPlayerBase::doDestroy() {
diff --git a/media/libaudioclient/aidl/android/media/CreateTrackResponse.aidl b/media/libaudioclient/aidl/android/media/CreateTrackResponse.aidl
index ab60461..0c9a947 100644
--- a/media/libaudioclient/aidl/android/media/CreateTrackResponse.aidl
+++ b/media/libaudioclient/aidl/android/media/CreateTrackResponse.aidl
@@ -33,8 +33,8 @@
     int flags;
     long frameCount;
     long notificationFrameCount;
-    /** Interpreted as audio_port_handle_t. */
-    int selectedDeviceId;
+    /** Interpreted as audio_port_handle_t[]. */
+    int[] selectedDeviceIds;
     int sessionId;
     int sampleRate;
     AudioStreamType streamType;
diff --git a/media/libaudioclient/aidl/fuzzer/Android.bp b/media/libaudioclient/aidl/fuzzer/Android.bp
index a215c0b..14e528f 100644
--- a/media/libaudioclient/aidl/fuzzer/Android.bp
+++ b/media/libaudioclient/aidl/fuzzer/Android.bp
@@ -24,8 +24,6 @@
         "android.hardware.audio.common@7.0-enums",
         "audiopermissioncontroller",
         "libaudiomockhal",
-        "libcgrouprc",
-        "libcgrouprc_format",
         "libfakeservicemanager",
         "libjsoncpp",
         "libmediametricsservice",
diff --git a/media/libaudioclient/fuzzer/Android.bp b/media/libaudioclient/fuzzer/Android.bp
index 8bca8df..65ada70 100644
--- a/media/libaudioclient/fuzzer/Android.bp
+++ b/media/libaudioclient/fuzzer/Android.bp
@@ -37,8 +37,6 @@
         "effect-aidl-cpp",
         "libaudioclient",
         "libbase",
-        "libcgrouprc",
-        "libcgrouprc_format",
         "libcutils",
         "libjsoncpp",
         "liblog",
diff --git a/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp b/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp
index b0b7e03..ba5b3b1 100644
--- a/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp
+++ b/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp
@@ -396,7 +396,7 @@
         static_cast<audio_port_handle_t>(mFdp.ConsumeIntegral<int32_t>());
     record->setInputDevice(deviceId);
     record->getInputDevice();
-    record->getRoutedDeviceId();
+    record->getRoutedDeviceIds();
     record->getPortId();
 }
 
diff --git a/media/libaudioclient/include/media/AudioIoDescriptor.h b/media/libaudioclient/include/media/AudioIoDescriptor.h
index 405ec7d..961cc1c 100644
--- a/media/libaudioclient/include/media/AudioIoDescriptor.h
+++ b/media/libaudioclient/include/media/AudioIoDescriptor.h
@@ -69,12 +69,21 @@
     size_t getFrameCountHAL() const { return mFrameCountHAL; }
     uint32_t getLatency() const { return mLatency; }
     audio_port_handle_t getPortId() const { return mPortId; }
-    audio_port_handle_t getDeviceId() const {
-        if (mPatch.num_sources != 0 && mPatch.num_sinks != 0) {
-            // FIXME: the API only returns the first device in case of multiple device selection
-            return mIsInput ? mPatch.sources[0].id : mPatch.sinks[0].id;
+    std::vector<audio_port_handle_t> getDeviceIds() const {
+        std::vector<audio_port_handle_t> deviceIds;
+        if (mPatch.num_sources == 0 || mPatch.num_sinks == 0) {
+            return deviceIds;
         }
-        return AUDIO_PORT_HANDLE_NONE;
+        if (mIsInput) {
+            for (unsigned int i = 0; i < mPatch.num_sources; i++) {
+                deviceIds.push_back(mPatch.sources[i].id);
+            }
+        } else {
+            for (unsigned int i = 0; i < mPatch.num_sinks; i++) {
+                deviceIds.push_back(mPatch.sinks[i].id);
+            }
+        }
+        return deviceIds;
     }
     void setPatch(const audio_patch& patch) { mPatch = patch; }
 
@@ -88,7 +97,13 @@
                    (mIsInput ? audio_channel_in_mask_to_string(mChannelMask) :
                            audio_channel_out_mask_to_string(mChannelMask)))
            << ", frameCount " << mFrameCount << ", frameCountHAL " << mFrameCountHAL
-           << ", deviceId " << getDeviceId();
+           << ", deviceIds ";
+
+        std::vector<audio_port_handle_t> deviceIds = getDeviceIds();
+        for (auto deviceId : deviceIds) {
+            ss << deviceId << " ";
+        }
+
         return ss.str();
     }
 
diff --git a/media/libaudioclient/include/media/AudioRecord.h b/media/libaudioclient/include/media/AudioRecord.h
index 25d91d3..80a756e 100644
--- a/media/libaudioclient/include/media/AudioRecord.h
+++ b/media/libaudioclient/include/media/AudioRecord.h
@@ -495,19 +495,19 @@
      */
             audio_port_handle_t getInputDevice();
 
-     /* Returns the ID of the audio device actually used by the input to which this AudioRecord
+     /* Returns the IDs of the audio devices actually used by the input to which this AudioRecord
       * is attached.
-      * The device ID is relevant only if the AudioRecord is active.
-      * When the AudioRecord is inactive, the device ID returned can be either:
-      * - AUDIO_PORT_HANDLE_NONE if the AudioRecord is not attached to any output.
-      * - The device ID used before paused or stopped.
+      * The device IDs is relevant only if the AudioRecord is active.
+      * When the AudioRecord is inactive, the device IDs returned can be either:
+      * - An empty vector if the AudioRecord is not attached to any output.
+      * - The device IDs used before paused or stopped.
       * - The device ID selected by audio policy manager of setOutputDevice() if the AudioRecord
       * has not been started yet.
       *
       * Parameters:
       *  none.
       */
-     audio_port_handle_t getRoutedDeviceId();
+     DeviceIdVector getRoutedDeviceIds();
 
     /* Add an AudioDeviceCallback. The caller will be notified when the audio device
      * to which this AudioRecord is routed is updated.
@@ -534,7 +534,7 @@
 
             // AudioSystem::AudioDeviceCallback> virtuals
             virtual void onAudioDeviceUpdate(audio_io_handle_t audioIo,
-                                             audio_port_handle_t deviceId);
+                                             const DeviceIdVector& deviceIds);
 
 private:
     /* If nonContig is non-NULL, it is an output parameter that will be set to the number of
@@ -678,7 +678,7 @@
             // FIXME enum is faster than strcmp() for parameter 'from'
             status_t restoreRecord_l(const char *from);
 
-            void     updateRoutedDeviceId_l();
+            void     updateRoutedDeviceIds_l();
 
     sp<AudioRecordThread>   mAudioRecordThread;
     mutable Mutex           mLock;
@@ -810,7 +810,7 @@
     audio_port_handle_t     mSelectedDeviceId = AUDIO_PORT_HANDLE_NONE;
     // Device actually selected by AudioPolicyManager: This may not match the app
     // selection depending on other activity and connected devices
-    audio_port_handle_t     mRoutedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    DeviceIdVector          mRoutedDeviceIds;
 
     wp<AudioSystem::AudioDeviceCallback> mDeviceCallback;
 
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index fbc7629..45ede3c 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -767,7 +767,7 @@
         virtual ~AudioDeviceCallback() {}
 
         virtual void onAudioDeviceUpdate(audio_io_handle_t audioIo,
-                                         audio_port_handle_t deviceId) = 0;
+                                         const DeviceIdVector& deviceIds) = 0;
     };
 
     static status_t addAudioDeviceCallback(const wp<AudioDeviceCallback>& callback,
@@ -793,7 +793,7 @@
     static status_t removeSupportedLatencyModesCallback(
             const sp<SupportedLatencyModesCallback>& callback);
 
-    static audio_port_handle_t getDeviceIdForIo(audio_io_handle_t audioIo);
+    static status_t getDeviceIdsForIo(audio_io_handle_t audioIo, DeviceIdVector& deviceIds);
 
     static status_t setVibratorInfos(const std::vector<media::AudioVibratorInfo>& vibratorInfos);
 
@@ -842,7 +842,8 @@
         status_t removeSupportedLatencyModesCallback(
                 const sp<SupportedLatencyModesCallback>& callback) EXCLUDES(mMutex);
 
-        audio_port_handle_t getDeviceIdForIo(audio_io_handle_t audioIo) EXCLUDES(mMutex);
+        status_t getDeviceIdsForIo(audio_io_handle_t audioIo, DeviceIdVector& deviceIds)
+                EXCLUDES(mMutex);
 
     private:
         mutable std::mutex mMutex;
diff --git a/media/libaudioclient/include/media/AudioTrack.h b/media/libaudioclient/include/media/AudioTrack.h
index de97863..330b5ee 100644
--- a/media/libaudioclient/include/media/AudioTrack.h
+++ b/media/libaudioclient/include/media/AudioTrack.h
@@ -835,18 +835,18 @@
      */
      audio_port_handle_t getOutputDevice();
 
-     /* Returns the ID of the audio device actually used by the output to which this AudioTrack is
+     /* Returns the IDs of the audio devices actually used by the output to which this AudioTrack is
       * attached.
       * When the AudioTrack is inactive, the device ID returned can be either:
-      * - AUDIO_PORT_HANDLE_NONE if the AudioTrack is not attached to any output.
-      * - The device ID used before paused or stopped.
+      * - An empty vector if the AudioTrack is not attached to any output.
+      * - The device IDs used before paused or stopped.
       * - The device ID selected by audio policy manager of setOutputDevice() if the AudioTrack
       * has not been started yet.
       *
       * Parameters:
       *  none.
       */
-     audio_port_handle_t getRoutedDeviceId();
+     DeviceIdVector getRoutedDeviceIds();
 
     /* Returns the unique session ID associated with this track.
      *
@@ -1089,7 +1089,7 @@
 
             // AudioSystem::AudioDeviceCallback> virtuals
             virtual void onAudioDeviceUpdate(audio_io_handle_t audioIo,
-                                             audio_port_handle_t deviceId);
+                                             const DeviceIdVector& deviceIds);
 
     /* Obtain the pending duration in milliseconds for playback of pure PCM
      * (mixable without embedded timing) data remaining in AudioTrack.
@@ -1258,7 +1258,7 @@
 
             void     restartIfDisabled();
 
-            void     updateRoutedDeviceId_l();
+            void     updateRoutedDeviceIds_l();
 
             /* Sets the Dual Mono mode presentation on the output device. */
             status_t setDualMonoMode_l(audio_dual_mono_mode_t mode);
@@ -1482,9 +1482,9 @@
     // Device requested by the application.
     audio_port_handle_t mSelectedDeviceId = AUDIO_PORT_HANDLE_NONE;
 
-    // Device actually selected by AudioPolicyManager: This may not match the app
+    // Devices actually selected by AudioPolicyManager: This may not match the app
     // selection depending on other activity and connected devices.
-    audio_port_handle_t mRoutedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    DeviceIdVector             mRoutedDeviceIds;
 
     sp<media::VolumeHandler>       mVolumeHandler;
 
diff --git a/media/libaudioclient/include/media/IAudioFlinger.h b/media/libaudioclient/include/media/IAudioFlinger.h
index 21ecb09..8292eef 100644
--- a/media/libaudioclient/include/media/IAudioFlinger.h
+++ b/media/libaudioclient/include/media/IAudioFlinger.h
@@ -109,7 +109,7 @@
         audio_output_flags_t flags;
         size_t frameCount;
         size_t notificationFrameCount;
-        audio_port_handle_t selectedDeviceId;
+        DeviceIdVector selectedDeviceIds;
         audio_session_t sessionId;
 
         /* output */
diff --git a/media/libaudioclient/include/media/PlayerBase.h b/media/libaudioclient/include/media/PlayerBase.h
index 5475f76..5df1a6e 100644
--- a/media/libaudioclient/include/media/PlayerBase.h
+++ b/media/libaudioclient/include/media/PlayerBase.h
@@ -22,6 +22,7 @@
 #include <utils/Mutex.h>
 
 #include "android/media/BnPlayer.h"
+#include "media/AudioContainers.h"
 
 namespace android {
 
@@ -44,14 +45,14 @@
             const media::VolumeShaperConfiguration& configuration,
             const media::VolumeShaperOperation& operation) override;
 
-            status_t startWithStatus(audio_port_handle_t deviceId);
+            status_t startWithStatus(const DeviceIdVector& deviceIds);
             status_t pauseWithStatus();
             status_t stopWithStatus();
 
             //FIXME temporary method while some player state is outside of this class
-            void reportEvent(player_state_t event, audio_port_handle_t deviceId);
+            void reportEvent(player_state_t event, const DeviceIdVector& deviceIds);
 
-            void baseUpdateDeviceId(audio_port_handle_t deviceId);
+            void baseUpdateDeviceIds(const DeviceIdVector& deviceIds);
 
             /**
              * Updates the mapping in the AudioService between portId and piid
@@ -80,7 +81,7 @@
     audio_unique_id_t mPIId;
 private:
             // report events to AudioService
-            void servicePlayerEvent(player_state_t event, audio_port_handle_t deviceId);
+            void servicePlayerEvent(player_state_t event, const DeviceIdVector& deviceIds);
             void serviceReleasePlayer();
 
     // native interface to AudioService
@@ -91,7 +92,7 @@
     player_state_t mLastReportedEvent;
 
     Mutex mDeviceIdLock;
-    audio_port_handle_t mLastReportedDeviceId;
+    DeviceIdVector mLastReportedDeviceIds GUARDED_BY(mDeviceIdLock);
 };
 
 } // namespace android
diff --git a/media/libaudioclient/include/media/TrackPlayerBase.h b/media/libaudioclient/include/media/TrackPlayerBase.h
index 8df9ff8..575b14c 100644
--- a/media/libaudioclient/include/media/TrackPlayerBase.h
+++ b/media/libaudioclient/include/media/TrackPlayerBase.h
@@ -60,7 +60,7 @@
             public:
                 SelfAudioDeviceCallback(PlayerBase& self);
                 virtual void onAudioDeviceUpdate(audio_io_handle_t audioIo,
-                                                         audio_port_handle_t deviceId);
+                                                 const DeviceIdVector& deviceIds);
             private:
                 virtual ~SelfAudioDeviceCallback();
                 PlayerBase& mSelf;
diff --git a/media/libaudioclient/tests/Android.bp b/media/libaudioclient/tests/Android.bp
index ddf14a3..3941280 100644
--- a/media/libaudioclient/tests/Android.bp
+++ b/media/libaudioclient/tests/Android.bp
@@ -107,7 +107,6 @@
         "framework-permission-aidl-cpp",
         "libaudioutils",
         "libbase",
-        "libcgrouprc",
         "libdl",
         "libmedia",
         "libmedia_helper",
diff --git a/media/libaudioclient/tests/audio_test_utils.cpp b/media/libaudioclient/tests/audio_test_utils.cpp
index 1599839..7d13939 100644
--- a/media/libaudioclient/tests/audio_test_utils.cpp
+++ b/media/libaudioclient/tests/audio_test_utils.cpp
@@ -27,12 +27,12 @@
 #define MAX_WAIT_TIME_MS 5000
 
 void OnAudioDeviceUpdateNotifier::onAudioDeviceUpdate(audio_io_handle_t audioIo,
-                                                      audio_port_handle_t deviceId) {
-    ALOGI("%s: audioIo=%d deviceId=%d", __func__, audioIo, deviceId);
+                                                      const DeviceIdVector& deviceIds) {
+    ALOGI("%s: audioIo=%d deviceIds=%s", __func__, audioIo, toString(deviceIds).c_str());
     {
         std::lock_guard lock(mMutex);
         mAudioIo = audioIo;
-        mDeviceId = deviceId;
+        mDeviceIds = deviceIds;
     }
     mCondition.notify_all();
 }
@@ -41,20 +41,23 @@
     std::unique_lock lock(mMutex);
     android::base::ScopedLockAssertion lock_assertion(mMutex);
     if (mAudioIo == AUDIO_IO_HANDLE_NONE ||
-        (expDeviceId != AUDIO_PORT_HANDLE_NONE && expDeviceId != mDeviceId)) {
+        (expDeviceId != AUDIO_PORT_HANDLE_NONE &&
+         std::find(mDeviceIds.begin(), mDeviceIds.end(), expDeviceId) == mDeviceIds.end())) {
         mCondition.wait_for(lock, std::chrono::milliseconds(500));
         if (mAudioIo == AUDIO_IO_HANDLE_NONE ||
-            (expDeviceId != AUDIO_PORT_HANDLE_NONE && expDeviceId != mDeviceId)) {
+            (expDeviceId != AUDIO_PORT_HANDLE_NONE &&
+             std::find(mDeviceIds.begin(), mDeviceIds.end(), expDeviceId) == mDeviceIds.end())) {
             return TIMED_OUT;
         }
     }
     return OK;
 }
 
-std::pair<audio_io_handle_t, audio_port_handle_t>
-OnAudioDeviceUpdateNotifier::getLastPortAndDevice() const {
+std::pair<audio_io_handle_t, DeviceIdVector> OnAudioDeviceUpdateNotifier::getLastPortAndDevices()
+        const {
     std::lock_guard lock(mMutex);
-    return {mAudioIo, mDeviceId};
+    ALOGI("%s: audioIo=%d deviceIds=%s", __func__, mAudioIo, toString(mDeviceIds).c_str());
+    return {mAudioIo, mDeviceIds};
 }
 
 AudioPlayback::AudioPlayback(uint32_t sampleRate, audio_format_t format,
@@ -761,13 +764,15 @@
     return BAD_VALUE;
 }
 
-bool patchContainsOutputDevice(audio_port_handle_t deviceId, audio_patch patch) {
+// Check if the patch matches all the output devices in the deviceIds vector.
+bool patchMatchesOutputDevices(const DeviceIdVector& deviceIds, audio_patch patch) {
+    DeviceIdVector patchDeviceIds;
     for (auto j = 0; j < patch.num_sinks; j++) {
-        if (patch.sinks[j].type == AUDIO_PORT_TYPE_DEVICE && patch.sinks[j].id == deviceId) {
-            return true;
+        if (patch.sinks[j].type == AUDIO_PORT_TYPE_DEVICE) {
+            patchDeviceIds.push_back(patch.sinks[j].id);
         }
     }
-    return false;
+    return areDeviceIdsEqual(deviceIds, patchDeviceIds);
 }
 
 bool patchContainsInputDevice(audio_port_handle_t deviceId, audio_patch patch) {
@@ -779,10 +784,10 @@
     return false;
 }
 
-bool checkPatchPlayback(audio_io_handle_t audioIo, audio_port_handle_t deviceId) {
+bool checkPatchPlayback(audio_io_handle_t audioIo, const DeviceIdVector& deviceIds) {
     struct audio_patch patch;
     if (getPatchForOutputMix(audioIo, patch) == OK) {
-        return patchContainsOutputDevice(deviceId, patch);
+        return patchMatchesOutputDevices(deviceIds, patch);
     }
     return false;
 }
diff --git a/media/libaudioclient/tests/audio_test_utils.h b/media/libaudioclient/tests/audio_test_utils.h
index 022ecf3..9ccc7da 100644
--- a/media/libaudioclient/tests/audio_test_utils.h
+++ b/media/libaudioclient/tests/audio_test_utils.h
@@ -52,9 +52,9 @@
                              audio_port_v7& port);
 status_t getPatchForOutputMix(audio_io_handle_t audioIo, audio_patch& patch);
 status_t getPatchForInputMix(audio_io_handle_t audioIo, audio_patch& patch);
-bool patchContainsOutputDevice(audio_port_handle_t deviceId, audio_patch patch);
+bool patchContainsOutputDevices(DeviceIdVector deviceIds, audio_patch patch);
 bool patchContainsInputDevice(audio_port_handle_t deviceId, audio_patch patch);
-bool checkPatchPlayback(audio_io_handle_t audioIo, audio_port_handle_t deviceId);
+bool checkPatchPlayback(audio_io_handle_t audioIo, const DeviceIdVector& deviceIds);
 bool checkPatchCapture(audio_io_handle_t audioIo, audio_port_handle_t deviceId);
 std::string dumpPort(const audio_port_v7& port);
 std::string dumpPortConfig(const audio_port_config& port);
@@ -62,13 +62,13 @@
 
 class OnAudioDeviceUpdateNotifier : public AudioSystem::AudioDeviceCallback {
   public:
-    void onAudioDeviceUpdate(audio_io_handle_t audioIo, audio_port_handle_t deviceId) override;
+    void onAudioDeviceUpdate(audio_io_handle_t audioIo, const DeviceIdVector& deviceIds) override;
     status_t waitForAudioDeviceCb(audio_port_handle_t expDeviceId = AUDIO_PORT_HANDLE_NONE);
-    std::pair<audio_io_handle_t, audio_port_handle_t> getLastPortAndDevice() const;
+    std::pair<audio_io_handle_t, DeviceIdVector> getLastPortAndDevices() const;
 
   private:
     audio_io_handle_t mAudioIo GUARDED_BY(mMutex) = AUDIO_IO_HANDLE_NONE;
-    audio_port_handle_t mDeviceId GUARDED_BY(mMutex) = AUDIO_PORT_HANDLE_NONE;
+    DeviceIdVector mDeviceIds GUARDED_BY(mMutex);
     mutable std::mutex mMutex;
     std::condition_variable mCondition;
 };
diff --git a/media/libaudioclient/tests/audioeffect_analyser.cpp b/media/libaudioclient/tests/audioeffect_analyser.cpp
index 199fb8b..3df5fd8 100644
--- a/media/libaudioclient/tests/audioeffect_analyser.cpp
+++ b/media/libaudioclient/tests/audioeffect_analyser.cpp
@@ -119,7 +119,8 @@
     CHECK_OK(capture->start(), "start recording failed")
     CHECK_OK(capture->audioProcess(), "recording process failed")
     CHECK_OK(cbCapture->waitForAudioDeviceCb(), "audio device callback notification timed out");
-    if (port.id != capture->getAudioRecordHandle()->getRoutedDeviceId()) {
+    DeviceIdVector routedDeviceIds = capture->getAudioRecordHandle()->getRoutedDeviceIds();
+    if (port.id != routedDeviceIds[0]) {
         CHECK_OK(BAD_VALUE, "Capture NOT routed on expected port")
     }
     CHECK_OK(getPortByAttributes(AUDIO_PORT_ROLE_SINK, AUDIO_PORT_TYPE_DEVICE,
diff --git a/media/libaudioclient/tests/audiorecord_tests.cpp b/media/libaudioclient/tests/audiorecord_tests.cpp
index f2fee8b..550ce6c 100644
--- a/media/libaudioclient/tests/audiorecord_tests.cpp
+++ b/media/libaudioclient/tests/audiorecord_tests.cpp
@@ -123,12 +123,12 @@
     EXPECT_EQ(OK, mAC->getAudioRecordHandle()->addAudioDeviceCallback(cb));
     EXPECT_EQ(OK, mAC->start()) << "record creation failed";
     EXPECT_EQ(OK, cb->waitForAudioDeviceCb());
-    const auto [oldAudioIo, oldDeviceId] = cbOld->getLastPortAndDevice();
+    const auto [oldAudioIo, oldDeviceIds] = cbOld->getLastPortAndDevices();
     EXPECT_EQ(AUDIO_IO_HANDLE_NONE, oldAudioIo);
-    EXPECT_EQ(AUDIO_PORT_HANDLE_NONE, oldDeviceId);
-    const auto [audioIo, deviceId] = cb->getLastPortAndDevice();
+    EXPECT_TRUE(oldDeviceIds.empty());
+    const auto [audioIo, deviceIds] = cb->getLastPortAndDevices();
     EXPECT_NE(AUDIO_IO_HANDLE_NONE, audioIo);
-    EXPECT_NE(AUDIO_PORT_HANDLE_NONE, deviceId);
+    EXPECT_FALSE(deviceIds.empty());
     EXPECT_EQ(BAD_VALUE, mAC->getAudioRecordHandle()->removeAudioDeviceCallback(nullptr));
     EXPECT_EQ(INVALID_OPERATION, mAC->getAudioRecordHandle()->removeAudioDeviceCallback(cbOld));
     EXPECT_EQ(OK, mAC->getAudioRecordHandle()->removeAudioDeviceCallback(cb));
diff --git a/media/libaudioclient/tests/audiorouting_tests.cpp b/media/libaudioclient/tests/audiorouting_tests.cpp
index a3ab9d2..7957c10 100644
--- a/media/libaudioclient/tests/audiorouting_tests.cpp
+++ b/media/libaudioclient/tests/audiorouting_tests.cpp
@@ -64,8 +64,8 @@
         EXPECT_EQ(OK, ap->start()) << "audio track start failed";
         EXPECT_EQ(OK, ap->onProcess());
         EXPECT_EQ(OK, cb->waitForAudioDeviceCb());
-        const auto [audioIo, deviceId] = cb->getLastPortAndDevice();
-        EXPECT_TRUE(checkPatchPlayback(audioIo, deviceId));
+        const auto [audioIo, deviceIds] = cb->getLastPortAndDevices();
+        EXPECT_TRUE(checkPatchPlayback(audioIo, deviceIds));
         EXPECT_NE(0, ap->getAudioTrackHandle()->getFlags() & output_flags[i]);
         audio_patch patch;
         EXPECT_EQ(OK, getPatchForOutputMix(audioIo, patch));
@@ -127,8 +127,8 @@
     // capture should be routed to submix in port
     EXPECT_EQ(OK, capture->start()) << "start recording failed";
     EXPECT_EQ(OK, cbCapture->waitForAudioDeviceCb());
-    EXPECT_EQ(port.id, capture->getAudioRecordHandle()->getRoutedDeviceId())
-            << "Capture NOT routed on expected port";
+    DeviceIdVector routedDeviceIds = capture->getAudioRecordHandle()->getRoutedDeviceIds();
+    EXPECT_EQ(port.id, routedDeviceIds[0]) << "Capture NOT routed on expected port";
 
     // capture start should create submix out port
     status_t status = getPortByAttributes(AUDIO_PORT_ROLE_SINK, AUDIO_PORT_TYPE_DEVICE,
@@ -138,8 +138,8 @@
     // playback should be routed to submix out as long as capture is active
     EXPECT_EQ(OK, playback->start()) << "audio track start failed";
     EXPECT_EQ(OK, cbPlayback->waitForAudioDeviceCb());
-    EXPECT_EQ(port.id, playback->getAudioTrackHandle()->getRoutedDeviceId())
-            << "Playback NOT routed on expected port";
+    routedDeviceIds = playback->getAudioTrackHandle()->getRoutedDeviceIds();
+    EXPECT_EQ(port.id, routedDeviceIds[0]) << "Playback NOT routed on expected port";
 
     capture->stop();
     playback->stop();
@@ -235,13 +235,13 @@
     // launch
     EXPECT_EQ(OK, captureA->start()) << "start recording failed";
     EXPECT_EQ(OK, cbCaptureA->waitForAudioDeviceCb());
-    EXPECT_EQ(port.id, captureA->getAudioRecordHandle()->getRoutedDeviceId())
-            << "Capture NOT routed on expected port";
+    DeviceIdVector routedDeviceIds = captureA->getAudioRecordHandle()->getRoutedDeviceIds();
+    EXPECT_EQ(port.id, routedDeviceIds[0]) << "Capture NOT routed on expected port";
 
     EXPECT_EQ(OK, captureB->start()) << "start recording failed";
     EXPECT_EQ(OK, cbCaptureB->waitForAudioDeviceCb());
-    EXPECT_EQ(port_mix.id, captureB->getAudioRecordHandle()->getRoutedDeviceId())
-            << "Capture NOT routed on expected port";
+    routedDeviceIds = captureB->getAudioRecordHandle()->getRoutedDeviceIds();
+    EXPECT_EQ(port_mix.id, routedDeviceIds[0]) << "Capture NOT routed on expected port";
 
     // as record started, expect submix out ports to be connected
     status = getPortByAttributes(AUDIO_PORT_ROLE_SINK, AUDIO_PORT_TYPE_DEVICE,
@@ -255,8 +255,8 @@
     // check if playback routed to desired port
     EXPECT_EQ(OK, playback->start());
     EXPECT_EQ(OK, cbPlayback->waitForAudioDeviceCb());
-    EXPECT_EQ(port_mix.id, playback->getAudioTrackHandle()->getRoutedDeviceId())
-            << "Playback NOT routed on expected port";
+    routedDeviceIds = playback->getAudioTrackHandle()->getRoutedDeviceIds();
+    EXPECT_EQ(port_mix.id, routedDeviceIds[0]) << "Playback NOT routed on expected port";
 
     captureB->stop();
 
@@ -282,8 +282,8 @@
     playback->onProcess();
     // as captureA is active, it should re route to legacy submix
     EXPECT_EQ(OK, cbPlayback->waitForAudioDeviceCb(port.id));
-    EXPECT_EQ(port.id, playback->getAudioTrackHandle()->getRoutedDeviceId())
-            << "Playback NOT routed on expected port";
+    routedDeviceIds = playback->getAudioTrackHandle()->getRoutedDeviceIds();
+    EXPECT_EQ(port.id, routedDeviceIds[0]) << "Playback NOT routed on expected port";
 
     captureA->stop();
     playback->stop();
diff --git a/media/libaudioclient/tests/audiosystem_tests.cpp b/media/libaudioclient/tests/audiosystem_tests.cpp
index 742ca48..31cab78 100644
--- a/media/libaudioclient/tests/audiosystem_tests.cpp
+++ b/media/libaudioclient/tests/audiosystem_tests.cpp
@@ -108,7 +108,7 @@
 // UNIT TESTS
 TEST_F(AudioSystemTest, CheckServerSideValues) {
     ASSERT_NO_FATAL_FAILURE(createPlaybackSession());
-    const auto [pbAudioIo, _] = mCbPlayback->getLastPortAndDevice();
+    const auto [pbAudioIo, _] = mCbPlayback->getLastPortAndDevices();
     EXPECT_GT(mAF->sampleRate(pbAudioIo), 0);
     EXPECT_NE(mAF->format(pbAudioIo), AUDIO_FORMAT_INVALID);
     EXPECT_GT(mAF->frameCount(pbAudioIo), 0);
@@ -122,7 +122,7 @@
     EXPECT_LE(mAF->latency(pbAudioIo), mPlayback->getAudioTrackHandle()->latency());
 
     ASSERT_NO_FATAL_FAILURE(createRecordSession());
-    const auto [recAudioIo, __] = mCbRecord->getLastPortAndDevice();
+    const auto [recAudioIo, __] = mCbRecord->getLastPortAndDevices();
     EXPECT_GT(mAF->sampleRate(recAudioIo), 0);
     // EXPECT_NE(mAF->format(recAudioIo), AUDIO_FORMAT_INVALID);
     EXPECT_GT(mAF->frameCount(recAudioIo), 0);
diff --git a/media/libaudioclient/tests/audiotrack_tests.cpp b/media/libaudioclient/tests/audiotrack_tests.cpp
index cf7d926..d283c6c 100644
--- a/media/libaudioclient/tests/audiotrack_tests.cpp
+++ b/media/libaudioclient/tests/audiotrack_tests.cpp
@@ -157,20 +157,21 @@
     EXPECT_EQ(OK, ap->start()) << "audio track start failed";
     EXPECT_EQ(OK, ap->onProcess());
     EXPECT_EQ(OK, cb->waitForAudioDeviceCb());
-    const auto [oldAudioIo, oldDeviceId] = cbOld->getLastPortAndDevice();
+    const auto [oldAudioIo, oldDeviceIds] = cbOld->getLastPortAndDevices();
     EXPECT_EQ(AUDIO_IO_HANDLE_NONE, oldAudioIo);
-    EXPECT_EQ(AUDIO_PORT_HANDLE_NONE, oldDeviceId);
-    const auto [audioIo, deviceId] = cb->getLastPortAndDevice();
+    EXPECT_TRUE(oldDeviceIds.empty());
+    const auto [audioIo, deviceIds] = cb->getLastPortAndDevices();
     EXPECT_NE(AUDIO_IO_HANDLE_NONE, audioIo);
-    EXPECT_NE(AUDIO_PORT_HANDLE_NONE, deviceId);
+    EXPECT_FALSE(deviceIds.empty());
     EXPECT_EQ(audioIo, ap->getAudioTrackHandle()->getOutput());
-    EXPECT_EQ(deviceId, ap->getAudioTrackHandle()->getRoutedDeviceId());
+    DeviceIdVector routedDeviceIds = ap->getAudioTrackHandle()->getRoutedDeviceIds();
+    EXPECT_TRUE(areDeviceIdsEqual(routedDeviceIds, deviceIds));
     String8 keys;
     keys = ap->getAudioTrackHandle()->getParameters(keys);
     if (!keys.empty()) {
         std::cerr << "track parameters :: " << keys << std::endl;
     }
-    EXPECT_TRUE(checkPatchPlayback(audioIo, deviceId));
+    EXPECT_TRUE(checkPatchPlayback(audioIo, deviceIds));
     EXPECT_EQ(BAD_VALUE, ap->getAudioTrackHandle()->removeAudioDeviceCallback(nullptr));
     EXPECT_EQ(INVALID_OPERATION, ap->getAudioTrackHandle()->removeAudioDeviceCallback(cbOld));
     EXPECT_EQ(OK, ap->getAudioTrackHandle()->removeAudioDeviceCallback(cb));
diff --git a/media/libaudiofoundation/AudioContainers.cpp b/media/libaudiofoundation/AudioContainers.cpp
index 3e2066b..f3d295b 100644
--- a/media/libaudiofoundation/AudioContainers.cpp
+++ b/media/libaudiofoundation/AudioContainers.cpp
@@ -151,6 +151,12 @@
     return deviceIds[0];
 }
 
+bool areDeviceIdsEqual(const DeviceIdVector& first, const DeviceIdVector& second) {
+    const std::set<audio_port_handle_t> firstSet(first.begin(), first.end());
+    const std::set<audio_port_handle_t> secondSet(second.begin(), second.end());
+    return firstSet == secondSet;
+}
+
 AudioProfileAttributesMultimap createAudioProfilesAttrMap(audio_profile profiles[],
                                                           uint32_t first,
                                                           uint32_t last) {
diff --git a/media/libaudiofoundation/include/media/AudioContainers.h b/media/libaudiofoundation/include/media/AudioContainers.h
index 8d4665e..b6c0444 100644
--- a/media/libaudiofoundation/include/media/AudioContainers.h
+++ b/media/libaudiofoundation/include/media/AudioContainers.h
@@ -140,16 +140,21 @@
 }
 
 /**
- * Returns human readable string for a set of device ids.
+ * Returns human readable string for a vector of device ids.
  */
 std::string toString(const DeviceIdVector& deviceIds);
 
 /**
- * Returns the first device id of a set of device ids or AUDIO_PORT_HANDLE_NONE when its empty.
+ * Returns the first device id of a vector of device ids or AUDIO_PORT_HANDLE_NONE when its empty.
  */
 audio_port_handle_t getFirstDeviceId(const DeviceIdVector& deviceIds);
 
 /**
+ * Returns whether two vectors of device ids have the same elements.
+ */
+bool areDeviceIdsEqual(const DeviceIdVector& first, const DeviceIdVector& second);
+
+/**
  * Create audio profile attributes map by given audio profile array from the range of [first, last).
  *
  * @param profiles the array of audio profiles.
diff --git a/media/libaudiohal/impl/Android.bp b/media/libaudiohal/impl/Android.bp
index f5dec56..ddef852 100644
--- a/media/libaudiohal/impl/Android.bp
+++ b/media/libaudiohal/impl/Android.bp
@@ -206,6 +206,7 @@
     shared_libs: [
         "android.hardware.common-V2-ndk",
         "android.hardware.common.fmq-V1-ndk",
+        "com.android.media.audio-aconfig-cc",
         "libaudio_aidl_conversion_common_cpp",
         "libaudio_aidl_conversion_common_ndk",
         "libaudio_aidl_conversion_common_ndk_cpp",
diff --git a/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp b/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
index 2753906..ac69b26 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
+++ b/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
@@ -25,10 +25,12 @@
 #include <error/expected_utils.h>
 #include <aidl/android/media/audio/common/AudioStreamType.h>
 #include <android/binder_manager.h>
+#include <com_android_media_audio.h>
 #include <media/AidlConversionCppNdk.h>
 #include <media/AidlConversionEffect.h>
 #include <system/audio.h>
 #include <system/audio_aidl_utils.h>
+#include <system/audio_effects/effect_uuid.h>
 #include <utils/Log.h>
 
 #include "AidlUtils.h"
@@ -68,6 +70,7 @@
           std::vector<Descriptor> list;
           if (mFactory) {
               mFactory->queryEffects(std::nullopt, std::nullopt, std::nullopt, &list).isOk();
+              filterHalDescriptors(list);
           }
           return list;
       }()),
@@ -180,6 +183,11 @@
 
     AudioUuid aidlUuid =
             VALUE_OR_RETURN_STATUS(::aidl::android::legacy2aidl_audio_uuid_t_AudioUuid(*uuid));
+    if (!com_android_media_audio_audio_eraser_effect() && isAudioEraser(aidlUuid)) {
+        ALOGE("%s Audio eraser effect not supported yet", __func__);
+        return BAD_VALUE;
+    }
+
     std::shared_ptr<IEffect> aidlEffect;
     // Use EffectProxy interface instead of IFactory to create
     const bool isProxy = isProxyEffect(aidlUuid);
@@ -367,6 +375,23 @@
     return 0;
 }
 
+
+bool EffectsFactoryHalAidl::isAudioEraser(const AudioUuid& uuid) {
+    return uuid == getEffectTypeUuidEraser();
+}
+
+void EffectsFactoryHalAidl::filterHalDescriptors(std::vector<Descriptor>& descs) {
+    if (!com_android_media_audio_audio_eraser_effect()) {
+        descs.erase(std::remove_if(descs.begin(), descs.end(),
+                                   [](const Descriptor& desc) {
+                                       return isAudioEraser(desc.common.id.type);
+                                   }),
+                    descs.end());
+    }
+
+    return;
+}
+
 } // namespace effect
 
 // When a shared library is built from a static library, even explicit
diff --git a/media/libaudiohal/impl/EffectsFactoryHalAidl.h b/media/libaudiohal/impl/EffectsFactoryHalAidl.h
index 3b8628c..a3cd165 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalAidl.h
+++ b/media/libaudiohal/impl/EffectsFactoryHalAidl.h
@@ -94,6 +94,11 @@
             std::vector<effect_descriptor_t>* descriptors);
 
     bool isProxyEffect(const aidl::android::media::audio::common::AudioUuid& uuid) const;
+
+    static bool isAudioEraser(const aidl::android::media::audio::common::AudioUuid& uuid);
+
+    // filter out descriptors which can not supported by the framework
+    static void filterHalDescriptors(std::vector<Descriptor>& descs);
 };
 
 } // namespace effect
diff --git a/media/libmedia/Android.bp b/media/libmedia/Android.bp
index 8a962c6..06dd27b 100644
--- a/media/libmedia/Android.bp
+++ b/media/libmedia/Android.bp
@@ -104,8 +104,8 @@
     ],
 
     shared_libs: [
-        "android.hidl.token@1.0-utils",
         "android.hardware.media.omx@1.0",
+        "android.hidl.token@1.0-utils",
         "libbinder",
         "libcutils",
         "libhidlbase",
@@ -116,8 +116,8 @@
     ],
 
     export_shared_lib_headers: [
-        "android.hidl.token@1.0-utils",
         "android.hardware.media.omx@1.0",
+        "android.hidl.token@1.0-utils",
         "libstagefright_foundation",
         "libui",
     ],
@@ -138,15 +138,15 @@
     ],
 
     cflags: [
+        "-Wall",
         "-Werror",
         "-Wno-error=deprecated-declarations",
-        "-Wall",
     ],
 
     sanitize: {
         misc_undefined: [
-            "unsigned-integer-overflow",
             "signed-integer-overflow",
+            "unsigned-integer-overflow",
         ],
         cfi: true,
     },
@@ -197,15 +197,15 @@
     ],
 
     cflags: [
+        "-Wall",
         "-Werror",
         "-Wno-error=deprecated-declarations",
-        "-Wall",
     ],
 
     sanitize: {
         misc_undefined: [
-            "unsigned-integer-overflow",
             "signed-integer-overflow",
+            "unsigned-integer-overflow",
         ],
         cfi: true,
     },
@@ -232,15 +232,15 @@
     ],
 
     cflags: [
+        "-Wall",
         "-Werror",
         "-Wno-error=deprecated-declarations",
-        "-Wall",
     ],
 
     sanitize: {
         misc_undefined: [
-            "unsigned-integer-overflow",
             "signed-integer-overflow",
+            "unsigned-integer-overflow",
         ],
         cfi: true,
     },
@@ -279,15 +279,15 @@
     ],
 
     cflags: [
+        "-Wall",
         "-Werror",
         "-Wno-error=deprecated-declarations",
-        "-Wall",
     ],
 
     sanitize: {
         misc_undefined: [
-            "unsigned-integer-overflow",
             "signed-integer-overflow",
+            "unsigned-integer-overflow",
         ],
         cfi: true,
     },
@@ -323,15 +323,15 @@
     ],
 
     cflags: [
+        "-Wall",
         "-Werror",
         "-Wno-error=deprecated-declarations",
-        "-Wall",
     ],
 
     sanitize: {
         misc_undefined: [
-            "unsigned-integer-overflow",
             "signed-integer-overflow",
+            "unsigned-integer-overflow",
         ],
         cfi: true,
     },
@@ -346,35 +346,35 @@
 
     srcs: [
         ":mediaextractorservice_aidl",
-        "IDataSource.cpp",
         "BufferingSettings.cpp",
-        "mediaplayer.cpp",
+        "CharacterEncodingDetector.cpp",
+        "IDataSource.cpp",
+        "IMediaDeathNotifier.cpp",
+        "IMediaExtractor.cpp",
         "IMediaHTTPConnection.cpp",
         "IMediaHTTPService.cpp",
-        "IMediaExtractor.cpp",
-        "IMediaPlayerService.cpp",
-        "IMediaPlayerClient.cpp",
-        "IMediaRecorderClient.cpp",
+        "IMediaMetadataRetriever.cpp",
         "IMediaPlayer.cpp",
+        "IMediaPlayerClient.cpp",
+        "IMediaPlayerService.cpp",
         "IMediaRecorder.cpp",
+        "IMediaRecorderClient.cpp",
         "IMediaSource.cpp",
         "IRemoteDisplay.cpp",
         "IRemoteDisplayClient.cpp",
         "IStreamSource.cpp",
-        "Metadata.cpp",
-        "mediarecorder.cpp",
-        "IMediaMetadataRetriever.cpp",
-        "mediametadataretriever.cpp",
-        "MediaScanner.cpp",
-        "MediaScannerClient.cpp",
-        "CharacterEncodingDetector.cpp",
-        "IMediaDeathNotifier.cpp",
         "MediaProfiles.cpp",
         "MediaResource.cpp",
         "MediaResourcePolicy.cpp",
-        "StringArray.cpp",
-        "NdkMediaFormatPriv.cpp",
+        "MediaScanner.cpp",
+        "MediaScannerClient.cpp",
+        "Metadata.cpp",
         "NdkMediaErrorPriv.cpp",
+        "NdkMediaFormatPriv.cpp",
+        "StringArray.cpp",
+        "mediametadataretriever.cpp",
+        "mediaplayer.cpp",
+        "mediarecorder.cpp",
     ],
 
     aidl: {
@@ -383,55 +383,57 @@
     },
 
     header_libs: [
+        "jni_headers",
         "libstagefright_headers",
         "media_ndk_headers",
-        "jni_headers",
     ],
 
     export_header_lib_headers: [
+        "jni_headers",
         "libstagefright_headers",
         "media_ndk_headers",
-        "jni_headers",
     ],
 
     shared_libs: [
         "android.hidl.token@1.0-utils",
         "audioclient-types-aidl-cpp",
         "av-types-aidl-cpp",
-        "liblog",
-        "libcutils",
-        "libutils",
         "libbinder",
         "libbinder_ndk",
         //"libsonivox",
+        "libcutils",
+        "liblog",
+        "libutils",
+        "framework-permission-aidl-cpp",
         "libandroidicu",
-        "libexpat",
-        "libcamera_client",
-        "libstagefright_foundation",
-        "libgui",
-        "libdl",
         "libaudioclient",
+        "libaudiofoundation",
+        "libcamera_client",
+        "libdl",
+        "libexpat",
+        "libgui",
         "libmedia_codeclist",
         "libmedia_omx",
-        "framework-permission-aidl-cpp",
+        "libstagefright_foundation",
     ],
 
     export_shared_lib_headers: [
         "libaudioclient",
+        "libaudiofoundation",
         "libbinder",
         //"libsonivox",
-        "libmedia_omx",
         "framework-permission-aidl-cpp",
+        "libmedia_omx",
     ],
 
     static_libs: [
-        "resourcemanager_aidl_interface-ndk",
         "framework-permission-aidl-cpp",
+        "resourcemanager_aidl_interface-ndk",
     ],
 
     export_static_lib_headers: [
-        "resourcemanager_aidl_interface-ndk",
         "framework-permission-aidl-cpp",
+        "resourcemanager_aidl_interface-ndk",
     ],
 
     export_include_dirs: [
@@ -439,17 +441,17 @@
     ],
 
     cflags: [
+        "-Wall",
         "-Werror",
         "-Wno-error=deprecated-declarations",
-        "-Wall",
     ],
 
     version_script: "exports.lds",
 
     sanitize: {
         misc_undefined: [
-            "unsigned-integer-overflow",
             "signed-integer-overflow",
+            "unsigned-integer-overflow",
         ],
         cfi: true,
     },
@@ -461,8 +463,8 @@
     host_supported: true,
 
     srcs: [
-        "NdkMediaFormatPriv.cpp",
         "NdkMediaErrorPriv.cpp",
+        "NdkMediaFormatPriv.cpp",
     ],
 
     header_libs: [
@@ -473,8 +475,8 @@
 
     cflags: [
         "-DEXPORT=__attribute__((visibility(\"default\")))",
-        "-Werror",
         "-Wall",
+        "-Werror",
     ],
 
     export_include_dirs: ["include"],
diff --git a/media/libmedia/IMediaPlayer.cpp b/media/libmedia/IMediaPlayer.cpp
index c9f361e..4967dda 100644
--- a/media/libmedia/IMediaPlayer.cpp
+++ b/media/libmedia/IMediaPlayer.cpp
@@ -567,23 +567,24 @@
         return reply.readInt32();
     }
 
-    status_t getRoutedDeviceId(audio_port_handle_t* deviceId)
+    status_t getRoutedDeviceIds(DeviceIdVector& deviceIds)
     {
         Parcel data, reply;
         data.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor());
+        deviceIds.clear();
 
-        status_t status = remote()->transact(GET_ROUTED_DEVICE_ID, data, &reply);
+        status_t status = remote()->transact(GET_ROUTED_DEVICE_IDS, data, &reply);
         if (status != OK) {
-            ALOGE("getRoutedDeviceid: binder call failed: %d", status);
-            *deviceId = AUDIO_PORT_HANDLE_NONE;
+            ALOGE("getRoutedDeviceIds: binder call failed: %d", status);
             return status;
         }
 
         status = reply.readInt32();
-        if (status != NO_ERROR) {
-            *deviceId = AUDIO_PORT_HANDLE_NONE;
-        } else {
-            *deviceId = reply.readInt32();
+        if (status == NO_ERROR) {
+            int size = reply.readInt32();
+            for (int i = 0; i < size; i++) {
+                deviceIds.push_back(reply.readInt32());
+            }
         }
         return status;
     }
@@ -983,13 +984,16 @@
             }
             return NO_ERROR;
         }
-        case GET_ROUTED_DEVICE_ID: {
+        case GET_ROUTED_DEVICE_IDS: {
             CHECK_INTERFACE(IMediaPlayer, data, reply);
-            audio_port_handle_t deviceId;
-            status_t ret = getRoutedDeviceId(&deviceId);
+            DeviceIdVector deviceIds;
+            status_t ret = getRoutedDeviceIds(deviceIds);
             reply->writeInt32(ret);
             if (ret == NO_ERROR) {
-                reply->writeInt32(deviceId);
+                reply->writeInt32(deviceIds.size());
+                for (auto deviceId : deviceIds) {
+                    reply->writeInt32(deviceId);
+                }
             }
             return NO_ERROR;
         } break;
diff --git a/media/libmedia/IMediaRecorder.cpp b/media/libmedia/IMediaRecorder.cpp
index 755a147..1f04217 100644
--- a/media/libmedia/IMediaRecorder.cpp
+++ b/media/libmedia/IMediaRecorder.cpp
@@ -62,7 +62,7 @@
     RESUME,
     GET_METRICS,
     SET_INPUT_DEVICE,
-    GET_ROUTED_DEVICE_ID,
+    GET_ROUTED_DEVICE_IDS,
     ENABLE_AUDIO_DEVICE_CALLBACK,
     GET_ACTIVE_MICROPHONES,
     GET_PORT_ID,
@@ -392,24 +392,24 @@
         return reply.readInt32();;
     }
 
-    audio_port_handle_t getRoutedDeviceId(audio_port_handle_t *deviceId)
+    status_t getRoutedDeviceIds(DeviceIdVector& deviceIds)
     {
-        ALOGV("getRoutedDeviceId");
         Parcel data, reply;
         data.writeInterfaceToken(IMediaRecorder::getInterfaceDescriptor());
+        deviceIds.clear();
 
-        status_t status = remote()->transact(GET_ROUTED_DEVICE_ID, data, &reply);
+        status_t status = remote()->transact(GET_ROUTED_DEVICE_IDS, data, &reply);
         if (status != OK) {
-            ALOGE("getRoutedDeviceid binder call failed: %d", status);
-            *deviceId = AUDIO_PORT_HANDLE_NONE;
+            ALOGE("getRoutedDeviceIds: binder call failed: %d", status);
             return status;
         }
 
         status = reply.readInt32();
-        if (status != NO_ERROR) {
-            *deviceId = AUDIO_PORT_HANDLE_NONE;
-        } else {
-            *deviceId = reply.readInt32();
+        if (status == NO_ERROR) {
+            int size = reply.readInt32();
+            for (int i = 0; i < size; i++) {
+                deviceIds.push_back(reply.readInt32());
+            }
         }
         return status;
     }
@@ -730,14 +730,17 @@
             }
             return NO_ERROR;
         } break;
-        case GET_ROUTED_DEVICE_ID: {
-            ALOGV("GET_ROUTED_DEVICE_ID");
+        case GET_ROUTED_DEVICE_IDS: {
+            ALOGV("GET_ROUTED_DEVICE_IDS");
             CHECK_INTERFACE(IMediaRecorder, data, reply);
-            audio_port_handle_t deviceId;
-            status_t status = getRoutedDeviceId(&deviceId);
-            reply->writeInt32(status);
-            if (status == NO_ERROR) {
-                reply->writeInt32(deviceId);
+            DeviceIdVector deviceIds;
+            status_t ret = getRoutedDeviceIds(deviceIds);
+            reply->writeInt32(ret);
+            if (ret == NO_ERROR) {
+                reply->writeInt32(deviceIds.size());
+                for (auto deviceId : deviceIds) {
+                    reply->writeInt32(deviceId);
+                }
             }
             return NO_ERROR;
         } break;
diff --git a/media/libmedia/include/media/IMediaPlayer.h b/media/libmedia/include/media/IMediaPlayer.h
index 28684d1..4c6f32c 100644
--- a/media/libmedia/include/media/IMediaPlayer.h
+++ b/media/libmedia/include/media/IMediaPlayer.h
@@ -23,6 +23,7 @@
 #include <utils/KeyedVector.h>
 #include <system/audio.h>
 
+#include <media/AudioContainers.h>
 #include <media/AudioResamplerPublic.h>
 #include <media/stagefright/MediaSource.h>
 #include <media/VolumeShaper.h>
@@ -135,7 +136,7 @@
 
     // AudioRouting
     virtual status_t        setOutputDevice(audio_port_handle_t deviceId) = 0;
-    virtual status_t        getRoutedDeviceId(audio_port_handle_t *deviceId) = 0;
+    virtual status_t        getRoutedDeviceIds(DeviceIdVector& deviceIds) = 0;
     virtual status_t        enableAudioDeviceCallback(bool enabled) = 0;
 protected:
 
@@ -184,7 +185,7 @@
         RELEASE_DRM,
         // AudioRouting
         SET_OUTPUT_DEVICE,
-        GET_ROUTED_DEVICE_ID,
+        GET_ROUTED_DEVICE_IDS,
         ENABLE_AUDIO_DEVICE_CALLBACK,
     };
 };
diff --git a/media/libmedia/include/media/IMediaRecorder.h b/media/libmedia/include/media/IMediaRecorder.h
index 05da5c2..8411ca7 100644
--- a/media/libmedia/include/media/IMediaRecorder.h
+++ b/media/libmedia/include/media/IMediaRecorder.h
@@ -20,6 +20,7 @@
 
 #include <android/media/MicrophoneInfoFw.h>
 #include <binder/IInterface.h>
+#include <media/AudioContainers.h>
 #include <system/audio.h>
 #include <vector>
 
@@ -71,7 +72,7 @@
     virtual sp<IGraphicBufferProducer> querySurfaceMediaSource() = 0;
 
     virtual status_t setInputDevice(audio_port_handle_t deviceId) = 0;
-    virtual status_t getRoutedDeviceId(audio_port_handle_t *deviceId) = 0;
+    virtual status_t getRoutedDeviceIds(DeviceIdVector& deviceIds) = 0;
     virtual status_t enableAudioDeviceCallback(bool enabled) = 0;
     virtual status_t getActiveMicrophones(
                         std::vector<media::MicrophoneInfoFw>* activeMicrophones) = 0;
diff --git a/media/libmedia/include/media/MediaRecorderBase.h b/media/libmedia/include/media/MediaRecorderBase.h
index 82ec9c5..e3698e3 100644
--- a/media/libmedia/include/media/MediaRecorderBase.h
+++ b/media/libmedia/include/media/MediaRecorderBase.h
@@ -69,7 +69,7 @@
     virtual status_t setInputSurface(const sp<PersistentSurface>& surface) = 0;
     virtual sp<IGraphicBufferProducer> querySurfaceMediaSource() const = 0;
     virtual status_t setInputDevice(audio_port_handle_t deviceId) = 0;
-    virtual status_t getRoutedDeviceId(audio_port_handle_t* deviceId) = 0;
+    virtual status_t getRoutedDeviceIds(DeviceIdVector& deviceIds) = 0;
     virtual void setAudioDeviceCallback(const sp<AudioSystem::AudioDeviceCallback>& callback) = 0;
     virtual status_t enableAudioDeviceCallback(bool enabled) = 0;
     virtual status_t getActiveMicrophones(
diff --git a/media/libmedia/include/media/mediaplayer.h b/media/libmedia/include/media/mediaplayer.h
index 2f9b85e..7c612c3 100644
--- a/media/libmedia/include/media/mediaplayer.h
+++ b/media/libmedia/include/media/mediaplayer.h
@@ -281,7 +281,7 @@
             status_t        releaseDrm();
             // AudioRouting
             status_t        setOutputDevice(audio_port_handle_t deviceId);
-            audio_port_handle_t getRoutedDeviceId();
+            status_t        getRoutedDeviceIds(DeviceIdVector& deviceIds);
             status_t        enableAudioDeviceCallback(bool enabled);
 
 private:
diff --git a/media/libmedia/include/media/mediarecorder.h b/media/libmedia/include/media/mediarecorder.h
index 602f72e..1377d61 100644
--- a/media/libmedia/include/media/mediarecorder.h
+++ b/media/libmedia/include/media/mediarecorder.h
@@ -22,6 +22,7 @@
 #include <utils/threads.h>
 #include <utils/List.h>
 #include <utils/Errors.h>
+#include <media/AudioContainers.h>
 #include <media/IMediaRecorderClient.h>
 #include <media/IMediaDeathNotifier.h>
 #include <android/media/MicrophoneInfoFw.h>
@@ -266,7 +267,7 @@
     sp<IGraphicBufferProducer>     querySurfaceMediaSourceFromMediaServer();
     status_t    getMetrics(Parcel *reply);
     status_t    setInputDevice(audio_port_handle_t deviceId);
-    status_t    getRoutedDeviceId(audio_port_handle_t *deviceId);
+    status_t    getRoutedDeviceIds(DeviceIdVector& deviceIds);
     status_t    enableAudioDeviceCallback(bool enabled);
     status_t    getActiveMicrophones(std::vector<media::MicrophoneInfoFw>* activeMicrophones);
     status_t    setPreferredMicrophoneDirection(audio_microphone_direction_t direction);
diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp
index b5c75b3..9d3fce7 100644
--- a/media/libmedia/mediaplayer.cpp
+++ b/media/libmedia/mediaplayer.cpp
@@ -1105,19 +1105,14 @@
     return mPlayer->setOutputDevice(deviceId);
 }
 
-audio_port_handle_t MediaPlayer::getRoutedDeviceId()
+status_t MediaPlayer::getRoutedDeviceIds(DeviceIdVector& deviceIds)
 {
     Mutex::Autolock _l(mLock);
     if (mPlayer == NULL) {
-        ALOGV("getRoutedDeviceId: player not init");
-        return AUDIO_PORT_HANDLE_NONE;
+        ALOGV("getRoutedDeviceIds: player not init");
+        return NO_INIT;
     }
-    audio_port_handle_t deviceId;
-    status_t status = mPlayer->getRoutedDeviceId(&deviceId);
-    if (status != NO_ERROR) {
-        return AUDIO_PORT_HANDLE_NONE;
-    }
-    return deviceId;
+    return mPlayer->getRoutedDeviceIds(deviceIds);
 }
 
 status_t MediaPlayer::enableAudioDeviceCallback(bool enabled)
diff --git a/media/libmedia/mediarecorder.cpp b/media/libmedia/mediarecorder.cpp
index 48f5e4b..e676d5a 100644
--- a/media/libmedia/mediarecorder.cpp
+++ b/media/libmedia/mediarecorder.cpp
@@ -858,17 +858,17 @@
     return mMediaRecorder->setInputDevice(deviceId);
 }
 
-status_t MediaRecorder::getRoutedDeviceId(audio_port_handle_t* deviceId)
+status_t MediaRecorder::getRoutedDeviceIds(DeviceIdVector& deviceIds)
 {
-    ALOGV("getRoutedDeviceId");
+    ALOGV("getRoutedDeviceIds");
 
     if (mMediaRecorder == NULL) {
         ALOGE("media recorder is not initialized yet");
         return INVALID_OPERATION;
     }
-    status_t status = mMediaRecorder->getRoutedDeviceId(deviceId);
+    status_t status = mMediaRecorder->getRoutedDeviceIds(deviceIds);
     if (status != NO_ERROR) {
-        *deviceId = AUDIO_PORT_HANDLE_NONE;
+        deviceIds.clear();
     }
     return status;
 }
diff --git a/media/libmediametrics/include/MediaMetricsConstants.h b/media/libmediametrics/include/MediaMetricsConstants.h
index 98c3382..a7b2077 100644
--- a/media/libmediametrics/include/MediaMetricsConstants.h
+++ b/media/libmediametrics/include/MediaMetricsConstants.h
@@ -183,6 +183,7 @@
 #define AMEDIAMETRICS_PROP_PLAYBACK_SPEED "playback.speed" // double value (AudioTrack)
 #define AMEDIAMETRICS_PROP_PLAYERIID      "playerIId"      // int32 (-1 invalid/unset IID)
 #define AMEDIAMETRICS_PROP_ROUTEDDEVICEID "routedDeviceId" // int32
+#define AMEDIAMETRICS_PROP_ROUTEDDEVICEIDS "routedDeviceIds" // string value
 #define AMEDIAMETRICS_PROP_SAMPLERATE     "sampleRate"     // int32
 #define AMEDIAMETRICS_PROP_SAMPLERATECLIENT "sampleRateClient" // int32
 #define AMEDIAMETRICS_PROP_SAMPLERATEHARDWARE "sampleRateHardware" // int32
diff --git a/media/libmediaplayerservice/Android.bp b/media/libmediaplayerservice/Android.bp
index a10c509..1d493e2 100644
--- a/media/libmediaplayerservice/Android.bp
+++ b/media/libmediaplayerservice/Android.bp
@@ -46,13 +46,14 @@
         "av-types-aidl-cpp",
         "framework-permission-aidl-cpp",
         "libaconfig_storage_read_api_cc",
-        "libaudioclient_aidl_conversion",
-        "libbase",
-        "libbinder_ndk",
         "libactivitymanager_aidl",
         "libandroid_net",
         "libaudioclient",
+        "libaudioclient_aidl_conversion",
+        "libaudiofoundation",
+        "libbase",
         "libbinder",
+        "libbinder_ndk",
         "libcamera_client",
         "libcodec2_client",
         "libcrypto",
@@ -81,25 +82,25 @@
     ],
 
     header_libs: [
-        "media_plugin_headers",
         "libmediautils_headers",
         "libstagefright_rtsp_headers",
         "libstagefright_webm_headers",
+        "media_plugin_headers",
     ],
 
     static_libs: [
         "com.android.media.flags.editing-aconfig-cc",
+        "framework-permission-aidl-cpp",
         "libplayerservice_datasource",
         "libstagefright_nuplayer",
         "libstagefright_rtsp",
         "libstagefright_timedtext",
-        "framework-permission-aidl-cpp",
     ],
 
     cflags: [
+        "-Wall",
         "-Werror",
         "-Wno-error=deprecated-declarations",
-        "-Wall",
     ],
 
     sanitize: {
@@ -115,8 +116,8 @@
     ],
 
     export_shared_lib_headers: [
-        "libmedia",
         "framework-permission-aidl-cpp",
+        "libmedia",
     ],
 
     export_header_lib_headers: [
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index b267c08..0067344 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -856,10 +856,13 @@
 
 void MediaPlayerService::Client::AudioDeviceUpdatedNotifier::onAudioDeviceUpdate(
         audio_io_handle_t audioIo,
-        audio_port_handle_t deviceId) {
+        const DeviceIdVector& deviceIds) {
+    ALOGD("onAudioDeviceUpdate deviceIds: %s", toString(deviceIds).c_str());
     sp<MediaPlayerBase> listener = mListener.promote();
     if (listener != NULL) {
-        listener->sendEvent(MEDIA_AUDIO_ROUTING_CHANGED, audioIo, deviceId);
+        // Java should query the new device ids once it gets the event.
+        // TODO(b/378505346): Pass the deviceIds to Java to avoid race conditions.
+        listener->sendEvent(MEDIA_AUDIO_ROUTING_CHANGED, audioIo);
     } else {
         ALOGW("listener for process %d death is gone", MEDIA_AUDIO_ROUTING_CHANGED);
     }
@@ -1750,13 +1753,13 @@
     return NO_INIT;
 }
 
-status_t MediaPlayerService::Client::getRoutedDeviceId(audio_port_handle_t* deviceId)
+status_t MediaPlayerService::Client::getRoutedDeviceIds(DeviceIdVector& deviceIds)
 {
-    ALOGV("[%d] getRoutedDeviceId", mConnId);
+    ALOGV("[%d] getRoutedDeviceIds", mConnId);
     {
         Mutex::Autolock l(mLock);
         if (mAudioOutput.get() != nullptr) {
-            return mAudioOutput->getRoutedDeviceId(deviceId);
+            return mAudioOutput->getRoutedDeviceIds(deviceIds);
         }
     }
     return NO_INIT;
@@ -1830,7 +1833,6 @@
       mFlags(AUDIO_OUTPUT_FLAG_NONE),
       mVolumeHandler(new media::VolumeHandler()),
       mSelectedDeviceId(AUDIO_PORT_HANDLE_NONE),
-      mRoutedDeviceId(AUDIO_PORT_HANDLE_NONE),
       mDeviceCallbackEnabled(false),
       mDeviceCallback(deviceCallback)
 {
@@ -2604,14 +2606,14 @@
     return NO_ERROR;
 }
 
-status_t MediaPlayerService::AudioOutput::getRoutedDeviceId(audio_port_handle_t* deviceId)
+status_t MediaPlayerService::AudioOutput::getRoutedDeviceIds(DeviceIdVector& deviceIds)
 {
-    ALOGV("getRoutedDeviceId");
+    ALOGV("getRoutedDeviceIds");
     Mutex::Autolock lock(mLock);
     if (mTrack != 0) {
-        mRoutedDeviceId = mTrack->getRoutedDeviceId();
+        mRoutedDeviceIds = mTrack->getRoutedDeviceIds();
     }
-    *deviceId = mRoutedDeviceId;
+    deviceIds = mRoutedDeviceIds;
     return NO_ERROR;
 }
 
diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h
index 76b7bcf..497ef79 100644
--- a/media/libmediaplayerservice/MediaPlayerService.h
+++ b/media/libmediaplayerservice/MediaPlayerService.h
@@ -28,6 +28,7 @@
 #include <utils/Vector.h>
 
 #include <media/AidlConversion.h>
+#include <media/AudioContainers.h>
 #include <media/AudioResamplerPublic.h>
 #include <media/AudioSystem.h>
 #include <media/AudioTrack.h>
@@ -148,7 +149,7 @@
 
         // AudioRouting
         virtual status_t        setOutputDevice(audio_port_handle_t deviceId);
-        virtual status_t        getRoutedDeviceId(audio_port_handle_t* deviceId);
+        virtual status_t        getRoutedDeviceIds(DeviceIdVector& deviceIds);
         virtual status_t        enableAudioDeviceCallback(bool enabled);
 
     private:
@@ -181,7 +182,7 @@
         audio_output_flags_t    mFlags;
         sp<media::VolumeHandler>       mVolumeHandler;
         audio_port_handle_t     mSelectedDeviceId;
-        audio_port_handle_t     mRoutedDeviceId;
+        DeviceIdVector          mRoutedDeviceIds;
         bool                    mDeviceCallbackEnabled;
         wp<AudioSystem::AudioDeviceCallback>        mDeviceCallback;
         mutable Mutex           mLock;
@@ -401,7 +402,7 @@
         virtual status_t releaseDrm();
         // AudioRouting
         virtual status_t setOutputDevice(audio_port_handle_t deviceId);
-        virtual status_t getRoutedDeviceId(audio_port_handle_t* deviceId);
+        virtual status_t getRoutedDeviceIds(DeviceIdVector& deviceIds);
         virtual status_t enableAudioDeviceCallback(bool enabled);
 
     private:
@@ -414,7 +415,7 @@
             ~AudioDeviceUpdatedNotifier() {}
 
             virtual void onAudioDeviceUpdate(audio_io_handle_t audioIo,
-                                             audio_port_handle_t deviceId);
+                                             const DeviceIdVector& deviceIds);
 
         private:
             wp<MediaPlayerBase> mListener;
diff --git a/media/libmediaplayerservice/MediaRecorderClient.cpp b/media/libmediaplayerservice/MediaRecorderClient.cpp
index ed3ec89..53f4e61 100644
--- a/media/libmediaplayerservice/MediaRecorderClient.cpp
+++ b/media/libmediaplayerservice/MediaRecorderClient.cpp
@@ -409,10 +409,13 @@
 
 void MediaRecorderClient::AudioDeviceUpdatedNotifier::onAudioDeviceUpdate(
         audio_io_handle_t audioIo,
-        audio_port_handle_t deviceId) {
+        const DeviceIdVector& deviceIds) {
+    ALOGD("onAudioDeviceUpdate deviceIds: %s", toString(deviceIds).c_str());
     sp<IMediaRecorderClient> listener = mListener.promote();
     if (listener != NULL) {
-        listener->notify(MEDIA_RECORDER_AUDIO_ROUTING_CHANGED, audioIo, deviceId);
+        // Java should query the new device ids once it gets the event.
+        // TODO(b/378505346): Pass the deviceIds to Java to avoid race conditions.
+        listener->notify(MEDIA_RECORDER_AUDIO_ROUTING_CHANGED, audioIo, 0 /*ext2*/);
     } else {
         ALOGW("listener for process %d death is gone", MEDIA_RECORDER_AUDIO_ROUTING_CHANGED);
     }
@@ -550,11 +553,11 @@
     return NO_INIT;
 }
 
-status_t MediaRecorderClient::getRoutedDeviceId(audio_port_handle_t* deviceId) {
-    ALOGV("getRoutedDeviceId");
+status_t MediaRecorderClient::getRoutedDeviceIds(DeviceIdVector& deviceIds) {
+    ALOGV("getRoutedDeviceIds");
     Mutex::Autolock lock(mLock);
     if (mRecorder != NULL) {
-        return mRecorder->getRoutedDeviceId(deviceId);
+        return mRecorder->getRoutedDeviceIds(deviceIds);
     }
     return NO_INIT;
 }
diff --git a/media/libmediaplayerservice/MediaRecorderClient.h b/media/libmediaplayerservice/MediaRecorderClient.h
index dec0c99..3b9ab07 100644
--- a/media/libmediaplayerservice/MediaRecorderClient.h
+++ b/media/libmediaplayerservice/MediaRecorderClient.h
@@ -41,7 +41,7 @@
         virtual ~AudioDeviceUpdatedNotifier();
         virtual void onAudioDeviceUpdate(
                 audio_io_handle_t audioIo,
-                audio_port_handle_t deviceId);
+                const DeviceIdVector& deviceIds);
     private:
         wp<IMediaRecorderClient> mListener;
     };
@@ -80,7 +80,7 @@
     virtual     status_t   setInputSurface(const sp<PersistentSurface>& surface);
     virtual     sp<IGraphicBufferProducer> querySurfaceMediaSource();
     virtual     status_t   setInputDevice(audio_port_handle_t deviceId);
-    virtual     status_t   getRoutedDeviceId(audio_port_handle_t* deviceId);
+    virtual     status_t   getRoutedDeviceIds(DeviceIdVector& deviceIds);
     virtual     status_t   enableAudioDeviceCallback(bool enabled);
     virtual     status_t   getActiveMicrophones(
                               std::vector<media::MicrophoneInfoFw>* activeMicrophones);
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index 23e7a47..fa42da2 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -2595,11 +2595,11 @@
     return NO_ERROR;
 }
 
-status_t StagefrightRecorder::getRoutedDeviceId(audio_port_handle_t* deviceId) {
-    ALOGV("getRoutedDeviceId");
+status_t StagefrightRecorder::getRoutedDeviceIds(DeviceIdVector& deviceIds) {
+    ALOGV("getRoutedDeviceIds");
 
     if (mAudioSourceNode != 0) {
-        status_t status = mAudioSourceNode->getRoutedDeviceId(deviceId);
+        status_t status = mAudioSourceNode->getRoutedDeviceIds(deviceIds);
         return status;
     }
     return NO_INIT;
diff --git a/media/libmediaplayerservice/StagefrightRecorder.h b/media/libmediaplayerservice/StagefrightRecorder.h
index 0b6a5bb..4c5e62f 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.h
+++ b/media/libmediaplayerservice/StagefrightRecorder.h
@@ -80,7 +80,7 @@
     // Querying a SurfaceMediaSourcer
     virtual sp<IGraphicBufferProducer> querySurfaceMediaSource() const;
     virtual status_t setInputDevice(audio_port_handle_t deviceId);
-    virtual status_t getRoutedDeviceId(audio_port_handle_t* deviceId);
+    virtual status_t getRoutedDeviceIds(DeviceIdVector& deviceIds);
     virtual void setAudioDeviceCallback(const sp<AudioSystem::AudioDeviceCallback>& callback);
     virtual status_t enableAudioDeviceCallback(bool enabled);
     virtual status_t getActiveMicrophones(std::vector<media::MicrophoneInfoFw>* activeMicrophones);
diff --git a/media/libmediaplayerservice/fuzzer/Android.bp b/media/libmediaplayerservice/fuzzer/Android.bp
index fcdaff9..a3285ee 100644
--- a/media/libmediaplayerservice/fuzzer/Android.bp
+++ b/media/libmediaplayerservice/fuzzer/Android.bp
@@ -33,6 +33,7 @@
         "liblog",
     ],
     shared_libs: [
+        "camera_platform_flags_c_lib",
         "framework-permission-aidl-cpp",
         "libbinder",
         "libbinder_ndk",
@@ -81,6 +82,7 @@
         "libactivitymanager_aidl",
         "libandroid_net",
         "libaudioflinger",
+        "libaudiofoundation",
         "libcamera_client",
         "libcodec2_client",
         "libcrypto",
@@ -159,6 +161,7 @@
         "libactivitymanager_aidl",
         "libandroid_net",
         "libaudioclient",
+        "libaudiofoundation",
         "libcamera_client",
         "libcodec2_client",
         "libcrypto",
diff --git a/media/libmediaplayerservice/fuzzer/mediaplayer_fuzzer.cpp b/media/libmediaplayerservice/fuzzer/mediaplayer_fuzzer.cpp
index 15265bf..a52d751 100644
--- a/media/libmediaplayerservice/fuzzer/mediaplayer_fuzzer.cpp
+++ b/media/libmediaplayerservice/fuzzer/mediaplayer_fuzzer.cpp
@@ -482,8 +482,8 @@
                     mMediaPlayer->setOutputDevice(deviceId);
                 },
                 [&]() {
-                    audio_port_handle_t deviceId;
-                    mMediaPlayer->getRoutedDeviceId(&deviceId);
+                    DeviceIdVector deviceIds;
+                    mMediaPlayer->getRoutedDeviceIds(deviceIds);
                 },
                 [&]() { mMediaPlayer->enableAudioDeviceCallback(mFdp.ConsumeBool()); },
                 [&]() {
diff --git a/media/libmediaplayerservice/fuzzer/mediarecorder_fuzzer.cpp b/media/libmediaplayerservice/fuzzer/mediarecorder_fuzzer.cpp
index 3339ae8..b95cae7 100644
--- a/media/libmediaplayerservice/fuzzer/mediarecorder_fuzzer.cpp
+++ b/media/libmediaplayerservice/fuzzer/mediarecorder_fuzzer.cpp
@@ -116,7 +116,7 @@
     virtual ~TestAudioDeviceCallback() = default;
 
     void onAudioDeviceUpdate(audio_io_handle_t /*audioIo*/,
-                             audio_port_handle_t /*deviceId*/) override{};
+                             const DeviceIdVector& /*deviceIds*/) override{};
 };
 
 class TestCamera : public ICamera {
@@ -185,8 +185,8 @@
     int32_t max;
     mStfRecorder->getMaxAmplitude(&max);
 
-    int32_t deviceId;
-    mStfRecorder->getRoutedDeviceId(&deviceId);
+    DeviceIdVector deviceIds;
+    mStfRecorder->getRoutedDeviceIds(deviceIds);
 
     vector<android::media::MicrophoneInfoFw> activeMicrophones{};
     mStfRecorder->getActiveMicrophones(&activeMicrophones);
diff --git a/media/libmediaplayerservice/include/MediaPlayerInterface.h b/media/libmediaplayerservice/include/MediaPlayerInterface.h
index 495cf00..9fe0e95 100644
--- a/media/libmediaplayerservice/include/MediaPlayerInterface.h
+++ b/media/libmediaplayerservice/include/MediaPlayerInterface.h
@@ -26,6 +26,7 @@
 #include <utils/RefBase.h>
 
 #include <media/mediaplayer.h>
+#include <media/AudioContainers.h>
 #include <media/AudioResamplerPublic.h>
 #include <media/AudioTimestamp.h>
 #include <media/AVSyncSettings.h>
@@ -185,7 +186,7 @@
 
         // AudioRouting
         virtual status_t    setOutputDevice(audio_port_handle_t deviceId) = 0;
-        virtual status_t    getRoutedDeviceId(audio_port_handle_t* deviceId) = 0;
+        virtual status_t    getRoutedDeviceIds(DeviceIdVector& deviceIds) = 0;
         virtual status_t    enableAudioDeviceCallback(bool enabled) = 0;
     };
 
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index 92bf35d..b466f18 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -47,8 +47,8 @@
     ],
 
     cflags: [
-        "-Werror",
         "-Wall",
+        "-Werror",
     ],
 
     header_libs: [
@@ -57,6 +57,8 @@
     ],
 
     shared_libs: [
+        "android.hardware.cas.native@1.0",
+        "android.hardware.drm@1.0",
         "libaudioutils",
         "libgui",
         "libhidlallocatorutils",
@@ -66,15 +68,13 @@
         "libstagefright_foundation",
         "libui",
         "libutils",
-        "android.hardware.cas.native@1.0",
-        "android.hardware.drm@1.0",
     ],
 
     sanitize: {
         cfi: true,
         misc_undefined: [
-            "unsigned-integer-overflow",
             "signed-integer-overflow",
+            "unsigned-integer-overflow",
         ],
     },
 }
@@ -88,9 +88,9 @@
     min_sdk_version: "29",
 
     srcs: [
-        "Utils.cpp",
-        "MediaSource.cpp",
         "HevcUtils.cpp",
+        "MediaSource.cpp",
+        "Utils.cpp",
     ],
 
     shared_libs: [
@@ -114,17 +114,17 @@
     ],
 
     cflags: [
-        "-Wno-multichar",
+        "-Wall",
         "-Werror",
         "-Wno-error=deprecated-declarations",
-        "-Wall",
+        "-Wno-multichar",
     ],
 
     sanitize: {
         cfi: true,
         misc_undefined: [
-            "unsigned-integer-overflow",
             "signed-integer-overflow",
+            "unsigned-integer-overflow",
         ],
     },
 
@@ -150,12 +150,11 @@
     ],
 
     shared_libs: [
-        "libbase",
-        "libcutils",
         "libEGL",
         "libGLESv1_CM",
         "libGLESv2",
-        "libvulkan",
+        "libbase",
+        "libcutils",
         "libgui",
         "liblog",
         "libprocessgroup",
@@ -163,6 +162,7 @@
         "libsync",
         "libui",
         "libutils",
+        "libvulkan",
     ],
 
     static_libs: [
@@ -174,18 +174,18 @@
     ],
 
     cflags: [
-        "-Wno-multichar",
+        "-Wall",
         "-Werror",
         "-Wno-error=deprecated-declarations",
-        "-Wall",
+        "-Wno-multichar",
     ],
 
     sanitize: {
         // TODO: re-enabled cfi for this lib after b/139945549 fixed
         cfi: false,
         misc_undefined: [
-            "unsigned-integer-overflow",
             "signed-integer-overflow",
+            "unsigned-integer-overflow",
         ],
     },
 }
@@ -209,16 +209,16 @@
     ],
 
     cflags: [
-        "-Wno-multichar",
-        "-Werror",
         "-Wall",
+        "-Werror",
+        "-Wno-multichar",
     ],
 
     sanitize: {
         cfi: true,
         misc_undefined: [
-            "unsigned-integer-overflow",
             "signed-integer-overflow",
+            "unsigned-integer-overflow",
         ],
     },
 }
@@ -255,13 +255,13 @@
         "MediaCodecSource.cpp",
         "MediaExtractor.cpp",
         "MediaExtractorFactory.cpp",
+        "MediaMuxer.cpp",
         "MediaSource.cpp",
         "MediaSync.cpp",
         "MediaTrack.cpp",
-        "MediaMuxer.cpp",
         "NuMediaExtractor.cpp",
-        "OggWriter.cpp",
         "OMXClient.cpp",
+        "OggWriter.cpp",
         "OmxInfoBuilder.cpp",
         "RemoteMediaExtractor.cpp",
         "RemoteMediaSource.cpp",
@@ -270,13 +270,22 @@
         "SurfaceUtils.cpp",
         "ThrottledSource.cpp",
         "Utils.cpp",
-        "VideoFrameSchedulerBase.cpp",
         "VideoFrameScheduler.cpp",
+        "VideoFrameSchedulerBase.cpp",
         "VideoRenderQualityTracker.cpp",
     ],
 
     shared_libs: [
-        "libstagefright_framecapture_utils",
+        "aconfig_mediacodec_flags_c_lib",
+        "android.hardware.cas.native@1.0",
+        "android.hardware.drm@1.0",
+        "android.hardware.media.omx@1.0",
+        "android.hidl.allocator@1.0",
+        "framework-permission-aidl-cpp",
+        "libaconfig_storage_read_api_cc",
+        "libaudioclient",
+        "libaudioclient_aidl_conversion",
+        "libaudiofoundation",
         "libaudioutils",
         "libbase",
         "libbinder",
@@ -289,30 +298,24 @@
         "libdl",
         "libdl_android",
         "libgui",
+        "libhidlallocatorutils",
+        "libhidlbase",
+        "libhidlmemory",
         "liblog",
         "libmedia",
         "libmedia_codeclist",
+        "libmedia_helper",
         "libmedia_omx",
         "libmedia_omx_client",
-        "libaudioclient",
         "libmediametrics",
-        "libui",
-        "libutils",
-        "libmedia_helper",
         "libsfplugin_ccodec",
         "libsfplugin_ccodec_utils",
         "libstagefright_codecbase",
         "libstagefright_foundation",
+        "libstagefright_framecapture_utils",
         "libstagefright_omx_utils",
-        "libhidlallocatorutils",
-        "libhidlbase",
-        "libhidlmemory",
-        "android.hidl.allocator@1.0",
-        "android.hardware.cas.native@1.0",
-        "android.hardware.drm@1.0",
-        "android.hardware.media.omx@1.0",
-        "framework-permission-aidl-cpp",
-        "libaudioclient_aidl_conversion",
+        "libui",
+        "libutils",
         "packagemanager_aidl-cpp",
         "server_configurable_flags",
         "libaconfig_storage_read_api_cc",
@@ -323,32 +326,32 @@
     static_libs: [
         "android.media.codec-aconfig-cc",
         "com.android.media.flags.editing-aconfig-cc",
-        "libstagefright_esds",
-        "libstagefright_color_conversion",
-        "libyuv",
-        "libstagefright_webm",
-        "libstagefright_timedtext",
-        "libogg",
-        "libstagefright_id3",
         "framework-permission-aidl-cpp",
-        "libmediandk_format",
         "libmedia_ndkformatpriv",
+        "libmediandk_format",
+        "libogg",
+        "libstagefright_color_conversion",
+        "libstagefright_esds",
+        "libstagefright_id3",
+        "libstagefright_timedtext",
+        "libstagefright_webm",
+        "libyuv",
     ],
 
     header_libs: [
         "libmediadrm_headers",
+        "libmediaformatshaper_headers",
         "libnativeloader-headers",
         "libstagefright_xmlparser_headers",
         "media_ndk_headers",
-        "libmediaformatshaper_headers",
     ],
 
     export_shared_lib_headers: [
+        "android.hidl.allocator@1.0",
+        "framework-permission-aidl-cpp",
         "libgui",
         "libhidlmemory",
         "libmedia",
-        "android.hidl.allocator@1.0",
-        "framework-permission-aidl-cpp",
     ],
 
     export_include_dirs: [
@@ -356,10 +359,10 @@
     ],
 
     cflags: [
-        "-Wno-multichar",
+        "-Wall",
         "-Werror",
         "-Wno-error=deprecated-declarations",
-        "-Wall",
+        "-Wno-multichar",
     ],
 
     version_script: "exports.lds",
@@ -374,8 +377,8 @@
     sanitize: {
         cfi: true,
         misc_undefined: [
-            "unsigned-integer-overflow",
             "signed-integer-overflow",
+            "unsigned-integer-overflow",
         ],
     },
 }
diff --git a/media/libstagefright/AudioSource.cpp b/media/libstagefright/AudioSource.cpp
index 584dad6..f658d84 100644
--- a/media/libstagefright/AudioSource.cpp
+++ b/media/libstagefright/AudioSource.cpp
@@ -497,9 +497,9 @@
     return NO_INIT;
 }
 
-status_t AudioSource::getRoutedDeviceId(audio_port_handle_t* deviceId) {
+status_t AudioSource::getRoutedDeviceIds(DeviceIdVector& deviceIds) {
     if (mRecord != 0) {
-        *deviceId = mRecord->getRoutedDeviceId();
+        deviceIds = mRecord->getRoutedDeviceIds();
         return NO_ERROR;
     }
     return NO_INIT;
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 9abe037..efbd682 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -1192,6 +1192,19 @@
     return new PersistentSurface(bufferProducer, bufferSource);
 }
 
+//static
+status_t MediaCodec::getGloballyAvailableResources(std::vector<GlobalResourceInfo>& resources) {
+    resources.clear();
+    // Make sure codec availability feature is on.
+    if (!android::media::codec::codec_availability()) {
+        return ERROR_UNSUPPORTED;
+    }
+    // TODO: For now this is just an empty function.
+    // The actual implementation should use component store to query the
+    // available resources from hal, and fill in resources with the same.
+    return ERROR_UNSUPPORTED;
+}
+
 // GenerateCodecId generates a 64bit Random ID for each codec that is created.
 // The Codec ID is generated as:
 //   - A process-unique random high 32bits
@@ -1298,7 +1311,12 @@
     CHECK_EQ(mState, UNINITIALIZED);
     mResourceManagerProxy->removeClient();
 
-    flushMediametrics();
+    flushMediametrics();  // this deletes mMetricsHandle
+    // don't keep the last metrics handle around
+    if (mLastMetricsHandle != 0) {
+        mediametrics_delete(mLastMetricsHandle);
+        mLastMetricsHandle = 0;
+    }
 
     // clean any saved metrics info we stored as part of configure()
     if (mConfigureMsg != nullptr) {
@@ -1309,7 +1327,7 @@
     }
 }
 
-// except for in constructor, called from the looper thread (and therefore mutexed)
+// except for in constructor, called from the looper thread (and therefore not mutexed)
 void MediaCodec::initMediametrics() {
     if (mMetricsHandle == 0) {
         mMetricsHandle = mediametrics_create(kCodecKeyName);
@@ -1335,6 +1353,7 @@
         mInputBufferCounter = 0;
     }
 
+    mSubsessionCount = 0;
     mLifetimeStartNs = systemTime(SYSTEM_TIME_MONOTONIC);
     resetMetricsFields();
 }
@@ -1346,6 +1365,17 @@
     mReliabilityContextMetrics = ReliabilityContextMetrics();
 }
 
+// always called from the looper thread (and therefore not mutexed)
+void MediaCodec::resetSubsessionMetricsFields() {
+    mBytesEncoded = 0;
+    mFramesEncoded = 0;
+    mFramesInput = 0;
+    mBytesInput = 0;
+    mEarliestEncodedPtsUs = INT64_MAX;
+    mLatestEncodedPtsUs = INT64_MIN;
+}
+
+// always called from the looper thread
 void MediaCodec::updateMediametrics() {
     if (mMetricsHandle == 0) {
         ALOGV("no metrics handle found");
@@ -1710,6 +1740,7 @@
     }
 }
 
+// except for in destructor, called from the looper thread
 void MediaCodec::flushMediametrics() {
     ALOGV("flushMediametrics");
 
@@ -1723,7 +1754,14 @@
         if (mMetricsToUpload && mediametrics_count(mMetricsHandle) > 0) {
             mediametrics_selfRecord(mMetricsHandle);
         }
-        mediametrics_delete(mMetricsHandle);
+        // keep previous metrics handle for subsequent getMetrics() calls.
+        // NOTE: There could be multiple error events, each flushing the metrics.
+        // We keep the last non-empty metrics handle, so getMetrics() in the
+        // next call will get the latest metrics prior to the errors.
+        if (mLastMetricsHandle != 0) {
+            mediametrics_delete(mLastMetricsHandle);
+        }
+        mLastMetricsHandle = mMetricsHandle;
         mMetricsHandle = 0;
     }
     // we no longer have anything pending upload
@@ -1888,7 +1926,10 @@
         });
     }
 
-    if (mDomain == DOMAIN_VIDEO && (mFlags & kFlagIsEncoder)) {
+    // NOTE: these were erroneously restricted to video encoders, but we want them for all
+    // codecs.
+    if (android::media::codec::provider_->subsession_metrics()
+            || (mDomain == DOMAIN_VIDEO && (mFlags & kFlagIsEncoder))) {
         mBytesInput += buffer->size();
         mFramesInput++;
     }
@@ -1910,12 +1951,15 @@
     ++mInputBufferCounter;
 }
 
-// when we get a buffer back from the codec
+// when we get a buffer back from the codec, always called from the looper thread
 void MediaCodec::statsBufferReceived(int64_t presentationUs, const sp<MediaCodecBuffer> &buffer) {
 
     CHECK_NE(mState, UNINITIALIZED);
 
-    if (mDomain == DOMAIN_VIDEO && (mFlags & kFlagIsEncoder)) {
+    // NOTE: these were erroneously restricted to video encoders, but we want them for all
+    // codecs.
+    if (android::media::codec::provider_->subsession_metrics()
+            || (mDomain == DOMAIN_VIDEO && (mFlags & kFlagIsEncoder))) {
         int32_t flags = 0;
         (void) buffer->meta()->findInt32("flags", &flags);
 
@@ -2463,12 +2507,8 @@
             mediametrics_setInt32(nextMetricsHandle, kCodecCrypto, 1);
         }
     } else if (mFlags & kFlagIsSecure) {
-        if (android::media::codec::provider_->secure_codecs_require_crypto()) {
-            mErrorLog.log(LOG_TAG, "Crypto or descrambler must be given for secure codec");
-            return INVALID_OPERATION;
-        } else {
-            ALOGW("Crypto or descrambler should be given for secure codec");
-        }
+        // We'll catch this later when we process the buffers.
+        ALOGW("Crypto or descrambler should be given for secure codec");
     }
 
     if (mConfigureMsg != nullptr) {
@@ -2529,6 +2569,31 @@
     return err;
 }
 
+status_t MediaCodec::getRequiredResources(std::vector<InstanceResourceInfo>& resources) {
+    resources.clear();
+    // Make sure codec availability feature is on.
+    if (!android::media::codec::codec_availability()) {
+        return ERROR_UNSUPPORTED;
+    }
+    // Make sure that the codec was configured already.
+    if (mState != CONFIGURED && mState != STARTING && mState != STARTED &&
+        mState != FLUSHING && mState != FLUSHED) {
+        ALOGE("Codec wasn't configured yet!");
+        return INVALID_OPERATION;
+    }
+
+    if (!mRequiredResourceInfo.empty()) {
+        resources = mRequiredResourceInfo;
+        return OK;
+    }
+
+    // TODO: For now this is just an empty function.
+    // The actual implementation should use component interface
+    // (for example, through mCodec->getRequiredDeviceResources) to query the
+    // the required resources for this configuration, and fill in resources with the same.
+    return ERROR_UNSUPPORTED;
+}
+
 // Media Format Shaping support
 //
 
@@ -3617,6 +3682,10 @@
         updateMediametrics();
         results = mediametrics_dup(mMetricsHandle);
         updateEphemeralMediametrics(results);
+    } else if (mLastMetricsHandle != 0) {
+        // After error, mMetricsHandle is cleared, but we keep the last
+        // metrics around so that it can be queried by getMetrics().
+        results = mediametrics_dup(mLastMetricsHandle);
     } else {
         results = mediametrics_dup(mMetricsHandle);
     }
@@ -3886,6 +3955,7 @@
     return true;
 }
 
+// always called from the looper thread
 MediaCodec::DequeueOutputResult MediaCodec::handleDequeueOutputBuffer(
         const sp<AReplyToken> &replyID, bool newRequest) {
     if (!isExecuting()) {
@@ -3941,6 +4011,9 @@
 
         response->setInt32("flags", flags);
 
+        // NOTE: we must account the stats for an output buffer only after we
+        // already handled a potential output format change that could have
+        // started a new subsession.
         statsBufferReceived(timeUs, buffer);
 
         response->postReply(replyID);
@@ -5845,6 +5918,7 @@
     }
 }
 
+// always called from the looper thread
 void MediaCodec::handleOutputFormatChangeIfNeeded(const sp<MediaCodecBuffer> &buffer) {
     sp<AMessage> format = buffer->format();
     if (mOutputFormat == format) {
@@ -5928,6 +6002,24 @@
             }
         }
     }
+
+    // Update the width and the height.
+    int32_t left = 0, top = 0, right = 0, bottom = 0, width = 0, height = 0;
+    bool newSubsession = false;
+    if (android::media::codec::provider_->subsession_metrics()
+            && mOutputFormat->findInt32("width", &width)
+            && mOutputFormat->findInt32("height", &height)
+            && (width != mWidth || height != mHeight)) {
+        // consider a new subsession if the width or height changes.
+        newSubsession = true;
+    }
+    // TODO: properly detect new audio subsession
+
+    // Only consider a new subsession if we already have output (from a previous subsession).
+    if (newSubsession && mMetricsToUpload && mBytesEncoded > 0) {
+        handleStartingANewSubsession();
+    }
+
     if (mFlags & kFlagIsAsync) {
         onOutputFormatChanged();
     } else {
@@ -5935,8 +6027,6 @@
         postActivityNotificationIfPossible();
     }
 
-    // Update the width and the height.
-    int32_t left = 0, top = 0, right = 0, bottom = 0, width = 0, height = 0;
     bool resolutionChanged = false;
     if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) {
         mWidth = right - left + 1;
@@ -5963,6 +6053,35 @@
     updateHdrMetrics(false /* isConfig */);
 }
 
+// always called from the looper thread (and therefore not mutexed)
+void MediaCodec::handleStartingANewSubsession() {
+    // create a new metrics item for the subsession with the new resolution.
+    // TODO: properly account input counts for the previous and the new
+    // subsessions. We only find out that a new subsession started from the
+    // output format, but by that time we already accounted the input counts
+    // to the previous subsession.
+    flushMediametrics(); // this deletes mMetricsHandle, but stores it in mLastMetricsHandle
+
+    // hence mLastMetricsHandle has the metrics item for the previous subsession.
+    if ((mFlags & kFlagIsAsync) && mCallback != nullptr) {
+        sp<AMessage> msg = mCallback->dup();
+        msg->setInt32("callbackID", CB_METRICS_FLUSHED);
+        std::unique_ptr<mediametrics::Item> flushedMetrics(
+                mediametrics::Item::convert(mediametrics_dup(mLastMetricsHandle)));
+        msg->setObject("metrics", new WrapperObject<std::unique_ptr<mediametrics::Item>>(
+                std::move(flushedMetrics)));
+        msg->post();
+    }
+
+    // reuse/continue old metrics item for the new subsession.
+    mMetricsHandle = mediametrics_dup(mLastMetricsHandle);
+    mMetricsToUpload = true;
+    // TODO: configured width/height for the new subsession should be the
+    // previous width/height.
+    mSubsessionCount++;
+    resetSubsessionMetricsFields();
+}
+
 void MediaCodec::extractCSD(const sp<AMessage> &format) {
     mCSD.clear();
 
@@ -6213,6 +6332,12 @@
     CryptoPlugin::SubSample ss;
     CryptoPlugin::Pattern pattern;
 
+    if (android::media::codec::provider_->secure_codecs_require_crypto()
+            && (mFlags & kFlagIsSecure) && !hasCryptoOrDescrambler()) {
+        mErrorLog.log(LOG_TAG, "Crypto or descrambler must be given for secure codec");
+        return INVALID_OPERATION;
+    }
+
     if (msg->findSize("size", &size)) {
         if (hasCryptoOrDescrambler()) {
             ss.mNumBytesOfClearData = size;
@@ -6957,6 +7082,18 @@
     }
 }
 
+void MediaCodec::onRequiredResourcesChanged(
+        const std::vector<InstanceResourceInfo>& resourceInfo) {
+    mRequiredResourceInfo = resourceInfo;
+    // Make sure codec availability feature is on.
+    if (mCallback != nullptr && android::media::codec::codec_availability()) {
+        // Post the callback
+        sp<AMessage> msg = mCallback->dup();
+        msg->setInt32("callbackID", CB_REQUIRED_RESOURCES_CHANGED);
+        msg->post();
+    }
+}
+
 void MediaCodec::postActivityNotificationIfPossible() {
     if (mActivityNotify == NULL) {
         return;
diff --git a/media/libstagefright/data/media_codecs_google_c2_video.xml b/media/libstagefright/data/media_codecs_google_c2_video.xml
index fc0a5e9..72a2551 100644
--- a/media/libstagefright/data/media_codecs_google_c2_video.xml
+++ b/media/libstagefright/data/media_codecs_google_c2_video.xml
@@ -95,7 +95,7 @@
             <Feature name="adaptive-playback" />
             <Attribute name="software-codec" />
         </MediaCodec>
-        <MediaCodec name="c2.android.apv.decoder" type="video/apv">
+        <MediaCodec name="c2.android.apv.decoder" type="video/apv" enabled="false" minsdk="36" variant="!slow-cpu">
             <Limit name="size" min="16x16" max="1920x1920"/>
             <Limit name="alignment" value="2x2"/>
             <Limit name="bitrate" range="1-240000000"/>
@@ -168,7 +168,7 @@
             <Limit name="bitrate" range="1-40000000" />
             <Feature name="bitrate-modes" value="VBR,CBR" />
         </MediaCodec>
-        <MediaCodec name="c2.android.apv.encoder" type="video/apv">
+        <MediaCodec name="c2.android.apv.encoder" type="video/apv" enabled="false" minsdk="36" variant="!slow-cpu">
             <Limit name="size" min="2x2" max="1920x1920" />
             <Limit name="alignment" value="2x2" />
             <Limit name="block-size" value="16x16" />
diff --git a/media/libstagefright/data/media_codecs_sw.xml b/media/libstagefright/data/media_codecs_sw.xml
index 61b7198..20c97dc 100644
--- a/media/libstagefright/data/media_codecs_sw.xml
+++ b/media/libstagefright/data/media_codecs_sw.xml
@@ -255,12 +255,13 @@
             <Feature name="adaptive-playback" />
             <Attribute name="software-codec" />
         </MediaCodec>
-        <MediaCodec name="c2.android.apv.decoder" type="video/apv">
+        <MediaCodec name="c2.android.apv.decoder" type="video/apv" enabled="false" minsdk="36" variant="!slow-cpu">
             <Limit name="size" min="16x16" max="1920x1920"/>
             <Limit name="alignment" value="2x2"/>
             <Limit name="bitrate" range="1-240000000"/>
             <Limit name="block-size" value="16x16" />
             <Limit name="block-count" range="1-32768" /> <!-- max 4096x2048 equivalent -->
+            <Feature name="adaptive-playback" />
             <Attribute name="software-codec"/>
         </MediaCodec>
     </Decoders>
@@ -417,7 +418,7 @@
             <Feature name="qp-bounds" />
             <Attribute name="software-codec" />
         </MediaCodec>
-        <MediaCodec name="c2.android.apv.encoder" type="video/apv" variant="!slow-cpu">
+        <MediaCodec name="c2.android.apv.encoder" type="video/apv" enabled="false" minsdk="36" variant="!slow-cpu">
             <Limit name="size" min="2x2" max="1920x1920" />
             <Limit name="alignment" value="2x2" />
             <Limit name="block-size" value="16x16" />
diff --git a/media/libstagefright/include/media/stagefright/AudioSource.h b/media/libstagefright/include/media/stagefright/AudioSource.h
index 65d5246..51f6ac4 100644
--- a/media/libstagefright/include/media/stagefright/AudioSource.h
+++ b/media/libstagefright/include/media/stagefright/AudioSource.h
@@ -78,7 +78,7 @@
     virtual void signalBufferReturned(MediaBufferBase *buffer);
 
     status_t setInputDevice(audio_port_handle_t deviceId);
-    status_t getRoutedDeviceId(audio_port_handle_t* deviceId);
+    status_t getRoutedDeviceIds(DeviceIdVector& deviceIds);
     status_t addAudioDeviceCallback(const sp<AudioSystem::AudioDeviceCallback>& callback);
     status_t removeAudioDeviceCallback(const sp<AudioSystem::AudioDeviceCallback>& callback);
 
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index 7169b1e..df1ebd7 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -123,6 +123,18 @@
         CB_RESOURCE_RECLAIMED = 5,
         CB_CRYPTO_ERROR = 6,
         CB_LARGE_FRAME_OUTPUT_AVAILABLE = 7,
+
+        /** Callback ID for when the metrics for this codec have been flushed
+         * due to the start of a new subsession. The associated AMessage will
+         * contain an sp<WrapperObject<std::unique_ptr<mediametrics::Item>>>
+         * Object at the "metrics" key.
+         */
+        CB_METRICS_FLUSHED = 8,
+
+        /** Callback ID to notify the change in resource requirement
+         * for the codec component.
+         */
+        CB_REQUIRED_RESOURCES_CHANGED = 9,
     };
 
     static const pid_t kNoPid = -1;
@@ -142,6 +154,73 @@
 
     static sp<PersistentSurface> CreatePersistentInputSurface();
 
+    /**
+     * Abstraction for the Global Codec resources.
+     * This encapsulates all the available codec resources on the device.
+     */
+    struct GlobalResourceInfo {
+        /**
+         * Name of the Resource type.
+         */
+        std::string mName;
+        /**
+         * Total count/capacity of resources of this type.
+         */
+        int mCapacity;
+        /**
+         * Available count of this resource type.
+         */
+        int mAvailable;
+
+        GlobalResourceInfo(const std::string& name, int capacity, int available) :
+                mName(name),
+                mCapacity(capacity),
+                mAvailable(available) {}
+
+        GlobalResourceInfo(const GlobalResourceInfo& info) :
+                mName(info.mName),
+                mCapacity(info.mCapacity),
+                mAvailable(info.mAvailable) {}
+    };
+
+    /**
+     * Abstraction for the resources associated with a codec instance.
+     * This encapsulates the required codec resources for a configured codec instance.
+     */
+    struct InstanceResourceInfo {
+        /**
+         * Name of the Resource type.
+         */
+        std::string mName;
+        /**
+         * Required resource count of this type.
+         */
+        int mStaticCount;
+        /**
+         * Per frame resource requirement of this resource type.
+         */
+        int mPerFrameCount;
+
+        InstanceResourceInfo(const std::string& name, int staticCount, int perFrameCount) :
+                mName(name),
+                mStaticCount(staticCount),
+                mPerFrameCount(perFrameCount) {}
+
+        InstanceResourceInfo(const InstanceResourceInfo& info) :
+                mName(info.mName),
+                mStaticCount(info.mStaticCount),
+                mPerFrameCount(info.mPerFrameCount) {}
+    };
+
+    /**
+     * Get a list of Globally available device codec resources.
+     *
+     * It will return INVALID_OPERATION if:
+     *  - HAL does not implement codec availability API
+     *  - codec_availability feature flag isn't defined.
+     */
+    static status_t getGloballyAvailableResources(std::vector<GlobalResourceInfo>& resources);
+
     status_t configure(
             const sp<AMessage> &format,
             const sp<Surface> &nativeWindow,
@@ -155,6 +234,19 @@
             const sp<IDescrambler> &descrambler,
             uint32_t flags);
 
+    /**
+     * Get a list of required codec resources.
+     *
+     * This may only be called after configuring the codec.
+     *
+     * Calling this while the codec wasn't configured, will result in
+     * returning INVALID_OPERATION error code.
+     * It will also return INVALID_OPERATION if:
+     *  - HAL does not implement codec availability API
+     *  - codec_availability feature flag isn't defined.
+     */
+    status_t getRequiredResources(std::vector<InstanceResourceInfo>& resources);
+
     status_t releaseCrypto();
 
     status_t setCallback(const sp<AMessage> &callback);
@@ -484,12 +576,21 @@
 
     Mutex mMetricsLock;
     mediametrics_handle_t mMetricsHandle = 0;
+    mediametrics_handle_t mLastMetricsHandle = 0; // only accessed from the looper or destructor
     bool mMetricsToUpload = false;
     nsecs_t mLifetimeStartNs = 0;
     void initMediametrics();
     void updateMediametrics();
     void flushMediametrics();
     void resetMetricsFields();
+
+    // Reset the metrics fields for a new subsession.
+    void resetSubsessionMetricsFields();
+
+    // Start a new subsession (for metrics). This includes flushing the current
+    // metrics, notifying the client and resetting the session fields.
+    void handleStartingANewSubsession();
+
     void updateEphemeralMediametrics(mediametrics_handle_t item);
     void updateLowLatency(const sp<AMessage> &msg);
     void updateCodecImportance(const sp<AMessage>& msg);
@@ -551,6 +652,7 @@
         int32_t setOutputSurfaceCount;
         int32_t resolutionChangeCount;
     } mReliabilityContextMetrics;
+    int32_t mSubsessionCount;
 
     // initial create parameters
     AString mInitName;
@@ -671,6 +773,7 @@
     void onCryptoError(const sp<AMessage> &msg);
     void onError(status_t err, int32_t actionCode, const char *detail = NULL);
     void onOutputFormatChanged();
+    void onRequiredResourcesChanged(const std::vector<InstanceResourceInfo>& resourceInfo);
 
     status_t onSetParameters(const sp<AMessage> &params);
 
@@ -770,6 +873,8 @@
     friend class MediaTestHelper;
 
     CodecErrorLog mErrorLog;
+    // Required resource info for this codec.
+    std::vector<InstanceResourceInfo> mRequiredResourceInfo;
 
     DISALLOW_EVIL_CONSTRUCTORS(MediaCodec);
 };
diff --git a/media/libstagefright/webm/Android.bp b/media/libstagefright/webm/Android.bp
index 723131d..c3bd36e 100644
--- a/media/libstagefright/webm/Android.bp
+++ b/media/libstagefright/webm/Android.bp
@@ -11,8 +11,8 @@
     name: "libstagefright_webm",
 
     cflags: [
-        "-Werror",
         "-Wall",
+        "-Werror",
     ],
 
     sanitize: {
@@ -38,11 +38,12 @@
     export_include_dirs: ["include"],
 
     shared_libs: [
+        "framework-permission-aidl-cpp",
+        "libaudiofoundation",
         "libdatasource",
+        "liblog",
         "libstagefright_foundation",
         "libutils",
-        "liblog",
-        "framework-permission-aidl-cpp",
     ],
 
     header_libs: [
@@ -51,7 +52,6 @@
     ],
 }
 
-
 cc_library_headers {
     name: "libstagefright_webm_headers",
     export_include_dirs: ["include"],
diff --git a/media/module/extractors/mpeg2/Android.bp b/media/module/extractors/mpeg2/Android.bp
index aa59a0c..63dbcda 100644
--- a/media/module/extractors/mpeg2/Android.bp
+++ b/media/module/extractors/mpeg2/Android.bp
@@ -44,7 +44,6 @@
 
     shared_libs: [
         "libbase",
-        "libcgrouprc#29",
     ],
 
     header_libs: [
diff --git a/media/module/libapexcodecs/Android.bp b/media/module/libapexcodecs/Android.bp
new file mode 100644
index 0000000..790b749
--- /dev/null
+++ b/media/module/libapexcodecs/Android.bp
@@ -0,0 +1,71 @@
+//
+// Copyright (C) 2024 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+package {
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_defaults {
+    name: "libapexcodecs-defaults",
+    header_libs: [
+        "libbase_headers",
+    ],
+
+    srcs: ["ApexCodecs.cpp"],
+
+    shared_libs: [
+        "libbase",
+        "libnativewindow",
+    ],
+
+    export_include_dirs: ["include"],
+
+    export_shared_lib_headers: [
+        "libbase",
+        "libnativewindow",
+    ],
+
+}
+
+cc_library {
+    name: "libapexcodecs-testing",
+    defaults: ["libapexcodecs-defaults"],
+
+    visibility: [
+        ":__subpackages__",
+    ],
+}
+
+cc_library {
+    name: "libapexcodecs",
+    defaults: ["libapexcodecs-defaults"],
+
+    visibility: [
+        "//frameworks/av/apex:__subpackages__",
+        "//frameworks/av/media/codec2/hal/client",
+    ],
+
+    min_sdk_version: "apex_inherit",
+    version_script: "libapexcodecs.map.txt",
+    stubs: {
+        symbol_file: "libapexcodecs.map.txt",
+        versions: ["36"],
+    },
+
+    apex_available: [
+        "com.android.media.swcodec",
+    ],
+}
diff --git a/media/module/libapexcodecs/ApexCodecs.cpp b/media/module/libapexcodecs/ApexCodecs.cpp
new file mode 100644
index 0000000..7101677
--- /dev/null
+++ b/media/module/libapexcodecs/ApexCodecs.cpp
@@ -0,0 +1,148 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <new>
+
+#include <android-base/no_destructor.h>
+#include <apex/ApexCodecs.h>
+
+// TODO: remove when we have real implementations
+#pragma clang diagnostic push
+#pragma clang diagnostic ignored "-Wunused-parameter"
+
+struct ApexCodec_ComponentStore {
+    ApexCodec_ComponentStore() = default;
+};
+
+ApexCodec_ComponentStore *ApexCodec_GetComponentStore() {
+    ::android::base::NoDestructor<ApexCodec_ComponentStore> store;
+    return store.get();
+}
+
+ApexCodec_ComponentTraits *ApexCodec_Traits_get(
+        ApexCodec_ComponentStore *store, size_t index) {
+    return nullptr;
+}
+
+ApexCodec_Status ApexCodec_Component_create(
+        ApexCodec_ComponentStore *store, const char *name, ApexCodec_Component **comp) {
+    *comp = nullptr;
+    return APEXCODEC_STATUS_NOT_FOUND;
+}
+
+void ApexCodec_Component_destroy(ApexCodec_Component *comp) {}
+
+ApexCodec_Status ApexCodec_Component_start(ApexCodec_Component *comp) {
+    return APEXCODEC_STATUS_OMITTED;
+}
+
+ApexCodec_Status ApexCodec_Component_flush(ApexCodec_Component *comp) {
+    return APEXCODEC_STATUS_OMITTED;
+}
+
+ApexCodec_Status ApexCodec_Component_reset(ApexCodec_Component *comp) {
+    return APEXCODEC_STATUS_OMITTED;
+}
+
+ApexCodec_Configurable *ApexCodec_Component_getConfigurable(
+        ApexCodec_Component *comp) {
+    return nullptr;
+}
+
+ApexCodec_Status ApexCodec_SupportedValues_getTypeAndValues(
+        ApexCodec_SupportedValues *supportedValues,
+        ApexCodec_SupportedValuesType *type,
+        ApexCodec_SupportedValuesNumberType *numberType,
+        ApexCodec_Value **values,
+        uint32_t *numValues) {
+    return APEXCODEC_STATUS_OMITTED;
+}
+
+void ApexCodec_SupportedValues_release(ApexCodec_SupportedValues *values) {}
+
+ApexCodec_Status ApexCodec_SettingResults_getResultAtIndex(
+        ApexCodec_SettingResults *results,
+        size_t index,
+        ApexCodec_SettingResultFailure *failure,
+        ApexCodec_ParamFieldValues *field,
+        ApexCodec_ParamFieldValues **conflicts,
+        size_t *numConflicts) {
+    return APEXCODEC_STATUS_OMITTED;
+}
+
+void ApexCodec_SettingResults_release(ApexCodec_SettingResults *results) {}
+
+ApexCodec_Status ApexCodec_Component_process(
+        ApexCodec_Component *comp,
+        const ApexCodec_Buffer *input,
+        ApexCodec_Buffer *output,
+        size_t *consumed,
+        size_t *produced) {
+    return APEXCODEC_STATUS_OMITTED;
+}
+
+ApexCodec_Status ApexCodec_Configurable_config(
+        ApexCodec_Configurable *comp,
+        ApexCodec_LinearBuffer *config,
+        ApexCodec_SettingResults **results) {
+    return APEXCODEC_STATUS_OMITTED;
+}
+
+ApexCodec_Status ApexCodec_Configurable_query(
+        ApexCodec_Configurable *comp,
+        uint32_t indices[],
+        size_t numIndices,
+        ApexCodec_LinearBuffer *config,
+        size_t *written) {
+    return APEXCODEC_STATUS_OMITTED;
+}
+
+ApexCodec_Status ApexCodec_ParamDescriptors_getIndices(
+        ApexCodec_ParamDescriptors *descriptors,
+        uint32_t **indices,
+        size_t *numIndices) {
+    return APEXCODEC_STATUS_OMITTED;
+}
+
+ApexCodec_Status ApexCodec_ParamDescriptors_getDescriptor(
+        ApexCodec_ParamDescriptors *descriptors,
+        uint32_t index,
+        ApexCodec_ParamAttribute *attr,
+        const char **name,
+        uint32_t **dependencies,
+        size_t *numDependencies) {
+    return APEXCODEC_STATUS_OMITTED;
+}
+
+ApexCodec_Status ApexCodec_ParamDescriptors_release(
+        ApexCodec_ParamDescriptors *descriptors) {
+    return APEXCODEC_STATUS_OMITTED;
+}
+
+ApexCodec_Status ApexCodec_Configurable_querySupportedParams(
+        ApexCodec_Configurable *comp,
+        ApexCodec_ParamDescriptors **descriptors) {
+    return APEXCODEC_STATUS_OMITTED;
+}
+
+ApexCodec_Status ApexCodec_Configurable_querySupportedValues(
+        ApexCodec_Configurable *comp,
+        ApexCodec_SupportedValuesQuery *queries,
+        size_t numQueries) {
+    return APEXCODEC_STATUS_OMITTED;
+}
+
+#pragma clang diagnostic pop
\ No newline at end of file
diff --git a/media/module/libapexcodecs/include/apex/ApexCodecs.h b/media/module/libapexcodecs/include/apex/ApexCodecs.h
new file mode 100644
index 0000000..b9f2e83
--- /dev/null
+++ b/media/module/libapexcodecs/include/apex/ApexCodecs.h
@@ -0,0 +1,768 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <sys/cdefs.h>
+#include <errno.h>
+#include <stdint.h>
+
+#include <android/api-level.h>
+#include <android/hardware_buffer.h>
+#include <android/versioning.h>
+
+__BEGIN_DECLS
+
+/**
+ * An API to access and operate codecs implemented within an APEX module,
+ * used only by the OS when using the codecs within a client process
+ * (instead of via a HAL).
+ *
+ * NOTE: Many of the constants and types mirror the ones in the Codec 2.0 API.
+ */
+
+/**
+ * Error code for ApexCodec APIs.
+ *
+ * Introduced in API 36.
+ */
+typedef enum ApexCodec_Status : int32_t {
+    APEXCODEC_STATUS_OK        = 0,
+
+    /* bad input */
+    APEXCODEC_STATUS_BAD_VALUE = EINVAL,
+    APEXCODEC_STATUS_BAD_INDEX = ENXIO,
+    APEXCODEC_STATUS_CANNOT_DO = ENOTSUP,
+
+    /* bad sequencing of events */
+    APEXCODEC_STATUS_DUPLICATE = EEXIST,
+    APEXCODEC_STATUS_NOT_FOUND = ENOENT,
+    APEXCODEC_STATUS_BAD_STATE = EPERM,
+    APEXCODEC_STATUS_BLOCKING  = EWOULDBLOCK,
+    APEXCODEC_STATUS_CANCELED  = EINTR,
+
+    /* bad environment */
+    APEXCODEC_STATUS_NO_MEMORY = ENOMEM,
+    APEXCODEC_STATUS_REFUSED   = EACCES,
+
+    APEXCODEC_STATUS_TIMED_OUT = ETIMEDOUT,
+
+    /* bad versioning */
+    APEXCODEC_STATUS_OMITTED   = ENOSYS,
+
+    /* unknown fatal */
+    APEXCODEC_STATUS_CORRUPTED = EFAULT,
+    APEXCODEC_STATUS_NO_INIT   = ENODEV,
+} ApexCodec_Status;
+
+/**
+ * Enum that represents the kind of component
+ *
+ * Introduced in API 36.
+ */
+typedef enum ApexCodec_Kind : uint32_t {
+    /**
+     * The component is of a kind that is not listed below.
+     */
+    APEXCODEC_KIND_OTHER = 0x0,
+    /**
+     * The component is a decoder, which decodes coded bitstream
+     * into raw buffers.
+     *
+     * Introduced in API 36.
+     */
+    APEXCODEC_KIND_DECODER = 0x1,
+    /**
+     * The component is an encoder, which encodes raw buffers
+     * into coded bitstream.
+     *
+     * Introduced in API 36.
+     */
+    APEXCODEC_KIND_ENCODER = 0x2,
+} ApexCodec_Kind;
+
+typedef enum ApexCodec_Domain : uint32_t {
+    /**
+     * A component domain that is not listed below.
+     *
+     * Introduced in API 36.
+     */
+    APEXCODEC_DOMAIN_OTHER = 0x0,
+    /**
+     * A component domain that operates on video.
+     *
+     * Introduced in API 36.
+     */
+    APEXCODEC_DOMAIN_VIDEO = 0x1,
+    /**
+     * A component domain that operates on audio.
+     *
+     * Introduced in API 36.
+     */
+    APEXCODEC_DOMAIN_AUDIO = 0x2,
+    /**
+     * A component domain that operates on image.
+     *
+     * Introduced in API 36.
+     */
+    APEXCODEC_DOMAIN_IMAGE = 0x3,
+} ApexCodec_Domain;
+
+/**
+ * Handle for component traits such as name, media type, kind (decoder/encoder),
+ * domain (audio/video/image), etc.
+ *
+ * Introduced in API 36.
+ */
+typedef struct ApexCodec_ComponentTraits {
+    /**
+     * The name of the component.
+     */
+    const char *name;
+    /**
+     * The supported media type of the component.
+     */
+    const char *mediaType;
+    /**
+     * The kind of the component.
+     */
+    ApexCodec_Kind kind;
+    /**
+     * The domain on which the component operates.
+     */
+    ApexCodec_Domain domain;
+} ApexCodec_ComponentTraits;
+
+/**
+ * An opaque struct that represents a component store.
+ *
+ * Introduced in API 36.
+ */
+typedef struct ApexCodec_ComponentStore ApexCodec_ComponentStore;
+
+/**
+ * Get the component store object. This function never fails.
+ *
+ * \return component store object.
+ */
+ApexCodec_ComponentStore *ApexCodec_GetComponentStore()
+        __INTRODUCED_IN(36);
+
+/**
+ * Get the traits object of a component at given index. ApexCodecs_Traits_*
+ * functions are used to extract information from the traits object.
+ *
+ * Returns nullptr if index is out of bounds. The returned object is owned by
+ * ApexCodec_ComponentStore object and the client should not delete it.
+ *
+ * The client can iterate through the traits objects by calling this function
+ * with an index incrementing from 0 until it gets a nullptr.
+ *
+ * \param index index of the traits object to query
+ * \return traits object at the index, or nullptr if the index is out of bounds.
+ */
+ApexCodec_ComponentTraits *ApexCodec_Traits_get(
+        ApexCodec_ComponentStore *store, size_t index) __INTRODUCED_IN(36);
+
+/**
+ * An opaque struct that represents a codec.
+ */
+typedef struct ApexCodec_Component ApexCodec_Component;
+
+/**
+ * Create a component by the name.
+ *
+ * \param store the component store
+ * \param name the name of the component
+ * \param component out-param to be filled with the component; must not be null
+ * \return  APEXCODEC_STATUS_OK         if successful
+ *          APEXCODEC_STATUS_NOT_FOUND  if the name is not found
+ */
+ApexCodec_Status ApexCodec_Component_create(
+        ApexCodec_ComponentStore *store, const char *name, ApexCodec_Component **comp)
+        __INTRODUCED_IN(36);
+
+/**
+ * Destroy the component by the handle. It is invalid to call component methods on the handle
+ * after calling this method. It is no-op to call this method with |comp| == nullptr.
+ *
+ * \param comp the handle for the component
+ */
+void ApexCodec_Component_destroy(ApexCodec_Component *comp) __INTRODUCED_IN(36);
+
+/**
+ * Start the component. The component is ready to process buffers after this call.
+ *
+ * \param comp the handle for the component
+ */
+ApexCodec_Status ApexCodec_Component_start(
+        ApexCodec_Component *comp) __INTRODUCED_IN(36);
+
+/**
+ * Flush the component's internal states. This operation preserves the existing configurations.
+ *
+ * \param comp the handle for the component
+ */
+ApexCodec_Status ApexCodec_Component_flush(
+        ApexCodec_Component *comp) __INTRODUCED_IN(36);
+
+/**
+ * Resets the component to the initial state, right after creation. Note that the configuration
+ * will also revert to the initial state, so if there are configurations required those should be
+ * set again to use the component.
+ *
+ * \param comp the handle for the component
+ */
+ApexCodec_Status ApexCodec_Component_reset(
+        ApexCodec_Component *comp) __INTRODUCED_IN(36);
+
+/**
+ * An opaque struct that represents a configurable part of the component.
+ *
+ * Introduced in API 36.
+ */
+typedef struct ApexCodec_Configurable ApexCodec_Configurable;
+
+/**
+ * Return the configurable object for the given ApexCodec_Component.
+ * The returned object has the same lifecycle as |comp|.
+ *
+ * \param comp the handle for the component
+ * \return the configurable object handle
+ */
+ApexCodec_Configurable *ApexCodec_Component_getConfigurable(
+        ApexCodec_Component *comp) __INTRODUCED_IN(36);
+
+/**
+ * Enum that represents the flags for ApexCodec_Buffer.
+ *
+ * Introduced in API 36.
+ */
+typedef enum ApexCodec_BufferFlags : uint32_t {
+    APEXCODEC_FLAG_DROP_FRAME    = (1 << 0),
+    APEXCODEC_FLAG_END_OF_STREAM = (1 << 1),
+    APEXCODEC_FLAG_DISCARD_FRAME = (1 << 2),
+    APEXCODEC_FLAG_INCOMPLETE    = (1 << 3),
+    APEXCODEC_FLAG_CORRECTED     = (1 << 4),
+    APEXCODEC_FLAG_CORRUPT       = (1 << 5),
+    APEXCODEC_FLAG_CODEC_CONFIG  = (1u << 31),
+} ApexCodec_BufferFlags;
+
+/**
+ * Enum that represents the type of buffer.
+ *
+ * Introduced in API 36.
+ */
+typedef enum ApexCodec_BufferType : uint32_t {
+    APEXCODEC_BUFFER_TYPE_INVALID,
+    APEXCODEC_BUFFER_TYPE_LINEAR,
+    APEXCODEC_BUFFER_TYPE_LINEAR_CHUNKS,
+    APEXCODEC_BUFFER_TYPE_GRAPHIC,
+    APEXCODEC_BUFFER_TYPE_GRAPHIC_CHUNKS,
+} ApexCodec_BufferType;
+
+/**
+ * Struct that represents the memory for ApexCodec_Buffer.
+ *
+ * All memory regions have the simple 1D representation.
+ *
+ * Introduced in API 36.
+ */
+typedef struct ApexCodec_LinearBuffer {
+    /**
+     * A pointer to the start of the buffer. This is not aligned.
+     */
+    uint8_t *data;
+    /**
+     * Size of the buffer. The memory region between |data| (inclusive) and
+     * |data + size| (exclusive) is assumed to be valid for read/write.
+     */
+    size_t size;
+} ApexCodec_LinearBuffer;
+
+/**
+ * Struct that represents a buffer for ApexCodec_Component.
+ *
+ * Introduced in API 36.
+ */
+typedef struct ApexCodec_Buffer {
+    /**
+     * Flags associated with the buffer.
+     */
+    ApexCodec_BufferFlags flags;
+    /**
+     * For input buffers client assign a unique sequential index for each buffer. For output buffers
+     * it is the same as the associated input buffer's frame index.
+     */
+    uint64_t frameIndex;
+    /**
+     * A timestamp associated with the buffer in microseconds.
+     */
+    uint64_t timestampUs;
+    /**
+     * The type of the buffer. The component may reject request to process a buffer with the wrong
+     * type. For example, a video decoder will reject an input buffer with type BUFFER_TYPE_GRAPHIC,
+     * or an output buffer with type BUFFER_TYPE_LINEAR.
+     */
+    ApexCodec_BufferType type;
+    /**
+     * The actual memory for the buffer.
+     */
+    union {
+        ApexCodec_LinearBuffer linear;
+        AHardwareBuffer *graphic;
+    } memory;
+    /**
+     * Config updates associated with the buffer. For input buffers these are sent to the component
+     * at the specific input frame. For output buffers these are config updates as a result of
+     * processing the buffer.
+     */
+    ApexCodec_LinearBuffer configUpdates;
+} ApexCodec_Buffer;
+
+/**
+ * Enum that represents the query type for the supported values.
+ *
+ * Introduced in API 36.
+ */
+typedef enum ApexCodec_SupportedValuesQueryType : uint32_t {
+    /** Query all possible supported values regardless of current configuration */
+    APEXCODEC_SUPPORTED_VALUES_QUERY_POSSIBLE,
+    /** Query supported values at current configuration */
+    APEXCODEC_SUPPORTED_VALUES_QUERY_CURRENT,
+} ApexCodec_SupportedValuesQueryType;
+
+/**
+ * Enum that represents the type of the supported values.
+ *
+ * Introduced in API 36.
+ */
+typedef enum ApexCodec_SupportedValuesType : uint32_t {
+    /** The supported values are empty. */
+    APEXCODEC_SUPPORTED_VALUES_EMPTY,
+    /**
+     * The supported values are represented by a range defined with {min, max, step, num, den}.
+     *
+     * If step is 0 and num and denom are both 1, the supported values are any value, for which
+     * min <= value <= max.
+     *
+     * Otherwise, the range represents a geometric/arithmetic/multiply-accumulate series, where
+     * successive supported values can be derived from previous values (starting at min), using the
+     * following formula:
+     *  v[0] = min
+     *  v[i] = v[i-1] * num / denom + step for i >= 1, while min < v[i] <= max.
+     */
+    APEXCODEC_SUPPORTED_VALUES_RANGE,
+    /** The supported values are represented by a list of values. */
+    APEXCODEC_SUPPORTED_VALUES_VALUES,
+    /** The supported values are represented by a list of flags. */
+    APEXCODEC_SUPPORTED_VALUES_FLAGS,
+} ApexCodec_SupportedValuesType;
+
+/**
+ * Enum that represents numeric types of the supported values.
+ *
+ * Introduced in API 36.
+ */
+typedef enum ApexCodec_SupportedValuesNumberType : uint32_t {
+    APEXCODEC_SUPPORTED_VALUES_TYPE_NONE   = 0,
+    APEXCODEC_SUPPORTED_VALUES_TYPE_INT32  = 1,
+    APEXCODEC_SUPPORTED_VALUES_TYPE_UINT32 = 2,
+    // RESERVED                            = 3,
+    APEXCODEC_SUPPORTED_VALUES_TYPE_INT64  = 4,
+    APEXCODEC_SUPPORTED_VALUES_TYPE_UINT64 = 5,
+    // RESERVED                            = 6,
+    APEXCODEC_SUPPORTED_VALUES_TYPE_FLOAT  = 7,
+} ApexCodec_SupportedValuesNumberType;
+
+/**
+ * Union of primitive types.
+ *
+ * Introduced in API 36.
+ */
+typedef union {
+    int32_t i32;
+    uint32_t u32;
+    int64_t i64;
+    uint64_t u64;
+    float f;
+} ApexCodec_Value;
+
+/**
+ * An opaque struct that represents the supported values of a parameter.
+ *
+ * Introduced in API 36.
+ */
+typedef struct ApexCodec_SupportedValues ApexCodec_SupportedValues;
+
+/**
+ * Extract information from ApexCodec_SupportedValues object.
+ *
+ * \param [in] supportedValues the supported values object
+ * \param [out] type        pointer to be filled with the type of the supported values
+ * \param [out] numberType  pointer to be filled with the numeric type of the supported values
+ * \param [out] values      pointer to be filled with the array of the actual supported values.
+ *                          if type == APEXCODEC_SUPPORTED_VALUES_EMPTY: nullptr
+ *                          if type == APEXCODEC_SUPPORTED_VALUES_RANGE: {min, max, step, num, den}
+ *                          if type == APEXCODEC_SUPPORTED_VALUES_VALUES/_FLAGS:
+ *                              the array of supported values/flags
+ *                          the array is owned by the |supportedValues| object and the client
+ *                          should not free it.
+ * \param [out] numValues   pointer to be filled with the number of values.
+ *                          if type == APEXCODEC_SUPPORTED_VALUES_EMPTY: 0
+ *                          if type == APEXCODEC_SUPPORTED_VALUES_RANGE: 5
+ *                          if type == APEXCODEC_SUPPORTED_VALUES_VALUES/_FLAGS: varies
+ */
+ApexCodec_Status ApexCodec_SupportedValues_getTypeAndValues(
+        ApexCodec_SupportedValues *supportedValues,
+        ApexCodec_SupportedValuesType *type,
+        ApexCodec_SupportedValuesNumberType *numberType,
+        ApexCodec_Value **values,
+        uint32_t *numValues) __INTRODUCED_IN(36);
+
+/**
+ * Release the supported values object.
+ *
+ * \param values the supported values object
+ */
+void ApexCodec_SupportedValues_release(
+        ApexCodec_SupportedValues *values) __INTRODUCED_IN(36);
+
+/**
+ * Struct that represents the result of ApexCodec_Configurable_config.
+ *
+ * Introduced in API 36.
+ */
+typedef struct ApexCodec_SettingResults ApexCodec_SettingResults;
+
+/**
+ * Enum that represents the failure code of ApexCodec_SettingResults.
+ *
+ * Introduced in API 36.
+ */
+typedef enum ApexCodec_SettingResultFailure : uint32_t {
+    /** parameter type is not supported */
+    APEXCODEC_SETTING_RESULT_BAD_TYPE,
+    /** parameter is not supported on the specific port */
+    APEXCODEC_SETTING_RESULT_BAD_PORT,
+    /** parameter is not supported on the specific stream */
+    APEXCODEC_SETTING_RESULT_BAD_INDEX,
+    /** parameter is read-only */
+    APEXCODEC_SETTING_RESULT_READ_ONLY,
+    /** parameter mismatches input data */
+    APEXCODEC_SETTING_RESULT_MISMATCH,
+    /** strict parameter does not accept value for the field at all */
+    APEXCODEC_SETTING_RESULT_BAD_VALUE,
+    /** strict parameter field value conflicts with another settings */
+    APEXCODEC_SETTING_RESULT_CONFLICT,
+    /** strict parameter field is out of range due to other settings */
+    APEXCODEC_SETTING_RESULT_UNSUPPORTED,
+    /**
+     * field does not accept the requested parameter value at all. It has been corrected to
+     * the closest supported value. This failure mode is provided to give guidance as to what
+     * are the currently supported values for this field (which may be a subset of the at-all-
+     * potential values)
+     */
+    APEXCODEC_SETTING_RESULT_INFO_BAD_VALUE,
+    /**
+     * requested parameter value is in conflict with an/other setting(s)
+     * and has been corrected to the closest supported value. This failure
+     * mode is given to provide guidance as to what are the currently supported values as well
+     * as to optionally provide suggestion to the client as to how to enable the requested
+     * parameter value.
+     */
+    APEXCODEC_SETTING_RESULT_INFO_CONFLICT,
+} ApexCodec_SettingResultFailure;
+
+/**
+ * Struct that represents a field and its supported values of a parameter.
+ *
+ * The offset and size of the field are where the field is located in the blob representation of
+ * the parameter, as used in the ApexCodec_Configurable_query() and ApexCodec_Configurable_config(),
+ * for example.
+ *
+ * Introduced in API 36.
+ */
+typedef struct ApexCodec_ParamFieldValues {
+    /** index of the param */
+    uint32_t index;
+    /** offset of the param field */
+    uint32_t offset;
+    /** size of the param field */
+    uint32_t size;
+    /** currently supported values of the param field */
+    ApexCodec_SupportedValues *values;
+} ApexCodec_ParamFieldValues;
+
+/**
+ * Extract the result of ApexCodec_Configurable_config.
+ * The client can iterate through the results with index starting from 0 until this function returns
+ * APEXCODEC_STATUS_NOT_FOUND.
+ *
+ * \param [in]  result  the result object
+ * \param [in]  index   the index of the result to extract, starts from 0.
+ * \param [out] failure pointer to be filled with the failure code
+ * \param [out] field   pointer to be filled with the field that failed.
+ *                      |field->value| is owned by the |result| object and the client should not
+ *                      free it.
+ * \param [out] conflicts   pointer to be filled with the array of conflicts.
+ *                          nullptr if |numConflicts| is 0.
+ *                          the array and its content is owned by the |result| object and the client
+ *                          should not free it.
+ * \param [out] numConflicts pointer to be filled with the number of conflicts
+ *                          may be 0 if there are no conflicts
+ * \return APEXCODEC_STATUS_OK         if successful
+ * \return APEXCODEC_STATUS_NOT_FOUND  if index is out of range
+ */
+ApexCodec_Status ApexCodec_SettingResults_getResultAtIndex(
+        ApexCodec_SettingResults *results,
+        size_t index,
+        ApexCodec_SettingResultFailure *failure,
+        ApexCodec_ParamFieldValues *field,
+        ApexCodec_ParamFieldValues **conflicts,
+        size_t *numConflicts) __INTRODUCED_IN(36);
+
+/**
+ * Release the setting result object.
+ *
+ * \param result the setting result object
+ */
+void ApexCodec_SettingResults_release(
+        ApexCodec_SettingResults *results) __INTRODUCED_IN(36);
+
+/**
+ * Process one frame from |input|, and produce one frame to |output| if possible.
+ * When successfully filled, |output->memory.linear| has the size adjusted to the produced
+ * output size, in case of linear buffers. |input->configUpdates| is applied with the input
+ * buffer; |output->configUpdates| contains config updates as a result of processing the frame.
+ *
+ * \param comp      the component to process the buffers
+ * \param input     the input buffer; when nullptr, the component should fill |output| if there are
+ *                  any pending output buffers.
+ * \param output    the output buffer, should not be nullptr.
+ * \param consumed  the number of consumed bytes from the input buffer
+ *                  set to 0 if no input buffer has been consumed, including |input| is nullptr.
+ *                  for graphic buffers, any non-zero value means that the input buffer is consumed.
+ * \param produced  the number of bytes produced on the output buffer
+ *                  set to 0 if no output buffer has been produced.
+ *                  for graphic buffers, any non-zero value means that the output buffer is filled.
+ * \return APEXCODEC_STATUS_OK         if successful
+ * \return APEXCODEC_STATUS_NO_MEMORY  if the output buffer is not suitable to hold the output frame
+ *                                     the client should retry with a new output buffer;
+ *                                     configUpdates should have the information to update
+ *                                     the buffer size.
+ * \return APEXCODEC_STATUS_BAD_VALUE  if the parameters are bad
+ * \return APEXCODEC_STATUS_BAD_STATE  if the component is not in the right state
+ *                                     to process the frame
+ * \return APEXCODEC_STATUS_CORRUPTED  if unexpected error has occurred
+ */
+ApexCodec_Status ApexCodec_Component_process(
+        ApexCodec_Component *comp,
+        const ApexCodec_Buffer *input,
+        ApexCodec_Buffer *output,
+        size_t *consumed,
+        size_t *produced) __INTRODUCED_IN(36);
+
+/**
+ * Configure the component with the given config.
+ *
+ * Configurations are Codec 2.0 configs in binary blobs,
+ * concatenated if there are multiple configs.
+ *
+ * frameworks/av/media/codec2/core/include/C2Param.h contains more details about the configuration
+ * blob layout.
+ *
+ * The component may correct the configured parameters to the closest supported values, and could
+ * fail in case there are no values that the component can auto-correct to. |result| contains the
+ * information about the failures. See ApexCodec_SettingResultFailure and ApexCodec_SettingResults
+ * for more details.
+ *
+ * \param [in]    comp   the handle for the component
+ * \param [inout] config the config blob; after the call, the config blob is updated to the actual
+ *                       config by the component.
+ * \param [out]   result the result of the configuration.
+ *                       the client should call ApexCodec_SettingResult_getResultAtIndex()
+ *                       to extract the result. The result object is owned by the client and should
+ *                       be released with ApexCodec_SettingResult_release().
+ *                       |result| may be nullptr if empty.
+ * \return APEXCODEC_STATUS_OK         if successful
+ * \return APEXCODEC_STATUS_BAD_VALUE  if the config is invalid
+ * \return APEXCODEC_STATUS_BAD_STATE  if the component is not in the right state to be configured
+ * \return APEXCODEC_STATUS_CORRUPTED  if unexpected error has occurred
+ */
+ApexCodec_Status ApexCodec_Configurable_config(
+        ApexCodec_Configurable *comp,
+        ApexCodec_LinearBuffer *config,
+        ApexCodec_SettingResults **results) __INTRODUCED_IN(36);
+
+/**
+ * Query the component for the given indices.
+ *
+ * Parameter indices are defined in frameworks/av/media/codec2/core/include/C2Config.h.
+ *
+ * \param [in] comp         the handle for the component
+ * \param [in] indices      the array of indices to query
+ * \param [in] numIndices   the size of the indices array
+ * \param [inout] config    the output buffer for the config blob, allocated by the client.
+ *                          if the |config->size| was insufficient, it is set to the required size
+ *                          and |config->data| remains unchanged.
+ * \param [out] written     the number of bytes written to |config|.
+ * \return APEXCODEC_STATUS_OK          if successful
+ * \return APEXCODEC_STATUS_NO_MEMORY   if |config.size| is too small; |config.size| is updated to the
+ *                                      requested buffer size.
+ * \return APEXCODEC_STATUS_BAD_VALUE   if the parameters are bad. e.g. |indices|, |config|,
+ *                                      |config->data| or |written| is nullptr.
+ */
+ApexCodec_Status ApexCodec_Configurable_query(
+        ApexCodec_Configurable *comp,
+        uint32_t indices[],
+        size_t numIndices,
+        ApexCodec_LinearBuffer *config,
+        size_t *written) __INTRODUCED_IN(36);
+
+/**
+ * Struct that represents a parameter descriptor.
+ *
+ * Introduced in API 36.
+ */
+typedef struct ApexCodec_ParamDescriptors ApexCodec_ParamDescriptors;
+
+/**
+ * Enum that represents the attributes of a parameter.
+ *
+ * Introduced in API 36.
+ */
+typedef enum ApexCodec_ParamAttribute : uint32_t {
+    /** parameter is required to be specified */
+    APEXCODEC_PARAM_IS_REQUIRED   = 1u << 0,
+    /** parameter retains its value */
+    APEXCODEC_PARAM_IS_PERSISTENT = 1u << 1,
+    /** parameter is strict */
+    APEXCODEC_PARAM_IS_STRICT     = 1u << 2,
+    /** parameter is read-only */
+    APEXCODEC_PARAM_IS_READ_ONLY  = 1u << 3,
+    /** parameter shall not be visible to clients */
+    APEXCODEC_PARAM_IS_HIDDEN     = 1u << 4,
+    /** parameter shall not be used by framework (other than testing) */
+    APEXCODEC_PARAM_IS_INTERNAL   = 1u << 5,
+    /** parameter is publicly const (hence read-only) */
+    APEXCODEC_PARAM_IS_CONST      = 1u << 6 | APEXCODEC_PARAM_IS_READ_ONLY,
+} ApexCodec_ParamAttribute;
+
+/**
+ * Get the parameter indices of the param descriptors.
+ *
+ * \param [in] descriptors the param descriptors object
+ * \param [out] indices the pointer to be filled with the array of the indices;
+ *                      the array is owned by |descriptors| and should not be freed by the client.
+ * \param [out] numIndices the size of the indices array
+ * \return APEXCODEC_STATUS_OK          if successful
+ * \return APEXCODEC_STATUS_BAD_VALUE   if parameters are bad. e.g. |descriptors|, |indices| or
+ *                                  |numIndices| is nullptr.
+ */
+ApexCodec_Status ApexCodec_ParamDescriptors_getIndices(
+        ApexCodec_ParamDescriptors *descriptors,
+        uint32_t **indices,
+        size_t *numIndices) __INTRODUCED_IN(36);
+
+/**
+ * Get the descriptor of the param.
+ *
+ * \param [in] descriptors the param descriptors object
+ * \param [in] index the index of the param
+ * \param [out] attr the attribute of the param
+ * \param [out] name    the pointer to be filled with the name of the param
+ *                      the string is owned by |descriptors| and should not be freed by the client.
+ * \param [out] dependencies the pointer to be filled with an array of the parameter indices
+ *                        that the parameter with |index| depends on.
+ *                        may be null if empty.
+ *                        the array is owned by |descriptors| and should not be freed by the client.
+ * \param [out] numDependencies the number of dependencies
+ * \return APEXCODEC_STATUS_OK          if successful
+ * \return APEXCODEC_STATUS_BAD_VALUE   if parameters are bad. e.g. |descriptors|, |attr|, |name|,
+ *                                  |dependencies| or |numDependencies| is nullptr.
+ * \return APEXCODEC_STATUS_BAD_INDEX   if the index is not included in the param descriptors.
+ */
+ApexCodec_Status ApexCodec_ParamDescriptors_getDescriptor(
+        ApexCodec_ParamDescriptors *descriptors,
+        uint32_t index,
+        ApexCodec_ParamAttribute *attr,
+        const char **name,
+        uint32_t **dependencies,
+        size_t *numDependencies) __INTRODUCED_IN(36);
+
+/**
+ * Release the param descriptors object.
+ *
+ * \param descriptors the param descriptors object
+ */
+ApexCodec_Status ApexCodec_ParamDescriptors_release(
+        ApexCodec_ParamDescriptors *descriptors) __INTRODUCED_IN(36);
+
+/**
+ * Query the component for the supported parameters.
+ *
+ * \param comp the handle for the component
+ * \param descriptors   the pointer to be filled with the param descriptors object
+ *                      the object should be released with ApexCodec_ParamDescriptors_release().
+ * \return APEXCODEC_STATUS_OK          if successful
+ * \return APEXCODEC_STATUS_BAD_VALUE   if parameters are bad. e.g. |descriptors| is nullptr.
+ */
+ApexCodec_Status ApexCodec_Configurable_querySupportedParams(
+        ApexCodec_Configurable *comp,
+        ApexCodec_ParamDescriptors **descriptors) __INTRODUCED_IN(36);
+
+/**
+ * Struct that represents the query for the supported values of a parameter.
+ *
+ * The offset of the field can be found in the layout of the parameter blob.
+ *
+ * Introduced in API 36.
+ */
+typedef struct ApexCodec_SupportedValuesQuery {
+    /* in-params */
+
+    /** index of the param */
+    uint32_t index;
+    /** offset to the param field */
+    size_t offset;
+    /** query type */
+    ApexCodec_SupportedValuesQueryType type;
+
+    /* out-params */
+
+    /** status of the query */
+    ApexCodec_Status status;
+
+    /** supported values. must be released with ApexCodec_SupportedValues_release(). */
+    ApexCodec_SupportedValues *values;
+} ApexCodec_SupportedValuesQuery;
+
+/**
+ * Query the component for the supported values of the given indices.
+ *
+ * \param comp the handle for the component
+ * \param queries the array of queries
+ * \param numQueries the size of the queries array
+ * \return  APEXCODEC_STATUS_OK         if successful
+ *          APEXCODEC_STATUS_CORRUPTED  if unexpected error has occurred
+ */
+ApexCodec_Status ApexCodec_Configurable_querySupportedValues(
+        ApexCodec_Configurable *comp,
+        ApexCodec_SupportedValuesQuery *queries,
+        size_t numQueries) __INTRODUCED_IN(36);
+
+__END_DECLS
\ No newline at end of file
diff --git a/media/module/libapexcodecs/libapexcodecs.map.txt b/media/module/libapexcodecs/libapexcodecs.map.txt
new file mode 100644
index 0000000..672cf89
--- /dev/null
+++ b/media/module/libapexcodecs/libapexcodecs.map.txt
@@ -0,0 +1,26 @@
+LIBAPEXCODECS_36 { # introduced=36
+  global:
+    ApexCodec_Component_create; # apex
+    ApexCodec_Component_destroy; # apex
+    ApexCodec_Component_flush; # apex
+    ApexCodec_Component_getConfigurable; # apex
+    ApexCodec_Component_process; # apex
+    ApexCodec_Component_start; # apex
+    ApexCodec_Component_reset; # apex
+    ApexCodec_Configurable_config; # apex
+    ApexCodec_Configurable_query; # apex
+    ApexCodec_Configurable_querySupportedParams; # apex
+    ApexCodec_Configurable_querySupportedValues; # apex
+    ApexCodec_GetComponentStore; # apex
+    ApexCodec_ParamDescriptors_getDescriptor; # apex
+    ApexCodec_ParamDescriptors_getIndices; # apex
+    ApexCodec_ParamDescriptors_release; # apex
+    ApexCodec_SettingResults_getResultAtIndex; # apex
+    ApexCodec_SettingResults_release; # apex
+    ApexCodec_SupportedValues_getTypeAndValues; # apex
+    ApexCodec_SupportedValues_release; # apex
+    ApexCodec_Traits_get; # apex
+
+  local:
+    *;
+};
\ No newline at end of file
diff --git a/media/module/libapexcodecs/tests/Android.bp b/media/module/libapexcodecs/tests/Android.bp
new file mode 100644
index 0000000..162d12c
--- /dev/null
+++ b/media/module/libapexcodecs/tests/Android.bp
@@ -0,0 +1,30 @@
+//
+// Copyright (C) 2024 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+package {
+    default_team: "trendy_team_android_media_codec_framework",
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_test {
+    name: "libapexcodecs_tests",
+    shared_libs: [
+        "libapexcodecs-testing",
+        "libcodec2",
+    ],
+
+    srcs: ["ApexCodecsTest.cpp"],
+}
diff --git a/media/module/libapexcodecs/tests/ApexCodecsTest.cpp b/media/module/libapexcodecs/tests/ApexCodecsTest.cpp
new file mode 100644
index 0000000..cd5ebba
--- /dev/null
+++ b/media/module/libapexcodecs/tests/ApexCodecsTest.cpp
@@ -0,0 +1,100 @@
+#include <C2.h>
+#include <C2Component.h>
+
+#include <apex/ApexCodecs.h>
+
+// static_asserts for enum values match
+static_assert((uint32_t)APEXCODEC_STATUS_OK        == (uint32_t)C2_OK);
+static_assert((uint32_t)APEXCODEC_STATUS_BAD_VALUE == (uint32_t)C2_BAD_VALUE);
+static_assert((uint32_t)APEXCODEC_STATUS_BAD_INDEX == (uint32_t)C2_BAD_INDEX);
+static_assert((uint32_t)APEXCODEC_STATUS_CANNOT_DO == (uint32_t)C2_CANNOT_DO);
+static_assert((uint32_t)APEXCODEC_STATUS_DUPLICATE == (uint32_t)C2_DUPLICATE);
+static_assert((uint32_t)APEXCODEC_STATUS_NOT_FOUND == (uint32_t)C2_NOT_FOUND);
+static_assert((uint32_t)APEXCODEC_STATUS_BAD_STATE == (uint32_t)C2_BAD_STATE);
+static_assert((uint32_t)APEXCODEC_STATUS_BLOCKING  == (uint32_t)C2_BLOCKING);
+static_assert((uint32_t)APEXCODEC_STATUS_CANCELED  == (uint32_t)C2_CANCELED);
+static_assert((uint32_t)APEXCODEC_STATUS_NO_MEMORY == (uint32_t)C2_NO_MEMORY);
+static_assert((uint32_t)APEXCODEC_STATUS_REFUSED   == (uint32_t)C2_REFUSED);
+static_assert((uint32_t)APEXCODEC_STATUS_TIMED_OUT == (uint32_t)C2_TIMED_OUT);
+static_assert((uint32_t)APEXCODEC_STATUS_OMITTED   == (uint32_t)C2_OMITTED);
+static_assert((uint32_t)APEXCODEC_STATUS_CORRUPTED == (uint32_t)C2_CORRUPTED);
+static_assert((uint32_t)APEXCODEC_STATUS_NO_INIT   == (uint32_t)C2_NO_INIT);
+
+static_assert((uint32_t)APEXCODEC_KIND_OTHER   == (uint32_t)C2Component::KIND_OTHER);
+static_assert((uint32_t)APEXCODEC_KIND_DECODER == (uint32_t)C2Component::KIND_DECODER);
+static_assert((uint32_t)APEXCODEC_KIND_ENCODER == (uint32_t)C2Component::KIND_ENCODER);
+
+static_assert((uint32_t)APEXCODEC_DOMAIN_OTHER == (uint32_t)C2Component::DOMAIN_OTHER);
+static_assert((uint32_t)APEXCODEC_DOMAIN_VIDEO == (uint32_t)C2Component::DOMAIN_VIDEO);
+static_assert((uint32_t)APEXCODEC_DOMAIN_AUDIO == (uint32_t)C2Component::DOMAIN_AUDIO);
+static_assert((uint32_t)APEXCODEC_DOMAIN_IMAGE == (uint32_t)C2Component::DOMAIN_IMAGE);
+
+static_assert((uint32_t)APEXCODEC_FLAG_DROP_FRAME    == (uint32_t)C2FrameData::FLAG_DROP_FRAME);
+static_assert((uint32_t)APEXCODEC_FLAG_END_OF_STREAM == (uint32_t)C2FrameData::FLAG_END_OF_STREAM);
+static_assert((uint32_t)APEXCODEC_FLAG_DISCARD_FRAME == (uint32_t)C2FrameData::FLAG_DISCARD_FRAME);
+static_assert((uint32_t)APEXCODEC_FLAG_INCOMPLETE    == (uint32_t)C2FrameData::FLAG_INCOMPLETE);
+static_assert((uint32_t)APEXCODEC_FLAG_CORRECTED     == (uint32_t)C2FrameData::FLAG_CORRECTED);
+static_assert((uint32_t)APEXCODEC_FLAG_CORRUPT       == (uint32_t)C2FrameData::FLAG_CORRUPT);
+static_assert((uint32_t)APEXCODEC_FLAG_CODEC_CONFIG  == (uint32_t)C2FrameData::FLAG_CODEC_CONFIG);
+
+static_assert((uint32_t)APEXCODEC_BUFFER_TYPE_INVALID        ==
+              (uint32_t)C2BufferData::INVALID);
+static_assert((uint32_t)APEXCODEC_BUFFER_TYPE_LINEAR         ==
+              (uint32_t)C2BufferData::LINEAR);
+static_assert((uint32_t)APEXCODEC_BUFFER_TYPE_LINEAR_CHUNKS  ==
+              (uint32_t)C2BufferData::LINEAR_CHUNKS);
+static_assert((uint32_t)APEXCODEC_BUFFER_TYPE_GRAPHIC        ==
+              (uint32_t)C2BufferData::GRAPHIC);
+static_assert((uint32_t)APEXCODEC_BUFFER_TYPE_GRAPHIC_CHUNKS ==
+              (uint32_t)C2BufferData::GRAPHIC_CHUNKS);
+
+static_assert((uint32_t)APEXCODEC_SUPPORTED_VALUES_QUERY_CURRENT  ==
+              (uint32_t)C2FieldSupportedValuesQuery::CURRENT);
+static_assert((uint32_t)APEXCODEC_SUPPORTED_VALUES_QUERY_POSSIBLE ==
+              (uint32_t)C2FieldSupportedValuesQuery::POSSIBLE);
+
+static_assert((uint32_t)APEXCODEC_SUPPORTED_VALUES_EMPTY  ==
+              (uint32_t)C2FieldSupportedValues::EMPTY);
+static_assert((uint32_t)APEXCODEC_SUPPORTED_VALUES_RANGE  ==
+              (uint32_t)C2FieldSupportedValues::RANGE);
+static_assert((uint32_t)APEXCODEC_SUPPORTED_VALUES_VALUES ==
+              (uint32_t)C2FieldSupportedValues::VALUES);
+static_assert((uint32_t)APEXCODEC_SUPPORTED_VALUES_FLAGS  ==
+              (uint32_t)C2FieldSupportedValues::FLAGS);
+
+static_assert((uint32_t)APEXCODEC_SUPPORTED_VALUES_TYPE_NONE   == (uint32_t)C2Value::NO_INIT);
+static_assert((uint32_t)APEXCODEC_SUPPORTED_VALUES_TYPE_INT32  == (uint32_t)C2Value::INT32);
+static_assert((uint32_t)APEXCODEC_SUPPORTED_VALUES_TYPE_UINT32 == (uint32_t)C2Value::UINT32);
+static_assert((uint32_t)APEXCODEC_SUPPORTED_VALUES_TYPE_INT64  == (uint32_t)C2Value::INT64);
+static_assert((uint32_t)APEXCODEC_SUPPORTED_VALUES_TYPE_UINT64 == (uint32_t)C2Value::UINT64);
+static_assert((uint32_t)APEXCODEC_SUPPORTED_VALUES_TYPE_FLOAT  == (uint32_t)C2Value::FLOAT);
+
+static_assert((uint32_t)APEXCODEC_SETTING_RESULT_BAD_TYPE       ==
+              (uint32_t)C2SettingResult::BAD_TYPE);
+static_assert((uint32_t)APEXCODEC_SETTING_RESULT_BAD_PORT       ==
+              (uint32_t)C2SettingResult::BAD_PORT);
+static_assert((uint32_t)APEXCODEC_SETTING_RESULT_BAD_INDEX      ==
+              (uint32_t)C2SettingResult::BAD_INDEX);
+static_assert((uint32_t)APEXCODEC_SETTING_RESULT_READ_ONLY      ==
+              (uint32_t)C2SettingResult::READ_ONLY);
+static_assert((uint32_t)APEXCODEC_SETTING_RESULT_MISMATCH       ==
+              (uint32_t)C2SettingResult::MISMATCH);
+static_assert((uint32_t)APEXCODEC_SETTING_RESULT_BAD_VALUE      ==
+              (uint32_t)C2SettingResult::BAD_VALUE);
+static_assert((uint32_t)APEXCODEC_SETTING_RESULT_CONFLICT       ==
+              (uint32_t)C2SettingResult::CONFLICT);
+static_assert((uint32_t)APEXCODEC_SETTING_RESULT_UNSUPPORTED    ==
+              (uint32_t)C2SettingResult::UNSUPPORTED);
+static_assert((uint32_t)APEXCODEC_SETTING_RESULT_INFO_BAD_VALUE ==
+              (uint32_t)C2SettingResult::INFO_BAD_VALUE);
+static_assert((uint32_t)APEXCODEC_SETTING_RESULT_INFO_CONFLICT  ==
+              (uint32_t)C2SettingResult::INFO_CONFLICT);
+
+static_assert((uint32_t)APEXCODEC_PARAM_IS_REQUIRED   == (uint32_t)C2ParamDescriptor::IS_REQUIRED);
+static_assert((uint32_t)APEXCODEC_PARAM_IS_PERSISTENT ==
+              (uint32_t)C2ParamDescriptor::IS_PERSISTENT);
+static_assert((uint32_t)APEXCODEC_PARAM_IS_STRICT     == (uint32_t)C2ParamDescriptor::IS_STRICT);
+static_assert((uint32_t)APEXCODEC_PARAM_IS_READ_ONLY  == (uint32_t)C2ParamDescriptor::IS_READ_ONLY);
+static_assert((uint32_t)APEXCODEC_PARAM_IS_HIDDEN     == (uint32_t)C2ParamDescriptor::IS_HIDDEN);
+static_assert((uint32_t)APEXCODEC_PARAM_IS_INTERNAL   == (uint32_t)C2ParamDescriptor::IS_INTERNAL);
+static_assert((uint32_t)APEXCODEC_PARAM_IS_CONST      == (uint32_t)C2ParamDescriptor::IS_CONST);
\ No newline at end of file
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 2322780..b2edaf7 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -1190,7 +1190,7 @@
                                             adjAttributionSource, &input.config, input.flags,
                                             &selectedDeviceIds, &portId, &secondaryOutputs,
                                             &isSpatialized, &isBitPerfect, &volume, &muted);
-    output.selectedDeviceId = getFirstDeviceId(selectedDeviceIds);
+    output.selectedDeviceIds = selectedDeviceIds;
 
     if (lStatus != NO_ERROR || output.outputId == AUDIO_IO_HANDLE_NONE) {
         ALOGE("createTrack() getOutputForAttr() return error %d or invalid output handle", lStatus);
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index e42b39e..200175b 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -10921,7 +10921,7 @@
 
     // For mmap streams, once the routing has changed, they will be disconnected. It should be
     // okay to notify the client earlier before the new patch creation.
-    if (mDeviceIds != deviceIds) {
+    if (!areDeviceIdsEqual(deviceIds, mDeviceIds)) {
         if (const sp<MmapStreamCallback> callback = mCallback.promote()) {
             // The aaudioservice handle the routing changed event asynchronously. In that case,
             // it is safe to hold the lock here.
@@ -10945,7 +10945,7 @@
         *handle = AUDIO_PATCH_HANDLE_NONE;
     }
 
-    if (numDevices == 0 || mDeviceIds != deviceIds) {
+    if (numDevices == 0 || (!areDeviceIdsEqual(deviceIds, mDeviceIds))) {
         if (isOutput()) {
             sendIoConfigEvent_l(AUDIO_OUTPUT_CONFIG_CHANGED);
             mOutDeviceTypeAddrs = sinkDeviceTypeAddrs;
@@ -11110,8 +11110,7 @@
             if (const sp<MmapStreamCallback> callback = mCallback.promote()) {
                 // The aaudioservice handle the routing changed event asynchronously. In that case,
                 // it is safe to hold the lock here.
-                DeviceIdVector emptyDeviceIdVector;
-                callback->onRoutingChanged(emptyDeviceIdVector);
+                callback->onRoutingChanged({});
             } else if (mNoCallbackWarningCount < kMaxNoCallbackWarnings) {
                 ALOGW("Could not notify MMAP stream tear down: no onRoutingChanged callback!");
                 mNoCallbackWarningCount++;
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 867561a..5fbe48c 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -1327,7 +1327,8 @@
 
         // states to reset position info for pcm tracks
         if (audio_is_linear_pcm(mFormat)
-                && (state == IDLE || state == STOPPED || state == FLUSHED)) {
+                && (state == IDLE || state == STOPPED || state == FLUSHED
+                        || state == PAUSED)) {
             mFrameMap.reset();
 
             if (!isFastTrack()) {
diff --git a/services/audiopolicy/OWNERS b/services/audiopolicy/OWNERS
index 50ceadf..4a65069 100644
--- a/services/audiopolicy/OWNERS
+++ b/services/audiopolicy/OWNERS
@@ -1,4 +1,5 @@
 # Bug component: 48436
+atneya@google.com
 elaurent@google.com
 jiabin@google.com
 jmtrivi@google.com
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index d22bf49..0ebf8d1 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -2350,7 +2350,7 @@
     sp<SwAudioOutputDescriptor> outputDesc = mOutputs.getOutputForClient(portId);
     if (outputDesc == 0) {
         ALOGW("startOutput() no output for client %d", portId);
-        return BAD_VALUE;
+        return DEAD_OBJECT;
     }
     sp<TrackClientDescriptor> client = outputDesc->getClient(portId);
 
@@ -2727,7 +2727,7 @@
     sp<SwAudioOutputDescriptor> outputDesc = mOutputs.getOutputForClient(portId);
     if (outputDesc == 0) {
         ALOGW("stopOutput() no output for client %d", portId);
-        return BAD_VALUE;
+        return DEAD_OBJECT;
     }
     sp<TrackClientDescriptor> client = outputDesc->getClient(portId);
 
@@ -3430,7 +3430,7 @@
     sp<AudioInputDescriptor> inputDesc = mInputs.getInputForClient(portId);
     if (inputDesc == 0) {
         ALOGW("%s no input for client %d", __FUNCTION__, portId);
-        return BAD_VALUE;
+        return DEAD_OBJECT;
     }
     audio_io_handle_t input = inputDesc->mIoHandle;
     sp<RecordClientDescriptor> client = inputDesc->getClient(portId);
@@ -3573,26 +3573,19 @@
     ALOGI("%s: deviceType 0x%X, enabled %d, streamToDriveAbs %d", __func__, deviceType, enabled,
           streamToDriveAbs);
 
-    audio_attributes_t attributesToDriveAbs = mEngine->getAttributesForStreamType(streamToDriveAbs);
-    if (enabled) {
-        if (attributesToDriveAbs == AUDIO_ATTRIBUTES_INITIALIZER) {
-            ALOGW("%s: no attributes for stream %s, bailing out", __func__,
-                  toString(streamToDriveAbs).c_str());
-            return BAD_VALUE;
-        }
-
-        mAbsoluteVolumeDrivingStreams[deviceType] = attributesToDriveAbs;
-    } else {
+    if (!enabled) {
         mAbsoluteVolumeDrivingStreams.erase(deviceType);
+        return NO_ERROR;
     }
 
-    // apply the stream volumes regarding the new absolute mode to all the outputs
-    for (size_t i = 0; i < mOutputs.size(); i++) {
-        sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
-        ALOGV("%s: apply stream volumes for portId %d", __func__, desc->getId());
-        applyStreamVolumes(desc, {deviceType}, static_cast<int>(desc->latency()) * 2);
+    audio_attributes_t attributesToDriveAbs = mEngine->getAttributesForStreamType(streamToDriveAbs);
+    if (attributesToDriveAbs == AUDIO_ATTRIBUTES_INITIALIZER) {
+        ALOGW("%s: no attributes for stream %s, bailing out", __func__,
+              toString(streamToDriveAbs).c_str());
+        return BAD_VALUE;
     }
 
+    mAbsoluteVolumeDrivingStreams[deviceType] = attributesToDriveAbs;
     return NO_ERROR;
 }
 
@@ -8350,7 +8343,9 @@
             VolumeSource vsToDriveAbs = toVolumeSource(groupToDriveAbs);
             if (vsToDriveAbs == volumeSource) {
                 // attenuation is applied by the abs volume controller
-                return (index != 0) ? volumeDbMax : volumeDb;
+                // do not mute LE broadcast to allow the secondary device to continue playing
+                return (index != 0 || volumeDevice == AUDIO_DEVICE_OUT_BLE_BROADCAST) ? volumeDbMax
+                                                                                      : volumeDb;
             } else {
                 IVolumeCurves &curvesAbs = getVolumeCurves(vsToDriveAbs);
                 int indexAbs = curvesAbs.getVolumeIndex({volumeDevice});
@@ -8765,6 +8760,7 @@
     case AUDIO_USAGE_SAFETY:
     case AUDIO_USAGE_VEHICLE_STATUS:
     case AUDIO_USAGE_ANNOUNCEMENT:
+    case AUDIO_USAGE_SPEAKER_CLEANUP:
         break;
     default:
         return false;
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index c37540c..f8f91fc 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -753,7 +753,7 @@
 TEST_P(AudioPolicyManagerTestMsd, GetDirectProfilesForAttributesWithMsd) {
     const audio_attributes_t attr = {
         AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
-        AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+        AUDIO_SOURCE_INVALID, AUDIO_FLAG_NONE, ""};
 
     // count expected direct profiles for the default device
     int countDirectProfilesPrimary = 0;
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index 3f2a617..b9c8206 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -101,8 +101,8 @@
         "android.frameworks.cameraservice.device@2.0",
         "android.frameworks.cameraservice.device@2.1",
         "android.frameworks.cameraservice.common-V1-ndk",
-        "android.frameworks.cameraservice.service-V2-ndk",
-        "android.frameworks.cameraservice.device-V2-ndk",
+        "android.frameworks.cameraservice.service-V3-ndk",
+        "android.frameworks.cameraservice.device-V3-ndk",
         "android.hardware.camera.common-V1-ndk",
         "android.hardware.camera.device-V3-ndk",
         "android.hardware.camera.metadata-V3-ndk",
@@ -179,6 +179,7 @@
         "device3/aidl/AidlCamera3Device.cpp",
         "device3/aidl/AidlCamera3OutputUtils.cpp",
         "device3/aidl/AidlCamera3OfflineSession.cpp",
+        "device3/aidl/AidlCamera3SharedDevice.cpp",
         "gui/RingBufferConsumer.cpp",
         "hidl/AidlCameraDeviceCallbacks.cpp",
         "hidl/AidlCameraServiceListener.cpp",
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index f59ad18..df94478 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -564,7 +564,7 @@
         updateStatus(StatusInternal::NOT_PRESENT, cameraId);
         mVirtualDeviceCameraIdMapper.removeCamera(cameraId);
 
-        sp<BasicClient> clientToDisconnectOnline, clientToDisconnectOffline;
+        std::vector<sp<BasicClient>> clientsToDisconnectOnline, clientsToDisconnectOffline;
         {
             // Don't do this in updateStatus to avoid deadlock over mServiceLock
             Mutex::Autolock lock(mServiceLock);
@@ -574,12 +574,12 @@
 
             // Remove online as well as offline client from the list of active clients,
             // if they are present
-            clientToDisconnectOnline = removeClientLocked(cameraId);
-            clientToDisconnectOffline = removeClientLocked(kOfflineDevice + cameraId);
+            clientsToDisconnectOnline = removeClientsLocked(cameraId);
+            clientsToDisconnectOffline = removeClientsLocked(kOfflineDevice + cameraId);
         }
 
-        disconnectClient(cameraId, clientToDisconnectOnline);
-        disconnectClient(kOfflineDevice + cameraId, clientToDisconnectOffline);
+        disconnectClients(cameraId, clientsToDisconnectOnline);
+        disconnectClients(kOfflineDevice + cameraId, clientsToDisconnectOffline);
 
         removeStates(cameraId);
     } else {
@@ -653,6 +653,13 @@
     }
 }
 
+void CameraService::disconnectClients(const std::string& id,
+        std::vector<sp<BasicClient>> clientsToDisconnect) {
+    for (auto& client : clientsToDisconnect) {
+        disconnectClient(id, client);
+    }
+}
+
 void CameraService::disconnectClient(const std::string& id, sp<BasicClient> clientToDisconnect) {
     if (clientToDisconnect.get() != nullptr) {
         ALOGI("%s: Client for camera ID %s evicted due to device status change from HAL",
@@ -1484,8 +1491,8 @@
         const std::string& cameraId, int api1CameraId, int facing, int sensorOrientation,
         int servicePid, std::pair<int, IPCTransport> deviceVersionAndTransport,
         apiLevel effectiveApiLevel, bool overrideForPerfClass, int rotationOverride,
-        bool forceSlowJpegMode, const std::string& originalCameraId,
-        /*out*/ sp<BasicClient>* client) {
+        bool forceSlowJpegMode, const std::string& originalCameraId, bool sharedMode,
+        /*out*/sp<BasicClient>* client) {
     // For HIDL devices
     if (deviceVersionAndTransport.second == IPCTransport::HIDL) {
         // Create CameraClient based on device version reported by the HAL.
@@ -1520,7 +1527,7 @@
                                     cameraService->mAttributionAndPermissionUtils,
                                     clientAttribution, callingPid, cameraId, api1CameraId, facing,
                                     sensorOrientation, servicePid, overrideForPerfClass,
-                                    rotationOverride, forceSlowJpegMode);
+                                    rotationOverride, forceSlowJpegMode, /*sharedMode*/false);
         ALOGI("%s: Camera1 API (legacy), rotationOverride %d, forceSlowJpegMode %d",
                 __FUNCTION__, rotationOverride, forceSlowJpegMode);
     } else { // Camera2 API route
@@ -1530,7 +1537,7 @@
                 cameraService, tmp, cameraService->mCameraServiceProxyWrapper,
                 cameraService->mAttributionAndPermissionUtils, clientAttribution, callingPid,
                 systemNativeClient, cameraId, facing, sensorOrientation, servicePid,
-                overrideForPerfClass, rotationOverride, originalCameraId);
+                overrideForPerfClass, rotationOverride, originalCameraId, sharedMode);
         ALOGI("%s: Camera2 API, rotationOverride %d", __FUNCTION__, rotationOverride);
     }
     return Status::ok();
@@ -1630,7 +1637,8 @@
                   /*targetSdkVersion*/ __ANDROID_API_FUTURE__,
                   /*rotationOverride*/
                   hardware::ICameraService::ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT,
-                  /*forceSlowJpegMode*/ false, cameraIdStr, /*isNonSystemNdk*/ false, /*out*/ tmp))
+                  /*forceSlowJpegMode*/ false, cameraIdStr, /*isNonSystemNdk*/ false,
+                  /*sharedMode*/false, /*out*/ tmp))
                  .isOk()) {
         ALOGE("%s: Error initializing shim metadata: %s", __FUNCTION__, ret.toString8().c_str());
     }
@@ -1700,11 +1708,12 @@
 }
 
 Status CameraService::validateConnectLocked(const std::string& cameraId,
-                                            const AttributionSourceState& clientAttribution) const {
+                                            const AttributionSourceState& clientAttribution,
+                                            bool sharedMode) const {
 #ifdef __BRILLO__
     UNUSED(clientAttribution);
 #else
-    Status allowed = validateClientPermissionsLocked(cameraId, clientAttribution);
+    Status allowed = validateClientPermissionsLocked(cameraId, clientAttribution, sharedMode);
     if (!allowed.isOk()) {
         return allowed;
     }
@@ -1742,7 +1751,8 @@
 }
 
 Status CameraService::validateClientPermissionsLocked(
-        const std::string& cameraId, const AttributionSourceState& clientAttribution) const {
+        const std::string& cameraId, const AttributionSourceState& clientAttribution,
+        bool sharedMode) const {
     int callingPid = getCallingPid();
     int callingUid = getCallingUid();
 
@@ -1763,6 +1773,14 @@
                 "found while trying to query device kind", cameraId.c_str());
     }
 
+    if (flags::camera_multi_client() && sharedMode
+            && (deviceKind != SystemCameraKind::SYSTEM_ONLY_CAMERA)) {
+        ALOGE("%s: camera id %s is not system camera. Device sharing only supported for"
+                " system cameras.", __FUNCTION__, cameraId.c_str());
+        return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT, "camera device sharing not supported for "
+                "camera ID \"%s\"", cameraId.c_str());
+    }
+
     // Get the device id that owns this camera.
     auto [deviceId, _] = mVirtualDeviceCameraIdMapper.getDeviceIdAndMappedCameraIdPair(cameraId);
     AttributionSourceState clientAttributionWithDeviceId = clientAttribution;
@@ -1886,6 +1904,28 @@
                 __FUNCTION__);
     }
 
+    if (flags::camera_multi_client()) {
+        sp<BasicClient> clientSp = clientDescriptor->getValue();
+        auto primaryClient = mActiveClientManager.getPrimaryClient(desc->getKey());
+        if (primaryClient == nullptr) {
+            // There is no primary client yet. Assign this first client as
+            // primary
+            clientSp->setPrimaryClient(true);
+        } else {
+            // There is already primary client. If the incoming client has a
+            // higher priority than the existing primary, then assign incoming
+            // client as primary and change the existing client to secondary.
+            // Otherwise incoming client is secondary client.
+            if (clientDescriptor->getPriority() <= primaryClient->getPriority()) {
+                clientSp->setPrimaryClient(true);
+                primaryClient->getValue()->setPrimaryClient(false);
+                primaryClient->getValue()->notifyClientSharedAccessPriorityChanged(false);
+            } else {
+                clientSp->setPrimaryClient(false);
+            }
+        }
+    }
+
     // And register a death notification for the client callback. Do
     // this last to avoid Binder policy where a nested Binder
     // transaction might be pre-empted to service the client death
@@ -1900,6 +1940,7 @@
 status_t CameraService::handleEvictionsLocked(const std::string& cameraId, int clientPid,
         apiLevel effectiveApiLevel, const sp<IBinder>& remoteCallback,
         const std::string& packageName, int oomScoreOffset, bool systemNativeClient,
+        bool sharedMode,
         /*out*/
         sp<BasicClient>* client,
         std::shared_ptr<resource_policy::ClientDescriptor<std::string, sp<BasicClient>>>* partial) {
@@ -1951,7 +1992,8 @@
             clientDescriptor = CameraClientManager::makeClientDescriptor(cameraId,
                     sp<BasicClient>{nullptr}, static_cast<int32_t>(state->getCost()),
                     state->getConflicting(), resource_policy::NATIVE_ADJ, clientPid,
-                    ActivityManager::PROCESS_STATE_BOUND_TOP, oomScoreOffset, systemNativeClient);
+                    ActivityManager::PROCESS_STATE_BOUND_TOP, oomScoreOffset, systemNativeClient,
+                    sharedMode);
         } else {
             // Get current active client PIDs
             std::vector<int> ownerPids(mActiveClientManager.getAllOwners());
@@ -1987,7 +2029,7 @@
             clientDescriptor = CameraClientManager::makeClientDescriptor(cameraId,
                     sp<BasicClient>{nullptr}, static_cast<int32_t>(state->getCost()),
                     state->getConflicting(), actualScore, clientPid, actualState,
-                    oomScoreOffset, systemNativeClient);
+                    oomScoreOffset, systemNativeClient, sharedMode);
         }
 
         resource_policy::ClientPriority clientPriority = clientDescriptor->getPriority();
@@ -2160,7 +2202,7 @@
             cameraClient, cameraIdStr, api1CameraId, resolvedClientAttribution,
             /*systemNativeClient*/ false, API_1,
             /*shimUpdateOnly*/ false, /*oomScoreOffset*/ 0, targetSdkVersion, rotationOverride,
-            forceSlowJpegMode, cameraIdStr, isNonSystemNdk, /*out*/ client);
+            forceSlowJpegMode, cameraIdStr, isNonSystemNdk, /*sharedMode*/false, /*out*/ client);
 
     if (!ret.isOk()) {
         logRejected(cameraIdStr, getCallingPid(),
@@ -2244,8 +2286,7 @@
         const std::string& unresolvedCameraId,
         int oomScoreOffset, int targetSdkVersion,
         int rotationOverride, const AttributionSourceState& clientAttribution, int32_t devicePolicy,
-        /*out*/
-        sp<hardware::camera2::ICameraDeviceUser>* device) {
+        bool sharedMode, /*out*/sp<hardware::camera2::ICameraDeviceUser>* device) {
     ATRACE_CALL();
     RunThreadWithRealtimePriority priorityBump;
     Status ret = Status::ok();
@@ -2275,7 +2316,7 @@
 
     bool isNonSystemNdk = clientPackageNameMaybe.size() == 0;
 
-    if (!flags::use_context_attribution_source()) {
+    if (!flags::data_delivery_permission_checks()) {
         resolvedClientAttribution.pid = USE_CALLING_PID;
     }
 
@@ -2325,7 +2366,8 @@
     ret = connectHelper<hardware::camera2::ICameraDeviceCallbacks, CameraDeviceClient>(
             cameraCb, cameraId, /*api1CameraId*/ -1, resolvedClientAttribution, systemNativeClient,
             API_2, /*shimUpdateOnly*/ false, oomScoreOffset, targetSdkVersion, rotationOverride,
-            /*forceSlowJpegMode*/ false, unresolvedCameraId, isNonSystemNdk, /*out*/ client);
+            /*forceSlowJpegMode*/ false, unresolvedCameraId, isNonSystemNdk, sharedMode,
+            /*out*/ client);
 
     if (!ret.isOk()) {
         logRejected(cameraId, clientPid, clientPackageName, toStdString(ret.toString8()));
@@ -2405,7 +2447,7 @@
                                     bool shimUpdateOnly, int oomScoreOffset, int targetSdkVersion,
                                     int rotationOverride, bool forceSlowJpegMode,
                                     const std::string& originalCameraId, bool isNonSystemNdk,
-                                    /*out*/ sp<CLIENT>& device) {
+                                    bool sharedMode, /*out*/ sp<CLIENT>& device) {
     binder::Status ret = binder::Status::ok();
 
     nsecs_t openTimeNs = systemTime();
@@ -2432,7 +2474,7 @@
         }
 
         // Enforce client permissions and do basic validity checks
-        if (!(ret = validateConnectLocked(cameraId, clientAttribution)).isOk()) {
+        if (!(ret = validateConnectLocked(cameraId, clientAttribution, sharedMode)).isOk()) {
             return ret;
         }
 
@@ -2453,7 +2495,7 @@
                      cameraId, clientAttribution.pid, effectiveApiLevel,
                      IInterface::asBinder(cameraCb),
                      clientAttribution.packageName.value_or(kUnknownPackageName), oomScoreOffset,
-                     systemNativeClient, /*out*/ &clientTmp,
+                     systemNativeClient, sharedMode, /*out*/ &clientTmp,
                      /*out*/ &partial)) != NO_ERROR) {
             switch (err) {
                 case -ENODEV:
@@ -2504,7 +2546,7 @@
                                systemNativeClient, cameraId, api1CameraId, facing, orientation,
                                getpid(), deviceVersionAndTransport, effectiveApiLevel,
                                overrideForPerfClass, rotationOverride, forceSlowJpegMode,
-                               originalCameraId,
+                               originalCameraId, sharedMode,
                                /*out*/ &tmp))
                      .isOk()) {
             return ret;
@@ -2753,7 +2795,7 @@
                 /*conflictingKeys*/ std::set<std::string>(), onlinePriority.getScore(),
                 onlineClientDesc->getOwnerId(), onlinePriority.getState(),
                 // native clients don't have offline processing support.
-                /*ommScoreOffset*/ 0, /*systemNativeClient*/false);
+                /*ommScoreOffset*/ 0, /*systemNativeClient*/false, /*sharedMode*/false);
         if (offlineClientDesc == nullptr) {
             ALOGE("%s: Offline client descriptor was NULL", __FUNCTION__);
             return BAD_VALUE;
@@ -3686,6 +3728,25 @@
     updateAudioRestrictionLocked();
 }
 
+bool CameraService::isOnlyClient(const BasicClient* client) {
+    Mutex::Autolock lock(mServiceLock);
+    bool ret = true;
+    if (!flags::camera_multi_client()) {
+        return ret;
+    }
+    if (client != nullptr) {
+        std::string camId = client->mCameraIdStr;
+        for (const auto& i : mActiveClientManager.getAll()) {
+            auto clientSp = i->getValue();
+            auto curCamId = i->getKey();
+            if (!curCamId.compare(camId) && clientSp.get() != client) {
+                return false;
+            }
+        }
+    }
+    return ret;
+}
+
 bool CameraService::evictClientIdByRemote(const wp<IBinder>& remote) {
     bool ret = false;
     {
@@ -3746,20 +3807,20 @@
     return state;
 }
 
-sp<CameraService::BasicClient> CameraService::removeClientLocked(const std::string& cameraId) {
+std::vector<sp<CameraService::BasicClient>> CameraService::removeClientsLocked(
+        const std::string& cameraId) {
     // Remove from active clients list
-    auto clientDescriptorPtr = mActiveClientManager.remove(cameraId);
-    if (clientDescriptorPtr == nullptr) {
-        ALOGW("%s: Could not evict client, no client for camera ID %s", __FUNCTION__,
-                cameraId.c_str());
-        return sp<BasicClient>{nullptr};
+    std::vector<sp<CameraService::BasicClient>> clients;
+    std::vector<CameraService::DescriptorPtr> clientDescriptors;
+    clientDescriptors =  mActiveClientManager.removeAll(cameraId);
+    for (const auto& clientDescriptorPtr : clientDescriptors) {
+        sp<BasicClient> client = clientDescriptorPtr->getValue();
+        if (client.get() != nullptr) {
+            cacheClientTagDumpIfNeeded(clientDescriptorPtr->getKey(), client.get());
+        }
+        clients.push_back(client);
     }
-
-    sp<BasicClient> client = clientDescriptorPtr->getValue();
-    if (client.get() != nullptr) {
-        cacheClientTagDumpIfNeeded(clientDescriptorPtr->getKey(), client.get());
-    }
-    return client;
+    return clients;
 }
 
 void CameraService::doUserSwitch(const std::vector<int32_t>& newUserIds) {
@@ -4043,11 +4104,11 @@
         std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
         const AttributionSourceState& clientAttribution, int callingPid, bool systemNativeClient,
         const std::string& cameraIdStr, int api1CameraId, int cameraFacing, int sensorOrientation,
-        int servicePid, int rotationOverride)
+        int servicePid, int rotationOverride, bool sharedMode)
     : CameraService::BasicClient(cameraService, IInterface::asBinder(cameraClient),
                                  attributionAndPermissionUtils, clientAttribution, callingPid,
                                  systemNativeClient, cameraIdStr, cameraFacing, sensorOrientation,
-                                 servicePid, rotationOverride),
+                                 servicePid, rotationOverride, sharedMode),
       mCameraId(api1CameraId) {
     LOG1("Client::Client E (pid %d, id %d)", callingPid, mCameraId);
 
@@ -4075,7 +4136,7 @@
         std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
         const AttributionSourceState& clientAttribution, int callingPid, bool nativeClient,
         const std::string& cameraIdStr, int cameraFacing, int sensorOrientation, int servicePid,
-        int rotationOverride)
+        int rotationOverride, bool sharedMode)
     : AttributionAndPermissionUtilsEncapsulator(attributionAndPermissionUtils),
       mDestructionStarted(false),
       mCameraIdStr(cameraIdStr),
@@ -4087,7 +4148,7 @@
       mServicePid(servicePid),
       mDisconnected(false),
       mUidIsTrusted(false),
-      mRotationOverride(rotationOverride),
+      mRotationOverride(rotationOverride), mSharedMode(sharedMode),
       mAudioRestriction(hardware::camera2::ICameraDeviceUser::AUDIO_RESTRICTION_NONE),
       mRemoteBinder(remoteCallback),
       mCameraOpen(false),
@@ -4111,7 +4172,7 @@
         mAppOpsManager = std::make_unique<AppOpsManager>();
     }
 
-    mUidIsTrusted = isTrustedCallingUid(getClientUid());
+    mUidIsTrusted = isTrustedCallingUid(mClientAttribution.uid);
 }
 
 CameraService::BasicClient::~BasicClient() {
@@ -4128,8 +4189,15 @@
 
     sCameraService->removeByClient(this);
     sCameraService->logDisconnected(mCameraIdStr, mCallingPid, getPackageName());
-    sCameraService->mCameraProviderManager->removeRef(CameraProviderManager::DeviceMode::CAMERA,
-            mCameraIdStr);
+    if (!flags::camera_multi_client() || !mSharedMode || (mSharedMode &&
+            sCameraService->isOnlyClient(this))) {
+        // Remove the HAL reference for the camera in either of the following scenarios :
+        // 1) Camera was opened in non-shared mode.
+        // 2) Camera was opened in shared mode and this is the last client using
+        //    the camera which is being disconnected
+        sCameraService->mCameraProviderManager->removeRef(CameraProviderManager::DeviceMode::CAMERA,
+                mCameraIdStr);
+    }
 
     sp<IBinder> remote = getRemote();
     if (remote != nullptr) {
@@ -4137,8 +4205,11 @@
     }
 
     notifyCameraClosing();
-    // Notify flashlight that a camera device is closed.
-    sCameraService->mFlashlight->deviceClosed(mCameraIdStr);
+    if (!flags::camera_multi_client() || !mSharedMode || (mSharedMode &&
+            sCameraService->isOnlyClient(this))) {
+        // Notify flashlight that a camera device is closed.
+        sCameraService->mFlashlight->deviceClosed(mCameraIdStr);
+    }
     ALOGI("%s: Disconnected client for camera %s for PID %d", __FUNCTION__, mCameraIdStr.c_str(),
             mCallingPid);
 
@@ -4281,7 +4352,7 @@
     ATRACE_CALL();
 
     // Don't start watching until we're streaming when using permissionChecker for data delivery
-    if (!flags::check_full_attribution_source_chain()) {
+    if (!flags::data_delivery_permission_checks()) {
         ALOGD("%s: Start camera ops, package name = %s, client UID = %d", __FUNCTION__,
               getPackageName().c_str(), getClientUid());
 
@@ -4303,7 +4374,7 @@
             }
         }
     } else {
-        // TODO: Remove when removing the check_full_attribution_source_chain flag
+        // TODO: Remove when removing the data_delivery_permission_checks flag
         ALOGD("%s: Bypassing checkOp for uid %d", __FUNCTION__, getClientUid());
     }
 
@@ -4315,7 +4386,8 @@
     sCameraService->mUidPolicy->registerMonitorUid(getClientUid(), /*openCamera*/ true);
 
     // Notify listeners of camera open/close status
-    sCameraService->updateOpenCloseStatus(mCameraIdStr, true /*open*/, getPackageName());
+    sCameraService->updateOpenCloseStatus(mCameraIdStr, true /*open*/, getPackageName(),
+            mSharedMode);
 
     return OK;
 }
@@ -4337,7 +4409,7 @@
           getPackageName().c_str(), getClientUid());
 
     if (mAppOpsManager != nullptr) {
-        if (flags::check_full_attribution_source_chain()) {
+        if (flags::data_delivery_permission_checks()) {
             ALOGD("%s: Start data delivery for uid %d", __FUNCTION__, getClientUid());
 
             const PermissionChecker::PermissionResult result =
@@ -4381,7 +4453,7 @@
 
     // noteAppOp is only used for when camera mute is not supported, in order
     // to trigger the sensor privacy "Unblock" dialog
-    if (flags::check_full_attribution_source_chain()) {
+    if (flags::data_delivery_permission_checks()) {
         // Ignore the result, since we're only triggering the dialog
         ALOGD("%s: Check data delivery permissions for uid %d", __FUNCTION__, getClientUid());
         hasPermissionsForCameraForDataDelivery(std::string(), mClientAttribution);
@@ -4413,7 +4485,7 @@
     }
 
     if (mAppOpsManager != nullptr) {
-        if (flags::check_full_attribution_source_chain()) {
+        if (flags::data_delivery_permission_checks()) {
             ALOGD("%s: finishDataDelivery for uid %d", __FUNCTION__, getClientUid());
             finishDataDelivery(mClientAttribution);
 
@@ -4453,12 +4525,15 @@
                 StatusInternal::ENUMERATING, StatusInternal::NOT_PRESENT};
 
         // Transition to PRESENT if the camera is not in either of the rejected states
-        sCameraService->updateStatus(StatusInternal::PRESENT,
-                mCameraIdStr, rejected);
+        if (!flags::camera_multi_client() || !mSharedMode || (mSharedMode
+                && sCameraService->isOnlyClient(this))) {
+            sCameraService->updateStatus(StatusInternal::PRESENT,
+                    mCameraIdStr, rejected);
+        }
     }
 
     // When using the data delivery permission checks, the open state does not involve AppOps
-    if (!flags::check_full_attribution_source_chain()) {
+    if (!flags::data_delivery_permission_checks()) {
         // Always stop watching, even if no camera op is active
         if (mOpsCallback != nullptr && mAppOpsManager != nullptr) {
             mAppOpsManager->stopWatchingMode(mOpsCallback);
@@ -4469,11 +4544,23 @@
     sCameraService->mUidPolicy->unregisterMonitorUid(getClientUid(), /*closeCamera*/ true);
 
     // Notify listeners of camera open/close status
-    sCameraService->updateOpenCloseStatus(mCameraIdStr, false /*open*/, getPackageName());
+    sCameraService->updateOpenCloseStatus(mCameraIdStr, false /*open*/, getPackageName(),
+            mSharedMode);
 
     return OK;
 }
 
+int32_t CameraService::getUidProcessState(int32_t uid) {
+    const auto& activityManager = getActivityManager();
+    int32_t procState = ActivityManager::PROCESS_STATE_NONEXISTENT;
+    if (activityManager != nullptr) {
+        procState = activityManager->getUidProcessState(uid, toString16(kServiceName));
+    } else {
+        ALOGE("%s: getActivityManager returned nullptr.", __FUNCTION__);
+    }
+    return procState;
+}
+
 void CameraService::BasicClient::opChanged(int32_t op, const String16&) {
     ATRACE_CALL();
     if (mAppOpsManager == nullptr) {
@@ -4486,7 +4573,7 @@
     }
 
     PermissionChecker::PermissionResult res;
-    if (flags::check_full_attribution_source_chain()) {
+    if (flags::data_delivery_permission_checks()) {
         int32_t appOpMode = AppOpsManager::MODE_ALLOWED;
         std::for_each(AttrSourceItr{mClientAttribution}, AttrSourceItr::end(),
                 [&](const auto& attr) {
@@ -4522,7 +4609,7 @@
         // Uid may be active, but not visible to the user (e.g. PROCESS_STATE_FOREGROUND_SERVICE).
         // If not visible, but still active, then we want to block instead of muting the camera.
         int32_t procState = ActivityManager::PROCESS_STATE_NONEXISTENT;
-        if (flags::check_full_attribution_source_chain()) {
+        if (flags::data_delivery_permission_checks()) {
             // Use the proc state of the last uid in the chain (ultimately receiving the data)
             // when determining whether to mute or block
             int32_t uid = -1;
@@ -4530,12 +4617,9 @@
                       [&](const auto& attr) {
                           uid = static_cast<uid_t>(attr.uid);
                       });
-            const auto& activityManager = getActivityManager();
-            if (activityManager != nullptr) {
-                procState = activityManager->getUidProcessState(uid, toString16(kServiceName));
-            } else {
-                ALOGD("%s: getActivityManager returned nullptr.", __FUNCTION__);
-            }
+            procState = getUidProcessState(uid);
+        } else if (flags::query_process_state()) {
+            procState = getUidProcessState(getClientUid());
         } else {
             procState = sCameraService->mUidPolicy->getProcState(getClientUid());
         }
@@ -4580,6 +4664,35 @@
     disconnect();
 }
 
+status_t CameraService::BasicClient::isPrimaryClient(bool* isPrimary) {
+    ATRACE_CALL();
+    if (!flags::camera_multi_client()) {
+        return INVALID_OPERATION;
+    }
+
+    if (!mSharedMode) {
+        ALOGW("%s: Invalid operation when camera is not opened in shared mode", __FUNCTION__);
+        return INVALID_OPERATION;
+    }
+    *isPrimary = mIsPrimaryClient;
+    return OK;
+}
+
+status_t CameraService::BasicClient::setPrimaryClient(bool isPrimary) {
+    ATRACE_CALL();
+
+    if (!flags::camera_multi_client()) {
+        return INVALID_OPERATION;
+    }
+
+    if (!mSharedMode) {
+        ALOGW("%s:Invalid operation when camera is not opened in shared mode", __FUNCTION__);
+        return INVALID_OPERATION;
+    }
+    mIsPrimaryClient = isPrimary;
+    return OK;
+}
+
 // ----------------------------------------------------------------------------
 
 void CameraService::Client::notifyError(int32_t errorCode,
@@ -5130,12 +5243,27 @@
 
 void CameraService::CameraState::setClientPackage(const std::string& clientPackage) {
     Mutex::Autolock lock(mStatusLock);
-    mClientPackage = clientPackage;
+    mClientPackages.clear();
+    mClientPackages.insert(clientPackage);
 }
 
 std::string CameraService::CameraState::getClientPackage() const {
     Mutex::Autolock lock(mStatusLock);
-    return mClientPackage;
+    if (!mClientPackages.empty()) {
+        std::set<std::string>::iterator it = mClientPackages.begin();
+        return *it;
+    }
+    return std::string();
+}
+
+void CameraService::CameraState::addClientPackage(const std::string& clientPackage) {
+    Mutex::Autolock lock(mStatusLock);
+    mClientPackages.insert(clientPackage);
+}
+
+void CameraService::CameraState::removeClientPackage(const std::string& clientPackage) {
+    Mutex::Autolock lock(mStatusLock);
+    mClientPackages.erase(clientPackage);
 }
 
 // ----------------------------------------------------------------------------
@@ -5183,6 +5311,39 @@
     return descriptor->getValue();
 }
 
+void CameraService::CameraClientManager::remove(const CameraService::DescriptorPtr& value) {
+    ClientManager::remove(value);
+    if (!flags::camera_multi_client()) {
+        return;
+    }
+    auto clientToRemove = value->getValue();
+    if ((clientToRemove.get() != nullptr) && clientToRemove->mSharedMode) {
+      bool primaryClient = false;
+      status_t ret = clientToRemove->isPrimaryClient(&primaryClient);
+      if ((ret == OK) && primaryClient) {
+            // Primary client is being removed. Find the next higher priority
+            // client to become primary client.
+            auto clientDescriptor = get(value->getKey());
+            if (clientDescriptor == nullptr) {
+                ALOGV("CameraService::CameraClientManager::no other clients are using same camera");
+                return;
+            }
+            resource_policy::ClientPriority highestPriority = clientDescriptor->getPriority();
+            sp<BasicClient> highestPriorityClient = clientDescriptor->getValue();
+            if (highestPriorityClient.get() != nullptr) {
+                for (auto& i : getAll()) {
+                    if ((i->getKey() == value->getKey()) && (i->getPriority() < highestPriority)) {
+                        highestPriority = i->getPriority();
+                        highestPriorityClient = i->getValue();
+                    }
+                }
+                highestPriorityClient->setPrimaryClient(true);
+                highestPriorityClient->notifyClientSharedAccessPriorityChanged(true);
+            }
+       }
+    }
+}
+
 std::string CameraService::CameraClientManager::toString() const {
     auto all = getAll();
     std::ostringstream ret;
@@ -5228,14 +5389,14 @@
 CameraService::DescriptorPtr CameraService::CameraClientManager::makeClientDescriptor(
         const std::string& key, const sp<BasicClient>& value, int32_t cost,
         const std::set<std::string>& conflictingKeys, int32_t score, int32_t ownerId,
-        int32_t state, int32_t oomScoreOffset, bool systemNativeClient) {
+        int32_t state, int32_t oomScoreOffset, bool systemNativeClient, bool sharedMode) {
 
     int32_t score_adj = systemNativeClient ? kSystemNativeClientScore : score;
     int32_t state_adj = systemNativeClient ? kSystemNativeClientState : state;
 
     return std::make_shared<resource_policy::ClientDescriptor<std::string, sp<BasicClient>>>(
             key, value, cost, conflictingKeys, score_adj, ownerId, state_adj,
-            systemNativeClient, oomScoreOffset);
+            systemNativeClient, oomScoreOffset, sharedMode);
 }
 
 CameraService::DescriptorPtr CameraService::CameraClientManager::makeClientDescriptor(
@@ -5244,7 +5405,7 @@
     return makeClientDescriptor(partial->getKey(), value, partial->getCost(),
             partial->getConflicting(), partial->getPriority().getScore(),
             partial->getOwnerId(), partial->getPriority().getState(), oomScoreOffset,
-            systemNativeClient);
+            systemNativeClient, partial->getSharedMode());
 }
 
 // ----------------------------------------------------------------------------
@@ -5776,7 +5937,7 @@
 }
 
 void CameraService::updateOpenCloseStatus(const std::string& cameraId, bool open,
-        const std::string& clientPackageName) {
+        const std::string& clientPackageName, bool sharedMode) {
     auto state = getCameraState(cameraId);
     if (state == nullptr) {
         ALOGW("%s: Could not update the status for %s, no such device exists", __FUNCTION__,
@@ -5784,9 +5945,17 @@
         return;
     }
     if (open) {
-        state->setClientPackage(clientPackageName);
+        if (flags::camera_multi_client() && sharedMode) {
+            state->addClientPackage(clientPackageName);
+        } else {
+            state->setClientPackage(clientPackageName);
+        }
     } else {
-        state->setClientPackage(std::string());
+        if (flags::camera_multi_client() && sharedMode) {
+            state->removeClientPackage(clientPackageName);
+        } else {
+            state->setClientPackage(std::string());
+        }
     }
 
     // Get the device id and app-visible camera id for the given HAL-visible camera id.
@@ -5805,7 +5974,10 @@
             ret = it->getListener()->onCameraOpened(mappedCameraId, clientPackageName,
                     deviceId);
         } else {
-            ret = it->getListener()->onCameraClosed(mappedCameraId, deviceId);
+            if (!flags::camera_multi_client() || !sharedMode || (sharedMode &&
+                    mActiveClientManager.getCameraClient(cameraId) == nullptr)) {
+                ret = it->getListener()->onCameraClosed(mappedCameraId, deviceId);
+            }
         }
 
         it->handleBinderStatus(ret,
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 4c93ae1..9c75ede 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -181,7 +181,7 @@
             const sp<hardware::camera2::ICameraDeviceCallbacks>& cameraCb,
             const std::string& cameraId, int scoreOffset, int targetSdkVersion,
             int rotationOverride, const AttributionSourceState& clientAttribution,
-            int32_t devicePolicy,
+            int32_t devicePolicy, bool sharedMode,
             /*out*/
             sp<hardware::camera2::ICameraDeviceUser>* device);
 
@@ -384,6 +384,8 @@
         virtual void notifyError(int32_t errorCode,
                 const CaptureResultExtras& resultExtras) = 0;
 
+        virtual void notifyClientSharedAccessPriorityChanged(bool primaryClient) = 0;
+
         // Get the UID of the application client using this
         virtual uid_t getClientUid() const;
 
@@ -453,12 +455,16 @@
         virtual status_t injectSessionParams(
                 const hardware::camera2::impl::CameraMetadataNative& sessionParams) = 0;
 
+        status_t isPrimaryClient(/*out*/bool* isPrimary);
+
+        status_t setPrimaryClient(bool isPrimary);
+
     protected:
         BasicClient(const sp<CameraService>& cameraService, const sp<IBinder>& remoteCallback,
                     std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
                     const AttributionSourceState& clientAttribution, int callingPid,
                     bool nativeClient, const std::string& cameraIdStr, int cameraFacing,
-                    int sensorOrientation, int servicePid, int rotationOverride);
+                    int sensorOrientation, int servicePid, int rotationOverride, bool sharedMode);
 
         virtual ~BasicClient();
 
@@ -480,6 +486,8 @@
         bool                            mDisconnected;
         bool                            mUidIsTrusted;
         int                             mRotationOverride;
+        bool                            mSharedMode;
+        bool                            mIsPrimaryClient;
 
         mutable Mutex                   mAudioRestrictionLock;
         int32_t                         mAudioRestriction;
@@ -562,7 +570,8 @@
                std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
                const AttributionSourceState& clientAttribution, int callingPid,
                bool systemNativeClient, const std::string& cameraIdStr, int api1CameraId,
-               int cameraFacing, int sensorOrientation, int servicePid, int rotationOverride);
+               int cameraFacing, int sensorOrientation, int servicePid, int rotationOverride,
+               bool sharedMode);
         ~Client();
 
         // return our camera client
@@ -622,6 +631,8 @@
         CameraClientManager();
         virtual ~CameraClientManager();
 
+        virtual void remove(const DescriptorPtr& value) override;
+
         /**
          * Return a strong pointer to the active BasicClient for this camera ID, or an empty
          * if none exists.
@@ -639,7 +650,8 @@
         static DescriptorPtr makeClientDescriptor(const std::string& key,
                 const sp<BasicClient>& value, int32_t cost,
                 const std::set<std::string>& conflictingKeys, int32_t score,
-                int32_t ownerId, int32_t state, int oomScoreOffset, bool systemNativeClient);
+                int32_t ownerId, int32_t state, int oomScoreOffset, bool systemNativeClient,
+                bool sharedMode);
 
         /**
          * Make a ClientDescriptor object wrapping the given BasicClient strong pointer with
@@ -654,6 +666,15 @@
     int32_t updateAudioRestriction();
     int32_t updateAudioRestrictionLocked();
 
+    /**
+     * Returns true if the given client is the only client in the active clients list for a given
+     * camera.
+     *
+     * This method acquires mServiceLock.
+     */
+    bool isOnlyClient(const BasicClient* client);
+
+
 private:
 
     // TODO: b/263304156 update this to make use of a death callback for more
@@ -670,6 +691,8 @@
         return activityManager;
     }
 
+    static int32_t getUidProcessState(int32_t uid);
+
     /**
      * Typesafe version of device status, containing both the HAL-layer and the service interface-
      * layer values.
@@ -771,6 +794,10 @@
         void setClientPackage(const std::string& clientPackage);
         std::string getClientPackage() const;
 
+        void addClientPackage(const std::string& clientPackage);
+        void removeClientPackage(const std::string& clientPackage);
+        std::set<std::string> getClientPackages() const;
+
         /**
          * Return the unavailable physical ids for this device.
          *
@@ -783,7 +810,7 @@
         const int mCost;
         std::set<std::string> mConflicting;
         std::set<std::string> mUnavailablePhysicalIds;
-        std::string mClientPackage;
+        std::set<std::string> mClientPackages;
         mutable Mutex mStatusLock;
         CameraParameters mShimParams;
         const SystemCameraKind mSystemCameraKind;
@@ -913,9 +940,11 @@
 
     // Check if we can connect, before we acquire the service lock.
     binder::Status validateConnectLocked(const std::string& cameraId,
-                                         const AttributionSourceState& clientAttribution) const;
+                                         const AttributionSourceState& clientAttribution,
+                                         bool sharedMode) const;
     binder::Status validateClientPermissionsLocked(
-            const std::string& cameraId, const AttributionSourceState& clientAttribution) const;
+            const std::string& cameraId, const AttributionSourceState& clientAttribution,
+            bool sharedMode) const;
 
     void logConnectionAttempt(int clientPid, const std::string& clientPackageName,
         const std::string& cameraId, apiLevel effectiveApiLevel) const;
@@ -927,7 +956,7 @@
     // Only call with with mServiceLock held.
     status_t handleEvictionsLocked(const std::string& cameraId, int clientPid,
         apiLevel effectiveApiLevel, const sp<IBinder>& remoteCallback,
-        const std::string& packageName, int scoreOffset, bool systemNativeClient,
+        const std::string& packageName, int scoreOffset, bool systemNativeClient, bool sharedMode,
         /*out*/
         sp<BasicClient>* client,
         std::shared_ptr<resource_policy::ClientDescriptor<std::string, sp<BasicClient>>>* partial);
@@ -964,7 +993,7 @@
                                  bool shimUpdateOnly, int scoreOffset, int targetSdkVersion,
                                  int rotationOverride, bool forceSlowJpegMode,
                                  const std::string& originalCameraId, bool isNonSystemNdk,
-                                 /*out*/ sp<CLIENT>& device);
+                                 bool sharedMode, /*out*/ sp<CLIENT>& device);
 
     // Lock guarding camera service state
     Mutex               mServiceLock;
@@ -1072,12 +1101,12 @@
     std::string cameraIdIntToStrLocked(int cameraIdInt, int32_t deviceId, int32_t devicePolicy);
 
     /**
-     * Remove a single client corresponding to the given camera id from the list of active clients.
+     * Remove all the clients corresponding to the given camera id from the list of active clients.
      * If none exists, return an empty strongpointer.
      *
      * This method must be called with mServiceLock held.
      */
-    sp<CameraService::BasicClient> removeClientLocked(const std::string& cameraId);
+    std::vector<sp<CameraService::BasicClient>> removeClientsLocked(const std::string& cameraId);
 
     /**
      * Handle a notification that the current device user has changed.
@@ -1285,7 +1314,7 @@
      * This method acqiures mStatusListenerLock.
      */
     void updateOpenCloseStatus(const std::string& cameraId, bool open,
-            const std::string& packageName);
+            const std::string& packageName, bool sharedMode);
 
     // flashlight control
     sp<CameraFlashlight> mFlashlight;
@@ -1460,7 +1489,7 @@
                                      std::pair<int, IPCTransport> deviceVersionAndIPCTransport,
                                      apiLevel effectiveApiLevel, bool overrideForPerfClass,
                                      int rotationOverride, bool forceSlowJpegMode,
-                                     const std::string& originalCameraId,
+                                     const std::string& originalCameraId, bool sharedMode,
                                      /*out*/ sp<BasicClient>* client);
 
     static std::string toString(std::set<userid_t> intSet);
@@ -1476,6 +1505,9 @@
 
     void disconnectClient(const std::string& id, sp<BasicClient> clientToDisconnect);
 
+    void disconnectClients(const std::string& id,
+            std::vector<sp<BasicClient>> clientsToDisconnect);
+
     // Regular online and offline devices must not be in conflict at camera service layer.
     // Use separate keys for offline devices.
     static const std::string kOfflineDevice;
diff --git a/services/camera/libcameraservice/aidl/AidlCameraDeviceCallbacks.cpp b/services/camera/libcameraservice/aidl/AidlCameraDeviceCallbacks.cpp
index e648a36..70647b4 100644
--- a/services/camera/libcameraservice/aidl/AidlCameraDeviceCallbacks.cpp
+++ b/services/camera/libcameraservice/aidl/AidlCameraDeviceCallbacks.cpp
@@ -79,6 +79,15 @@
     return binder::Status::ok();
 }
 
+binder::Status AidlCameraDeviceCallbacks::onClientSharedAccessPriorityChanged(bool primaryClient) {
+    if (!flags::camera_multi_client()) {
+        return binder::Status::ok();
+    }
+    auto ret = mBase->onClientSharedAccessPriorityChanged(primaryClient);
+    LOG_STATUS_ERROR_IF_NOT_OK(ret, "onClientSharedAccessPriorityChanged")
+    return binder::Status::ok();
+ }
+
 binder::Status AidlCameraDeviceCallbacks::onDeviceIdle() {
     auto ret = mBase->onDeviceIdle();
     LOG_STATUS_ERROR_IF_NOT_OK(ret, "onDeviceIdle")
diff --git a/services/camera/libcameraservice/aidl/AidlCameraDeviceCallbacks.h b/services/camera/libcameraservice/aidl/AidlCameraDeviceCallbacks.h
index 5cff5b3..07bf7d8 100644
--- a/services/camera/libcameraservice/aidl/AidlCameraDeviceCallbacks.h
+++ b/services/camera/libcameraservice/aidl/AidlCameraDeviceCallbacks.h
@@ -75,6 +75,8 @@
 
     binder::Status onRequestQueueEmpty() override;
 
+    binder::Status onClientSharedAccessPriorityChanged(bool primaryClient) override;
+
     status_t linkToDeath(const sp<DeathRecipient>& recipient, void* cookie,
                          uint32_t flags) override;
     status_t unlinkToDeath(const wp<DeathRecipient>& recipient, void* cookie, uint32_t flags,
diff --git a/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.cpp b/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.cpp
index 9e6a925..fc987b2 100644
--- a/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.cpp
+++ b/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.cpp
@@ -192,6 +192,16 @@
     return fromUStatus(ret);
 }
 
+ndk::ScopedAStatus AidlCameraDeviceUser::isPrimaryClient(bool* _aidl_return) {
+    bool isPrimary;
+    UStatus ret = mDeviceRemote->isPrimaryClient(&isPrimary);
+    if (!ret.isOk()) {
+        ALOGE("%s: Failed to get isPrimaryClient: %s", __FUNCTION__, ret.toString8().c_str());
+    }
+    *_aidl_return = isPrimary;
+    return fromUStatus(ret);
+}
+
 ndk::ScopedAStatus AidlCameraDeviceUser::flush(int64_t* _aidl_return) {
     UStatus ret = mDeviceRemote->flush(_aidl_return);
     return fromUStatus(ret);
@@ -278,4 +288,4 @@
     return true;
 }
 
-} // namespace android::frameworks::cameraservice::device::implementation
\ No newline at end of file
+} // namespace android::frameworks::cameraservice::device::implementation
diff --git a/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.h b/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.h
index 8014951..8fa33f7 100644
--- a/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.h
+++ b/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.h
@@ -97,6 +97,8 @@
         return mCaptureResultMetadataQueue;
     }
 
+    ndk::ScopedAStatus isPrimaryClient(bool* _aidl_return) override;
+
   private:
     bool initDevice();
 
diff --git a/services/camera/libcameraservice/aidl/AidlCameraService.cpp b/services/camera/libcameraservice/aidl/AidlCameraService.cpp
index 7f674bd..a2c431e 100644
--- a/services/camera/libcameraservice/aidl/AidlCameraService.cpp
+++ b/services/camera/libcameraservice/aidl/AidlCameraService.cpp
@@ -30,6 +30,9 @@
 #include <hidl/HidlTransportSupport.h>
 #include <utils/AttributionAndPermissionUtils.h>
 #include <utils/Utils.h>
+#include <com_android_internal_camera_flags.h>
+
+namespace flags = com::android::internal::camera::flags;
 
 namespace android::frameworks::cameraservice::service::implementation {
 
@@ -131,10 +134,28 @@
 
     return ScopedAStatus::ok();
 }
+
 ndk::ScopedAStatus AidlCameraService::connectDevice(
         const std::shared_ptr<SICameraDeviceCallback>& in_callback,
         const std::string& in_cameraId,
         std::shared_ptr<SICameraDeviceUser>* _aidl_return) {
+    return connectDeviceImpl(in_callback, in_cameraId, /*sharedMode*/false, _aidl_return);
+}
+
+ndk::ScopedAStatus AidlCameraService::connectDeviceV2(
+        const std::shared_ptr<SICameraDeviceCallback>& in_callback,
+        const std::string& in_cameraId, bool sharedMode,
+        std::shared_ptr<SICameraDeviceUser>* _aidl_return) {
+      if (!flags::camera_multi_client()) {
+          return fromSStatus(SStatus::INVALID_OPERATION);
+      }
+      return connectDeviceImpl(in_callback, in_cameraId, sharedMode, _aidl_return);
+}
+
+ndk::ScopedAStatus AidlCameraService::connectDeviceImpl(
+        const std::shared_ptr<SICameraDeviceCallback>& in_callback,
+        const std::string& in_cameraId, bool sharedMode,
+        std::shared_ptr<SICameraDeviceUser>* _aidl_return) {
     // Here, we first get NDK ICameraDeviceUser from mCameraService, then save
     // that interface in the newly created AidlCameraDeviceUser impl class.
     if (mCameraService == nullptr) {
@@ -164,6 +185,7 @@
             ROTATION_OVERRIDE_NONE,
             clientAttribution,
             /* devicePolicy= */ 0,
+            sharedMode,
             &unstableDevice);
     if (!serviceRet.isOk()) {
         ALOGE("%s: Unable to connect to camera device: %s", __FUNCTION__,
diff --git a/services/camera/libcameraservice/aidl/AidlCameraService.h b/services/camera/libcameraservice/aidl/AidlCameraService.h
index 4c67ac7..80e965d 100644
--- a/services/camera/libcameraservice/aidl/AidlCameraService.h
+++ b/services/camera/libcameraservice/aidl/AidlCameraService.h
@@ -60,6 +60,9 @@
     ndk::ScopedAStatus removeListener(
             const std::shared_ptr<SICameraServiceListener>& in_listener) override;
 
+    ndk::ScopedAStatus connectDeviceV2(const std::shared_ptr<SICameraDeviceCallback>& in_callback,
+                                     const std::string& in_cameraId, bool sharedMode,
+                                     std::shared_ptr<SICameraDeviceUser>* _aidl_return);
   private:
     void addToListenerCacheLocked(std::shared_ptr<SICameraServiceListener> stableCsListener,
                                   sp<hardware::ICameraServiceListener> csListener);
@@ -70,6 +73,9 @@
     SStatus addListenerInternal(const std::shared_ptr<SICameraServiceListener>& listener,
                                 std::vector<hardware::CameraStatus>* cameraStatusAndIds);
 
+    ndk::ScopedAStatus connectDeviceImpl(const std::shared_ptr<SICameraDeviceCallback>& in_callback,
+            const std::string& in_cameraId, bool sharedMode,
+            std::shared_ptr<SICameraDeviceUser>* _aidl_return);
 
     ::android::CameraService* mCameraService;
 
diff --git a/services/camera/libcameraservice/aidl/AidlCameraServiceListener.h b/services/camera/libcameraservice/aidl/AidlCameraServiceListener.h
index a7c32e3..c0dc688 100644
--- a/services/camera/libcameraservice/aidl/AidlCameraServiceListener.h
+++ b/services/camera/libcameraservice/aidl/AidlCameraServiceListener.h
@@ -75,6 +75,11 @@
                          uint32_t flags) override;
     status_t unlinkToDeath(const wp<DeathRecipient>& recipient, void* cookie, uint32_t flags,
                            wp<DeathRecipient>* outRecipient) override;
+    binder::Status onCameraOpenedInSharedMode(const std::string& /*cameraId*/,
+            const std::string& /*clientPackageId*/, int32_t /*deviceId*/, bool /*primaryClient*/) {
+         // empty implementation
+        return binder::Status::ok();
+    }
 
   private:
     std::shared_ptr<SICameraServiceListener> mBase;
@@ -86,4 +91,4 @@
 
 } // android
 
-#endif // FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLCAMERASERVICELISTENER_H_
\ No newline at end of file
+#endif // FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLCAMERASERVICELISTENER_H_
diff --git a/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h b/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h
index 24473f7..43ddac6 100644
--- a/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h
+++ b/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h
@@ -96,12 +96,15 @@
       {36, {
           ANDROID_COLOR_CORRECTION_AVAILABLE_MODES,
           ANDROID_COLOR_CORRECTION_COLOR_TEMPERATURE_RANGE,
+          ANDROID_CONTROL_AE_AVAILABLE_PRIORITY_MODES,
           ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_MIN_FRAME_DURATIONS,
           ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
           ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STALL_DURATIONS,
           ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STALL_DURATIONS_MAXIMUM_RESOLUTION,
           ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS,
           ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_SHARED_SESSION_COLOR_SPACE,
+          ANDROID_SHARED_SESSION_OUTPUT_CONFIGURATIONS,
         } },
 };
 
@@ -138,6 +141,8 @@
       {36, {
           ANDROID_COLOR_CORRECTION_COLOR_TEMPERATURE,
           ANDROID_COLOR_CORRECTION_COLOR_TINT,
+          ANDROID_CONTROL_AE_PRIORITY_MODE,
+          ANDROID_CONTROL_ZOOM_METHOD,
           ANDROID_EXTENSION_NIGHT_MODE_INDICATOR,
         }  },
 };
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index 45b7c3b..1d29462 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -28,6 +28,7 @@
 #include <camera/CameraUtils.h>
 #include <camera/StringUtils.h>
 #include <com_android_internal_camera_flags.h>
+#include <com_android_window_flags.h>
 #include <cutils/properties.h>
 #include <gui/Surface.h>
 #include <gui/view/Surface.h>
@@ -53,6 +54,7 @@
 using namespace camera2;
 
 namespace flags = com::android::internal::camera::flags;
+namespace wm_flags = com::android::window::flags;
 
 // Interface used by CameraService
 
@@ -63,12 +65,12 @@
         const AttributionSourceState& clientAttribution, int callingPid,
         const std::string& cameraDeviceId, int api1CameraId, int cameraFacing,
         int sensorOrientation, int servicePid, bool overrideForPerfClass, int rotationOverride,
-        bool forceSlowJpegMode)
+        bool forceSlowJpegMode, bool sharedMode)
     : Camera2ClientBase(cameraService, cameraClient, cameraServiceProxyWrapper,
                         attributionAndPermissionUtils, clientAttribution, callingPid,
                         false /*systemNativeClient - since no ndk for api1*/, cameraDeviceId,
                         api1CameraId, cameraFacing, sensorOrientation, servicePid,
-                        overrideForPerfClass, rotationOverride,
+                        overrideForPerfClass, rotationOverride, sharedMode,
                         /*legacyClient*/ true),
       mParameters(api1CameraId, cameraFacing),
       mLatestRequestIds(kMaxRequestIds),
@@ -134,8 +136,13 @@
     // The 'mRotateAndCropMode' value only accounts for the necessary adjustment
     // when the display rotates. The sensor orientation still needs to be calculated
     // and applied similar to the Camera2 path.
+    using hardware::BnCameraService::ROTATION_OVERRIDE_ROTATION_ONLY;
+    bool enableTransformInverseDisplay = true;
+    if (wm_flags::enable_camera_compat_for_desktop_windowing()) {
+        enableTransformInverseDisplay = (mRotationOverride != ROTATION_OVERRIDE_ROTATION_ONLY);
+    }
     CameraUtils::getRotationTransform(staticInfo, OutputConfiguration::MIRROR_MODE_AUTO,
-            &mRotateAndCropPreviewTransform);
+            enableTransformInverseDisplay, &mRotateAndCropPreviewTransform);
 
     mStreamingProcessor = new StreamingProcessor(this);
 
diff --git a/services/camera/libcameraservice/api1/Camera2Client.h b/services/camera/libcameraservice/api1/Camera2Client.h
index 345494b..51d8d54 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.h
+++ b/services/camera/libcameraservice/api1/Camera2Client.h
@@ -107,7 +107,7 @@
                   const AttributionSourceState& clientAttribution, int callingPid,
                   const std::string& cameraDeviceId, int api1CameraId, int cameraFacing,
                   int sensorOrientation, int servicePid, bool overrideForPerfClass,
-                  int rotationOverride, bool forceSlowJpegMode);
+                  int rotationOverride, bool forceSlowJpegMode, bool sharedMode);
 
     virtual ~Camera2Client();
 
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 17a6dc3..8c30d54 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -63,10 +63,11 @@
         std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
         const AttributionSourceState& clientAttribution, int callingPid, bool systemNativeClient,
         const std::string& cameraId, [[maybe_unused]] int api1CameraId, int cameraFacing,
-        int sensorOrientation, int servicePid, int rotationOverride)
+        int sensorOrientation, int servicePid, int rotationOverride, bool sharedMode)
     : BasicClient(cameraService, IInterface::asBinder(remoteCallback),
                   attributionAndPermissionUtils, clientAttribution, callingPid, systemNativeClient,
-                  cameraId, cameraFacing, sensorOrientation, servicePid, rotationOverride),
+                  cameraId, cameraFacing, sensorOrientation, servicePid, rotationOverride,
+                  sharedMode),
       mRemoteCallback(remoteCallback) {}
 
 // Interface used by CameraService
@@ -78,11 +79,13 @@
         std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
         const AttributionSourceState& clientAttribution, int callingPid, bool systemNativeClient,
         const std::string& cameraId, int cameraFacing, int sensorOrientation, int servicePid,
-        bool overrideForPerfClass, int rotationOverride, const std::string& originalCameraId)
+        bool overrideForPerfClass, int rotationOverride, const std::string& originalCameraId,
+        bool sharedMode)
     : Camera2ClientBase(cameraService, remoteCallback, cameraServiceProxyWrapper,
                         attributionAndPermissionUtils, clientAttribution, callingPid,
                         systemNativeClient, cameraId, /*API1 camera ID*/ -1, cameraFacing,
-                        sensorOrientation, servicePid, overrideForPerfClass, rotationOverride),
+                        sensorOrientation, servicePid, overrideForPerfClass, rotationOverride,
+                        sharedMode),
       mInputStream(),
       mStreamingRequestId(REQUEST_ID_NONE),
       mRequestIdCounter(0),
@@ -610,9 +613,20 @@
 }
 
 binder::Status CameraDeviceClient::beginConfigure() {
-    // TODO: Implement this.
     ATRACE_CALL();
-    ALOGV("%s: Not implemented yet.", __FUNCTION__);
+    if (!flags::camera_multi_client()) {
+        return binder::Status::ok();
+    }
+    if (!mDevice.get()) {
+        return STATUS_ERROR(CameraService::ERROR_DISCONNECTED, "Camera device no longer alive");
+    }
+    status_t res = mDevice->beginConfigure();
+    if (res != OK) {
+        std::string msg = fmt::sprintf("Camera %s: Error beginning stream configuration: %s (%d)",
+                mCameraIdStr.c_str(), strerror(-res), res);
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
+    }
     return binder::Status::ok();
 }
 
@@ -645,6 +659,12 @@
         return res;
     }
 
+    if (flags::camera_multi_client() && mSharedMode) {
+        // For shared camera session, streams are already configured
+        // earlier, hence no need to do it here.
+        return res;
+    }
+
     status_t err = mDevice->configureStreams(sessionParams, operatingMode);
     if (err == BAD_VALUE) {
         std::string msg = fmt::sprintf("Camera %s: Unsupported set of inputs/outputs provided",
@@ -768,6 +788,7 @@
 
     bool isInput = false;
     std::vector<sp<IBinder>> surfaces;
+    std::vector<size_t> removedSurfaceIds;
     ssize_t dIndex = NAME_NOT_FOUND;
     ssize_t compositeIndex  = NAME_NOT_FOUND;
 
@@ -778,6 +799,9 @@
         for (size_t i = 0; i < mStreamMap.size(); ++i) {
             if (streamId == mStreamMap.valueAt(i).streamId()) {
                 surfaces.push_back(mStreamMap.keyAt(i));
+                if (flags::camera_multi_client() && mSharedMode) {
+                    removedSurfaceIds.push_back(mStreamMap.valueAt(i).surfaceId());
+                }
             }
         }
 
@@ -805,8 +829,14 @@
         }
     }
 
-    // Also returns BAD_VALUE if stream ID was not valid
-    status_t err = mDevice->deleteStream(streamId);
+
+    status_t err;
+    if (flags::camera_multi_client() && mSharedMode) {
+        err = mDevice->removeSharedSurfaces(streamId, removedSurfaceIds);
+    } else {
+        // Also returns BAD_VALUE if stream ID was not valid
+        err = mDevice->deleteStream(streamId);
+    }
 
     if (err != OK) {
         std::string msg = fmt::sprintf("Camera %s: Unexpected error %s (%d) when deleting stream "
@@ -900,6 +930,7 @@
 
     std::vector<SurfaceHolder> surfaces;
     std::vector<sp<IBinder>> binders;
+    std::vector<OutputStreamInfo> streamInfos;
     status_t err;
 
     // Create stream for deferred surface case.
@@ -939,52 +970,57 @@
 
         binders.push_back(IInterface::asBinder(bufferProducer));
         surfaces.push_back({surface, mirrorMode});
+        if (flags::camera_multi_client() && mSharedMode) {
+            streamInfos.push_back(streamInfo);
+        }
     }
 
-    // If mOverrideForPerfClass is true, do not fail createStream() for small
-    // JPEG sizes because existing createSurfaceFromGbp() logic will find the
-    // closest possible supported size.
-
     int streamId = camera3::CAMERA3_STREAM_ID_INVALID;
     std::vector<int> surfaceIds;
-    bool isDepthCompositeStream =
-            camera3::DepthCompositeStream::isDepthCompositeStream(surfaces[0].mSurface);
-    bool isHeicCompositeStream = camera3::HeicCompositeStream::isHeicCompositeStream(
-            surfaces[0].mSurface);
-    bool isJpegRCompositeStream =
-        camera3::JpegRCompositeStream::isJpegRCompositeStream(surfaces[0].mSurface) &&
-        !mDevice->isCompositeJpegRDisabled();
-    if (isDepthCompositeStream || isHeicCompositeStream || isJpegRCompositeStream) {
-        sp<CompositeStream> compositeStream;
-        if (isDepthCompositeStream) {
-            compositeStream = new camera3::DepthCompositeStream(mDevice, getRemoteCallback());
-        } else if (isHeicCompositeStream) {
-            compositeStream = new camera3::HeicCompositeStream(mDevice, getRemoteCallback());
-        } else {
-            compositeStream = new camera3::JpegRCompositeStream(mDevice, getRemoteCallback());
+    if (flags::camera_multi_client() && mSharedMode) {
+        err = mDevice->getSharedStreamId(outputConfiguration, &streamId);
+        if (err == OK) {
+            err = mDevice->addSharedSurfaces(streamId, streamInfos, surfaces, &surfaceIds);
         }
-
-        err = compositeStream->createStream(surfaces, deferredConsumer, streamInfo.width,
+    } else {
+        bool isDepthCompositeStream =
+                camera3::DepthCompositeStream::isDepthCompositeStream(surfaces[0].mSurface);
+        bool isHeicCompositeStream = camera3::HeicCompositeStream::isHeicCompositeStream(
+                surfaces[0].mSurface);
+        bool isJpegRCompositeStream =
+            camera3::JpegRCompositeStream::isJpegRCompositeStream(surfaces[0].mSurface) &&
+            !mDevice->isCompositeJpegRDisabled();
+        if (isDepthCompositeStream || isHeicCompositeStream || isJpegRCompositeStream) {
+            sp<CompositeStream> compositeStream;
+            if (isDepthCompositeStream) {
+                compositeStream = new camera3::DepthCompositeStream(mDevice, getRemoteCallback());
+            } else if (isHeicCompositeStream) {
+                compositeStream = new camera3::HeicCompositeStream(mDevice, getRemoteCallback());
+            } else {
+                compositeStream = new camera3::JpegRCompositeStream(mDevice, getRemoteCallback());
+            }
+            err = compositeStream->createStream(surfaces, deferredConsumer, streamInfo.width,
                 streamInfo.height, streamInfo.format,
                 static_cast<camera_stream_rotation_t>(outputConfiguration.getRotation()),
                 &streamId, physicalCameraId, streamInfo.sensorPixelModesUsed, &surfaceIds,
                 outputConfiguration.getSurfaceSetID(), isShared, isMultiResolution,
                 streamInfo.colorSpace, streamInfo.dynamicRangeProfile, streamInfo.streamUseCase,
                 useReadoutTimestamp);
-        if (err == OK) {
-            Mutex::Autolock l(mCompositeLock);
-            mCompositeStreamMap.add(
-                    IInterface::asBinder(surfaces[0].mSurface->getIGraphicBufferProducer()),
-                    compositeStream);
+            if (err == OK) {
+                Mutex::Autolock l(mCompositeLock);
+                mCompositeStreamMap.add(
+                        IInterface::asBinder(surfaces[0].mSurface->getIGraphicBufferProducer()),
+                        compositeStream);
+            }
+        } else {
+            err = mDevice->createStream(surfaces, deferredConsumer, streamInfo.width,
+                    streamInfo.height, streamInfo.format, streamInfo.dataSpace,
+                    static_cast<camera_stream_rotation_t>(outputConfiguration.getRotation()),
+                    &streamId, physicalCameraId, streamInfo.sensorPixelModesUsed, &surfaceIds,
+                    outputConfiguration.getSurfaceSetID(), isShared, isMultiResolution,
+                    /*consumerUsage*/0, streamInfo.dynamicRangeProfile, streamInfo.streamUseCase,
+                    streamInfo.timestampBase, streamInfo.colorSpace, useReadoutTimestamp);
         }
-    } else {
-        err = mDevice->createStream(surfaces, deferredConsumer, streamInfo.width,
-                streamInfo.height, streamInfo.format, streamInfo.dataSpace,
-                static_cast<camera_stream_rotation_t>(outputConfiguration.getRotation()),
-                &streamId, physicalCameraId, streamInfo.sensorPixelModesUsed, &surfaceIds,
-                outputConfiguration.getSurfaceSetID(), isShared, isMultiResolution,
-                /*consumerUsage*/0, streamInfo.dynamicRangeProfile, streamInfo.streamUseCase,
-                streamInfo.timestampBase, streamInfo.colorSpace, useReadoutTimestamp);
     }
 
     if (err != OK) {
@@ -1743,6 +1779,20 @@
     return binder::Status::ok();
 }
 
+binder::Status CameraDeviceClient::isPrimaryClient(/*out*/bool* isPrimary) {
+    ATRACE_CALL();
+    binder::Status res =  binder::Status::ok();
+    if (!flags::camera_multi_client()) {
+        return res;
+    }
+    if (!(res = checkPidStatus(__FUNCTION__)).isOk()) return res;
+    if (isPrimary != nullptr) {
+        status_t ret = BasicClient::isPrimaryClient(isPrimary);
+        return  binder::Status::fromStatusT(ret);
+    }
+    return res;
+}
+
 status_t CameraDeviceClient::setCameraServiceWatchdog(bool enabled) {
     return mDevice->setCameraServiceWatchdog(enabled);
 }
@@ -1875,7 +1925,7 @@
         offlineClient = new CameraOfflineSessionClient(
                 sCameraService, offlineSession, offlineCompositeStreamMap, cameraCb,
                 mAttributionAndPermissionUtils, mClientAttribution, mCallingPid, mCameraIdStr,
-                mCameraFacing, mOrientation, mServicePid);
+                mCameraFacing, mOrientation, mServicePid, /*sharedMode*/false);
         ret = sCameraService->addOfflineClient(mCameraIdStr, offlineClient);
     }
 
@@ -2078,46 +2128,59 @@
     }
 }
 
+void CameraDeviceClient::notifyClientSharedAccessPriorityChanged(bool primaryClient) {
+    // Thread safe. Don't bother locking.
+    if (!flags::camera_multi_client()) {
+        return;
+    }
+    sp<hardware::camera2::ICameraDeviceCallbacks> remoteCb = getRemoteCallback();
+    if (remoteCb != 0) {
+        remoteCb->onClientSharedAccessPriorityChanged(primaryClient);
+    }
+}
+
 void CameraDeviceClient::detachDevice() {
     if (mDevice == 0) return;
 
     nsecs_t startTime = systemTime();
-    ALOGV("Camera %s: Stopping processors", mCameraIdStr.c_str());
+    if (!flags::camera_multi_client() || sCameraService->isOnlyClient(this)){
+        ALOGV("Camera %s: Stopping processors", mCameraIdStr.c_str());
 
-    if (mFrameProcessor.get() != nullptr) {
-        mFrameProcessor->removeListener(
-                camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MIN_ID,
-                camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MAX_ID, /*listener*/this);
-        mFrameProcessor->requestExit();
-        ALOGV("Camera %s: Waiting for threads", mCameraIdStr.c_str());
-        mFrameProcessor->join();
-        ALOGV("Camera %s: Disconnecting device", mCameraIdStr.c_str());
-    }
-
-    // WORKAROUND: HAL refuses to disconnect while there's streams in flight
-    {
-        int64_t lastFrameNumber;
-        status_t code;
-        if ((code = mDevice->flush(&lastFrameNumber)) != OK) {
-            ALOGE("%s: flush failed with code 0x%x", __FUNCTION__, code);
+        if (mFrameProcessor.get() != nullptr) {
+            mFrameProcessor->removeListener(
+                    camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MIN_ID,
+                    camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MAX_ID, /*listener*/this);
+            mFrameProcessor->requestExit();
+            ALOGV("Camera %s: Waiting for threads", mCameraIdStr.c_str());
+            mFrameProcessor->join();
+            ALOGV("Camera %s: Disconnecting device", mCameraIdStr.c_str());
         }
 
-        if ((code = mDevice->waitUntilDrained()) != OK) {
-            ALOGE("%s: waitUntilDrained failed with code 0x%x", __FUNCTION__,
-                  code);
-        }
-    }
+        // WORKAROUND: HAL refuses to disconnect while there's streams in flight
+        {
+            int64_t lastFrameNumber;
+            status_t code;
+            if ((code = mDevice->flush(&lastFrameNumber)) != OK) {
+                ALOGE("%s: flush failed with code 0x%x", __FUNCTION__, code);
+            }
 
-    {
-        Mutex::Autolock l(mCompositeLock);
-        for (size_t i = 0; i < mCompositeStreamMap.size(); i++) {
-            auto ret = mCompositeStreamMap.valueAt(i)->deleteInternalStreams();
-            if (ret != OK) {
-                ALOGE("%s: Failed removing composite stream  %s (%d)", __FUNCTION__,
-                        strerror(-ret), ret);
+            if ((code = mDevice->waitUntilDrained()) != OK) {
+                ALOGE("%s: waitUntilDrained failed with code 0x%x", __FUNCTION__,
+                        code);
             }
         }
-        mCompositeStreamMap.clear();
+
+        {
+            Mutex::Autolock l(mCompositeLock);
+            for (size_t i = 0; i < mCompositeStreamMap.size(); i++) {
+                auto ret = mCompositeStreamMap.valueAt(i)->deleteInternalStreams();
+                if (ret != OK) {
+                    ALOGE("%s: Failed removing composite stream  %s (%d)", __FUNCTION__,
+                            strerror(-ret), ret);
+                }
+            }
+            mCompositeStreamMap.clear();
+        }
     }
 
     bool hasDeviceError = mDevice->hasDeviceError();
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index 0858633..a8cf451 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -54,7 +54,8 @@
             std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
             const AttributionSourceState& clientAttribution, int callingPid,
             bool systemNativeClient, const std::string& cameraId, int api1CameraId,
-            int cameraFacing, int sensorOrientation, int servicePid, int rotationOverride);
+            int cameraFacing, int sensorOrientation, int servicePid, int rotationOverride,
+            bool sharedMode);
 
     sp<hardware::camera2::ICameraDeviceCallbacks> mRemoteCallback;
 };
@@ -168,6 +169,8 @@
             /*out*/
             sp<hardware::camera2::ICameraOfflineSession>* session) override;
 
+    virtual binder::Status isPrimaryClient(/*out*/bool* isPrimary) override;
+
     /**
      * Interface used by CameraService
      */
@@ -179,7 +182,7 @@
                        const AttributionSourceState& clientAttribution, int callingPid,
                        bool clientPackageOverride, const std::string& cameraId, int cameraFacing,
                        int sensorOrientation, int servicePid, bool overrideForPerfClass,
-                       int rotationOverride, const std::string& originalCameraId);
+                       int rotationOverride, const std::string& originalCameraId, bool sharedMode);
     virtual ~CameraDeviceClient();
 
     virtual status_t      initialize(sp<CameraProviderManager> manager,
@@ -222,6 +225,7 @@
     virtual void notifyPrepared(int streamId);
     virtual void notifyRequestQueueEmpty();
     virtual void notifyRepeatingRequestError(long lastFrameNumber);
+    virtual void notifyClientSharedAccessPriorityChanged(bool primaryClient);
 
     void setImageDumpMask(int mask) { if (mDevice != nullptr) mDevice->setImageDumpMask(mask); }
     /**
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
index e783cbc..71fd3ba 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
@@ -308,6 +308,9 @@
     }
 }
 
+void CameraOfflineSessionClient::notifyClientSharedAccessPriorityChanged(bool /*primaryClient*/) {
+}
+
 void CameraOfflineSessionClient::notifyShutter(const CaptureResultExtras& resultExtras,
         nsecs_t timestamp) {
 
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
index 574ff9a..78a3055 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
@@ -49,13 +49,14 @@
             const sp<ICameraDeviceCallbacks>& remoteCallback,
             std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
             const AttributionSourceState& clientAttribution, int callingPid,
-            const std::string& cameraIdStr, int cameraFacing, int sensorOrientation, int servicePid)
+            const std::string& cameraIdStr, int cameraFacing, int sensorOrientation, int servicePid,
+            bool sharedMode)
         : CameraService::BasicClient(cameraService, IInterface::asBinder(remoteCallback),
                                      attributionAndPermissionUtils,
                                      // (v)ndk doesn't have offline session support
                                      clientAttribution, callingPid, /*overridePackageName*/ false,
                                      cameraIdStr, cameraFacing, sensorOrientation, servicePid,
-                                     hardware::ICameraService::ROTATION_OVERRIDE_NONE),
+                                     hardware::ICameraService::ROTATION_OVERRIDE_NONE, sharedMode),
         mRemoteCallback(remoteCallback),
         mOfflineSession(session),
         mCompositeStreamMap(offlineCompositeStreamMap) {}
@@ -119,6 +120,7 @@
     void notifyRepeatingRequestError(long lastFrameNumber) override;
     status_t injectCamera(const std::string& injectedCamId,
             sp<CameraProviderManager> manager) override;
+    void notifyClientSharedAccessPriorityChanged(bool primaryClient) override;
     status_t stopInjection() override;
     status_t injectSessionParams(
         const hardware::camera2::impl::CameraMetadataNative& sessionParams) override;
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index f6b1e80..03abf71 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -31,6 +31,7 @@
 #include <camera/CameraSessionStats.h>
 #include <camera/StringUtils.h>
 #include <com_android_window_flags.h>
+#include <com_android_internal_camera_flags.h>
 
 #include "common/Camera2ClientBase.h"
 
@@ -39,12 +40,14 @@
 #include "device3/Camera3Device.h"
 #include "device3/aidl/AidlCamera3Device.h"
 #include "device3/hidl/HidlCamera3Device.h"
+#include "device3/aidl/AidlCamera3SharedDevice.h"
 
 namespace android {
 
 using namespace camera2;
 
 namespace wm_flags = com::android::window::flags;
+namespace flags = com::android::internal::camera::flags;
 
 // Interface used by CameraService
 
@@ -55,10 +58,11 @@
         std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
         const AttributionSourceState& clientAttribution, int callingPid, bool systemNativeClient,
         const std::string& cameraId, int api1CameraId, int cameraFacing, int sensorOrientation,
-        int servicePid, bool overrideForPerfClass, int rotationOverride, bool legacyClient)
+        int servicePid, bool overrideForPerfClass, int rotationOverride, bool sharedMode,
+        bool legacyClient)
     : TClientBase(cameraService, remoteCallback, attributionAndPermissionUtils, clientAttribution,
                   callingPid, systemNativeClient, cameraId, api1CameraId, cameraFacing,
-                  sensorOrientation, servicePid, rotationOverride),
+                  sensorOrientation, servicePid, rotationOverride, sharedMode),
       mSharedCameraCallbacks(remoteCallback),
       mCameraServiceProxyWrapper(cameraServiceProxyWrapper),
       mDeviceActive(false),
@@ -114,12 +118,19 @@
                             TClientBase::mRotationOverride, mLegacyClient);
             break;
         case IPCTransport::AIDL:
-            mDevice =
+            if (flags::camera_multi_client() && TClientBase::mSharedMode) {
+                mDevice = AidlCamera3SharedDevice::getInstance(mCameraServiceProxyWrapper,
+                            TClientBase::mAttributionAndPermissionUtils,
+                            TClientBase::mCameraIdStr, mOverrideForPerfClass,
+                            TClientBase::mRotationOverride, mLegacyClient);
+            } else {
+                mDevice =
                     new AidlCamera3Device(mCameraServiceProxyWrapper,
                             TClientBase::mAttributionAndPermissionUtils,
                             TClientBase::mCameraIdStr, mOverrideForPerfClass,
                             TClientBase::mRotationOverride, mLegacyClient);
-             break;
+            }
+            break;
         default:
             ALOGE("%s Invalid transport for camera id %s", __FUNCTION__,
                     TClientBase::mCameraIdStr.c_str());
@@ -161,9 +172,10 @@
 Camera2ClientBase<TClientBase>::~Camera2ClientBase() {
     ATRACE_CALL();
 
-    TClientBase::mDestructionStarted = true;
-
-    disconnect();
+    if (!flags::camera_multi_client() || !TClientBase::mDisconnected) {
+        TClientBase::mDestructionStarted = true;
+        disconnect();
+    }
 
     ALOGI("%s: Client object's dtor for Camera Id %s completed. Client was: %s (PID %d, UID %u)",
           __FUNCTION__, TClientBase::mCameraIdStr.c_str(), TClientBase::getPackageName().c_str(),
@@ -250,7 +262,10 @@
 template <typename TClientBase>
 binder::Status Camera2ClientBase<TClientBase>::disconnect() {
 
-    return disconnectImpl();
+   if (!flags::camera_multi_client() || !TClientBase::mDisconnected) {
+       return disconnectImpl();
+   }
+   return binder::Status::ok();
 }
 
 template <typename TClientBase>
@@ -288,7 +303,11 @@
 template <typename TClientBase>
 void Camera2ClientBase<TClientBase>::detachDevice() {
     if (mDevice == 0) return;
-    mDevice->disconnect();
+    if (flags::camera_multi_client() && TClientBase::mSharedMode) {
+        mDevice->disconnectClient(TClientBase::getClientUid());
+    } else {
+        mDevice->disconnect();
+    }
 
     ALOGV("Camera %s: Detach complete", TClientBase::mCameraIdStr.c_str());
 }
@@ -331,6 +350,12 @@
 }
 
 template <typename TClientBase>
+void Camera2ClientBase<TClientBase>::notifyClientSharedAccessPriorityChanged(bool primaryClient) {
+    ALOGV("%s Camera %s access priorities changed for client %d primaryClient=%d", __FUNCTION__,
+            TClientBase::mCameraIdStr.c_str(), TClientBase::getClientUid(), primaryClient);
+}
+
+template <typename TClientBase>
 void Camera2ClientBase<TClientBase>::notifyPhysicalCameraChange(const std::string &physicalId) {
     using android::hardware::ICameraService;
     // We're only interested in this notification if rotationOverride is turned on.
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h
index e231f1f..cb30199 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.h
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.h
@@ -56,7 +56,8 @@
                       const AttributionSourceState& clientAttribution, int callingPid,
                       bool systemNativeClient, const std::string& cameraId, int api1CameraId,
                       int cameraFacing, int sensorOrientation, int servicePid,
-                      bool overrideForPerfClass, int rotationOverride, bool legacyClient = false);
+                      bool overrideForPerfClass, int rotationOverride,  bool sharedMode,
+                      bool legacyClient = false);
     virtual ~Camera2ClientBase();
 
     virtual status_t      initialize(sp<CameraProviderManager> manager,
@@ -88,6 +89,7 @@
     virtual void          notifyPrepared(int streamId);
     virtual void          notifyRequestQueueEmpty();
     virtual void          notifyRepeatingRequestError(long lastFrameNumber);
+    virtual void          notifyClientSharedAccessPriorityChanged(bool primaryClient) override;
 
     void                  notifyIdleWithUserTag(int64_t requestCount, int64_t resultErrorCount,
                                      bool deviceError,
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index e17d700..cfedf0c 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -51,6 +51,7 @@
 typedef enum camera_stream_configuration_mode {
     CAMERA_STREAM_CONFIGURATION_NORMAL_MODE = 0,
     CAMERA_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE = 1,
+    CAMERA_STREAM_CONFIGURATION_SHARED_MODE = 2,
     CAMERA_VENDOR_STREAM_CONFIGURATION_MODE_START = 0x8000
 } camera_stream_configuration_mode_t;
 
@@ -97,6 +98,7 @@
     virtual status_t initialize(sp<CameraProviderManager> manager,
             const std::string& monitorTags) = 0;
     virtual status_t disconnect() = 0;
+    virtual status_t disconnectClient(int) {return OK;};
 
     virtual status_t dump(int fd, const Vector<String16> &args) = 0;
     virtual status_t startWatchingTags(const std::string &tags) = 0;
@@ -290,6 +292,33 @@
      */
     virtual status_t deleteStream(int id) = 0;
 
+
+    /**
+     * This function is responsible for configuring camera streams at the start of a session.
+     * In shared session mode, where multiple clients may access the camera, camera service
+     * applies a predetermined shared session configuration. If the camera is opened in non-shared
+     * mode, this function is a no-op.
+     */
+    virtual status_t beginConfigure() = 0;
+
+    /**
+     * In shared session mode, this function retrieves the stream ID associated with a specific
+     * output configuration.
+     */
+    virtual status_t getSharedStreamId(const OutputConfiguration &config, int *streamId) = 0;
+
+    /**
+     * In shared session mode, this function add surfaces to an existing shared stream ID.
+     */
+    virtual status_t addSharedSurfaces(int streamId,
+            const std::vector<android::camera3::OutputStreamInfo> &outputInfo,
+            const std::vector<SurfaceHolder>& surfaces, std::vector<int> *surfaceIds = nullptr) = 0;
+
+    /**
+     * In shared session mode, this function remove surfaces from an existing shared stream ID.
+     */
+    virtual status_t removeSharedSurfaces(int streamId, const std::vector<size_t> &surfaceIds) = 0;
+
     /**
      * Take the currently-defined set of streams and configure the HAL to use
      * them. This is a long-running operation (may be several hundered ms).
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index 2d58652..a8d7480 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -63,6 +63,7 @@
 using namespace camera3::SessionConfigurationUtils;
 using std::literals::chrono_literals::operator""s;
 using hardware::camera2::utils::CameraIdAndSessionConfiguration;
+using hardware::camera2::params::OutputConfiguration;
 
 namespace flags = com::android::internal::camera::flags;
 namespace vd_flags = android::companion::virtualdevice::flags;
@@ -1311,11 +1312,15 @@
         int32_t height = std::get<1>(it);
         int32_t gainmapWidth = std::get<0>(it) / HeicCompositeStream::kGainmapScale;
         int32_t gainmapHeight = std::get<1>(it) / HeicCompositeStream::kGainmapScale;
-        if (camera3::HeicCompositeStream::isSizeSupportedByHeifEncoder(width, height,
-                &useHeic, &useGrid, &stall, nullptr /*hevcName*/, kFrameworkHeicAllowSWCodecs) &&
-               camera3::HeicCompositeStream::isSizeSupportedByHeifEncoder(gainmapWidth,
-                   gainmapHeight, &useHeic, &useGrid, &stall, nullptr /*hevcName*/,
-                   kFrameworkHeicAllowSWCodecs) ) {
+        // Support gainmap sizes that are sufficiently aligned so CPU specific copy
+        // optimizations can be utilized without side effects.
+        if (((gainmapWidth % 64) == 0) && ((gainmapHeight % 2) == 0) &&
+                camera3::HeicCompositeStream::isSizeSupportedByHeifEncoder(width, height,
+                    &useHeic, &useGrid, &stall, nullptr /*hevcName*/,
+                    kFrameworkHeicAllowSWCodecs) &&
+                camera3::HeicCompositeStream::isSizeSupportedByHeifEncoder(gainmapWidth,
+                    gainmapHeight, &useHeic, &useGrid, &stall, nullptr /*hevcName*/,
+                    kFrameworkHeicAllowSWCodecs)) {
             int32_t entry[4] = {HAL_PIXEL_FORMAT_BLOB, static_cast<int32_t> (std::get<0>(it)),
                     static_cast<int32_t> (std::get<1>(it)),
                     ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS_OUTPUT };
@@ -1894,6 +1899,36 @@
     return res;
 }
 
+status_t CameraProviderManager::ProviderInfo::DeviceInfo3::addAePriorityModeTags() {
+    status_t res = OK;
+    auto& c = mCameraCharacteristics;
+
+    auto entry = c.find(ANDROID_CONTROL_AE_AVAILABLE_PRIORITY_MODES);
+    if (entry.count != 0) {
+        return res;
+    }
+
+    std::vector<int32_t> supportedChTags;
+    auto chTags = c.find(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS);
+    if (chTags.count == 0) {
+        ALOGE("%s: No supported camera characteristics keys!", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    std::vector<uint8_t> aePriorityAvailableModes = {
+            ANDROID_CONTROL_AE_PRIORITY_MODE_OFF };
+    supportedChTags.reserve(chTags.count + 1);
+    supportedChTags.insert(supportedChTags.end(), chTags.data.i32,
+            chTags.data.i32 + chTags.count);
+    supportedChTags.push_back(ANDROID_CONTROL_AE_AVAILABLE_PRIORITY_MODES);
+    c.update(ANDROID_CONTROL_AE_AVAILABLE_PRIORITY_MODES,
+            aePriorityAvailableModes.data(), aePriorityAvailableModes.size());
+    c.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, supportedChTags.data(),
+             supportedChTags.size());
+
+    return res;
+}
+
 status_t CameraProviderManager::ProviderInfo::DeviceInfo3::addPreCorrectionActiveArraySize() {
     status_t res = OK;
     auto& c = mCameraCharacteristics;
@@ -2024,18 +2059,98 @@
 
     int versionCode = ANDROID_INFO_SESSION_CONFIGURATION_QUERY_VERSION_UPSIDE_DOWN_CAKE;
     IPCTransport ipcTransport = parentProvider->getIPCTransport();
-    int deviceVersion = HARDWARE_DEVICE_API_VERSION(mVersion.get_major(), mVersion.get_minor());
-    if (ipcTransport == IPCTransport::AIDL
-            && deviceVersion >= CAMERA_DEVICE_API_VERSION_1_3) {
-        versionCode = ANDROID_INFO_SESSION_CONFIGURATION_QUERY_VERSION_VANILLA_ICE_CREAM;
+    auto& c = mCameraCharacteristics;
+    status_t res = OK;
+    if (ipcTransport != IPCTransport::AIDL) {
+        res = c.update(ANDROID_INFO_SESSION_CONFIGURATION_QUERY_VERSION, &versionCode, 1);
+        mSessionConfigQueryVersion = versionCode;
+        return res;
     }
 
-    auto& c = mCameraCharacteristics;
-    status_t res = c.update(ANDROID_INFO_SESSION_CONFIGURATION_QUERY_VERSION, &versionCode, 1);
+    int deviceVersion = HARDWARE_DEVICE_API_VERSION(mVersion.get_major(), mVersion.get_minor());
+    if (deviceVersion == CAMERA_DEVICE_API_VERSION_1_3) {
+        versionCode = ANDROID_INFO_SESSION_CONFIGURATION_QUERY_VERSION_VANILLA_ICE_CREAM;
+    } else if (deviceVersion >= CAMERA_DEVICE_API_VERSION_1_4) {
+        if (flags::feature_combination_baklava()) {
+            versionCode = ANDROID_INFO_SESSION_CONFIGURATION_QUERY_VERSION_BAKLAVA;
+        } else {
+            versionCode = ANDROID_INFO_SESSION_CONFIGURATION_QUERY_VERSION_VANILLA_ICE_CREAM;
+        }
+    }
+    res = c.update(ANDROID_INFO_SESSION_CONFIGURATION_QUERY_VERSION, &versionCode, 1);
     mSessionConfigQueryVersion = versionCode;
     return res;
 }
 
+bool CameraProviderManager::ProviderInfo::DeviceInfo3::isAutomotiveDevice() {
+    // Checks the property ro.hardware.type and returns true if it is
+    // automotive.
+    char value[PROPERTY_VALUE_MAX] = {0};
+    property_get("ro.hardware.type", value, "");
+    return strncmp(value, "automotive", PROPERTY_VALUE_MAX) == 0;
+}
+
+status_t CameraProviderManager::ProviderInfo::DeviceInfo3::addSharedSessionConfigurationTags() {
+    status_t res = OK;
+    if (flags::camera_multi_client()) {
+        const int32_t sharedColorSpaceTag = ANDROID_SHARED_SESSION_COLOR_SPACE;
+        const int32_t sharedOutputConfigurationsTag = ANDROID_SHARED_SESSION_OUTPUT_CONFIGURATIONS;
+        auto& c = mCameraCharacteristics;
+        uint8_t colorSpace = 0;
+
+        res = c.update(sharedColorSpaceTag, &colorSpace, 1);
+
+        // ToDo: b/372321187 Hardcoding the shared session configuration. Update the code to
+        // take these values from XML instead.
+        std::vector<int64_t> sharedOutputConfigEntries;
+        int64_t surfaceType1 =  OutputConfiguration::SURFACE_TYPE_IMAGE_READER;
+        int64_t width = 1280;
+        int64_t height = 800;
+        int64_t format1 = HAL_PIXEL_FORMAT_RGBA_8888;
+        int64_t mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO;
+        int64_t timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT;
+        int64_t usage1 = 3;
+        int64_t dataspace = 0;
+        int64_t useReadoutTimestamp = 0;
+        int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
+        int64_t physicalCamIdLen = 0;
+
+        // Stream 1 configuration hardcoded
+        sharedOutputConfigEntries.push_back(surfaceType1);
+        sharedOutputConfigEntries.push_back(width);
+        sharedOutputConfigEntries.push_back(height);
+        sharedOutputConfigEntries.push_back(format1);
+        sharedOutputConfigEntries.push_back(mirrorMode);
+        sharedOutputConfigEntries.push_back(useReadoutTimestamp);
+        sharedOutputConfigEntries.push_back(timestampBase);
+        sharedOutputConfigEntries.push_back(dataspace);
+        sharedOutputConfigEntries.push_back(usage1);
+        sharedOutputConfigEntries.push_back(streamUseCase);
+        sharedOutputConfigEntries.push_back(physicalCamIdLen);
+
+        // Stream 2 configuration hardcoded
+        int64_t surfaceType2 =  OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW;
+        int64_t format2 = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+        int64_t usage2 = 0;
+
+        sharedOutputConfigEntries.push_back(surfaceType2);
+        sharedOutputConfigEntries.push_back(width);
+        sharedOutputConfigEntries.push_back(height);
+        sharedOutputConfigEntries.push_back(format2);
+        sharedOutputConfigEntries.push_back(mirrorMode);
+        sharedOutputConfigEntries.push_back(useReadoutTimestamp);
+        sharedOutputConfigEntries.push_back(timestampBase);
+        sharedOutputConfigEntries.push_back(dataspace);
+        sharedOutputConfigEntries.push_back(usage2);
+        sharedOutputConfigEntries.push_back(streamUseCase);
+        sharedOutputConfigEntries.push_back(physicalCamIdLen);
+
+        res = c.update(sharedOutputConfigurationsTag, sharedOutputConfigEntries.data(),
+                sharedOutputConfigEntries.size());
+    }
+    return res;
+}
+
 status_t CameraProviderManager::ProviderInfo::DeviceInfo3::removeAvailableKeys(
         CameraMetadata& c, const std::vector<uint32_t>& keys, uint32_t keyTag) {
     status_t res = OK;
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index e629218..11985f5 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -88,6 +88,7 @@
 #define CAMERA_DEVICE_API_VERSION_1_0 HARDWARE_DEVICE_API_VERSION(1, 0)
 #define CAMERA_DEVICE_API_VERSION_1_2 HARDWARE_DEVICE_API_VERSION(1, 2)
 #define CAMERA_DEVICE_API_VERSION_1_3 HARDWARE_DEVICE_API_VERSION(1, 3)
+#define CAMERA_DEVICE_API_VERSION_1_4 HARDWARE_DEVICE_API_VERSION(1, 4)
 #define CAMERA_DEVICE_API_VERSION_3_0 HARDWARE_DEVICE_API_VERSION(3, 0)
 #define CAMERA_DEVICE_API_VERSION_3_1 HARDWARE_DEVICE_API_VERSION(3, 1)
 #define CAMERA_DEVICE_API_VERSION_3_2 HARDWARE_DEVICE_API_VERSION(3, 2)
@@ -779,7 +780,10 @@
             status_t addPreCorrectionActiveArraySize();
             status_t addReadoutTimestampTag(bool readoutTimestampSupported = true);
             status_t addColorCorrectionAvailableModesTag(CameraMetadata& ch);
+            status_t addAePriorityModeTags();
             status_t addSessionConfigQueryVersionTag();
+            status_t addSharedSessionConfigurationTags();
+            bool isAutomotiveDevice();
 
             static void getSupportedSizes(const CameraMetadata& ch, uint32_t tag,
                     android_pixel_format_t format,
diff --git a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
index 3d6a23f..88998c6 100644
--- a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
@@ -30,6 +30,7 @@
 
 #include "device3/DistortionMapper.h"
 #include "device3/ZoomRatioMapper.h"
+#include <utils/AttributionAndPermissionUtils.h>
 #include <utils/SessionConfigurationUtils.h>
 #include <utils/Trace.h>
 
@@ -202,7 +203,7 @@
 void AidlProviderInfo::binderDied(void *cookie) {
     AidlProviderInfo *provider = reinterpret_cast<AidlProviderInfo *>(cookie);
     ALOGI("Camera provider '%s' has died; removing it", provider->mProviderInstance.c_str());
-    provider->mManager->removeProvider(provider->mProviderInstance);
+    provider->mManager->removeProvider(std::string(provider->mProviderInstance));
 }
 
 status_t AidlProviderInfo::setUpVendorTags() {
@@ -320,7 +321,7 @@
     if (link != STATUS_OK) {
         ALOGW("%s: Unable to link to provider '%s' death notifications",
                 __FUNCTION__, mProviderName.c_str());
-        mManager->removeProvider(mProviderInstance);
+        mManager->removeProvider(std::string(mProviderInstance));
         return nullptr;
     }
 
@@ -618,6 +619,14 @@
         }
     }
 
+    if (flags::ae_priority()) {
+        res = addAePriorityModeTags();
+        if (OK != res) {
+            ALOGE("%s: Unable to add CONTROL_AE_AVAILABLE_PRIORITY_MODES tag: %s (%d)",
+                    __FUNCTION__, strerror(-res), res);
+        }
+    }
+
     camera_metadata_entry flashAvailable =
             mCameraCharacteristics.find(ANDROID_FLASH_INFO_AVAILABLE);
     if (flashAvailable.count == 1 &&
@@ -724,6 +733,10 @@
                 {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, ANDROID_CONTROL_AE_TARGET_FPS_RANGE});
     }
 
+    if (flags::camera_multi_client() && isAutomotiveDevice()) {
+        addSharedSessionConfigurationTags();
+    }
+
     if (!kEnableLazyHal) {
         // Save HAL reference indefinitely
         mSavedInterface = interface;
diff --git a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
index edaee6e..27ae766 100644
--- a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
@@ -391,7 +391,7 @@
                   __FUNCTION__,
                   mProviderName.c_str(),
                   linked.description().c_str());
-              mManager->removeProvider(mProviderInstance);
+              mManager->removeProvider(std::string(mProviderInstance));
               return nullptr;
             } else if (!linked) {
               ALOGW("%s: Unable to link to provider '%s' death notifications",
@@ -451,7 +451,7 @@
         ALOGW("%s: Unexpected serviceDied cookie %" PRIu64 ", expected %" PRIu32,
                 __FUNCTION__, cookie, mId);
     }
-    mManager->removeProvider(mProviderInstance);
+    mManager->removeProvider(std::string(mProviderInstance));
 }
 
 std::unique_ptr<CameraProviderManager::ProviderInfo::DeviceInfo>
@@ -683,6 +683,14 @@
         }
     }
 
+    if (flags::ae_priority()) {
+        res = addAePriorityModeTags();
+        if (OK != res) {
+            ALOGE("%s: Unable to add CONTROL_AE_AVAILABLE_PRIORITY_MODES tag: %s (%d)",
+                    __FUNCTION__, strerror(-res), res);
+        }
+    }
+
     camera_metadata_entry flashAvailable =
             mCameraCharacteristics.find(ANDROID_FLASH_INFO_AVAILABLE);
     if (flashAvailable.count == 1 &&
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 5295442..9e89a19 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -58,6 +58,7 @@
 #include <android/hardware/camera/device/3.7/ICameraInjectionSession.h>
 #include <android/hardware/camera2/ICameraDeviceUser.h>
 #include <com_android_internal_camera_flags.h>
+#include <com_android_window_flags.h>
 
 #include "CameraService.h"
 #include "aidl/android/hardware/graphics/common/Dataspace.h"
@@ -83,6 +84,8 @@
 using namespace android::hardware::cameraservice::utils::conversion::aidl;
 
 namespace flags = com::android::internal::camera::flags;
+namespace wm_flags = com::android::window::flags;
+
 namespace android {
 
 Camera3Device::Camera3Device(std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
@@ -2885,7 +2888,7 @@
         bool hasAppCallback, nsecs_t minExpectedDuration, nsecs_t maxExpectedDuration,
         bool isFixedFps, const std::set<std::set<std::string>>& physicalCameraIds,
         bool isStillCapture, bool isZslCapture, bool rotateAndCropAuto, bool autoframingAuto,
-        const std::set<std::string>& cameraIdsWithZoom,
+        const std::set<std::string>& cameraIdsWithZoom, bool useZoomRatio,
         const SurfaceMap& outputSurfaces, nsecs_t requestTimeNs) {
     ATRACE_CALL();
     std::lock_guard<std::mutex> l(mInFlightLock);
@@ -2894,7 +2897,7 @@
     res = mInFlightMap.add(frameNumber, InFlightRequest(numBuffers, resultExtras, hasInput,
             hasAppCallback, minExpectedDuration, maxExpectedDuration, isFixedFps, physicalCameraIds,
             isStillCapture, isZslCapture, rotateAndCropAuto, autoframingAuto, cameraIdsWithZoom,
-            requestTimeNs, outputSurfaces));
+            requestTimeNs, useZoomRatio, outputSurfaces));
     if (res < 0) return res;
 
     if (mInFlightMap.size() == 1) {
@@ -3793,16 +3796,19 @@
     return submitRequestSuccess;
 }
 
-status_t Camera3Device::removeFwkOnlyRegionKeys(CameraMetadata *request) {
-    static const std::array<uint32_t, 4> kFwkOnlyRegionKeys = {ANDROID_CONTROL_AF_REGIONS_SET,
-        ANDROID_CONTROL_AE_REGIONS_SET, ANDROID_CONTROL_AWB_REGIONS_SET,
-        ANDROID_SCALER_CROP_REGION_SET};
+status_t Camera3Device::removeFwkOnlyKeys(CameraMetadata *request) {
+    static const std::array<uint32_t, 5> kFwkOnlyKeys = {
+            ANDROID_CONTROL_AF_REGIONS_SET,
+            ANDROID_CONTROL_AE_REGIONS_SET,
+            ANDROID_CONTROL_AWB_REGIONS_SET,
+            ANDROID_SCALER_CROP_REGION_SET,
+            ANDROID_CONTROL_ZOOM_METHOD};
     if (request == nullptr) {
         ALOGE("%s request metadata nullptr", __FUNCTION__);
         return BAD_VALUE;
     }
     status_t res = OK;
-    for (const auto &key : kFwkOnlyRegionKeys) {
+    for (const auto &key : kFwkOnlyKeys) {
         if (request->exists(key)) {
             res = request->erase(key);
             if (res != OK) {
@@ -3881,7 +3887,7 @@
                             it != captureRequest->mSettingsList.end(); it++) {
                         if (parent->mUHRCropAndMeteringRegionMappers.find(it->cameraId) ==
                                 parent->mUHRCropAndMeteringRegionMappers.end()) {
-                            if (removeFwkOnlyRegionKeys(&(it->metadata)) != OK) {
+                            if (removeFwkOnlyKeys(&(it->metadata)) != OK) {
                                 SET_ERR("RequestThread: Unable to remove fwk-only keys from request"
                                         "%d: %s (%d)", halRequest->frame_number, strerror(-res),
                                         res);
@@ -3901,7 +3907,7 @@
                                 return INVALID_OPERATION;
                             }
                             captureRequest->mUHRCropAndMeteringRegionsUpdated = true;
-                            if (removeFwkOnlyRegionKeys(&(it->metadata)) != OK) {
+                            if (removeFwkOnlyKeys(&(it->metadata)) != OK) {
                                 SET_ERR("RequestThread: Unable to remove fwk-only keys from request"
                                         "%d: %s (%d)", halRequest->frame_number, strerror(-res),
                                         res);
@@ -4182,6 +4188,7 @@
         }
         bool isStillCapture = false;
         bool isZslCapture = false;
+        bool useZoomRatio = false;
         const camera_metadata_t* settings = halRequest->settings;
         bool shouldUnlockSettings = false;
         if (settings == nullptr) {
@@ -4201,6 +4208,14 @@
             if ((e.count > 0) && (e.data.u8[0] == ANDROID_CONTROL_ENABLE_ZSL_TRUE)) {
                 isZslCapture = true;
             }
+
+            if (flags::zoom_method()) {
+                e = camera_metadata_ro_entry_t();
+                find_camera_metadata_ro_entry(settings, ANDROID_CONTROL_ZOOM_METHOD, &e);
+                if ((e.count > 0) && (e.data.u8[0] == ANDROID_CONTROL_ZOOM_METHOD_ZOOM_RATIO)) {
+                    useZoomRatio = true;
+                }
+            }
         }
         bool passSurfaceMap =
                 mUseHalBufManager || containsHalBufferManagedStream;
@@ -4214,7 +4229,7 @@
                 expectedDurationInfo.isFixedFps,
                 requestedPhysicalCameras, isStillCapture, isZslCapture,
                 captureRequest->mRotateAndCropAuto, captureRequest->mAutoframingAuto,
-                mPrevCameraIdsWithZoom,
+                mPrevCameraIdsWithZoom, useZoomRatio,
                 passSurfaceMap ? uniqueSurfaceIdMap :
                                       SurfaceMap{}, captureRequest->mRequestTimeNs);
         ALOGVV("%s: registered in flight requestId = %" PRId32 ", frameNumber = %" PRId64
@@ -5811,7 +5826,13 @@
 status_t Camera3Device::deriveAndSetTransformLocked(
         Camera3OutputStreamInterface& stream, int mirrorMode, int surfaceId) {
     int transform = -1;
-    int res = CameraUtils::getRotationTransform(mDeviceInfo, mirrorMode, &transform);
+    bool enableTransformInverseDisplay = true;
+    using hardware::ICameraService::ROTATION_OVERRIDE_ROTATION_ONLY;
+    if (wm_flags::enable_camera_compat_for_desktop_windowing()) {
+        enableTransformInverseDisplay = (mRotationOverride != ROTATION_OVERRIDE_ROTATION_ONLY);
+    }
+    int res = CameraUtils::getRotationTransform(mDeviceInfo, mirrorMode,
+            enableTransformInverseDisplay, &transform);
     if (res != OK) {
         return res;
     }
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 397ec5c..5d3c010 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -195,6 +195,19 @@
 
     status_t deleteStream(int id) override;
 
+    virtual status_t beginConfigure() override {return OK;};
+
+    virtual status_t getSharedStreamId(const OutputConfiguration& /*config*/,
+            int* /*streamId*/) override {return INVALID_OPERATION;};
+
+    virtual status_t addSharedSurfaces(int /*streamId*/,
+            const std::vector<android::camera3::OutputStreamInfo>& /*outputInfo*/,
+            const std::vector<SurfaceHolder>& /*surfaces*/,
+            std::vector<int>* /*surfaceIds*/) override {return INVALID_OPERATION;};
+
+    virtual status_t removeSharedSurfaces(int /*streamId*/,
+            const std::vector<size_t>& /*surfaceIds*/) override {return INVALID_OPERATION;};
+
     status_t configureStreams(const CameraMetadata& sessionParams,
             int operatingMode =
             camera_stream_configuration_mode_t::CAMERA_STREAM_CONFIGURATION_NORMAL_MODE) override;
@@ -367,7 +380,7 @@
 
   protected:
     status_t disconnectImpl();
-    static status_t removeFwkOnlyRegionKeys(CameraMetadata *request);
+    static status_t removeFwkOnlyKeys(CameraMetadata *request);
 
     float getMaxPreviewFps(sp<camera3::Camera3OutputStreamInterface> stream);
 
@@ -1275,8 +1288,8 @@
             bool callback, nsecs_t minExpectedDuration, nsecs_t maxExpectedDuration,
             bool isFixedFps, const std::set<std::set<std::string>>& physicalCameraIds,
             bool isStillCapture, bool isZslCapture, bool rotateAndCropAuto, bool autoframingAuto,
-            const std::set<std::string>& cameraIdsWithZoom, const SurfaceMap& outputSurfaces,
-            nsecs_t requestTimeNs);
+            const std::set<std::string>& cameraIdsWithZoom, bool useZoomRatio,
+            const SurfaceMap& outputSurfaces, nsecs_t requestTimeNs);
 
     /**
      * Tracking for idle detection
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
index 62226e1..78f1698 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
@@ -304,7 +304,7 @@
         CameraMetadata &collectedPartialResult,
         uint32_t frameNumber,
         bool reprocess, bool zslStillCapture, bool rotateAndCropAuto,
-        const std::set<std::string>& cameraIdsWithZoom,
+        const std::set<std::string>& cameraIdsWithZoom, bool useZoomRatio,
         const std::vector<PhysicalCaptureResultInfo>& physicalMetadatas) {
     ATRACE_CALL();
     if (pendingMetadata.isEmpty())
@@ -385,8 +385,9 @@
     // Fix up result metadata to account for zoom ratio availabilities between
     // HAL and app.
     bool zoomRatioIs1 = cameraIdsWithZoom.find(states.cameraId) == cameraIdsWithZoom.end();
+    bool appUsesZoomRatio = !zoomRatioIs1 || useZoomRatio;
     res = states.zoomRatioMappers[states.cameraId].updateCaptureResult(
-            &captureResult.mMetadata, zoomRatioIs1);
+            &captureResult.mMetadata, appUsesZoomRatio);
     if (res != OK) {
         SET_ERR("Failed to update capture result zoom ratio metadata for frame %d: %s (%d)",
                 frameNumber, strerror(-res), res);
@@ -452,9 +453,10 @@
             }
         }
 
-        zoomRatioIs1 = cameraIdsWithZoom.find(cameraId) == cameraIdsWithZoom.end();
+        // Note: Physical camera continues to use SCALER_CROP_REGION to reflect
+        // zoom levels.
         res = states.zoomRatioMappers[cameraId].updateCaptureResult(
-                &physicalMetadata.mPhysicalCameraMetadata, zoomRatioIs1);
+                &physicalMetadata.mPhysicalCameraMetadata, /*appUsesZoomRatio*/ false);
         if (res != OK) {
             SET_ERR("Failed to update camera %s's physical zoom ratio metadata for "
                     "frame %d: %s(%d)", cameraId.c_str(), frameNumber, strerror(-res), res);
@@ -685,7 +687,8 @@
                         if (orientation.count > 0) {
                             int32_t transform;
                             ret = CameraUtils::getRotationTransform(deviceInfo->second,
-                                    OutputConfiguration::MIRROR_MODE_AUTO, &transform);
+                                    OutputConfiguration::MIRROR_MODE_AUTO,
+                                            /*transformInverseDisplay*/true, &transform);
                             if (ret == OK) {
                                 // It is possible for camera providers to return the capture
                                 // results after the processed frames. In such scenario, we will
@@ -828,7 +831,7 @@
                 sendCaptureResult(states, metadata, request.resultExtras,
                     collectedPartialResult, frameNumber,
                     hasInputBufferInRequest, request.zslCapture && request.stillCapture,
-                    request.rotateAndCropAuto, cameraIdsWithZoom,
+                    request.rotateAndCropAuto, cameraIdsWithZoom, request.useZoomRatio,
                     request.physicalMetadatas);
             }
         }
@@ -1097,7 +1100,8 @@
                     r.pendingMetadata, r.resultExtras,
                     r.collectedPartialResult, msg.frame_number,
                     r.hasInputBuffer, r.zslCapture && r.stillCapture,
-                    r.rotateAndCropAuto, cameraIdsWithZoom, r.physicalMetadatas);
+                    r.rotateAndCropAuto, cameraIdsWithZoom, r.useZoomRatio,
+                    r.physicalMetadatas);
             }
             collectAndRemovePendingOutputBuffers(
                     states.useHalBufManager, states.halBufManagedStreamIds,
diff --git a/services/camera/libcameraservice/device3/InFlightRequest.h b/services/camera/libcameraservice/device3/InFlightRequest.h
index 3626f20..62980c5 100644
--- a/services/camera/libcameraservice/device3/InFlightRequest.h
+++ b/services/camera/libcameraservice/device3/InFlightRequest.h
@@ -198,6 +198,9 @@
     // Current output transformation
     int32_t transform;
 
+    // Whether the app explicitly uses ZOOM_RATIO
+    bool useZoomRatio;
+
     static const nsecs_t kDefaultMinExpectedDuration = 33333333; // 33 ms
     static const nsecs_t kDefaultMaxExpectedDuration = 100000000; // 100 ms
 
@@ -220,14 +223,15 @@
             rotateAndCropAuto(false),
             autoframingAuto(false),
             requestTimeNs(0),
-            transform(-1) {
+            transform(-1),
+            useZoomRatio(false) {
     }
 
     InFlightRequest(int numBuffers, CaptureResultExtras extras, bool hasInput,
             bool hasAppCallback, nsecs_t minDuration, nsecs_t maxDuration, bool fixedFps,
             const std::set<std::set<std::string>>& physicalCameraIdSet, bool isStillCapture,
             bool isZslCapture, bool rotateAndCropAuto, bool autoframingAuto,
-            const std::set<std::string>& idsWithZoom, nsecs_t requestNs,
+            const std::set<std::string>& idsWithZoom, nsecs_t requestNs, bool useZoomRatio,
             const SurfaceMap& outSurfaces = SurfaceMap{}) :
             shutterTimestamp(0),
             sensorTimestamp(0),
@@ -250,7 +254,8 @@
             cameraIdsWithZoom(idsWithZoom),
             requestTimeNs(requestNs),
             outputSurfaces(outSurfaces),
-            transform(-1) {
+            transform(-1),
+            useZoomRatio(useZoomRatio) {
     }
 };
 
diff --git a/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp b/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
index 2016284..5260ad3 100644
--- a/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
+++ b/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
@@ -19,6 +19,8 @@
 
 #include <algorithm>
 
+#include <com_android_internal_camera_flags.h>
+
 #include "device3/ZoomRatioMapper.h"
 #include "utils/SessionConfigurationUtilsHost.h"
 
@@ -42,13 +44,25 @@
 }
 
 status_t ZoomRatioMapper::initZoomRatioInTemplate(CameraMetadata *request) {
+    status_t res = OK;
+
+    if (flags::zoom_method()) {
+        uint8_t zoomMethod = ANDROID_CONTROL_ZOOM_METHOD_AUTO;
+        res = request->update(ANDROID_CONTROL_ZOOM_METHOD, &zoomMethod, 1);
+        if (res != OK) {
+            ALOGE("%s: Failed to update CONTROL_ZOOM_METHOD key: %s (%d)",
+                    __FUNCTION__, strerror(-res), res);
+            return res;
+        }
+    }
+
     camera_metadata_entry_t entry;
     entry = request->find(ANDROID_CONTROL_ZOOM_RATIO);
     float defaultZoomRatio = 1.0f;
     if (entry.count == 0) {
-        return request->update(ANDROID_CONTROL_ZOOM_RATIO, &defaultZoomRatio, 1);
+        res = request->update(ANDROID_CONTROL_ZOOM_RATIO, &defaultZoomRatio, 1);
     }
-    return OK;
+    return res;
 }
 
 status_t ZoomRatioMapper::overrideZoomRatioTags(
@@ -57,40 +71,69 @@
         return BAD_VALUE;
     }
 
+    bool halSupportZoomRatio = false;
     camera_metadata_entry_t entry;
     entry = deviceInfo->find(ANDROID_CONTROL_ZOOM_RATIO_RANGE);
     if (entry.count != 2 && entry.count != 0) return BAD_VALUE;
-
     // Hal has zoom ratio support
     if (entry.count == 2) {
-        *supportNativeZoomRatio = true;
-        return OK;
+        halSupportZoomRatio = true;
     }
 
-    // Hal has no zoom ratio support
-    *supportNativeZoomRatio = false;
-
-    entry = deviceInfo->find(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM);
-    if (entry.count != 1) {
-        ALOGI("%s: Camera device doesn't support SCALER_AVAILABLE_MAX_DIGITAL_ZOOM key!",
-                __FUNCTION__);
-        return OK;
-    }
-
-    float zoomRange[] = {1.0f, entry.data.f[0]};
-    status_t res = deviceInfo->update(ANDROID_CONTROL_ZOOM_RATIO_RANGE, zoomRange, 2);
-    if (res != OK) {
-        ALOGE("%s: Failed to update CONTROL_ZOOM_RATIO_RANGE key: %s (%d)",
-                __FUNCTION__, strerror(-res), res);
-        return res;
-    }
-
+    // Add ZOOM_METHOD request and result keys
     std::vector<int32_t> requestKeys;
     entry = deviceInfo->find(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS);
     if (entry.count > 0) {
         requestKeys.insert(requestKeys.end(), entry.data.i32, entry.data.i32 + entry.count);
     }
-    requestKeys.push_back(ANDROID_CONTROL_ZOOM_RATIO);
+    if (flags::zoom_method()) {
+        requestKeys.push_back(ANDROID_CONTROL_ZOOM_METHOD);
+    }
+    std::vector<int32_t> resultKeys;
+    entry = deviceInfo->find(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS);
+    if (entry.count > 0) {
+        resultKeys.insert(resultKeys.end(), entry.data.i32, entry.data.i32 + entry.count);
+    }
+    if (flags::zoom_method()) {
+        resultKeys.push_back(ANDROID_CONTROL_ZOOM_METHOD);
+    }
+
+    // Add additional keys if the HAL doesn't support ZOOM_RATIO
+    status_t res = OK;
+    if (!halSupportZoomRatio) {
+        entry = deviceInfo->find(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM);
+        if (entry.count != 1) {
+            ALOGI("%s: Camera device doesn't support SCALER_AVAILABLE_MAX_DIGITAL_ZOOM key!",
+                    __FUNCTION__);
+            return OK;
+        }
+        float zoomRange[] = {1.0f, entry.data.f[0]};
+        res = deviceInfo->update(ANDROID_CONTROL_ZOOM_RATIO_RANGE, zoomRange, 2);
+        if (res != OK) {
+            ALOGE("%s: Failed to update CONTROL_ZOOM_RATIO_RANGE key: %s (%d)",
+                    __FUNCTION__, strerror(-res), res);
+            return res;
+        }
+
+        requestKeys.push_back(ANDROID_CONTROL_ZOOM_RATIO);
+        resultKeys.push_back(ANDROID_CONTROL_ZOOM_RATIO);
+
+        std::vector<int32_t> charKeys;
+        entry = deviceInfo->find(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS);
+        if (entry.count > 0) {
+            charKeys.insert(charKeys.end(), entry.data.i32, entry.data.i32 + entry.count);
+        }
+        charKeys.push_back(ANDROID_CONTROL_ZOOM_RATIO_RANGE);
+        res = deviceInfo->update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
+                charKeys.data(), charKeys.size());
+        if (res != OK) {
+            ALOGE("%s: Failed to update REQUEST_AVAILABLE_CHARACTERISTICS_KEYS: %s (%d)",
+                    __FUNCTION__, strerror(-res), res);
+            return res;
+        }
+    }
+
+    // Update available request and result keys
     res = deviceInfo->update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
             requestKeys.data(), requestKeys.size());
     if (res != OK) {
@@ -98,13 +141,6 @@
                 __FUNCTION__, strerror(-res), res);
         return res;
     }
-
-    std::vector<int32_t> resultKeys;
-    entry = deviceInfo->find(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS);
-    if (entry.count > 0) {
-        resultKeys.insert(resultKeys.end(), entry.data.i32, entry.data.i32 + entry.count);
-    }
-    resultKeys.push_back(ANDROID_CONTROL_ZOOM_RATIO);
     res = deviceInfo->update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
             resultKeys.data(), resultKeys.size());
     if (res != OK) {
@@ -113,20 +149,7 @@
         return res;
     }
 
-    std::vector<int32_t> charKeys;
-    entry = deviceInfo->find(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS);
-    if (entry.count > 0) {
-        charKeys.insert(charKeys.end(), entry.data.i32, entry.data.i32 + entry.count);
-    }
-    charKeys.push_back(ANDROID_CONTROL_ZOOM_RATIO_RANGE);
-    res = deviceInfo->update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
-            charKeys.data(), charKeys.size());
-    if (res != OK) {
-        ALOGE("%s: Failed to update REQUEST_AVAILABLE_CHARACTERISTICS_KEYS: %s (%d)",
-                __FUNCTION__, strerror(-res), res);
-        return res;
-    }
-
+    *supportNativeZoomRatio = halSupportZoomRatio;
     return OK;
 }
 
@@ -223,7 +246,6 @@
     if (!mIsValid) return INVALID_OPERATION;
 
     status_t res = OK;
-    bool zoomRatioIs1 = true;
     camera_metadata_entry_t entry;
     int arrayHeight, arrayWidth = 0;
     res = getArrayDimensionsToBeUsed(request, &arrayWidth, &arrayHeight);
@@ -231,9 +253,14 @@
         return res;
     }
     entry = request->find(ANDROID_CONTROL_ZOOM_RATIO);
-    if (entry.count == 1 && entry.data.f[0] != 1.0f) {
-        zoomRatioIs1 = false;
-
+    bool zoomRatioIs1 = (entry.count == 0 || entry.data.f[0] == 1.0f);
+    bool useZoomRatio = !zoomRatioIs1;
+    if (flags::zoom_method()) {
+        entry = request->find(ANDROID_CONTROL_ZOOM_METHOD);
+        useZoomRatio |= (entry.count == 1
+                        && entry.data.u8[0] == ANDROID_CONTROL_ZOOM_METHOD_ZOOM_RATIO);
+    }
+    if (useZoomRatio) {
         // If cropRegion is windowboxing, override it with activeArray
         camera_metadata_entry_t cropRegionEntry = request->find(ANDROID_SCALER_CROP_REGION);
         if (cropRegionEntry.count == 4) {
@@ -248,9 +275,9 @@
         }
     }
 
-    if (mHalSupportsZoomRatio && zoomRatioIs1) {
+    if (mHalSupportsZoomRatio && !useZoomRatio) {
         res = separateZoomFromCropLocked(request, false/*isResult*/, arrayWidth, arrayHeight);
-    } else if (!mHalSupportsZoomRatio && !zoomRatioIs1) {
+    } else if (!mHalSupportsZoomRatio && useZoomRatio) {
         res = combineZoomAndCropLocked(request, false/*isResult*/, arrayWidth, arrayHeight);
     }
 
@@ -263,7 +290,7 @@
     return res;
 }
 
-status_t ZoomRatioMapper::updateCaptureResult(CameraMetadata* result, bool requestedZoomRatioIs1) {
+status_t ZoomRatioMapper::updateCaptureResult(CameraMetadata* result, bool useZoomRatio) {
     if (!mIsValid) return INVALID_OPERATION;
 
     status_t res = OK;
@@ -273,9 +300,9 @@
     if (res != OK) {
         return res;
     }
-    if (mHalSupportsZoomRatio && requestedZoomRatioIs1) {
+    if (mHalSupportsZoomRatio && !useZoomRatio) {
         res = combineZoomAndCropLocked(result, true/*isResult*/, arrayWidth, arrayHeight);
-    } else if (!mHalSupportsZoomRatio && !requestedZoomRatioIs1) {
+    } else if (!mHalSupportsZoomRatio && useZoomRatio) {
         res = separateZoomFromCropLocked(result, true/*isResult*/, arrayWidth, arrayHeight);
     } else {
         camera_metadata_entry_t entry = result->find(ANDROID_CONTROL_ZOOM_RATIO);
diff --git a/services/camera/libcameraservice/device3/ZoomRatioMapper.h b/services/camera/libcameraservice/device3/ZoomRatioMapper.h
index 1aa8e78..0ac2e09 100644
--- a/services/camera/libcameraservice/device3/ZoomRatioMapper.h
+++ b/services/camera/libcameraservice/device3/ZoomRatioMapper.h
@@ -64,7 +64,7 @@
     /**
      * Update capture result to handle both cropRegion and zoomRatio.
      */
-    status_t updateCaptureResult(CameraMetadata *request, bool requestedZoomRatioIs1);
+    status_t updateCaptureResult(CameraMetadata *request, bool useZoomRatio);
 
   public: // Visible for testing. Do not use concurently.
     void scaleCoordinates(int32_t* coordPairs, int coordCount,
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
index abc3f9c..474dfc7 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
@@ -72,7 +72,8 @@
     virtual status_t switchToOffline(const std::vector<int32_t>& /*streamsToKeep*/,
             /*out*/ sp<CameraOfflineSessionBase>* /*session*/) override;
 
-    status_t initialize(sp<CameraProviderManager> manager, const std::string& monitorTags) override;
+    virtual status_t initialize(sp<CameraProviderManager> manager, const std::string& monitorTags)
+            override;
 
     class AidlHalInterface : public Camera3Device::HalInterface {
      public:
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3SharedDevice.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3SharedDevice.cpp
new file mode 100644
index 0000000..5bd8d8c
--- /dev/null
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3SharedDevice.cpp
@@ -0,0 +1,306 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#define LOG_TAG "AidlCamera3-SharedDevice"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+//#define LOG_NNDEBUG 0  // Per-frame verbose logging
+
+#ifdef LOG_NNDEBUG
+#define ALOGVV(...) ALOGV(__VA_ARGS__)
+#else
+#define ALOGVV(...) ((void)0)
+#endif
+
+// Convenience macro for transient errors
+#define CLOGE(fmt, ...) ALOGE("Camera %s: %s: " fmt, mId.c_str(), __FUNCTION__, \
+            ##__VA_ARGS__)
+
+#define CLOGW(fmt, ...) ALOGW("Camera %s: %s: " fmt, mId.c_str(), __FUNCTION__, \
+            ##__VA_ARGS__)
+
+// Convenience macros for transitioning to the error state
+#define SET_ERR(fmt, ...) setErrorState(   \
+    "%s: " fmt, __FUNCTION__,              \
+    ##__VA_ARGS__)
+#define SET_ERR_L(fmt, ...) setErrorStateLocked( \
+    "%s: " fmt, __FUNCTION__,                    \
+    ##__VA_ARGS__)
+#define DECODE_VALUE(decoder, type, var) \
+  do { \
+    if (decoder.get##type(var) != OK) { \
+      return NOT_ENOUGH_DATA; \
+    } \
+  } while (0)
+
+#include <utils/Log.h>
+#include <utils/Trace.h>
+#include <cstring>
+#include "../../common/aidl/AidlProviderInfo.h"
+#include "utils/SessionConfigurationUtils.h"
+#include "AidlCamera3SharedDevice.h"
+
+using namespace android::camera3;
+using namespace android::camera3::SessionConfigurationUtils;
+
+namespace android {
+
+// Metadata android.info.availableSharedOutputConfigurations has list of shared output
+// configurations. Each output configuration has minimum of 11 entries of size long
+// followed by the physical camera id if present.
+// See android.info.availableSharedOutputConfigurations for details.
+static const int SHARED_OUTPUT_CONFIG_NUM_OF_ENTRIES = 11;
+std::map<std::string, sp<AidlCamera3SharedDevice>> AidlCamera3SharedDevice::sSharedDevices;
+std::map<std::string, std::unordered_set<int>> AidlCamera3SharedDevice::sClientsUid;
+sp<AidlCamera3SharedDevice> AidlCamera3SharedDevice::getInstance(
+        std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
+        std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
+        const std::string& id, bool overrideForPerfClass, int rotationOverride,
+        bool legacyClient) {
+    if (sClientsUid[id].empty()) {
+        AidlCamera3SharedDevice* sharedDevice = new AidlCamera3SharedDevice(
+                cameraServiceProxyWrapper, attributionAndPermissionUtils, id, overrideForPerfClass,
+                rotationOverride, legacyClient);
+        sSharedDevices[id] = sharedDevice;
+    }
+    if (attributionAndPermissionUtils != nullptr) {
+        sClientsUid[id].insert(attributionAndPermissionUtils->getCallingUid());
+    }
+    return sSharedDevices[id];
+}
+
+status_t AidlCamera3SharedDevice::initialize(sp<CameraProviderManager> manager,
+        const std::string& monitorTags) {
+    ATRACE_CALL();
+    status_t res = OK;
+
+    if (mStatus == STATUS_UNINITIALIZED) {
+        res = AidlCamera3Device::initialize(manager, monitorTags);
+        if (res == OK) {
+            mSharedOutputConfigurations = getSharedOutputConfiguration();
+        }
+    }
+    return res;
+}
+
+status_t AidlCamera3SharedDevice::disconnectClient(int clientUid) {
+    if (sClientsUid[mId].erase(clientUid) == 0) {
+        ALOGW("%s: Camera %s: Client %d is not connected to shared device", __FUNCTION__,
+                mId.c_str(), clientUid);
+    }
+    if (sClientsUid[mId].empty()) {
+        return Camera3Device::disconnect();
+    }
+    return OK;
+}
+
+std::vector<OutputConfiguration> AidlCamera3SharedDevice::getSharedOutputConfiguration() {
+    std::vector<OutputConfiguration> sharedConfigs;
+    uint8_t colorspace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED;
+    camera_metadata_entry sharedSessionColorSpace = mDeviceInfo.find(
+            ANDROID_SHARED_SESSION_COLOR_SPACE);
+    if (sharedSessionColorSpace.count > 0) {
+        colorspace = *sharedSessionColorSpace.data.u8;
+    }
+    camera_metadata_entry sharedSessionConfigs = mDeviceInfo.find(
+            ANDROID_SHARED_SESSION_OUTPUT_CONFIGURATIONS);
+    if (sharedSessionConfigs.count > 0) {
+        int numOfEntries = sharedSessionConfigs.count;
+        int i = 0;
+        uint8_t physicalCameraIdLen;
+        int surfaceType, width, height, format, mirrorMode, timestampBase, dataspace;
+        long usage, streamUseCase;
+        bool isReadOutTimestampEnabled;
+        while (numOfEntries >= SHARED_OUTPUT_CONFIG_NUM_OF_ENTRIES) {
+            surfaceType = (int)sharedSessionConfigs.data.i64[i];
+            width = (int)sharedSessionConfigs.data.i64[i+1];
+            height = (int)sharedSessionConfigs.data.i64[i+2];
+            format = (int)sharedSessionConfigs.data.i64[i+3];
+            mirrorMode = (int)sharedSessionConfigs.data.i64[i+4];
+            isReadOutTimestampEnabled = (sharedSessionConfigs.data.i64[i+5] != 0);
+            timestampBase = (int)sharedSessionConfigs.data.i64[i+6];
+            dataspace = (int)sharedSessionConfigs.data.i64[i+7];
+            usage = sharedSessionConfigs.data.i64[i+8];
+            streamUseCase = sharedSessionConfigs.data.i64[i+9];
+            physicalCameraIdLen = sharedSessionConfigs.data.i64[i+10];
+            numOfEntries -= SHARED_OUTPUT_CONFIG_NUM_OF_ENTRIES;
+            i += SHARED_OUTPUT_CONFIG_NUM_OF_ENTRIES;
+            if (numOfEntries < physicalCameraIdLen) {
+                ALOGE("%s: Camera %s: Number of remaining data (%d entries) in shared configuration"
+                        " is less than physical camera id length %d. Malformed metadata"
+                        " android.info.availableSharedOutputConfigurations.", __FUNCTION__,
+                        mId.c_str(), numOfEntries, physicalCameraIdLen);
+                break;
+            }
+            std::string physicalCameraId;
+            long asciiValue;
+            for (int j = 0; j < physicalCameraIdLen; j++) {
+                asciiValue = sharedSessionConfigs.data.i64[i+j];
+                if (asciiValue == 0) { // Check for null terminator
+                    break;
+                }
+                physicalCameraId += static_cast<char>(asciiValue);
+            }
+            OutputConfiguration* outConfig = new OutputConfiguration(surfaceType, width, height,
+                    format, colorspace, mirrorMode, isReadOutTimestampEnabled, timestampBase,
+                    dataspace, usage, streamUseCase, physicalCameraId);
+            sharedConfigs.push_back(*outConfig);
+            i += physicalCameraIdLen;
+            numOfEntries -= physicalCameraIdLen;
+        }
+        if (numOfEntries != 0) {
+            ALOGE("%s: Camera %s: there are still %d entries left in shared output configuration."
+                    " Malformed metadata android.info.availableSharedOutputConfigurations.",
+                    __FUNCTION__, mId.c_str(), numOfEntries);
+        }
+    }
+    return sharedConfigs;
+}
+
+status_t AidlCamera3SharedDevice::beginConfigure() {
+    status_t res;
+    int i = 0;
+
+    if (mStatus != STATUS_UNCONFIGURED) {
+        return OK;
+    }
+
+    for (auto config : mSharedOutputConfigurations) {
+        std::vector<SurfaceHolder> consumers;
+        android_dataspace dataSpace;
+        if (config.getColorSpace()
+                != ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED
+                && config.getFormat() != HAL_PIXEL_FORMAT_BLOB) {
+            if (!dataSpaceFromColorSpace(&dataSpace, config.getColorSpace())) {
+                std::string msg = fmt::sprintf("Camera %s: color space %d not supported, "
+                    " failed to convert to data space", mId.c_str(), config.getColorSpace());
+                ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+                return INVALID_OPERATION;
+            }
+        }
+        std::unordered_set<int32_t> overriddenSensorPixelModes;
+        if (checkAndOverrideSensorPixelModesUsed(config.getSensorPixelModesUsed(),
+                config.getFormat(), config.getWidth(), config.getHeight(),
+                mDeviceInfo, &overriddenSensorPixelModes) != OK) {
+            std::string msg = fmt::sprintf("Camera %s: sensor pixel modes for stream with "
+                        "format %#x are not valid",mId.c_str(), config.getFormat());
+            ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+            return INVALID_OPERATION;
+        }
+        sp<IGraphicBufferProducer> producer;
+        sp<IGraphicBufferConsumer> consumer;
+        BufferQueue::createBufferQueue(&producer, &consumer);
+        mSharedSurfaces[i] = new Surface(producer);
+        consumers.push_back({mSharedSurfaces[i], config.getMirrorMode()});
+        mSharedStreams[i] = new Camera3SharedOutputStream(mNextStreamId, consumers,
+                config.getWidth(),config.getHeight(), config.getFormat(), config.getUsage(),
+                dataSpace, static_cast<camera_stream_rotation_t>(config.getRotation()),
+                mTimestampOffset, config.getPhysicalCameraId(), overriddenSensorPixelModes,
+                getTransportType(), config.getSurfaceSetID(), mUseHalBufManager,
+                config.getDynamicRangeProfile(), config.getStreamUseCase(),
+                mDeviceTimeBaseIsRealtime, config.getTimestampBase(),
+                config.getColorSpace(), config.useReadoutTimestamp());
+        int id = mSharedStreams[i]->getSurfaceId(consumers[0].mSurface);
+        if (id < 0) {
+            SET_ERR_L("Invalid surface id");
+            return BAD_VALUE;
+        }
+        mSharedSurfaceIds[i] = id;
+        mSharedStreams[i]->setStatusTracker(mStatusTracker);
+        mSharedStreams[i]->setBufferManager(mBufferManager);
+        mSharedStreams[i]->setImageDumpMask(mImageDumpMask);
+        res = mOutputStreams.add(mNextStreamId, mSharedStreams[i]);
+        if (res < 0) {
+            SET_ERR_L("Can't add new stream to set: %s (%d)", strerror(-res), res);
+            return res;
+        }
+        mSessionStatsBuilder.addStream(mNextStreamId);
+        mConfiguredOutputs.add(mNextStreamId++, config);
+        i++;
+    }
+    CameraMetadata sessionParams;
+    res = configureStreams(sessionParams, CAMERA_STREAM_CONFIGURATION_SHARED_MODE);
+    if (res != OK) {
+        std::string msg = fmt::sprintf("Camera %s: Error configuring streams: %s (%d)",
+                mId.c_str(), strerror(-res), res);
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return res;
+    }
+    return OK;
+}
+
+status_t AidlCamera3SharedDevice::getSharedStreamId(const OutputConfiguration &config,
+        int *streamId) {
+    if (streamId ==  nullptr) {
+        return BAD_VALUE;
+    }
+    for (size_t i = 0 ; i < mConfiguredOutputs.size(); i++){
+        OutputConfiguration sharedConfig = mConfiguredOutputs.valueAt(i);
+        if (config.sharedConfigEqual(sharedConfig)) {
+            *streamId = mConfiguredOutputs.keyAt(i);
+            return OK;
+        }
+    }
+    return INVALID_OPERATION;
+}
+
+status_t AidlCamera3SharedDevice::addSharedSurfaces(int streamId,
+        const std::vector<android::camera3::OutputStreamInfo> &outputInfo,
+        const std::vector<SurfaceHolder> &surfaces,  std::vector<int> *surfaceIds) {
+    KeyedVector<sp<Surface>, size_t> outputMap;
+    std::vector<size_t> removedSurfaceIds;
+    status_t res;
+    sp<Camera3OutputStreamInterface> stream = mOutputStreams.get(streamId);
+    if (stream == nullptr) {
+        CLOGE("Stream %d is unknown", streamId);
+        return BAD_VALUE;
+    }
+
+    res = updateStream(streamId, surfaces, outputInfo, removedSurfaceIds, &outputMap);
+    if (res != OK) {
+        CLOGE("Stream %d failed to update stream (error %d %s) ",
+              streamId, res, strerror(-res));
+        return res;
+    }
+
+    for (size_t i = 0 ; i < outputMap.size(); i++){
+        if (surfaceIds != nullptr) {
+            surfaceIds->push_back(outputMap.valueAt(i));
+        }
+    }
+    return OK;
+}
+
+status_t AidlCamera3SharedDevice::removeSharedSurfaces(int streamId,
+        const std::vector<size_t> &removedSurfaceIds) {
+    KeyedVector<sp<Surface>, size_t> outputMap;
+    std::vector<SurfaceHolder> surfaces;
+    std::vector<OutputStreamInfo> outputInfo;
+    status_t res;
+    sp<Camera3OutputStreamInterface> stream = mOutputStreams.get(streamId);
+    if (stream == nullptr) {
+        CLOGE("Stream %d is unknown", streamId);
+        return BAD_VALUE;
+    }
+
+    res = updateStream(streamId, surfaces, outputInfo, removedSurfaceIds, &outputMap);
+    if (res != OK) {
+        CLOGE("Stream %d failed to update stream (error %d %s) ",
+              streamId, res, strerror(-res));
+        return res;
+    }
+    return OK;
+}
+}
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3SharedDevice.h b/services/camera/libcameraservice/device3/aidl/AidlCamera3SharedDevice.h
new file mode 100644
index 0000000..b2ee2d6
--- /dev/null
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3SharedDevice.h
@@ -0,0 +1,66 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_AIDLCAMERA3SHAREDDEVICE_H
+#define ANDROID_SERVERS_AIDLCAMERA3SHAREDDEVICE_H
+
+#include <camera/camera2/OutputConfiguration.h>
+#include "../Camera3SharedOutputStream.h"
+#include "AidlCamera3Device.h"
+namespace android {
+
+/**
+ * Shared CameraDevice for AIDL HAL devices.
+ */
+using ::android::camera3::Camera3SharedOutputStream;
+class AidlCamera3SharedDevice :
+        public AidlCamera3Device {
+  public:
+    static sp<AidlCamera3SharedDevice> getInstance(
+            std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
+            std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
+            const std::string& id, bool overrideForPerfClass, int rotationOverride,
+            bool legacyClient = false);
+    status_t initialize(sp<CameraProviderManager> manager,
+            const std::string& monitorTags) override;
+    status_t disconnectClient(int clientUid) override;
+    status_t beginConfigure() override;
+    status_t getSharedStreamId(const OutputConfiguration &config, int *streamId) override;
+    status_t addSharedSurfaces(int streamId,
+            const std::vector<android::camera3::OutputStreamInfo> &outputInfo,
+            const std::vector<SurfaceHolder>& surfaces,
+            std::vector<int> *surfaceIds = nullptr) override;
+    status_t removeSharedSurfaces(int streamId,
+            const std::vector<size_t> &surfaceIds) override;
+  private:
+    static std::map<std::string, sp<AidlCamera3SharedDevice>> sSharedDevices;
+    static std::map<std::string, std::unordered_set<int>> sClientsUid;
+    AidlCamera3SharedDevice(
+            std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
+            std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
+            const std::string& id, bool overrideForPerfClass, int rotationOverride,
+            bool legacyClient)
+        : AidlCamera3Device(cameraServiceProxyWrapper, attributionAndPermissionUtils, id,
+                  overrideForPerfClass, rotationOverride, legacyClient) {}
+    std::vector<OutputConfiguration> getSharedOutputConfiguration();
+    std::vector<OutputConfiguration> mSharedOutputConfigurations;
+    std::vector<int> mSharedSurfaceIds;
+    std::vector<sp<Surface>> mSharedSurfaces;
+    std::vector<sp<Camera3SharedOutputStream>> mSharedStreams;
+    KeyedVector<int32_t, OutputConfiguration> mConfiguredOutputs;
+}; // class AidlCamera3SharedDevice
+}; // namespace android
+#endif
diff --git a/services/camera/libcameraservice/hidl/AidlCameraDeviceCallbacks.h b/services/camera/libcameraservice/hidl/AidlCameraDeviceCallbacks.h
index 152002b..98a0dbb 100644
--- a/services/camera/libcameraservice/hidl/AidlCameraDeviceCallbacks.h
+++ b/services/camera/libcameraservice/hidl/AidlCameraDeviceCallbacks.h
@@ -86,6 +86,10 @@
         mCaptureResultMetadataQueue = metadataQueue;
     }
 
+    virtual binder::Status onClientSharedAccessPriorityChanged(bool /*primaryClient*/) {
+        return binder::Status::ok();
+    }
+
  private:
     // Wrapper struct so that parameters to onResultReceived callback may be
     // sent through an AMessage.
diff --git a/services/camera/libcameraservice/hidl/AidlCameraServiceListener.h b/services/camera/libcameraservice/hidl/AidlCameraServiceListener.h
index 78fca4e..b31ccc6 100644
--- a/services/camera/libcameraservice/hidl/AidlCameraServiceListener.h
+++ b/services/camera/libcameraservice/hidl/AidlCameraServiceListener.h
@@ -72,6 +72,10 @@
         // empty implementation
         return binder::Status::ok();
     }
+    virtual binder::Status onCameraOpenedInSharedMode(const std::string& /*cameraId*/,
+            const std::string& /*clientPackageId*/, int32_t /*deviceId*/, bool /*primaryClient*/) {
+        return binder::Status::ok();
+    }
 };
 
 } // implementation
diff --git a/services/camera/libcameraservice/hidl/HidlCameraService.cpp b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
index 59e892f..9d140f2 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraService.cpp
+++ b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
@@ -135,7 +135,7 @@
     binder::Status serviceRet = mAidlICameraService->connectDevice(
             callbacks, cameraId, 0/*oomScoreOffset*/,
             /*targetSdkVersion*/__ANDROID_API_FUTURE__, ROTATION_OVERRIDE_NONE,
-            clientAttribution, /*devicePolicy*/0, /*out*/&deviceRemote);
+            clientAttribution, /*devicePolicy*/0, /*sharedMode*/false, /*out*/&deviceRemote);
     HStatus status = HStatus::NO_ERROR;
     if (!serviceRet.isOk()) {
         ALOGE("%s: Unable to connect to camera device", __FUNCTION__);
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
index 86e2c70..6c98837 100644
--- a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
@@ -749,6 +749,13 @@
         // No op
         return binder::Status::ok();
     }
+
+    virtual binder::Status onCameraOpenedInSharedMode(const std::string& /*cameraId*/,
+            const std::string& /*clientPackageName*/, int32_t /*deviceId*/,
+            bool /*isPrimaryClient*/) {
+        // No op
+        return binder::Status::ok();
+    }
 };
 
 class TestCameraDeviceCallbacks : public hardware::camera2::BnCameraDeviceCallbacks {
@@ -789,6 +796,11 @@
     virtual binder::Status onRequestQueueEmpty() {
         return binder::Status::ok();
     }
+
+    virtual binder::Status onClientSharedAccessPriorityChanged(bool /*isPrimaryClient*/) {
+        return binder::Status::ok();
+    }
+
 };
 
 class Camera2Fuzzer {
@@ -817,7 +829,7 @@
         mCameraService->connectDevice(callbacks, s.cameraId,
                 0/*oomScoreDiff*/, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
                 ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT,
-                clientAttribution, /*devicePolicy*/0, &device);
+                clientAttribution, /*devicePolicy*/0, /*sharedMode*/false, &device);
         if (device == nullptr) {
             continue;
         }
diff --git a/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp b/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp
index 50aeaca..ff58c4a 100644
--- a/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp
+++ b/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp
@@ -77,6 +77,13 @@
         // No op
         return binder::Status::ok();
     }
+
+    virtual binder::Status onCameraOpenedInSharedMode(const std::string& /*cameraId*/,
+            const std::string& /*clientPackageName*/, int32_t /*deviceId*/,
+            bool /*isPrimaryClient*/) {
+        // No op
+        return binder::Status::ok();
+    }
 };
 
 // Empty device callback.
@@ -118,6 +125,10 @@
     virtual binder::Status onRequestQueueEmpty() {
         return binder::Status::ok();
     }
+
+    virtual binder::Status onClientSharedAccessPriorityChanged(bool /*isPrimaryClient*/) {
+        return binder::Status::ok();
+    }
 };
 
 // Override isCameraDisabled from the CameraServiceProxy with a flag.
@@ -242,7 +253,7 @@
                 sCameraService->connectDevice(callbacks, s.cameraId,
                 0/*oomScoreDiff*/, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
                 hardware::ICameraService::ROTATION_OVERRIDE_NONE,
-                clientAttribution, /*devicePolicy*/0, &device);
+                clientAttribution, /*devicePolicy*/0, /*sharedMode*/false, &device);
         AutoDisconnectDevice autoDisconnect(device);
         ASSERT_TRUE(!status.isOk()) << "connectDevice returned OK status";
         ASSERT_EQ(status.serviceSpecificErrorCode(), hardware::ICameraService::ERROR_DISABLED)
@@ -257,7 +268,7 @@
                 sCameraService->connectDevice(callbacks, s.cameraId,
                 0/*oomScoreDiff*/, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
                 hardware::ICameraService::ROTATION_OVERRIDE_NONE,
-                clientAttribution, /*devicePolicy*/0, &device);
+                clientAttribution, /*devicePolicy*/0, /*sharedMode*/false, &device);
         AutoDisconnectDevice autoDisconnect(device);
         ASSERT_TRUE(status.isOk());
     }
@@ -281,7 +292,7 @@
                 sCameraService->connectDevice(callbacks, s.cameraId,
                 0/*oomScoreDiff*/, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
                 hardware::ICameraService::ROTATION_OVERRIDE_NONE,
-                clientAttribution, /*devicePolicy*/0, &deviceA);
+                clientAttribution, /*devicePolicy*/0, /*sharedMode*/false, &deviceA);
         AutoDisconnectDevice autoDisconnectA(deviceA);
         ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
                 " service specific error code " << status.serviceSpecificErrorCode();
@@ -289,7 +300,7 @@
                 sCameraService->connectDevice(callbacks, s.cameraId,
                 0/*oomScoreDiff*/, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
                 hardware::ICameraService::ROTATION_OVERRIDE_NONE,
-                clientAttribution, /*devicePolicy*/0, &deviceB);
+                clientAttribution, /*devicePolicy*/0, /*sharedMode*/false, &deviceB);
         AutoDisconnectDevice autoDisconnectB(deviceB);
         ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
                 " service specific error code " << status.serviceSpecificErrorCode();
@@ -315,7 +326,7 @@
                 sCameraService->connectDevice(callbacks, s.cameraId,
                 0/*oomScoreDiff*/, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
                 hardware::ICameraService::ROTATION_OVERRIDE_NONE,
-                clientAttribution, /*devicePolicy*/0, &deviceA);
+                clientAttribution, /*devicePolicy*/0, /*sharedMode*/false, &deviceA);
         AutoDisconnectDevice autoDisconnectA(deviceA);
         ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
                 " service specific error code " << status.serviceSpecificErrorCode();
@@ -323,7 +334,7 @@
                 sCameraService->connectDevice(callbacks, s.cameraId,
                 1/*oomScoreDiff*/, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
                 hardware::ICameraService::ROTATION_OVERRIDE_NONE,
-                clientAttribution, /*devicePolicy*/0, &deviceB);
+                clientAttribution, /*devicePolicy*/0, /*sharedMode*/false, &deviceB);
         AutoDisconnectDevice autoDisconnectB(deviceB);
         ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
                 " service specific error code " << status.serviceSpecificErrorCode();
diff --git a/services/camera/libcameraservice/tests/ZoomRatioTest.cpp b/services/camera/libcameraservice/tests/ZoomRatioTest.cpp
index badd47a..a531e10 100644
--- a/services/camera/libcameraservice/tests/ZoomRatioTest.cpp
+++ b/services/camera/libcameraservice/tests/ZoomRatioTest.cpp
@@ -296,7 +296,7 @@
     }
 
     metadata.update(ANDROID_SCALER_CROP_REGION, test2xCropRegion[index], 4);
-    res = mapper.updateCaptureResult(&metadata, true/*requestedZoomRatioIs1*/);
+    res = mapper.updateCaptureResult(&metadata, false/*useZoomRatio*/);
     ASSERT_EQ(res, OK);
     entry = metadata.find(ANDROID_SCALER_CROP_REGION);
     ASSERT_EQ(entry.count, 4U);
@@ -340,7 +340,7 @@
     entry = metadata.find(ANDROID_CONTROL_ZOOM_RATIO);
     EXPECT_NEAR(entry.data.f[0], 2.0f, kMaxAllowedRatioError);
 
-    res = mapper.updateCaptureResult(&metadata, true/*requestedZoomRatioIs1*/);
+    res = mapper.updateCaptureResult(&metadata, false/*useZoomRatio*/);
     ASSERT_EQ(res, OK);
     entry = metadata.find(ANDROID_CONTROL_ZOOM_RATIO);
     EXPECT_NEAR(entry.data.f[0], 1.0f, kMaxAllowedRatioError);
@@ -364,7 +364,7 @@
     entry = metadata.find(ANDROID_CONTROL_ZOOM_RATIO);
     EXPECT_NEAR(entry.data.f[0], 1.0f, kMaxAllowedRatioError);
 
-    res = mapper.updateCaptureResult(&metadata, true/*requestedZoomRatioIs1*/);
+    res = mapper.updateCaptureResult(&metadata, false/*useZoomRatio*/);
     ASSERT_EQ(res, OK);
     entry = metadata.find(ANDROID_SCALER_CROP_REGION);
     ASSERT_EQ(entry.count, 4U);
@@ -452,7 +452,7 @@
     entry = metadata.find(ANDROID_CONTROL_ZOOM_RATIO);
     ASSERT_EQ(entry.data.f[0], zoomRatio);
 
-    res = mapper.updateCaptureResult(&metadata, false/*requestedZoomRatioIs1*/);
+    res = mapper.updateCaptureResult(&metadata, true/*useZoomRatio*/);
     ASSERT_EQ(res, OK);
     entry = metadata.find(ANDROID_SCALER_CROP_REGION);
     ASSERT_EQ(entry.count, 4U);
diff --git a/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.cpp b/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.cpp
index 4b63704..80af140 100644
--- a/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.cpp
+++ b/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.cpp
@@ -138,7 +138,7 @@
         int32_t attributedOpCode, bool forDataDelivery, bool startDataDelivery,
         bool checkAutomotive) {
     AttributionSourceState clientAttribution = attributionSource;
-    if (!flags::check_full_attribution_source_chain() && !clientAttribution.next.empty()) {
+    if (!flags::data_delivery_permission_checks() && !clientAttribution.next.empty()) {
         clientAttribution.next.clear();
     }
 
@@ -408,7 +408,7 @@
         clientUid = callingUid;
     } else {
         validUid = isTrustedCallingUid(callingUid);
-        if (flags::use_context_attribution_source()) {
+        if (flags::data_delivery_permission_checks()) {
             validUid = validUid || (clientUid == callingUid);
         }
     }
@@ -426,7 +426,7 @@
         clientPid = callingPid;
     } else {
         validPid = isTrustedCallingUid(callingUid);
-        if (flags::use_context_attribution_source()) {
+        if (flags::data_delivery_permission_checks()) {
             validPid = validPid || (clientPid == callingPid);
         }
     }
diff --git a/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.h b/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.h
index 8b2804d..1c5d6da 100644
--- a/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.h
+++ b/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.h
@@ -92,8 +92,8 @@
     virtual void restoreCallingIdentity(int64_t token);
 
     /**
-     * If flag::use_context_attribution_source() is enabled, check the calling attribution source
-     * and resolve its package name, or fill in the pid/uid/package name if necessary.
+     * If flags::data_delivery_permission_checks() is enabled, check the calling attribution
+     * source and resolve its package name, or fill in the pid/uid/package name if necessary.
      *
      * @param resolvedAttributionSource The resolved attribution source.
      * @param methodName The name of the method calling this function (for logging only).
diff --git a/services/camera/libcameraservice/utils/ClientManager.h b/services/camera/libcameraservice/utils/ClientManager.h
index 074c84d..b2b8685 100644
--- a/services/camera/libcameraservice/utils/ClientManager.h
+++ b/services/camera/libcameraservice/utils/ClientManager.h
@@ -20,6 +20,7 @@
 #include <utils/Condition.h>
 #include <utils/Mutex.h>
 #include <utils/Timers.h>
+#include <utils/Log.h>
 
 #include <algorithm>
 #include <utility>
@@ -27,6 +28,9 @@
 #include <set>
 #include <map>
 #include <memory>
+#include <com_android_internal_camera_flags.h>
+
+namespace flags = com::android::internal::camera::flags;
 
 namespace android {
 namespace resource_policy {
@@ -142,10 +146,10 @@
 public:
     ClientDescriptor(const KEY& key, const VALUE& value, int32_t cost,
             const std::set<KEY>& conflictingKeys, int32_t score, int32_t ownerId, int32_t state,
-            bool isVendorClient, int32_t oomScoreOffset);
+            bool isVendorClient, int32_t oomScoreOffset, bool sharedMode = false);
     ClientDescriptor(KEY&& key, VALUE&& value, int32_t cost, std::set<KEY>&& conflictingKeys,
             int32_t score, int32_t ownerId, int32_t state, bool isVendorClient,
-            int32_t oomScoreOffset);
+            int32_t oomScoreOffset, bool sharedMode = false);
 
     ~ClientDescriptor();
 
@@ -189,6 +193,11 @@
      */
     void setPriority(const ClientPriority& priority);
 
+    /**
+     * Returns true when camera is opened in shared mode.
+     */
+    bool getSharedMode() const;
+
     // This class is ordered by key
     template<class K, class V>
     friend bool operator < (const ClientDescriptor<K, V>& a, const ClientDescriptor<K, V>& b);
@@ -200,6 +209,7 @@
     std::set<KEY> mConflicting;
     ClientPriority mPriority;
     int32_t mOwnerId;
+    bool mSharedMode;
 }; // class ClientDescriptor
 
 template<class K, class V>
@@ -210,18 +220,19 @@
 template<class KEY, class VALUE>
 ClientDescriptor<KEY, VALUE>::ClientDescriptor(const KEY& key, const VALUE& value, int32_t cost,
         const std::set<KEY>& conflictingKeys, int32_t score, int32_t ownerId, int32_t state,
-        bool isVendorClient, int32_t scoreOffset) :
+        bool isVendorClient, int32_t scoreOffset, bool sharedMode) :
         mKey{key}, mValue{value}, mCost{cost}, mConflicting{conflictingKeys},
         mPriority(score, state, isVendorClient, scoreOffset),
-        mOwnerId{ownerId} {}
+        mOwnerId{ownerId}, mSharedMode{sharedMode} {}
 
 template<class KEY, class VALUE>
 ClientDescriptor<KEY, VALUE>::ClientDescriptor(KEY&& key, VALUE&& value, int32_t cost,
         std::set<KEY>&& conflictingKeys, int32_t score, int32_t ownerId, int32_t state,
-        bool isVendorClient, int32_t scoreOffset) :
+        bool isVendorClient, int32_t scoreOffset, bool sharedMode) :
         mKey{std::forward<KEY>(key)}, mValue{std::forward<VALUE>(value)}, mCost{cost},
         mConflicting{std::forward<std::set<KEY>>(conflictingKeys)},
-        mPriority(score, state, isVendorClient, scoreOffset), mOwnerId{ownerId} {}
+        mPriority(score, state, isVendorClient, scoreOffset), mOwnerId{ownerId},
+        mSharedMode{sharedMode} {}
 
 template<class KEY, class VALUE>
 ClientDescriptor<KEY, VALUE>::~ClientDescriptor() {}
@@ -253,7 +264,14 @@
 
 template<class KEY, class VALUE>
 bool ClientDescriptor<KEY, VALUE>::isConflicting(const KEY& key) const {
-    if (key == mKey) return true;
+    if (flags::camera_multi_client()) {
+        // In shared mode, there can be more than one client using the camera.
+        // Hence, having more than one client with the same key is not considered as
+        // conflicting.
+        if (!mSharedMode && key == mKey) return true;
+    } else {
+        if (key == mKey) return true;
+    }
     for (const auto& x : mConflicting) {
         if (key == x) return true;
     }
@@ -266,6 +284,11 @@
 }
 
 template<class KEY, class VALUE>
+bool ClientDescriptor<KEY, VALUE>::getSharedMode() const {
+    return mSharedMode;
+}
+
+template<class KEY, class VALUE>
 void ClientDescriptor<KEY, VALUE>::setPriority(const ClientPriority& priority) {
     // We don't use the usual copy constructor here since we want to remember
     // whether a client is a vendor client or not. This could have been wiped
@@ -349,14 +372,19 @@
     void removeAll();
 
     /**
-     * Remove and return the ClientDescriptor with a given key.
+     * Remove all ClientDescriptors with a given key.
+     */
+    std::vector<std::shared_ptr<ClientDescriptor<KEY, VALUE>>> removeAll(const KEY& key);
+
+    /**
+     * Remove and return the ClientDescriptors with a given key.
      */
     std::shared_ptr<ClientDescriptor<KEY, VALUE>> remove(const KEY& key);
 
     /**
      * Remove the given ClientDescriptor.
      */
-    void remove(const std::shared_ptr<ClientDescriptor<KEY, VALUE>>& value);
+    virtual void remove(const std::shared_ptr<ClientDescriptor<KEY, VALUE>>& value);
 
     /**
      * Return a vector of the ClientDescriptors that would be evicted by adding the given
@@ -395,6 +423,8 @@
      */
     std::shared_ptr<ClientDescriptor<KEY, VALUE>> get(const KEY& key) const;
 
+    std::shared_ptr<ClientDescriptor<KEY, VALUE>> getPrimaryClient(const KEY& key) const;
+
     /**
      * Block until the given client is no longer in the active clients list, or the timeout
      * occurred.
@@ -495,6 +525,8 @@
     int32_t cost = client->getCost();
     ClientPriority priority = client->getPriority();
     int32_t owner = client->getOwnerId();
+    bool sharedMode = client->getSharedMode();
+
 
     int64_t totalCost = getCurrentCostLocked() + cost;
 
@@ -520,9 +552,15 @@
         int32_t curCost = i->getCost();
         ClientPriority curPriority = i->getPriority();
         int32_t curOwner = i->getOwnerId();
-
-        bool conflicting = (curKey == key || i->isConflicting(key) ||
-                client->isConflicting(curKey));
+        bool curSharedMode = i->getSharedMode();
+        bool conflicting;
+        if (flags::camera_multi_client()) {
+            conflicting = (((!sharedMode || !curSharedMode) && curKey == key)
+                    || i->isConflicting(key) || client->isConflicting(curKey));
+        } else {
+            conflicting = (curKey == key || i->isConflicting(key) ||
+                    client->isConflicting(curKey));
+        }
 
         if (!returnIncompatibleClients) {
             // Find evicted clients
@@ -669,6 +707,25 @@
 }
 
 template<class KEY, class VALUE, class LISTENER>
+std::shared_ptr<ClientDescriptor<KEY, VALUE>> ClientManager<KEY, VALUE, LISTENER>::getPrimaryClient(
+        const KEY& key) const {
+    Mutex::Autolock lock(mLock);
+    if (flags::camera_multi_client()) {
+        for (const auto& i : mClients) {
+            bool sharedMode =  i->getSharedMode();
+            bool primaryClient;
+            status_t ret = i->getValue()->isPrimaryClient(&primaryClient);
+            if (ret == OK) {
+                if ((i->getKey() == key) && sharedMode && primaryClient) {
+                    return i;
+                }
+            }
+        }
+    }
+    return std::shared_ptr<ClientDescriptor<KEY, VALUE>>(nullptr);
+}
+
+template<class KEY, class VALUE, class LISTENER>
 void ClientManager<KEY, VALUE, LISTENER>::removeAll() {
     Mutex::Autolock lock(mLock);
     if (mListener != nullptr) {
@@ -681,6 +738,27 @@
 }
 
 template<class KEY, class VALUE, class LISTENER>
+std::vector<std::shared_ptr<ClientDescriptor<KEY, VALUE>>>
+        ClientManager<KEY, VALUE, LISTENER>::removeAll(const KEY& key) {
+    Mutex::Autolock lock(mLock);
+    std::vector<std::shared_ptr<ClientDescriptor<KEY, VALUE>>> clients;
+    if (flags::camera_multi_client()) {
+        for (auto it = mClients.begin(); it != mClients.end();)
+        {
+            if ((*it)->getKey() == key) {
+                it = mClients.erase(it);
+                if (mListener != nullptr) mListener->onClientRemoved(**it);
+                clients.push_back(*it);
+            } else {
+                ++it;
+            }
+        }
+        mRemovedCondition.broadcast();
+    }
+    return clients;
+}
+
+template<class KEY, class VALUE, class LISTENER>
 std::shared_ptr<ClientDescriptor<KEY, VALUE>> ClientManager<KEY, VALUE, LISTENER>::remove(
     const KEY& key) {
     Mutex::Autolock lock(mLock);
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
index 08f6314..ee4df4e 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
@@ -41,6 +41,7 @@
 
 namespace android {
 namespace camera3 {
+namespace flags = com::android::internal::camera::flags;
 
 void StreamConfiguration::getStreamConfigurations(
         const CameraMetadata &staticInfo, int configuration,
@@ -1252,6 +1253,14 @@
         request->update(ANDROID_CONTROL_AUTOFRAMING, &kDefaultAutoframingMode, 1);
     }
 
+    if (flags::ae_priority()) {
+        // Fill in CONTROL_AE_PRIORITY_MODE if not available
+        if (!request->exists(ANDROID_CONTROL_AE_PRIORITY_MODE)) {
+            static const uint8_t kDefaultAePriorityMode = ANDROID_CONTROL_AE_PRIORITY_MODE_OFF;
+            request->update(ANDROID_CONTROL_AE_PRIORITY_MODE, &kDefaultAePriorityMode, 1);
+        }
+    }
+
     return OK;
 }
 
diff --git a/services/mediametrics/Android.bp b/services/mediametrics/Android.bp
index c90488f..d29aa80 100644
--- a/services/mediametrics/Android.bp
+++ b/services/mediametrics/Android.bp
@@ -159,7 +159,7 @@
     },
 
     shared_libs: [
-        "mediametricsservice-aidl-cpp",
+        "libaudioutils",
         "libbase", // android logging
         "libbinder",
         "libcutils",
@@ -174,6 +174,7 @@
         "libstatspull",
         "libstatssocket",
         "libutils",
+        "mediametricsservice-aidl-cpp",
         "packagemanager_aidl-cpp",
     ],
 
diff --git a/services/mediametrics/AudioPowerUsage.cpp b/services/mediametrics/AudioPowerUsage.cpp
index 201d740..095832c 100644
--- a/services/mediametrics/AudioPowerUsage.cpp
+++ b/services/mediametrics/AudioPowerUsage.cpp
@@ -25,6 +25,7 @@
 #include <sstream>
 #include <string>
 #include <audio_utils/clock.h>
+#include <audio_utils/StringUtils.h>
 #include <cutils/properties.h>
 #include <stats_media_metrics.h>
 #include <sys/timerfd.h>
@@ -131,7 +132,7 @@
 
 int32_t AudioPowerUsage::deviceFromStringPairs(const std::string& device_strings) {
     int32_t deviceMask = 0;
-    const auto devaddrvec = stringutils::getDeviceAddressPairs(device_strings);
+    const auto devaddrvec = audio_utils::stringutils::getDeviceAddressPairs(device_strings);
     for (const auto &[device, addr] : devaddrvec) {
         int32_t combo_device = 0;
         deviceFromString(device, combo_device);
diff --git a/services/mediametrics/AudioTypes.cpp b/services/mediametrics/AudioTypes.cpp
index 353ae12..0eeff2b 100644
--- a/services/mediametrics/AudioTypes.cpp
+++ b/services/mediametrics/AudioTypes.cpp
@@ -17,6 +17,7 @@
 #include "AudioTypes.h"
 #include "MediaMetricsConstants.h"
 #include "StringUtils.h"
+#include <audio_utils/StringUtils.h>
 #include <media/TypeConverter.h> // requires libmedia_helper to get the Audio code.
 #include <stats_media_metrics.h>            // statsd
 
@@ -349,7 +350,7 @@
 template <typename Traits>
 int32_t int32FromFlags(const std::string &flags)
 {
-    const auto result = stringutils::split(flags, "|");
+    const auto result = audio_utils::stringutils::split(flags, "|");
     int32_t intFlags = 0;
     for (const auto& flag : result) {
         typename Traits::Type value;
@@ -364,7 +365,7 @@
 template <typename Traits>
 std::string stringFromFlags(const std::string &flags, size_t len)
 {
-    const auto result = stringutils::split(flags, "|");
+    const auto result = audio_utils::stringutils::split(flags, "|");
     std::string sFlags;
     for (const auto& flag : result) {
         typename Traits::Type value;
@@ -383,7 +384,7 @@
 {
     if (str.empty()) return {};
 
-    const auto result = stringutils::split(str, "|");
+    const auto result = audio_utils::stringutils::split(str, "|");
     std::stringstream ss;
     for (const auto &s : result) {
         if (map.count(s) > 0) {
@@ -399,7 +400,7 @@
 {
     if (str.empty()) return {};
 
-    const auto result = stringutils::split(str, "|");
+    const auto result = audio_utils::stringutils::split(str, "|");
     typename M::mapped_type value{};
     for (const auto &s : result) {
         auto it = map.find(s);
@@ -416,7 +417,7 @@
 
     if (str.empty()) return v;
 
-    const auto result = stringutils::split(str, "|");
+    const auto result = audio_utils::stringutils::split(str, "|");
     for (const auto &s : result) {
         auto it = map.find(s);
         if (it == map.end()) continue;
@@ -429,7 +430,7 @@
 {
     std::vector<int64_t> v;
 
-    const auto result = stringutils::split(s, "|");
+    const auto result = audio_utils::stringutils::split(s, "|");
     for (const auto &mask : result) {
         // 0 if undetected or if actually 0.
         int64_t int64Mask = strtoll(mask.c_str(), nullptr, 0);
diff --git a/services/mediametrics/StringUtils.cpp b/services/mediametrics/StringUtils.cpp
index 3b2db85..c4111ae 100644
--- a/services/mediametrics/StringUtils.cpp
+++ b/services/mediametrics/StringUtils.cpp
@@ -19,105 +19,12 @@
 #include <utils/Log.h>
 
 #include "StringUtils.h"
-
+#include "AudioTypes.h"
+#include <audio_utils/StringUtils.h>
 #include <charconv>
 
-#include "AudioTypes.h"
-
 namespace android::mediametrics::stringutils {
 
-std::string tokenizer(std::string::const_iterator& it,
-        const std::string::const_iterator& end, const char *reserved)
-{
-    // consume leading white space
-    for (; it != end && std::isspace(*it); ++it);
-    if (it == end) return {};
-
-    auto start = it;
-    // parse until we hit a reserved keyword or space
-    if (strchr(reserved, *it)) return {start, ++it};
-    for (;;) {
-        ++it;
-        if (it == end || std::isspace(*it) || strchr(reserved, *it)) return {start, it};
-    }
-}
-
-std::vector<std::string> split(const std::string& flags, const char *delim)
-{
-    std::vector<std::string> result;
-    for (auto it = flags.begin(); ; ) {
-        auto flag = tokenizer(it, flags.end(), delim);
-        if (flag.empty() || !std::isalnum(flag[0])) return result;
-        result.emplace_back(std::move(flag));
-
-        // look for the delimeter and discard
-        auto token = tokenizer(it, flags.end(), delim);
-        if (token.size() != 1 || strchr(delim, token[0]) == nullptr) return result;
-    }
-}
-
-bool parseVector(const std::string &str, std::vector<int32_t> *vector) {
-    std::vector<int32_t> values;
-    const char *p = str.c_str();
-    const char *last = p + str.size();
-    while (p != last) {
-        if (*p == ',' || *p == '{' || *p == '}') {
-            p++;
-        }
-        int32_t value = -1;
-        auto [ptr, error] = std::from_chars(p, last, value);
-        if (error == std::errc::invalid_argument || error == std::errc::result_out_of_range) {
-            return false;
-        }
-        p = ptr;
-        values.push_back(value);
-    }
-    *vector = std::move(values);
-    return true;
-}
-
-std::vector<std::pair<std::string, std::string>> getDeviceAddressPairs(const std::string& devices)
-{
-    std::vector<std::pair<std::string, std::string>> result;
-
-    // Currently, the device format is
-    //
-    // devices = device_addr OR device_addr|devices
-    // device_addr = device OR (device, addr)
-    //
-    // EXAMPLE:
-    // device1|(device2, addr2)|...
-
-    static constexpr char delim[] = "()|,";
-    for (auto it = devices.begin(); ; ) {
-        std::string address;
-        std::string device = tokenizer(it, devices.end(), delim);
-        if (device.empty()) return result;
-        if (device == "(") {  // it is a pair otherwise we consider it a device
-            device = tokenizer(it, devices.end(), delim); // get actual device
-            auto token = tokenizer(it, devices.end(), delim);
-            if (token != ",") return result;  // malformed, must have a comma
-
-            // special handling here for empty addresses
-            address = tokenizer(it, devices.end(), delim);
-            if (address.empty()) return result;
-            if (address == ")") {  // no address, just the ")"
-                address.clear();
-            } else {
-                token = tokenizer(it, devices.end(), delim);
-                if (token != ")") return result;
-            }
-        }
-        // misaligned token, device must start alphanumeric.
-        if (!std::isalnum(device[0])) return result;
-
-        result.emplace_back(std::move(device), std::move(address));
-
-        auto token = tokenizer(it, devices.end(), delim);
-        if (token != "|") return result;  // this includes end of string detection
-    }
-}
-
 size_t replace(std::string &str, const char *targetChars, const char replaceChar)
 {
     size_t replaced = 0;
@@ -134,7 +41,7 @@
 std::pair<std::string /* external statsd */, std::string /* internal */>
 parseDevicePairs(const std::string& devicePairs) {
     std::pair<std::string, std::string> result{};
-    const auto devaddrvec = stringutils::getDeviceAddressPairs(devicePairs);
+    const auto devaddrvec = audio_utils::stringutils::getDeviceAddressPairs(devicePairs);
     for (const auto& [device, addr] : devaddrvec) { // addr ignored for now.
         if (!result.second.empty()) {
             result.second.append("|"); // delimit devices with '|'.
diff --git a/services/mediametrics/fuzzer/Android.bp b/services/mediametrics/fuzzer/Android.bp
index 99703e3..efea252 100644
--- a/services/mediametrics/fuzzer/Android.bp
+++ b/services/mediametrics/fuzzer/Android.bp
@@ -36,6 +36,7 @@
     ],
 
     shared_libs: [
+        "libaudioutils",
         "libbase",
         "libbinder",
         "libcutils",
diff --git a/services/mediametrics/include/mediametricsservice/StringUtils.h b/services/mediametrics/include/mediametricsservice/StringUtils.h
index ed2cf2e..3e1cafc 100644
--- a/services/mediametrics/include/mediametricsservice/StringUtils.h
+++ b/services/mediametrics/include/mediametricsservice/StringUtils.h
@@ -61,30 +61,6 @@
 }
 
 /**
- * Return string tokens from iterator, separated by spaces and reserved chars.
- */
-std::string tokenizer(std::string::const_iterator& it,
-        const std::string::const_iterator& end, const char *reserved);
-
-/**
- * Splits flags string based on delimeters (or, whitespace which is removed).
- */
-std::vector<std::string> split(const std::string& flags, const char *delim);
-
-/**
- * Parses a vector of integers using ',' '{' and '}' as delimeters. Leaves
- * vector unmodified if the parsing fails.
- */
-bool parseVector(const std::string &str, std::vector<int32_t> *vector);
-
-/**
- * Parse the devices string and return a vector of device address pairs.
- *
- * A failure to parse returns early with the contents that were able to be parsed.
- */
-std::vector<std::pair<std::string, std::string>> getDeviceAddressPairs(const std::string &devices);
-
-/**
  * Replaces targetChars with replaceChar in string, returns number of chars replaced.
  */
 size_t replace(std::string &str, const char *targetChars, const char replaceChar);
diff --git a/services/mediametrics/statsd_codec.cpp b/services/mediametrics/statsd_codec.cpp
index 844f204..2f7c4f9 100644
--- a/services/mediametrics/statsd_codec.cpp
+++ b/services/mediametrics/statsd_codec.cpp
@@ -33,6 +33,7 @@
 #include <stats_media_metrics.h>
 #include <stats_event.h>
 
+#include <audio_utils/StringUtils.h>
 #include <frameworks/proto_logging/stats/message/mediametrics_message.pb.h>
 #include <mediametricsservice/cleaner.h>
 #include <mediametricsservice/iface_statsd.h>
@@ -171,7 +172,7 @@
 }
 
 static void parseVector(const std::string &str, std::vector<int32_t> *vector) {
-    if (!mediametrics::stringutils::parseVector(str, vector)) {
+    if (!audio_utils::stringutils::parseVector(str, vector)) {
         ALOGE("failed to parse integer vector from '%s'", str.c_str());
     }
 }
diff --git a/services/mediametrics/tests/mediametrics_tests.cpp b/services/mediametrics/tests/mediametrics_tests.cpp
index f3933a7..383ed6a 100644
--- a/services/mediametrics/tests/mediametrics_tests.cpp
+++ b/services/mediametrics/tests/mediametrics_tests.cpp
@@ -22,6 +22,7 @@
 #include <unordered_set>
 #include <vector>
 
+#include <audio_utils/StringUtils.h>
 #include <gtest/gtest.h>
 #include <media/MediaMetricsItem.h>
 #include <mediametricsservice/AudioTypes.h>
@@ -31,7 +32,7 @@
 #include <system/audio.h>
 
 using namespace android;
-using android::mediametrics::stringutils::parseVector;
+using android::audio_utils::stringutils::parseVector;
 
 static size_t countNewlines(const char *s) {
     size_t count = 0;
@@ -59,35 +60,6 @@
   ASSERT_EQ(false, android::mediametrics::startsWith(s, std::string("est")));
 }
 
-TEST(mediametrics_tests, parseVector) {
-    {
-        std::vector<int32_t> values;
-        EXPECT_EQ(true, parseVector("0{4,300,0,-112343,350}9", &values));
-        EXPECT_EQ(values, std::vector<int32_t>({0, 4, 300, 0, -112343, 350, 9}));
-    }
-    {
-        std::vector<int32_t> values;
-        EXPECT_EQ(true, parseVector("53", &values));
-        EXPECT_EQ(values, std::vector<int32_t>({53}));
-    }
-    {
-        std::vector<int32_t> values;
-        EXPECT_EQ(false, parseVector("5{3,6*3}3", &values));
-        EXPECT_EQ(values, std::vector<int32_t>({}));
-    }
-    {
-        std::vector<int32_t> values = {1}; // should still be this when parsing fails
-        std::vector<int32_t> expected = {1};
-        EXPECT_EQ(false, parseVector("51342abcd,1232", &values));
-        EXPECT_EQ(values, std::vector<int32_t>({1}));
-    }
-    {
-        std::vector<int32_t> values = {2}; // should still be this when parsing fails
-        EXPECT_EQ(false, parseVector("12345678901234,12345678901234", &values));
-        EXPECT_EQ(values, std::vector<int32_t>({2}));
-    }
-}
-
 TEST(mediametrics_tests, defer) {
   bool check = false;
   {
@@ -934,62 +906,6 @@
   }
 }
 
-TEST(mediametrics_tests, device_parsing) {
-    auto devaddr = android::mediametrics::stringutils::getDeviceAddressPairs("(DEVICE, )");
-    ASSERT_EQ((size_t)1, devaddr.size());
-    ASSERT_EQ("DEVICE", devaddr[0].first);
-    ASSERT_EQ("", devaddr[0].second);
-
-    devaddr = android::mediametrics::stringutils::getDeviceAddressPairs(
-            "(DEVICE1, A)|(D, ADDRB)");
-    ASSERT_EQ((size_t)2, devaddr.size());
-    ASSERT_EQ("DEVICE1", devaddr[0].first);
-    ASSERT_EQ("A", devaddr[0].second);
-    ASSERT_EQ("D", devaddr[1].first);
-    ASSERT_EQ("ADDRB", devaddr[1].second);
-
-    devaddr = android::mediametrics::stringutils::getDeviceAddressPairs(
-            "(A,B)|(C,D)");
-    ASSERT_EQ((size_t)2, devaddr.size());
-    ASSERT_EQ("A", devaddr[0].first);
-    ASSERT_EQ("B", devaddr[0].second);
-    ASSERT_EQ("C", devaddr[1].first);
-    ASSERT_EQ("D", devaddr[1].second);
-
-    devaddr = android::mediametrics::stringutils::getDeviceAddressPairs(
-            "  ( A1 , B )  | ( C , D2 )  ");
-    ASSERT_EQ((size_t)2, devaddr.size());
-    ASSERT_EQ("A1", devaddr[0].first);
-    ASSERT_EQ("B", devaddr[0].second);
-    ASSERT_EQ("C", devaddr[1].first);
-    ASSERT_EQ("D2", devaddr[1].second);
-
-    devaddr = android::mediametrics::stringutils::getDeviceAddressPairs(
-            " Z  ");
-    ASSERT_EQ((size_t)1, devaddr.size());
-    ASSERT_EQ("Z", devaddr[0].first);
-
-    devaddr = android::mediametrics::stringutils::getDeviceAddressPairs(
-            "  A | B|C  ");
-    ASSERT_EQ((size_t)3, devaddr.size());
-    ASSERT_EQ("A", devaddr[0].first);
-    ASSERT_EQ("", devaddr[0].second);
-    ASSERT_EQ("B", devaddr[1].first);
-    ASSERT_EQ("", devaddr[1].second);
-    ASSERT_EQ("C", devaddr[2].first);
-    ASSERT_EQ("", devaddr[2].second);
-
-    devaddr = android::mediametrics::stringutils::getDeviceAddressPairs(
-            "  A | (B1, 10) |C  ");
-    ASSERT_EQ((size_t)3, devaddr.size());
-    ASSERT_EQ("A", devaddr[0].first);
-    ASSERT_EQ("", devaddr[0].second);
-    ASSERT_EQ("B1", devaddr[1].first);
-    ASSERT_EQ("10", devaddr[1].second);
-    ASSERT_EQ("C", devaddr[2].first);
-    ASSERT_EQ("", devaddr[2].second);
-}
-
 TEST(mediametrics_tests, timed_action) {
     android::mediametrics::TimedAction timedAction;
     std::atomic_int value1 = 0;
diff --git a/services/oboeservice/AAudioEndpointManager.cpp b/services/oboeservice/AAudioEndpointManager.cpp
index b5ee2f2..243f1f1 100644
--- a/services/oboeservice/AAudioEndpointManager.cpp
+++ b/services/oboeservice/AAudioEndpointManager.cpp
@@ -119,8 +119,9 @@
         }
     }
 
-    ALOGV("findExclusiveEndpoint_l(), found %p for device = %d, sessionId = %d",
-          endpoint.get(), configuration.getDeviceId(), configuration.getSessionId());
+    ALOGV("findExclusiveEndpoint_l(), found %p for devices = %s, sessionId = %d",
+          endpoint.get(), toString(configuration.getDeviceIds()).c_str(),
+          configuration.getSessionId());
     return endpoint;
 }
 
@@ -137,8 +138,9 @@
         }
     }
 
-    ALOGV("findSharedEndpoint_l(), found %p for device = %d, sessionId = %d",
-          endpoint.get(), configuration.getDeviceId(), configuration.getSessionId());
+    ALOGV("findSharedEndpoint_l(), found %p for devices = %s, sessionId = %d",
+          endpoint.get(), toString(configuration.getDeviceIds()).c_str(),
+          configuration.getSessionId());
     return endpoint;
 }
 
@@ -192,8 +194,8 @@
     } else {
         const sp<AAudioServiceEndpointMMAP> endpointMMap =
                 new AAudioServiceEndpointMMAP(aaudioService);
-        ALOGV("%s(), no match so try to open MMAP %p for dev %d",
-              __func__, endpointMMap.get(), configuration.getDeviceId());
+        ALOGV("%s(), no match so try to open MMAP %p for devices %s",
+              __func__, endpointMMap.get(), toString(configuration.getDeviceIds()).c_str());
         endpoint = endpointMMap;
 
         const aaudio_result_t result = endpoint->open(request);
@@ -250,8 +252,9 @@
                 mSharedOpenCount++;
             }
         }
-        ALOGV("%s(), created endpoint %p, requested device = %d, dir = %d",
-              __func__, endpoint.get(), configuration.getDeviceId(), (int)direction);
+        ALOGV("%s(), created endpoint %p, requested device = %s, dir = %d",
+              __func__, endpoint.get(), android::toString(configuration.getDeviceIds()).c_str(),
+              (int)direction);
         IPCThreadState::self()->restoreCallingIdentity(token);
     }
 
@@ -289,8 +292,9 @@
 
         serviceEndpoint->close();
         mExclusiveCloseCount++;
-        ALOGV("%s() %p for device %d",
-              __func__, serviceEndpoint.get(), serviceEndpoint->getDeviceId());
+        ALOGV("%s() %p for devices %s",
+              __func__, serviceEndpoint.get(),
+              android::toString(serviceEndpoint->getDeviceIds()).c_str());
     }
 }
 
@@ -313,7 +317,8 @@
         serviceEndpoint->close();
 
         mSharedCloseCount++;
-        ALOGV("%s(%p) closed for device %d",
-              __func__, serviceEndpoint.get(), serviceEndpoint->getDeviceId());
+        ALOGV("%s(%p) closed for device %s",
+              __func__, serviceEndpoint.get(),
+              android::toString(serviceEndpoint->getDeviceIds()).c_str());
     }
 }
diff --git a/services/oboeservice/AAudioServiceEndpoint.cpp b/services/oboeservice/AAudioServiceEndpoint.cpp
index e49e9e7..c677619 100644
--- a/services/oboeservice/AAudioServiceEndpoint.cpp
+++ b/services/oboeservice/AAudioServiceEndpoint.cpp
@@ -57,7 +57,7 @@
     result << "    Direction:            " << ((getDirection() == AAUDIO_DIRECTION_OUTPUT)
                                    ? "OUTPUT" : "INPUT") << "\n";
     result << "    Requested Device Id:  " << mRequestedDeviceId << "\n";
-    result << "    Device Id:            " << getDeviceId() << "\n";
+    result << "    Device Ids:           " << android::toString(getDeviceIds()).c_str() << "\n";
     result << "    Sample Rate:          " << getSampleRate() << "\n";
     result << "    Channel Count:        " << getSamplesPerFrame() << "\n";
     result << "    Channel Mask:         0x" << std::hex << getChannelMask() << std::dec << "\n";
@@ -155,8 +155,8 @@
     if (configuration.getDirection() != getDirection()) {
         return false;
     }
-    if (configuration.getDeviceId() != AAUDIO_UNSPECIFIED &&
-        configuration.getDeviceId() != getDeviceId()) {
+    if (!configuration.getDeviceIds().empty() &&
+        !android::areDeviceIdsEqual(configuration.getDeviceIds(), getDeviceIds())) {
         return false;
     }
     if (configuration.getSessionId() != AAUDIO_SESSION_ID_ALLOCATE &&
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.cpp b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
index 59bb98e..66918c1 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.cpp
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
@@ -105,7 +105,7 @@
     aaudio_result_t result = AAUDIO_OK;
     mAudioDataWrapper = std::make_unique<SharedMemoryWrapper>();
     copyFrom(request.getConstantConfiguration());
-    mRequestedDeviceId = getDeviceId();
+    mRequestedDeviceId = android::getFirstDeviceId(getDeviceIds());
 
     mMmapClient.attributionSource = request.getAttributionSource();
     // TODO b/182392769: use attribution source util
@@ -173,11 +173,13 @@
         audio_config_base_t* config) {
     aaudio_result_t result = AAUDIO_OK;
     audio_config_base_t currentConfig = *config;
-    audio_port_handle_t deviceId;
+    android::DeviceIdVector deviceIds;
 
     const audio_attributes_t attributes = getAudioAttributesFrom(this);
 
-    deviceId = mRequestedDeviceId;
+    if (mRequestedDeviceId != AAUDIO_UNSPECIFIED) {
+        deviceIds.push_back(mRequestedDeviceId);
+    }
 
     const aaudio_direction_t direction = getDirection();
 
@@ -202,14 +204,9 @@
 
     // Open HAL stream. Set mMmapStream
     ALOGD("%s trying to open MMAP stream with format=%#x, "
-          "sample_rate=%u, channel_mask=%#x, device=%d",
+          "sample_rate=%u, channel_mask=%#x, device=%s",
           __func__, config->format, config->sample_rate,
-          config->channel_mask, deviceId);
-
-    android::DeviceIdVector deviceIds;
-    if (deviceId != AAUDIO_UNSPECIFIED) {
-        deviceIds.push_back(deviceId);
-    }
+          config->channel_mask, android::toString(deviceIds).c_str());
 
     const std::lock_guard<std::mutex> lock(mMmapStreamLock);
     const status_t status = MmapStreamInterface::openMmapStream(streamDirection,
@@ -233,12 +230,11 @@
         config->channel_mask = currentConfig.channel_mask;
         return AAUDIO_ERROR_UNAVAILABLE;
     }
-    deviceId = android::getFirstDeviceId(deviceIds);
 
-    if (deviceId == AAUDIO_UNSPECIFIED) {
-        ALOGW("%s() - openMmapStream() failed to set deviceId", __func__);
+    if (deviceIds.empty()) {
+        ALOGW("%s() - openMmapStream() failed to set deviceIds", __func__);
     }
-    setDeviceId(deviceId);
+    setDeviceIds(deviceIds);
 
     if (sessionId == AUDIO_SESSION_ALLOCATE) {
         ALOGW("%s() - openMmapStream() failed to set sessionId", __func__);
@@ -250,8 +246,8 @@
             : (aaudio_session_id_t) sessionId;
     setSessionId(actualSessionId);
 
-    ALOGD("%s(format = 0x%X) deviceId = %d, sessionId = %d",
-          __func__, config->format, getDeviceId(), getSessionId());
+    ALOGD("%s(format = 0x%X) deviceIds = %s, sessionId = %d",
+          __func__, config->format, toString(getDeviceIds()).c_str(), getSessionId());
 
     // Create MMAP/NOIRQ buffer.
     result = createMmapBuffer_l();
@@ -280,9 +276,9 @@
 
     mDataReportOffsetNanos = ((int64_t)mTimestampGracePeriodMs) * AAUDIO_NANOS_PER_MILLISECOND;
 
-    ALOGD("%s() got rate = %d, channels = %d channelMask = %#x, deviceId = %d, capacity = %d\n",
+    ALOGD("%s() got rate = %d, channels = %d channelMask = %#x, deviceIds = %s, capacity = %d\n",
           __func__, getSampleRate(), getSamplesPerFrame(), getChannelMask(),
-          deviceId, getBufferCapacity());
+          android::toString(deviceIds).c_str(), getBufferCapacity());
 
     ALOGD("%s() got format = 0x%X = %s, frame size = %d, burst size = %d",
           __func__, getFormat(), audio_format_to_string(getFormat()),
@@ -293,7 +289,11 @@
 error:
     close_l();
     // restore original requests
-    setDeviceId(mRequestedDeviceId);
+    android::DeviceIdVector requestedDeviceIds;
+    if (mRequestedDeviceId != AAUDIO_UNSPECIFIED) {
+        requestedDeviceIds.push_back(mRequestedDeviceId);
+    }
+    setDeviceIds(requestedDeviceIds);
     setSessionId(requestedSessionId);
     return result;
 }
@@ -491,27 +491,26 @@
 };
 
 void AAudioServiceEndpointMMAP::onRoutingChanged(const android::DeviceIdVector& deviceIds) {
-    const auto deviceId = android::getFirstDeviceId(deviceIds);
-    // TODO(b/367816690): Compare the new and saved device sets.
-    ALOGD("%s() called with dev %d, old = %d", __func__, deviceId, getDeviceId());
-    if (getDeviceId() != deviceId) {
-        if (getDeviceId() != AUDIO_PORT_HANDLE_NONE) {
+    ALOGD("%s() called with dev %s, old = %s", __func__, android::toString(deviceIds).c_str(),
+          android::toString(getDeviceIds()).c_str());
+    if (!android::areDeviceIdsEqual(getDeviceIds(), deviceIds)) {
+        if (!getDeviceIds().empty()) {
             // When there is a routing changed, mmap stream should be disconnected. Set `mConnected`
-            // as false here so that there won't be a new stream connect to this endpoint.
+            // as false here so that there won't be a new stream connected to this endpoint.
             mConnected.store(false);
             const android::sp<AAudioServiceEndpointMMAP> holdEndpoint(this);
-            std::thread asyncTask([holdEndpoint, deviceId]() {
+            std::thread asyncTask([holdEndpoint, deviceIds]() {
                 ALOGD("onRoutingChanged() asyncTask launched");
                 // When routing changed, the stream is disconnected and cannot be used except for
                 // closing. In that case, it should be safe to release all registered streams.
                 // This can help release service side resource in case the client doesn't close
                 // the stream after receiving disconnect event.
                 holdEndpoint->releaseRegisteredStreams();
-                holdEndpoint->setDeviceId(deviceId);
+                holdEndpoint->setDeviceIds(deviceIds);
             });
             asyncTask.detach();
         } else {
-            setDeviceId(deviceId);
+            setDeviceIds(deviceIds);
         }
     }
 };
diff --git a/services/oboeservice/AAudioServiceEndpointShared.cpp b/services/oboeservice/AAudioServiceEndpointShared.cpp
index 5e1e594..f54de5e 100644
--- a/services/oboeservice/AAudioServiceEndpointShared.cpp
+++ b/services/oboeservice/AAudioServiceEndpointShared.cpp
@@ -64,7 +64,7 @@
     const AAudioStreamConfiguration &configuration = request.getConstantConfiguration();
 
     copyFrom(configuration);
-    mRequestedDeviceId = configuration.getDeviceId();
+    mRequestedDeviceId = android::getFirstDeviceId(configuration.getDeviceIds());
 
     AudioStreamBuilder builder;
     builder.copyFrom(configuration);
@@ -79,7 +79,7 @@
 
     setSampleRate(mStreamInternal->getSampleRate());
     setChannelMask(mStreamInternal->getChannelMask());
-    setDeviceId(mStreamInternal->getDeviceId());
+    setDeviceIds(mStreamInternal->getDeviceIds());
     setSessionId(mStreamInternal->getSessionId());
     setFormat(AUDIO_FORMAT_PCM_FLOAT); // force for mixer
     setHardwareSampleRate(mStreamInternal->getHardwareSampleRate());
@@ -220,7 +220,7 @@
 void AAudioServiceEndpointShared::handleDisconnectRegisteredStreamsAsync() {
     android::sp<AAudioServiceEndpointShared> holdEndpoint(this);
     // When there is a routing changed, mmap stream should be disconnected. Set `mConnected`
-    // as false here so that there won't be a new stream connect to this endpoint.
+    // as false here so that there won't be a new stream connected to this endpoint.
     mConnected.store(false);
     std::thread asyncTask([holdEndpoint]() {
         // When handling disconnection, the service side has disconnected. In that case,
diff --git a/services/oboeservice/AAudioServiceStreamBase.cpp b/services/oboeservice/AAudioServiceStreamBase.cpp
index 78cf706..1c24f18 100644
--- a/services/oboeservice/AAudioServiceStreamBase.cpp
+++ b/services/oboeservice/AAudioServiceStreamBase.cpp
@@ -127,7 +127,8 @@
         .set(AMEDIAMETRICS_PROP_DIRECTION,
                 AudioGlobal_convertDirectionToText(getDirection()))
         .set(AMEDIAMETRICS_PROP_ENCODING, toString(getFormat()).c_str())
-        .set(AMEDIAMETRICS_PROP_ROUTEDDEVICEID, (int32_t)getDeviceId())
+        .set(AMEDIAMETRICS_PROP_ROUTEDDEVICEID, android::getFirstDeviceId(getDeviceIds()))
+        .set(AMEDIAMETRICS_PROP_ROUTEDDEVICEIDS, android::toString(getDeviceIds()).c_str())
         .set(AMEDIAMETRICS_PROP_SAMPLERATE, (int32_t)getSampleRate())
         .set(AMEDIAMETRICS_PROP_SESSIONID, (int32_t)getSessionId())
         .set(AMEDIAMETRICS_PROP_SOURCE, toString(attributes.source).c_str())
diff --git a/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp b/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp
index f5c2e6c..e80f51d 100644
--- a/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp
+++ b/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp
@@ -372,7 +372,8 @@
     request.setAttributionSource(attributionSource);
     request.setInService(fdp.ConsumeBool());
 
-    request.getConfiguration().setDeviceId(fdp.ConsumeIntegral<int32_t>());
+    android::DeviceIdVector DeviceIdVector = { fdp.ConsumeIntegral<int32_t>() };
+    request.getConfiguration().setDeviceIds(DeviceIdVector);
     request.getConfiguration().setSampleRate(fdp.ConsumeIntegral<int32_t>());
     request.getConfiguration().setChannelMask((aaudio_channel_mask_t)(
         fdp.ConsumeBool()