Merge "Add ValidatedAttributionSourceState" into main
diff --git a/Android.bp b/Android.bp
index fa043b2..c11e38a 100644
--- a/Android.bp
+++ b/Android.bp
@@ -163,3 +163,19 @@
     frozen: true,
 
 }
+
+latest_av_audio_types_aidl = "av-audio-types-aidl-V1"
+
+cc_defaults {
+    name: "latest_av_audio_types_aidl_ndk_shared",
+    shared_libs: [
+        latest_av_audio_types_aidl + "-ndk",
+    ],
+}
+
+cc_defaults {
+    name: "latest_av_audio_types_aidl_ndk_static",
+    static_libs: [
+        latest_av_audio_types_aidl + "-ndk",
+    ],
+}
diff --git a/camera/Android.bp b/camera/Android.bp
index d0f8e7e..75c2999 100644
--- a/camera/Android.bp
+++ b/camera/Android.bp
@@ -169,7 +169,6 @@
     srcs: [
         "aidl/android/hardware/CameraExtensionSessionStats.aidl",
         "aidl/android/hardware/ICameraService.aidl",
-        "aidl/android/hardware/CameraIdRemapping.aidl",
         "aidl/android/hardware/ICameraServiceListener.aidl",
         "aidl/android/hardware/ICameraServiceProxy.aidl",
         "aidl/android/hardware/camera2/ICameraDeviceCallbacks.aidl",
diff --git a/camera/Camera.cpp b/camera/Camera.cpp
index 5d32871..8018390 100644
--- a/camera/Camera.cpp
+++ b/camera/Camera.cpp
@@ -70,11 +70,11 @@
 }
 
 sp<Camera> Camera::connect(int cameraId, const std::string& clientPackageName,
-        int clientUid, int clientPid, int targetSdkVersion, bool overrideToPortrait,
+        int clientUid, int clientPid, int targetSdkVersion, int rotationOverride,
         bool forceSlowJpegMode, int32_t deviceId, int32_t devicePolicy)
 {
     return CameraBaseT::connect(cameraId, clientPackageName, clientUid,
-            clientPid, targetSdkVersion, overrideToPortrait, forceSlowJpegMode, deviceId,
+            clientPid, targetSdkVersion, rotationOverride, forceSlowJpegMode, deviceId,
             devicePolicy);
 }
 
diff --git a/camera/CameraBase.cpp b/camera/CameraBase.cpp
index b2f7cc7..d7415a3 100644
--- a/camera/CameraBase.cpp
+++ b/camera/CameraBase.cpp
@@ -163,7 +163,7 @@
 sp<TCam> CameraBase<TCam, TCamTraits>::connect(int cameraId,
                                                const std::string& clientPackageName,
                                                int clientUid, int clientPid, int targetSdkVersion,
-                                               bool overrideToPortrait, bool forceSlowJpegMode,
+                                               int rotationOverride, bool forceSlowJpegMode,
                                                int32_t deviceId, int32_t devicePolicy)
 {
     ALOGV("%s: connect", __FUNCTION__);
@@ -174,10 +174,10 @@
     binder::Status ret;
     if (cs != nullptr) {
         TCamConnectService fnConnectService = TCamTraits::fnConnectService;
-        ALOGI("Connect camera (legacy API) - overrideToPortrait %d, forceSlowJpegMode %d",
-                overrideToPortrait, forceSlowJpegMode);
+        ALOGI("Connect camera (legacy API) - rotationOverride %d, forceSlowJpegMode %d",
+                rotationOverride, forceSlowJpegMode);
         ret = (cs.get()->*fnConnectService)(cl, cameraId, clientPackageName, clientUid,
-                clientPid, targetSdkVersion, overrideToPortrait, forceSlowJpegMode, deviceId,
+                clientPid, targetSdkVersion, rotationOverride, forceSlowJpegMode, deviceId,
                 devicePolicy, /*out*/ &c->mCamera);
     }
     if (ret.isOk() && c->mCamera != nullptr) {
@@ -279,11 +279,11 @@
 // this can be in BaseCamera but it should be an instance method
 template <typename TCam, typename TCamTraits>
 status_t CameraBase<TCam, TCamTraits>::getCameraInfo(int cameraId,
-        bool overrideToPortrait, int32_t deviceId, int32_t devicePolicy,
+        int rotationOverride, int32_t deviceId, int32_t devicePolicy,
         struct hardware::CameraInfo* cameraInfo) {
     const sp<::android::hardware::ICameraService> cs = getCameraService();
     if (cs == 0) return UNKNOWN_ERROR;
-    binder::Status res = cs->getCameraInfo(cameraId, overrideToPortrait, deviceId, devicePolicy,
+    binder::Status res = cs->getCameraInfo(cameraId, rotationOverride, deviceId, devicePolicy,
             cameraInfo);
     return res.isOk() ? OK : res.serviceSpecificErrorCode();
 }
diff --git a/camera/aidl/android/hardware/CameraExtensionSessionStats.aidl b/camera/aidl/android/hardware/CameraExtensionSessionStats.aidl
index 1c81831..a3c0e69 100644
--- a/camera/aidl/android/hardware/CameraExtensionSessionStats.aidl
+++ b/camera/aidl/android/hardware/CameraExtensionSessionStats.aidl
@@ -66,4 +66,9 @@
      * true if advanced extensions are being used, false otherwise
      */
     boolean isAdvanced = false;
+
+    /**
+     * Format of image capture request
+     */
+    int captureFormat;
 }
\ No newline at end of file
diff --git a/camera/aidl/android/hardware/CameraIdRemapping.aidl b/camera/aidl/android/hardware/CameraIdRemapping.aidl
deleted file mode 100644
index 453f696..0000000
--- a/camera/aidl/android/hardware/CameraIdRemapping.aidl
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Copyright (C) 2023 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.hardware;
-
-/**
- * Specifies a remapping of Camera Ids.
- *
- * Example: For a given package, a remapping of camera id0 to id1 specifies
- * that any operation to perform on id0 should instead be performed on id1.
- *
- * @hide
- */
-parcelable CameraIdRemapping {
-    /**
-     * Specifies remapping of Camera Ids per package.
-     */
-    parcelable PackageIdRemapping {
-        /** Package Name (e.g. com.android.xyz). */
-        @utf8InCpp String packageName;
-        /**
-         * Ordered list of Camera Ids to replace. Only Camera Ids present in this list will be
-         * affected.
-         */
-        @utf8InCpp List<String> cameraIdsToReplace;
-        /**
-         *  Ordered list of updated Camera Ids, where updatedCameraIds[i] corresponds to
-         *  the updated camera id for cameraIdsToReplace[i].
-         */
-        @utf8InCpp List<String> updatedCameraIds;
-    }
-
-    /**
-     * List of Camera Id remappings to perform.
-     */
-    List<PackageIdRemapping> packageIdRemappings;
-}
diff --git a/camera/aidl/android/hardware/ICameraService.aidl b/camera/aidl/android/hardware/ICameraService.aidl
index 885749d..d9a0934 100644
--- a/camera/aidl/android/hardware/ICameraService.aidl
+++ b/camera/aidl/android/hardware/ICameraService.aidl
@@ -30,7 +30,6 @@
 import android.hardware.camera2.impl.CameraMetadataNative;
 import android.hardware.ICameraServiceListener;
 import android.hardware.CameraInfo;
-import android.hardware.CameraIdRemapping;
 import android.hardware.CameraStatus;
 import android.hardware.CameraExtensionSessionStats;
 
@@ -76,11 +75,28 @@
     int getNumberOfCameras(int type, int deviceId, int devicePolicy);
 
     /**
+     * If changed, reflect in
+     * frameworks/base/core/java/android/hardware/camera2/CameraManager.java.
+     * We have an enum here since the decision to override to portrait mode / fetch the
+     * rotationOverride as it exists in CameraManager right now is based on a static system
+     * property and not something that changes based dynamically, say on fold state. As a result,
+     * we can't use just a boolean to differentiate between the case where cameraserver should
+     * override to portrait (sensor orientation is 0, 180) or just rotate the sensor feed (sensor
+     * orientation is 90, 270)
+     */
+    const int ROTATION_OVERRIDE_NONE = 0;
+    const int ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT = 1;
+    const int ROTATION_OVERRIDE_ROTATION_ONLY = 2;
+
+    /**
      * Fetch basic camera information for a camera.
      *
      * @param cameraId The ID of the camera to fetch information for.
-     * @param overrideToPortrait Whether to override the sensor orientation information to
-     *        correspond to portrait.
+     * @param rotationOverride Whether to override the sensor orientation information to
+     *        correspond to portrait: {@link ICameraService#ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT}
+     *        will override the sensor orientation and rotate and crop, while {@link
+     *        ICameraService#ROTATION_OVERRIDE_ROTATION_ONLY} will rotate and crop the camera feed
+     *        without changing the sensor orientation.
      * @param deviceId The device id of the context associated with the caller.
      * @param devicePolicy The camera policy of the device of the associated context (default
      *                     policy for default device context). Only virtual cameras would be exposed
@@ -88,7 +104,7 @@
      *                     policy.
      * @return CameraInfo for the camera.
      */
-    CameraInfo getCameraInfo(int cameraId, boolean overrideToPortrait, int deviceId,
+    CameraInfo getCameraInfo(int cameraId, int rotationOverride, int deviceId,
             int devicePolicy);
 
     /**
@@ -105,8 +121,12 @@
      * @param opPackageName The package name to report for the app-ops.
      * @param clientUid UID for the calling client.
      * @param clientPid PID for the calling client.
-     * @param overrideToPortrait Whether to override the sensor orientation information to
-     *        correspond to portrait.
+     * @param targetSdkVersion the target sdk level of the application calling this function.
+     * @param rotationOverride Whether to override the sensor orientation information to
+     *        correspond to portrait: {@link ICameraService#ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT}
+     *        will override the sensor orientation and rotate and crop, while {@link
+     *        ICameraService#ROTATION_OVERRIDE_ROTATION_ONLY} will rotate and crop the camera feed
+     *        without changing the sensor orientation.
      * @param forceSlowJpegMode Whether to force slow jpeg mode.
      * @param deviceId The device id of the context associated with the caller.
      * @param devicePolicy The camera policy of the device of the associated context (default
@@ -119,7 +139,7 @@
             @utf8InCpp String opPackageName,
             int clientUid, int clientPid,
             int targetSdkVersion,
-            boolean overrideToPortrait,
+            int rotationOverride,
             boolean forceSlowJpegMode,
             int deviceId,
             int devicePolicy);
@@ -131,8 +151,12 @@
      * @param cameraId The ID of the camera to open.
      * @param opPackageName The package name to report for the app-ops.
      * @param clientUid UID for the calling client.
-     * @param overrideToPortrait Whether to override the sensor orientation information to
-     *        correspond to portrait.
+     * @param targetSdkVersion the target sdk level of the application calling this function.
+     * @param rotationOverride Whether to override the sensor orientation information to
+     *        correspond to portrait: {@link ICameraService#ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT}
+     *        will override the sensor orientation and rotate and crop, while {@link
+     *        ICameraService#ROTATION_OVERRIDE_ROTATION_ONLY} will rotate and crop the camera feed
+     *        without changing the sensor orientation.
      * @param deviceId The device id of the context associated with the caller.
      * @param devicePolicy The camera policy of the device of the associated context (default
      *                     policy for default device context). Only virtual cameras would be exposed
@@ -145,7 +169,7 @@
             @nullable @utf8InCpp String featureId,
             int clientUid, int oomScoreOffset,
             int targetSdkVersion,
-            boolean overrideToPortrait,
+            int rotationOverride,
             int deviceId,
             int devicePolicy);
 
@@ -165,35 +189,24 @@
     ConcurrentCameraIdCombination[] getConcurrentCameraIds();
 
     /**
-      * Check whether a particular set of session configurations are concurrently supported by the
-      * corresponding camera ids.
-      *
-      * @param sessions the set of camera id and session configuration pairs to be queried.
-      * @param targetSdkVersion the target sdk level of the application calling this function.
-      * @return true  - the set of concurrent camera id and stream combinations is supported.
-      *         false - the set of concurrent camera id and stream combinations is not supported
-      *                 OR the method was called with a set of camera ids not returned by
-      *                 getConcurrentCameraIds().
-      */
+     * Check whether a particular set of session configurations are concurrently supported by the
+     * corresponding camera ids.
+     *
+     * @param sessions the set of camera id and session configuration pairs to be queried.
+     * @param targetSdkVersion the target sdk level of the application calling this function.
+     * @param deviceId The device id of the context associated with the caller.
+     * @param devicePolicy The camera policy of the device of the associated context (default
+     *                     policy for default device context). Only virtual cameras would be exposed
+     *                     only for custom policy and only real cameras would be exposed for default
+     *                     policy.
+     * @return true  - the set of concurrent camera id and stream combinations is supported.
+     *         false - the set of concurrent camera id and stream combinations is not supported
+     *                 OR the method was called with a set of camera ids not returned by
+     *                 getConcurrentCameraIds().
+     */
     boolean isConcurrentSessionConfigurationSupported(
             in CameraIdAndSessionConfiguration[] sessions,
-            int targetSdkVersion);
-
-    /**
-     * Remap Camera Ids in the CameraService.
-     *
-     * Once this is in effect, all binder calls in the ICameraService that
-     * use logicalCameraId should consult remapping state to arrive at the
-     * correct cameraId to perform the operation on.
-     *
-     * Note: Before the new cameraIdRemapping state is applied, the previous
-     * state is cleared.
-     *
-     * @param cameraIdRemapping the camera ids to remap. Sending an unpopulated
-     *        cameraIdRemapping object will result in clearing of any previous
-     *        cameraIdRemapping state in the camera service.
-     */
-    void remapCameraIds(in CameraIdRemapping cameraIdRemapping);
+            int targetSdkVersion, int deviceId, int devicePolicy);
 
     /**
      * Inject Session Params into an existing camera session.
@@ -217,8 +230,12 @@
      * Only supported for device HAL versions >= 3.2
      *
      * @param cameraId The ID of the camera to fetch metadata for.
-     * @param overrideToPortrait Whether to override the sensor orientation information to
-     *        correspond to portrait.
+     * @param targetSdkVersion the target sdk level of the application calling this function.
+     * @param rotationOverride Whether to override the sensor orientation information to
+     *        correspond to portrait: {@link ICameraService#ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT}
+     *        will override the sensor orientation and rotate and crop, while {@link
+     *        ICameraService#ROTATION_OVERRIDE_ROTATION_ONLY} will rotate and crop the camera feed
+     *        without changing the sensor orientation.
      * @param deviceId The device id of the context associated with the caller.
      * @param devicePolicy The camera policy of the device of the associated context (default
      *                     policy for default device context). Only virtual cameras would be exposed
@@ -227,7 +244,7 @@
      * @return Characteristics for the given camera.
      */
     CameraMetadataNative getCameraCharacteristics(@utf8InCpp String cameraId, int targetSdkVersion,
-            boolean overrideToPortrait, int deviceId, int devicePolicy);
+            int rotationOverride, int deviceId, int devicePolicy);
 
     /**
      * Read in the vendor tag descriptors from the camera module HAL.
@@ -403,8 +420,11 @@
      *
      * @param cameraId ID of the device for which the session characteristics must be fetched.
      * @param targetSdkVersion the target sdk level of the application calling this function.
-     * @param overrideToPortrait Whether to override the sensor orientation information to
-     *                           correspond to portrait.
+     * @param rotationOverride Whether to override the sensor orientation information to
+     *        correspond to portrait: {@link ICameraService#ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT}
+     *        will override the sensor orientation and rotate and crop, while {@link
+     *        ICameraService#ROTATION_OVERRIDE_ROTATION_ONLY} will rotate and crop the camera feed
+     *        without changing the sensor orientation.
      * @param sessionConfiguration Session configuration for which the characteristics
      *                             must be fetched.
      * @param deviceId The device id of the context associated with the caller.
@@ -416,7 +436,7 @@
      */
     CameraMetadataNative getSessionCharacteristics(@utf8InCpp String cameraId,
             int targetSdkVersion,
-            boolean overrideToPortrait,
+            int rotationOverride,
             in SessionConfiguration sessionConfiguration,
             int deviceId,
             int devicePolicy);
diff --git a/camera/camera2/ConcurrentCamera.cpp b/camera/camera2/ConcurrentCamera.cpp
index 67aa876..ac442ed 100644
--- a/camera/camera2/ConcurrentCamera.cpp
+++ b/camera/camera2/ConcurrentCamera.cpp
@@ -32,7 +32,8 @@
 ConcurrentCameraIdCombination::ConcurrentCameraIdCombination() = default;
 
 ConcurrentCameraIdCombination::ConcurrentCameraIdCombination(
-        std::vector<std::string> &&combination) : mConcurrentCameraIds(std::move(combination)) { }
+        std::vector<std::pair<std::string, int32_t>> &&combination)
+            : mConcurrentCameraIdDeviceIdPairs(std::move(combination)) { }
 
 ConcurrentCameraIdCombination::~ConcurrentCameraIdCombination() = default;
 
@@ -42,25 +43,29 @@
         return BAD_VALUE;
     }
     status_t err = OK;
-    mConcurrentCameraIds.clear();
-    int32_t cameraIdCount = 0;
-    if ((err = parcel->readInt32(&cameraIdCount)) != OK) {
-        ALOGE("%s: Failed to read the camera id count from parcel: %d", __FUNCTION__, err);
+    mConcurrentCameraIdDeviceIdPairs.clear();
+    int32_t cameraCount = 0;
+    if ((err = parcel->readInt32(&cameraCount)) != OK) {
+        ALOGE("%s: Failed to read the camera count from parcel: %d", __FUNCTION__, err);
         return err;
     }
-    for (int32_t i = 0; i < cameraIdCount; i++) {
-        String16 id;
-        if ((err = parcel->readString16(&id)) != OK) {
+    for (int32_t i = 0; i < cameraCount; i++) {
+        String16 cameraId;
+        if ((err = parcel->readString16(&cameraId)) != OK) {
             ALOGE("%s: Failed to read camera id!", __FUNCTION__);
             return err;
         }
-        mConcurrentCameraIds.push_back(toStdString(id));
+        int32_t deviceId;
+        if ((err = parcel->readInt32(&deviceId)) != OK) {
+            ALOGE("%s: Failed to read device id!", __FUNCTION__);
+            return err;
+        }
+        mConcurrentCameraIdDeviceIdPairs.push_back({toStdString(cameraId), deviceId});
     }
     return OK;
 }
 
 status_t ConcurrentCameraIdCombination::writeToParcel(android::Parcel* parcel) const {
-
     if (parcel == nullptr) {
         ALOGE("%s: Null parcel", __FUNCTION__);
         return BAD_VALUE;
@@ -68,16 +73,20 @@
 
     status_t err = OK;
 
-    if ((err = parcel->writeInt32(mConcurrentCameraIds.size())) != OK) {
+    if ((err = parcel->writeInt32(mConcurrentCameraIdDeviceIdPairs.size())) != OK) {
         ALOGE("%s: Failed to write the camera id count to parcel: %d", __FUNCTION__, err);
         return err;
     }
 
-    for (const auto &it : mConcurrentCameraIds) {
-        if ((err = parcel->writeString16(toString16(it))) != OK) {
+    for (const auto &it : mConcurrentCameraIdDeviceIdPairs) {
+        if ((err = parcel->writeString16(toString16(it.first))) != OK) {
             ALOGE("%s: Failed to write the camera id string to parcel: %d", __FUNCTION__, err);
             return err;
         }
+        if ((err = parcel->writeInt32(it.second)) != OK) {
+            ALOGE("%s: Failed to write the device id integer to parcel: %d", __FUNCTION__, err);
+            return err;
+        }
     }
     return OK;
 }
@@ -105,7 +114,6 @@
 }
 
 status_t CameraIdAndSessionConfiguration::writeToParcel(android::Parcel* parcel) const {
-
     if (parcel == nullptr) {
         ALOGE("%s: Null parcel", __FUNCTION__);
         return BAD_VALUE;
diff --git a/camera/camera_platform.aconfig b/camera/camera_platform.aconfig
index 4fcceae..fe10e12 100644
--- a/camera/camera_platform.aconfig
+++ b/camera/camera_platform.aconfig
@@ -2,70 +2,70 @@
 container: "system"
 
 flag {
-     namespace: "camera_platform"
-     name: "camera_hsum_permission"
-     is_exported: true
-     description: "Camera access by headless system user"
-     bug: "273539631"
+    namespace: "camera_platform"
+    name: "camera_hsum_permission"
+    is_exported: true
+    description: "Camera access by headless system user"
+    bug: "273539631"
 }
 
 flag {
-     namespace: "camera_platform"
-     name: "concert_mode"
-     is_exported: true
-     description: "Introduces a new concert mode camera extension type"
-     bug: "297083874"
+    namespace: "camera_platform"
+    name: "concert_mode"
+    is_exported: true
+    description: "Introduces a new concert mode camera extension type"
+    bug: "297083874"
 }
 
 flag {
-     namespace: "camera_platform"
-     name: "feature_combination_query"
-     is_exported: true
-     description: "Query feature combination support and session specific characteristics"
-     bug: "309627704"
+    namespace: "camera_platform"
+    name: "feature_combination_query"
+    is_exported: true
+    description: "Query feature combination support and session specific characteristics"
+    bug: "309627704"
 }
 
 flag {
-     namespace: "camera_platform"
-     name: "watch_foreground_changes"
-     description: "Request AppOps to notify changes in the foreground status of the client"
-     bug: "290086710"
+    namespace: "camera_platform"
+    name: "watch_foreground_changes"
+    description: "Request AppOps to notify changes in the foreground status of the client"
+    bug: "290086710"
 }
 
 flag {
-     namespace: "camera_platform"
-     name: "log_ultrawide_usage"
-     description: "Enable measuring how much usage there is for ultrawide-angle cameras"
-     bug: "300515796"
+    namespace: "camera_platform"
+    name: "log_ultrawide_usage"
+    description: "Enable measuring how much usage there is for ultrawide-angle cameras"
+    bug: "300515796"
 }
 
 flag {
-     namespace: "camera_platform"
-     name: "camera_manual_flash_strength_control"
-     is_exported: true
-     description: "Flash brightness level control in manual flash mode"
-     bug: "238348881"
+    namespace: "camera_platform"
+    name: "camera_manual_flash_strength_control"
+    is_exported: true
+    description: "Flash brightness level control in manual flash mode"
+    bug: "238348881"
 }
 
 flag {
-     namespace: "camera_platform"
-     name: "lazy_aidl_wait_for_service"
-     description: "Use waitForService instead of getService with lazy AIDL HALs"
-     bug: "285546208"
+    namespace: "camera_platform"
+    name: "lazy_aidl_wait_for_service"
+    description: "Use waitForService instead of getService with lazy AIDL HALs"
+    bug: "285546208"
 }
 
 flag {
-     namespace: "camera_platform"
-     name: "log_zoom_override_usage"
-     description: "Enable measuring how much usage there is for zoom settings overrde"
-     bug: "307409002"
+    namespace: "camera_platform"
+    name: "log_zoom_override_usage"
+    description: "Enable measuring how much usage there is for zoom settings overrde"
+    bug: "307409002"
 }
 
 flag {
-     namespace: "camera_platform"
-     name: "session_hal_buf_manager"
-     description: "Enable or disable HAL buffer manager as requested by the camera HAL"
-     bug: "311263114"
+    namespace: "camera_platform"
+    name: "session_hal_buf_manager"
+    description: "Enable or disable HAL buffer manager as requested by the camera HAL"
+    bug: "311263114"
 }
 
 flag {
@@ -76,144 +76,174 @@
 }
 
 flag {
-     namespace: "camera_platform"
-     name: "camera_ae_mode_low_light_boost"
-     is_exported: true
-     description: "An AE mode that enables increased brightening in low light scenes"
-     bug: "312803148"
+    namespace: "camera_platform"
+    name: "camera_ae_mode_low_light_boost"
+    is_exported: true
+    description: "An AE mode that enables increased brightening in low light scenes"
+    bug: "312803148"
 }
 
 flag {
-     namespace: "camera_platform"
-     name: "multiresolution_imagereader_usage_config"
-     description: "Enable creating MultiResolutionImageReader with usage flag configuration"
-     bug: "301588215"
+    namespace: "camera_platform"
+    name: "multiresolution_imagereader_usage_config"
+    description: "Enable creating MultiResolutionImageReader with usage flag configuration"
+    bug: "301588215"
 }
 
 flag {
-     namespace: "camera_platform"
-     name: "use_ro_board_api_level_for_vndk_version"
-     description: "Enable using ro.board.api_level instead of ro.vndk.version to get VNDK version"
-     bug: "312315580"
+    namespace: "camera_platform"
+    name: "use_ro_board_api_level_for_vndk_version"
+    description: "Enable using ro.board.api_level instead of ro.vndk.version to get VNDK version"
+    bug: "312315580"
 }
 
 flag {
-     namespace: "camera_platform"
-     name: "camera_extensions_characteristics_get"
-     is_exported: true
-     description: "Enable get extension specific camera characteristics API"
-     bug: "280649914"
+    namespace: "camera_platform"
+    name: "camera_extensions_characteristics_get"
+    is_exported: true
+    description: "Enable get extension specific camera characteristics API"
+    bug: "280649914"
 }
 
 flag {
-     namespace: "camera_platform"
-     name: "delay_lazy_hal_instantiation"
-     description: "Only trigger lazy HAL instantiation when the HAL is needed for an operation."
-     bug: "319735068"
+    namespace: "camera_platform"
+    name: "delay_lazy_hal_instantiation"
+    description: "Only trigger lazy HAL instantiation when the HAL is needed for an operation."
+    bug: "319735068"
 }
 
 flag {
-     namespace: "camera_platform"
-     name: "return_buffers_outside_locks"
-     description: "Enable returning graphics buffers to buffer queues without holding the in-flight mutex"
-     bug: "315526878"
+    namespace: "camera_platform"
+    name: "return_buffers_outside_locks"
+    description: "Enable returning graphics buffers to buffer queues without holding the in-flight mutex"
+    bug: "315526878"
 }
 
 flag {
-     namespace: "camera_platform"
-     name: "camera_device_setup"
-     is_exported: true
-     description: "Create an intermediate Camera Device class for limited CameraDevice access."
-     bug: "320741775"
+    namespace: "camera_platform"
+    name: "camera_device_setup"
+    is_exported: true
+    description: "Create an intermediate Camera Device class for limited CameraDevice access."
+    bug: "320741775"
 }
 
 flag {
-     namespace: "camera_platform"
-     name: "camera_privacy_allowlist"
-     is_exported: true
-     description: "Allowlisting to exempt safety-relevant cameras from privacy control for automotive devices"
-     bug: "282814430"
+    namespace: "camera_platform"
+    name: "camera_privacy_allowlist"
+    is_exported: true
+    description: "Allowlisting to exempt safety-relevant cameras from privacy control for automotive devices"
+    bug: "282814430"
 }
 
 flag {
-     namespace: "camera_platform"
-     name: "surface_ipc"
-     description: "Optimize Surface binder IPC"
-     bug: "323292530"
-     metadata {
-       purpose: PURPOSE_BUGFIX
-     }
+    namespace: "camera_platform"
+    name: "surface_ipc"
+    description: "Optimize Surface binder IPC"
+    bug: "323292530"
+    metadata {
+        purpose: PURPOSE_BUGFIX
+    }
 }
 
 flag {
-     namespace: "camera_platform"
-     name: "extension_10_bit"
-     is_exported: true
-     description: "Enables 10-bit support in the camera extensions."
-     bug: "316375635"
+    namespace: "camera_platform"
+    name: "extension_10_bit"
+    is_exported: true
+    description: "Enables 10-bit support in the camera extensions."
+    bug: "316375635"
 }
 
 flag {
-     namespace: "camera_platform"
-     name: "single_thread_executor"
-     description: "Ensure device logic is run within one thread."
-     bug: "305857746"
-     metadata {
-       purpose: PURPOSE_BUGFIX
-     }
+    namespace: "camera_platform"
+    name: "single_thread_executor"
+    description: "Ensure device logic is run within one thread."
+    bug: "305857746"
+    metadata {
+        purpose: PURPOSE_BUGFIX
+    }
 }
 
 flag {
-     namespace: "camera_platform"
-     name: "surface_leak_fix"
-     description: "Address Surface release leaks in CaptureRequest"
-     bug: "324071855"
-     metadata {
-       purpose: PURPOSE_BUGFIX
-     }
+    namespace: "camera_platform"
+    name: "surface_leak_fix"
+    description: "Address Surface release leaks in CaptureRequest"
+    bug: "324071855"
+    metadata {
+        purpose: PURPOSE_BUGFIX
+    }
 }
 
 flag {
-     namespace: "camera_platform"
-     name: "concert_mode_api"
-     description: "Covers the eyes free videography public facing API"
-     bug: "297083874"
+    namespace: "camera_platform"
+    name: "concert_mode_api"
+    description: "Covers the eyes free videography public facing API"
+    bug: "297083874"
 }
 
 
 flag {
-     namespace: "camera_platform"
-     name: "cache_permission_services"
-     description: "Cache IPermissionController and IPermissionChecker in CameraService to reduce query latency."
-     bug: "326139956"
-     metadata {
-       purpose: PURPOSE_BUGFIX
-     }
+    namespace: "camera_platform"
+    name: "cache_permission_services"
+    description: "Cache IPermissionController and IPermissionChecker in CameraService to reduce query latency."
+    bug: "326139956"
+    metadata {
+        purpose: PURPOSE_BUGFIX
+    }
 }
 
 flag {
-     namespace: "camera_platform"
-     name: "check_session_support_before_session_char"
-     description: "Validate that a SessionConfiguration is supported before fetching SessionCharacteristics."
-     bug: "327008530"
-     metadata {
-       purpose: PURPOSE_BUGFIX
-     }
+    namespace: "camera_platform"
+    name: "check_session_support_before_session_char"
+    description: "Validate that a SessionConfiguration is supported before fetching SessionCharacteristics."
+    bug: "327008530"
+    metadata {
+        purpose: PURPOSE_BUGFIX
+    }
 }
 
 flag {
-     namespace: "camera_platform"
-     name: "calculate_perf_override_during_session_support"
-     description: "Dynamically calulate whether perf class override should be set in isSessionConfigurationWithParametersSupported."
-     bug: "332975108"
-     metadata {
-       purpose: PURPOSE_BUGFIX
-     }
+    namespace: "camera_platform"
+    name: "calculate_perf_override_during_session_support"
+    description: "Dynamically calulate whether perf class override should be set in isSessionConfigurationWithParametersSupported."
+    bug: "332975108"
+    metadata {
+        purpose: PURPOSE_BUGFIX
+    }
 }
 
 flag {
-     namespace: "camera_platform"
-     name: "analytics_24q3"
-     description: "Miscellaneous camera platform metrics for 24Q3"
-     bug: "332557570"
+    namespace: "camera_platform"
+    name: "analytics_24q3"
+    description: "Miscellaneous camera platform metrics for 24Q3"
+    bug: "332557570"
+}
+
+flag {
+    namespace: "camera_platform"
+    name: "realtime_priority_bump"
+    description: "Bump the scheduling priority of performance critical code paths"
+    bug: "336628522"
+    metadata {
+        purpose: PURPOSE_BUGFIX
+    }
+}
+
+flag {
+    namespace: "camera_platform"
+    name: "use_system_api_for_vndk_version"
+    description: "ro.board.api_level isn't reliable. Use system api to replace ro.vndk.version"
+    bug: "312315580"
+    metadata {
+        purpose: PURPOSE_BUGFIX
+    }
+}
+
+flag {
+    namespace: "camera_platform"
+    name: "multi_res_raw_reprocessing"
+    description: "Allow multi-resolution raw reprocessing without reprocessing capability"
+    bug: "336922859"
+    metadata {
+        purpose: PURPOSE_BUGFIX
+    }
 }
diff --git a/camera/include/camera/Camera.h b/camera/include/camera/Camera.h
index dfa53d2..3ecd10d 100644
--- a/camera/include/camera/Camera.h
+++ b/camera/include/camera/Camera.h
@@ -59,7 +59,7 @@
     typedef ::android::hardware::ICameraClient TCamCallbacks;
     typedef ::android::binder::Status (::android::hardware::ICameraService::*TCamConnectService)
         (const sp<::android::hardware::ICameraClient>&,
-        int, const std::string&, int, int, int, bool, bool, int32_t, int32_t,
+        int, const std::string&, int, int, int, int, bool, int32_t, int32_t,
         /*out*/
         sp<::android::hardware::ICamera>*);
     static TCamConnectService     fnConnectService;
@@ -83,7 +83,7 @@
     static  sp<Camera>  connect(int cameraId,
                                 const std::string& clientPackageName,
                                 int clientUid, int clientPid, int targetSdkVersion,
-                                bool overrideToPortrait, bool forceSlowJpegMode,
+                                int rotationOverride, bool forceSlowJpegMode,
                                 int32_t deviceId = kDefaultDeviceId, int32_t devicePolicy = 0);
 
             virtual     ~Camera();
diff --git a/camera/include/camera/CameraBase.h b/camera/include/camera/CameraBase.h
index 85ddbd6..3370b3d 100644
--- a/camera/include/camera/CameraBase.h
+++ b/camera/include/camera/CameraBase.h
@@ -125,7 +125,7 @@
     static sp<TCam>      connect(int cameraId,
                                  const std::string& clientPackageName,
                                  int clientUid, int clientPid, int targetSdkVersion,
-                                 bool overrideToPortrait, bool forceSlowJpegMode,
+                                 int rotationOverride, bool forceSlowJpegMode,
                                  int32_t deviceId, int32_t devicePolicy);
     virtual void         disconnect();
 
@@ -134,7 +134,7 @@
     static int           getNumberOfCameras(int32_t deviceId, int32_t devicePolicy);
 
     static status_t      getCameraInfo(int cameraId,
-                                       bool overrideToPortrait,
+                                       int rotationOverride,
                                        int32_t deviceId,
                                        int32_t devicePolicy,
                                        /*out*/
diff --git a/camera/include/camera/camera2/ConcurrentCamera.h b/camera/include/camera/camera2/ConcurrentCamera.h
index ac99fd5..2a65da8 100644
--- a/camera/include/camera/camera2/ConcurrentCamera.h
+++ b/camera/include/camera/camera2/ConcurrentCamera.h
@@ -28,9 +28,9 @@
 namespace utils {
 
 struct ConcurrentCameraIdCombination : public Parcelable {
-    std::vector<std::string> mConcurrentCameraIds;
+    std::vector<std::pair<std::string, int32_t>> mConcurrentCameraIdDeviceIdPairs;
     ConcurrentCameraIdCombination();
-    ConcurrentCameraIdCombination(std::vector<std::string> &&combination);
+    ConcurrentCameraIdCombination(std::vector<std::pair<std::string, int32_t>> &&combination);
     virtual ~ConcurrentCameraIdCombination();
 
     virtual status_t writeToParcel(android::Parcel *parcel) const override;
diff --git a/camera/ndk/Android.bp b/camera/ndk/Android.bp
index 421469a..5577775 100644
--- a/camera/ndk/Android.bp
+++ b/camera/ndk/Android.bp
@@ -77,6 +77,8 @@
         "impl/ACameraCaptureSession.cpp",
     ],
     shared_libs: [
+        "android.companion.virtual.virtualdevice_aidl-cpp",
+        "android.companion.virtualdevice.flags-aconfig-cc",
         "libbinder",
         "liblog",
         "libgui",
diff --git a/camera/ndk/NdkCameraManager.cpp b/camera/ndk/NdkCameraManager.cpp
index 2de4a50..1b3343e 100644
--- a/camera/ndk/NdkCameraManager.cpp
+++ b/camera/ndk/NdkCameraManager.cpp
@@ -68,7 +68,7 @@
 
 EXPORT
 camera_status_t ACameraManager_registerAvailabilityCallback(
-        ACameraManager*, const ACameraManager_AvailabilityCallbacks *callback) {
+        ACameraManager* manager, const ACameraManager_AvailabilityCallbacks* callback) {
     ATRACE_CALL();
     if (callback == nullptr) {
         ALOGE("%s: invalid argument! callback is null!", __FUNCTION__);
@@ -81,13 +81,13 @@
                callback->onCameraAvailable, callback->onCameraUnavailable);
         return ACAMERA_ERROR_INVALID_PARAMETER;
     }
-    CameraManagerGlobal::getInstance()->registerAvailabilityCallback(callback);
+    manager->registerAvailabilityCallback(callback);
     return ACAMERA_OK;
 }
 
 EXPORT
 camera_status_t ACameraManager_unregisterAvailabilityCallback(
-        ACameraManager*, const ACameraManager_AvailabilityCallbacks *callback) {
+        ACameraManager* manager, const ACameraManager_AvailabilityCallbacks* callback) {
     ATRACE_CALL();
     if (callback == nullptr) {
         ALOGE("%s: invalid argument! callback is null!", __FUNCTION__);
@@ -100,13 +100,13 @@
                callback->onCameraAvailable, callback->onCameraUnavailable);
         return ACAMERA_ERROR_INVALID_PARAMETER;
     }
-    CameraManagerGlobal::getInstance()->unregisterAvailabilityCallback(callback);
+    manager->unregisterAvailabilityCallback(callback);
     return ACAMERA_OK;
 }
 
 EXPORT
 camera_status_t ACameraManager_registerExtendedAvailabilityCallback(
-        ACameraManager* /*manager*/, const ACameraManager_ExtendedAvailabilityCallbacks *callback) {
+        ACameraManager* manager, const ACameraManager_ExtendedAvailabilityCallbacks* callback) {
     ATRACE_CALL();
     if (callback == nullptr) {
         ALOGE("%s: invalid argument! callback is null!", __FUNCTION__);
@@ -131,13 +131,13 @@
             return ACAMERA_ERROR_INVALID_PARAMETER;
         }
     }
-    CameraManagerGlobal::getInstance()->registerExtendedAvailabilityCallback(callback);
+    manager->registerExtendedAvailabilityCallback(callback);
     return ACAMERA_OK;
 }
 
 EXPORT
 camera_status_t ACameraManager_unregisterExtendedAvailabilityCallback(
-        ACameraManager* /*manager*/, const ACameraManager_ExtendedAvailabilityCallbacks *callback) {
+        ACameraManager* manager, const ACameraManager_ExtendedAvailabilityCallbacks* callback) {
     ATRACE_CALL();
     if (callback == nullptr) {
         ALOGE("%s: invalid argument! callback is null!", __FUNCTION__);
@@ -154,7 +154,7 @@
                callback->onCameraAccessPrioritiesChanged);
         return ACAMERA_ERROR_INVALID_PARAMETER;
     }
-    CameraManagerGlobal::getInstance()->unregisterExtendedAvailabilityCallback(callback);
+    manager->unregisterExtendedAvailabilityCallback(callback);
     return ACAMERA_OK;
 }
 
diff --git a/camera/ndk/impl/ACameraManager.cpp b/camera/ndk/impl/ACameraManager.cpp
index 0744992..f36a743 100644
--- a/camera/ndk/impl/ACameraManager.cpp
+++ b/camera/ndk/impl/ACameraManager.cpp
@@ -17,23 +17,108 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "ACameraManager"
 
-#include <memory>
 #include "ACameraManager.h"
-#include "ACameraMetadata.h"
-#include "ACameraDevice.h"
-#include <utils/Vector.h>
-#include <cutils/properties.h>
-#include <stdlib.h>
+#include <android_companion_virtualdevice_flags.h>
 #include <camera/CameraUtils.h>
 #include <camera/StringUtils.h>
 #include <camera/VendorTagDescriptor.h>
+#include <cutils/properties.h>
+#include <stdlib.h>
+#include <utils/Vector.h>
+#include <memory>
+#include "ACameraDevice.h"
+#include "ACameraMetadata.h"
 
 using namespace android::acam;
+namespace vd_flags = android::companion::virtualdevice::flags;
 
 namespace android {
 namespace acam {
+namespace {
 
-// TODO(b/291736219): Add device-awareness to ACameraManager.
+using ::android::binder::Status;
+using ::android::companion::virtualnative::IVirtualDeviceManagerNative;
+
+// Return binder connection to VirtualDeviceManager.
+//
+// Subsequent calls return the same cached instance.
+sp<IVirtualDeviceManagerNative> getVirtualDeviceManager() {
+    auto connectToVirtualDeviceManagerNative = []() {
+        sp<IBinder> binder =
+                defaultServiceManager()->checkService(String16("virtualdevice_native"));
+        if (binder == nullptr) {
+            ALOGW("%s: Cannot get virtualdevice_native service", __func__);
+            return interface_cast<IVirtualDeviceManagerNative>(nullptr);
+        }
+        return interface_cast<IVirtualDeviceManagerNative>(binder);
+    };
+
+    static sp<IVirtualDeviceManagerNative> vdm = connectToVirtualDeviceManagerNative();
+    return vdm;
+}
+
+// Returns device id calling process is running on.
+// If the process cannot be attributed to single virtual device id, returns default device id.
+int getCurrentDeviceId() {
+    if (!vd_flags::camera_device_awareness()) {
+        return kDefaultDeviceId;
+    }
+
+    auto vdm = getVirtualDeviceManager();
+    if (vdm == nullptr) {
+        return kDefaultDeviceId;
+    }
+
+    const uid_t myUid = getuid();
+    std::vector<int> deviceIds;
+    Status status = vdm->getDeviceIdsForUid(myUid, &deviceIds);
+    if (!status.isOk() || deviceIds.empty()) {
+        ALOGE("%s: Failed to call getDeviceIdsForUid to determine device id for uid %d: %s",
+              __func__, myUid, status.toString8().c_str());
+        return kDefaultDeviceId;
+    }
+
+    // If the UID is associated with multiple virtual devices, use the default device's
+    // camera as we cannot disambiguate here. This effectively means that the app has
+    // activities on different devices at the same time.
+    if (deviceIds.size() != 1) {
+        return kDefaultDeviceId;
+    }
+    return deviceIds[0];
+}
+
+// Returns device policy for POLICY_TYPE_CAMERA corresponding to deviceId.
+DevicePolicy getDevicePolicyForDeviceId(const int deviceId) {
+    if (!vd_flags::camera_device_awareness() || deviceId == kDefaultDeviceId) {
+        return DevicePolicy::DEVICE_POLICY_DEFAULT;
+    }
+
+    auto vdm = getVirtualDeviceManager();
+    if (vdm == nullptr) {
+        return DevicePolicy::DEVICE_POLICY_DEFAULT;
+    }
+
+    int policy = IVirtualDeviceManagerNative::DEVICE_POLICY_DEFAULT;
+    Status status = vdm->getDevicePolicy(deviceId, IVirtualDeviceManagerNative::POLICY_TYPE_CAMERA,
+                                         &policy);
+    if (!status.isOk()) {
+        ALOGE("%s: Failed to call getDevicePolicy to determine camera policy for device id %d: %s",
+              __func__, deviceId, status.toString8().c_str());
+        return DevicePolicy::DEVICE_POLICY_DEFAULT;
+    }
+    return static_cast<DevicePolicy>(policy);
+}
+
+// Returns true if camera owned by device cameraDeviceId can be accessed within deviceContext.
+bool isCameraAccessible(const DeviceContext deviceContext, const int cameraDeviceId) {
+    if (!vd_flags::camera_device_awareness() ||
+        deviceContext.policy == DevicePolicy::DEVICE_POLICY_DEFAULT) {
+        return cameraDeviceId == kDefaultDeviceId;
+    }
+    return deviceContext.deviceId == cameraDeviceId;
+}
+
+}  // namespace
 
 // Static member definitions
 const char* CameraManagerGlobal::kCameraIdKey   = "CameraId";
@@ -44,6 +129,11 @@
 Mutex                CameraManagerGlobal::sLock;
 wp<CameraManagerGlobal> CameraManagerGlobal::sInstance = nullptr;
 
+DeviceContext::DeviceContext() {
+    deviceId = getCurrentDeviceId();
+    policy = getDevicePolicyForDeviceId(deviceId);
+}
+
 sp<CameraManagerGlobal> CameraManagerGlobal::getInstance() {
     Mutex::Autolock _l(sLock);
     sp<CameraManagerGlobal> instance = sInstance.promote();
@@ -128,17 +218,11 @@
         std::vector<hardware::CameraStatus> cameraStatuses{};
         mCameraService->addListener(mCameraServiceListener, &cameraStatuses);
         for (auto& c : cameraStatuses) {
-            // Skip callback for cameras not belonging to the default device, as NDK doesn't support
-            // device awareness yet.
-            if (c.deviceId != kDefaultDeviceId) {
-                continue;
-            }
-
-            onStatusChangedLocked(c.status, c.cameraId);
+            onStatusChangedLocked(c.status, c.deviceId, c.cameraId);
 
             for (auto& unavailablePhysicalId : c.unavailablePhysicalIds) {
                 onStatusChangedLocked(hardware::ICameraServiceListener::STATUS_NOT_PRESENT,
-                        c.cameraId, unavailablePhysicalId);
+                                      c.deviceId, c.cameraId, unavailablePhysicalId);
             }
         }
 
@@ -198,14 +282,15 @@
     sp<CameraManagerGlobal> cm = mCameraManager.promote();
     if (cm != nullptr) {
         AutoMutex lock(cm->mLock);
-        std::vector<std::string> cameraIdList;
+        std::vector<DeviceStatusMapKey> keysToRemove;
+        keysToRemove.reserve(cm->mDeviceStatusMap.size());
         for (auto& pair : cm->mDeviceStatusMap) {
-            cameraIdList.push_back(pair.first);
+            keysToRemove.push_back(pair.first);
         }
 
-        for (const std::string& cameraId : cameraIdList) {
-            cm->onStatusChangedLocked(
-                    CameraServiceListener::STATUS_NOT_PRESENT, cameraId);
+        for (const DeviceStatusMapKey& key : keysToRemove) {
+            cm->onStatusChangedLocked(CameraServiceListener::STATUS_NOT_PRESENT, key.deviceId,
+                                      key.cameraId);
         }
         cm->mCameraService.clear();
         // TODO: consider adding re-connect call here?
@@ -213,32 +298,35 @@
 }
 
 void CameraManagerGlobal::registerExtendedAvailabilityCallback(
-        const ACameraManager_ExtendedAvailabilityCallbacks *callback) {
-    return registerAvailCallback<ACameraManager_ExtendedAvailabilityCallbacks>(callback);
+        const DeviceContext& deviceContext,
+        const ACameraManager_ExtendedAvailabilityCallbacks* callback) {
+    return registerAvailCallback<ACameraManager_ExtendedAvailabilityCallbacks>(deviceContext,
+                                                                               callback);
 }
 
 void CameraManagerGlobal::unregisterExtendedAvailabilityCallback(
-        const ACameraManager_ExtendedAvailabilityCallbacks *callback) {
+        const DeviceContext& deviceContext,
+        const ACameraManager_ExtendedAvailabilityCallbacks* callback) {
     Mutex::Autolock _l(mLock);
 
     drainPendingCallbacksLocked();
 
-    Callback cb(callback);
+    Callback cb(deviceContext, callback);
     mCallbacks.erase(cb);
 }
 
 void CameraManagerGlobal::registerAvailabilityCallback(
-        const ACameraManager_AvailabilityCallbacks *callback) {
-    return registerAvailCallback<ACameraManager_AvailabilityCallbacks>(callback);
+        const DeviceContext& deviceContext, const ACameraManager_AvailabilityCallbacks* callback) {
+    return registerAvailCallback<ACameraManager_AvailabilityCallbacks>(deviceContext, callback);
 }
 
 void CameraManagerGlobal::unregisterAvailabilityCallback(
-        const ACameraManager_AvailabilityCallbacks *callback) {
+        const DeviceContext& deviceContext, const ACameraManager_AvailabilityCallbacks* callback) {
     Mutex::Autolock _l(mLock);
 
     drainPendingCallbacksLocked();
 
-    Callback cb(callback);
+    Callback cb(deviceContext, callback);
     mCallbacks.erase(cb);
 }
 
@@ -261,20 +349,24 @@
     }
 }
 
-template<class T>
-void CameraManagerGlobal::registerAvailCallback(const T *callback) {
+template <class T>
+void CameraManagerGlobal::registerAvailCallback(const DeviceContext& deviceContext,
+                                                const T* callback) {
     Mutex::Autolock _l(mLock);
     getCameraServiceLocked();
-    Callback cb(callback);
-    auto pair = mCallbacks.insert(cb);
+    Callback cb(deviceContext, callback);
+    const auto& [_, newlyRegistered] = mCallbacks.insert(cb);
     // Send initial callbacks if callback is newly registered
-    if (pair.second) {
-        for (auto& pair : mDeviceStatusMap) {
-            const std::string& cameraId = pair.first;
-            int32_t status = pair.second.getStatus();
+    if (newlyRegistered) {
+        for (auto& [key, statusAndHAL3Support] : mDeviceStatusMap) {
+            if (!isCameraAccessible(deviceContext, key.deviceId)) {
+                continue;
+            }
+            const std::string& cameraId = key.cameraId;
+            int32_t status = statusAndHAL3Support.getStatus();
             // Don't send initial callbacks for camera ids which don't support
             // camera2
-            if (!pair.second.supportsHAL3) {
+            if (!statusAndHAL3Support.supportsHAL3) {
                 continue;
             }
 
@@ -290,7 +382,7 @@
 
             // Physical camera unavailable callback
             std::set<std::string> unavailablePhysicalCameras =
-                    pair.second.getUnavailablePhysicalIds();
+                    statusAndHAL3Support.getUnavailablePhysicalIds();
             for (const auto& physicalCameraId : unavailablePhysicalCameras) {
                 sp<AMessage> msg = new AMessage(kWhatSendSinglePhysicalCameraCallback, mHandler);
                 ACameraManager_PhysicalCameraAvailabilityCallback cbFunc =
@@ -320,21 +412,26 @@
     return camera2Support;
 }
 
-void CameraManagerGlobal::getCameraIdList(std::vector<std::string>* cameraIds) {
+void CameraManagerGlobal::getCameraIdList(const DeviceContext& context,
+        std::vector<std::string>* cameraIds) {
     // Ensure that we have initialized/refreshed the list of available devices
     Mutex::Autolock _l(mLock);
     // Needed to make sure we're connected to cameraservice
     getCameraServiceLocked();
-    for(auto& deviceStatus : mDeviceStatusMap) {
-        int32_t status = deviceStatus.second.getStatus();
+    for (auto& [key, statusAndHAL3Support] : mDeviceStatusMap) {
+        if (!isCameraAccessible(context, key.deviceId)) {
+            continue;
+        }
+
+        int32_t status = statusAndHAL3Support.getStatus();
         if (status == hardware::ICameraServiceListener::STATUS_NOT_PRESENT ||
                 status == hardware::ICameraServiceListener::STATUS_ENUMERATING) {
             continue;
         }
-        if (!deviceStatus.second.supportsHAL3) {
+        if (!statusAndHAL3Support.supportsHAL3) {
             continue;
         }
-        cameraIds->push_back(deviceStatus.first);
+        cameraIds->push_back(key.cameraId);
     }
 }
 
@@ -471,36 +568,24 @@
 
 binder::Status CameraManagerGlobal::CameraServiceListener::onStatusChanged(
         int32_t status, const std::string& cameraId, int deviceId) {
-    // Skip callback for cameras not belonging to the default device, as NDK doesn't support
-    // device awareness yet.
-    if (deviceId != kDefaultDeviceId) {
-        return binder::Status::ok();
-    }
-
     sp<CameraManagerGlobal> cm = mCameraManager.promote();
     if (cm != nullptr) {
-        cm->onStatusChanged(status, cameraId);
-    } else {
-        ALOGE("Cannot deliver status change. Global camera manager died");
+        cm->onStatusChanged(status, deviceId, cameraId);
     }
+    ALOGE_IF(cm == nullptr,
+             "Cannot deliver physical camera status change. Global camera manager died");
     return binder::Status::ok();
 }
 
 binder::Status CameraManagerGlobal::CameraServiceListener::onPhysicalCameraStatusChanged(
         int32_t status, const std::string& cameraId, const std::string& physicalCameraId,
         int deviceId) {
-    // Skip callback for cameras not belonging to the default device, as NDK doesn't support
-    // device awareness yet.
-    if (deviceId != kDefaultDeviceId) {
-        return binder::Status::ok();
-    }
-
     sp<CameraManagerGlobal> cm = mCameraManager.promote();
     if (cm != nullptr) {
-        cm->onStatusChanged(status, cameraId, physicalCameraId);
-    } else {
-        ALOGE("Cannot deliver physical camera status change. Global camera manager died");
+        cm->onStatusChanged(status, deviceId, cameraId, physicalCameraId);
     }
+    ALOGE_IF(cm == nullptr,
+             "Cannot deliver physical camera status change. Global camera manager died");
     return binder::Status::ok();
 }
 
@@ -518,23 +603,24 @@
     }
 }
 
-void CameraManagerGlobal::onStatusChanged(
-        int32_t status, const std::string& cameraId) {
+void CameraManagerGlobal::onStatusChanged(int32_t status, const int deviceId,
+        const std::string& cameraId) {
     Mutex::Autolock _l(mLock);
-    onStatusChangedLocked(status, cameraId);
+    onStatusChangedLocked(status, deviceId, cameraId);
 }
 
-void CameraManagerGlobal::onStatusChangedLocked(
-        int32_t status, const std::string& cameraId) {
+void CameraManagerGlobal::onStatusChangedLocked(int32_t status, const int deviceId,
+        const std::string& cameraId) {
     if (!validStatus(status)) {
         ALOGE("%s: Invalid status %d", __FUNCTION__, status);
         return;
     }
 
-    bool firstStatus = (mDeviceStatusMap.count(cameraId) == 0);
-    int32_t oldStatus = firstStatus ?
-            status : // first status
-            mDeviceStatusMap[cameraId].getStatus();
+    DeviceStatusMapKey key{.deviceId = deviceId, .cameraId = cameraId};
+
+    bool firstStatus = (mDeviceStatusMap.count(key) == 0);
+    int32_t oldStatus = firstStatus ? status :  // first status
+                                mDeviceStatusMap[key].getStatus();
 
     if (!firstStatus &&
             isStatusAvailable(status) == isStatusAvailable(oldStatus)) {
@@ -544,15 +630,17 @@
 
     bool supportsHAL3 = supportsCamera2ApiLocked(cameraId);
     if (firstStatus) {
-        mDeviceStatusMap.emplace(std::piecewise_construct,
-                std::forward_as_tuple(cameraId),
-                std::forward_as_tuple(status, supportsHAL3));
+        mDeviceStatusMap.emplace(std::piecewise_construct, std::forward_as_tuple(key),
+                                 std::forward_as_tuple(status, supportsHAL3));
     } else {
-        mDeviceStatusMap[cameraId].updateStatus(status);
+        mDeviceStatusMap[key].updateStatus(status);
     }
     // Iterate through all registered callbacks
     if (supportsHAL3) {
         for (auto cb : mCallbacks) {
+            if (!isCameraAccessible(cb.mDeviceContext, deviceId)) {
+                continue;
+            }
             sp<AMessage> msg = new AMessage(kWhatSendSingleCallback, mHandler);
             ACameraManager_AvailabilityCallback cbFp = isStatusAvailable(status) ?
                     cb.mAvailable : cb.mUnavailable;
@@ -564,30 +652,31 @@
         }
     }
     if (status == hardware::ICameraServiceListener::STATUS_NOT_PRESENT) {
-        mDeviceStatusMap.erase(cameraId);
+        mDeviceStatusMap.erase(key);
     }
 }
 
-void CameraManagerGlobal::onStatusChanged(
-        int32_t status, const std::string& cameraId, const std::string& physicalCameraId) {
+void CameraManagerGlobal::onStatusChanged(int32_t status, const int deviceId,
+        const std::string& cameraId, const std::string& physicalCameraId) {
     Mutex::Autolock _l(mLock);
-    onStatusChangedLocked(status, cameraId, physicalCameraId);
+    onStatusChangedLocked(status, deviceId, cameraId, physicalCameraId);
 }
 
-void CameraManagerGlobal::onStatusChangedLocked(
-        int32_t status, const std::string& cameraId, const std::string& physicalCameraId) {
+void CameraManagerGlobal::onStatusChangedLocked(int32_t status, const int deviceId,
+        const std::string& cameraId, const std::string& physicalCameraId) {
     if (!validStatus(status)) {
         ALOGE("%s: Invalid status %d", __FUNCTION__, status);
         return;
     }
 
-    auto logicalStatus = mDeviceStatusMap.find(cameraId);
+    DeviceStatusMapKey key{.deviceId = deviceId, .cameraId = cameraId};
+    auto logicalStatus = mDeviceStatusMap.find(key);
     if (logicalStatus == mDeviceStatusMap.end()) {
         ALOGE("%s: Physical camera id %s status change on a non-present id %s",
                 __FUNCTION__, physicalCameraId.c_str(), cameraId.c_str());
         return;
     }
-    int32_t logicalCamStatus = mDeviceStatusMap[cameraId].getStatus();
+    int32_t logicalCamStatus = mDeviceStatusMap[key].getStatus();
     if (logicalCamStatus != hardware::ICameraServiceListener::STATUS_PRESENT &&
             logicalCamStatus != hardware::ICameraServiceListener::STATUS_NOT_AVAILABLE) {
         ALOGE("%s: Physical camera id %s status %d change for an invalid logical camera state %d",
@@ -599,14 +688,17 @@
 
     bool updated = false;
     if (status == hardware::ICameraServiceListener::STATUS_PRESENT) {
-        updated = mDeviceStatusMap[cameraId].removeUnavailablePhysicalId(physicalCameraId);
+        updated = mDeviceStatusMap[key].removeUnavailablePhysicalId(physicalCameraId);
     } else {
-        updated = mDeviceStatusMap[cameraId].addUnavailablePhysicalId(physicalCameraId);
+        updated = mDeviceStatusMap[key].addUnavailablePhysicalId(physicalCameraId);
     }
 
     // Iterate through all registered callbacks
     if (supportsHAL3 && updated) {
         for (auto cb : mCallbacks) {
+            if (!isCameraAccessible(cb.mDeviceContext, deviceId)) {
+                continue;
+            }
             sp<AMessage> msg = new AMessage(kWhatSendSinglePhysicalCameraCallback, mHandler);
             ACameraManager_PhysicalCameraAvailabilityCallback cbFp = isStatusAvailable(status) ?
                     cb.mPhysicalCamAvailable : cb.mPhysicalCamUnavailable;
@@ -660,7 +752,7 @@
     Mutex::Autolock _l(mLock);
 
     std::vector<std::string> idList;
-    CameraManagerGlobal::getInstance()->getCameraIdList(&idList);
+    mGlobalManager->getCameraIdList(mDeviceContext, &idList);
 
     int numCameras = idList.size();
     ACameraIdList *out = new ACameraIdList;
@@ -710,7 +802,7 @@
         const char* cameraIdStr, sp<ACameraMetadata>* characteristics) {
     Mutex::Autolock _l(mLock);
 
-    sp<hardware::ICameraService> cs = CameraManagerGlobal::getInstance()->getCameraService();
+    sp<hardware::ICameraService> cs = mGlobalManager->getCameraService();
     if (cs == nullptr) {
         ALOGE("%s: Cannot reach camera service!", __FUNCTION__);
         return ACAMERA_ERROR_CAMERA_DISCONNECTED;
@@ -719,7 +811,8 @@
     CameraMetadata rawMetadata;
     int targetSdkVersion = android_get_application_target_sdk_version();
     binder::Status serviceRet = cs->getCameraCharacteristics(cameraIdStr,
-            targetSdkVersion, /*overrideToPortrait*/false, kDefaultDeviceId, /*devicePolicy*/0,
+            targetSdkVersion, /*rotationOverride*/hardware::ICameraService::ROTATION_OVERRIDE_NONE,
+            mDeviceContext.deviceId, static_cast<int32_t>(mDeviceContext.policy),
             &rawMetadata);
     if (!serviceRet.isOk()) {
         switch(serviceRet.serviceSpecificErrorCode()) {
@@ -757,7 +850,7 @@
 
     ACameraDevice* device = new ACameraDevice(cameraId, callback, chars);
 
-    sp<hardware::ICameraService> cs = CameraManagerGlobal::getInstance()->getCameraService();
+    sp<hardware::ICameraService> cs = mGlobalManager->getCameraService();
     if (cs == nullptr) {
         ALOGE("%s: Cannot reach camera service!", __FUNCTION__);
         delete device;
@@ -772,7 +865,8 @@
     binder::Status serviceRet = cs->connectDevice(
             callbacks, cameraId, "", {},
             hardware::ICameraService::USE_CALLING_UID, /*oomScoreOffset*/0,
-            targetSdkVersion, /*overrideToPortrait*/false, kDefaultDeviceId, /*devicePolicy*/0,
+            targetSdkVersion, /*rotationOverride*/hardware::ICameraService::ROTATION_OVERRIDE_NONE,
+            mDeviceContext.deviceId, static_cast<int32_t>(mDeviceContext.policy),
             /*out*/&deviceRemote);
 
     if (!serviceRet.isOk()) {
@@ -820,6 +914,22 @@
     return ACAMERA_OK;
 }
 
-ACameraManager::~ACameraManager() {
+void ACameraManager::registerAvailabilityCallback(
+        const ACameraManager_AvailabilityCallbacks* callback) {
+    mGlobalManager->registerAvailabilityCallback(mDeviceContext, callback);
+}
 
+void ACameraManager::unregisterAvailabilityCallback(
+        const ACameraManager_AvailabilityCallbacks* callback) {
+    mGlobalManager->unregisterAvailabilityCallback(mDeviceContext, callback);
+}
+
+void ACameraManager::registerExtendedAvailabilityCallback(
+        const ACameraManager_ExtendedAvailabilityCallbacks* callback) {
+    mGlobalManager->registerExtendedAvailabilityCallback(mDeviceContext, callback);
+}
+
+void ACameraManager::unregisterExtendedAvailabilityCallback(
+        const ACameraManager_ExtendedAvailabilityCallbacks* callback) {
+    mGlobalManager->unregisterExtendedAvailabilityCallback(mDeviceContext, callback);
 }
diff --git a/camera/ndk/impl/ACameraManager.h b/camera/ndk/impl/ACameraManager.h
index c6e2bf9..d8bf6b1 100644
--- a/camera/ndk/impl/ACameraManager.h
+++ b/camera/ndk/impl/ACameraManager.h
@@ -20,6 +20,7 @@
 #include <camera/NdkCameraManager.h>
 
 #include <android-base/parseint.h>
+#include <android/companion/virtualnative/IVirtualDeviceManagerNative.h>
 #include <android/hardware/ICameraService.h>
 #include <android/hardware/BnCameraServiceListener.h>
 #include <camera/CameraMetadata.h>
@@ -37,6 +38,36 @@
 namespace android {
 namespace acam {
 
+enum class DevicePolicy {
+  DEVICE_POLICY_DEFAULT =
+    ::android::companion::virtualnative::IVirtualDeviceManagerNative::DEVICE_POLICY_DEFAULT,
+  DEVICE_POLICY_CUSTOM =
+    ::android::companion::virtualnative::IVirtualDeviceManagerNative::DEVICE_POLICY_CUSTOM
+};
+
+/**
+ * Device context within which are cameras accessed.
+ *
+ * When constructed, the device id is set to id of virtual device corresponding to
+ * caller's UID (or default device id if current app process is not running on virtual device).
+ *
+ * See getDeviceId() in Context.java for more context (no pun intented).
+ */
+struct DeviceContext {
+    DeviceContext();
+
+    // Id of virtual device asociated with this context (or DEFAULT_DEVICE_ID = 0 in case
+    // caller UID is not running on virtual device).
+    int deviceId;
+    // Device policy corresponding to VirtualDeviceParams.POLICY_TYPE_CAMERA:
+    //
+    // Can be either:
+    // * (0) DEVICE_POLICY_DEFAULT - virtual devices have access to default device cameras.
+    // * (1) DEVICE_POLICY_CUSTOM - virtual devices do not have access to default device cameras
+    //                              and can only access virtual cameras owned by the same device.
+    DevicePolicy policy;
+};
+
 /**
  * Per-process singleton instance of CameraManger. Shared by all ACameraManager
  * instances. Created when first ACameraManager is created and destroyed when
@@ -49,20 +80,22 @@
     static sp<CameraManagerGlobal> getInstance();
     sp<hardware::ICameraService> getCameraService();
 
-    void registerAvailabilityCallback(
-            const ACameraManager_AvailabilityCallbacks *callback);
-    void unregisterAvailabilityCallback(
-            const ACameraManager_AvailabilityCallbacks *callback);
+    void registerAvailabilityCallback(const DeviceContext& context,
+                                      const ACameraManager_AvailabilityCallbacks* callback);
+    void unregisterAvailabilityCallback(const DeviceContext& context,
+                                        const ACameraManager_AvailabilityCallbacks* callback);
 
     void registerExtendedAvailabilityCallback(
+            const DeviceContext& context,
             const ACameraManager_ExtendedAvailabilityCallbacks* callback);
     void unregisterExtendedAvailabilityCallback(
+            const DeviceContext& context,
             const ACameraManager_ExtendedAvailabilityCallbacks* callback);
 
     /**
      * Return camera IDs that support camera2
      */
-    void getCameraIdList(std::vector<std::string> *cameraIds);
+    void getCameraIdList(const DeviceContext& deviceContext, std::vector<std::string>* cameraIds);
 
   private:
     sp<hardware::ICameraService> mCameraService;
@@ -70,8 +103,8 @@
     const char*                  kCameraServiceName      = "media.camera";
     Mutex                        mLock;
 
-    template<class T>
-    void registerAvailCallback(const T *callback);
+    template <class T>
+    void registerAvailCallback(const DeviceContext& deviceContext, const T* callback);
 
     class DeathNotifier : public IBinder::DeathRecipient {
       public:
@@ -115,29 +148,34 @@
 
     // Wrapper of ACameraManager_AvailabilityCallbacks so we can store it in std::set
     struct Callback {
-        explicit Callback(const ACameraManager_AvailabilityCallbacks *callback) :
-            mAvailable(callback->onCameraAvailable),
-            mUnavailable(callback->onCameraUnavailable),
-            mAccessPriorityChanged(nullptr),
-            mPhysicalCamAvailable(nullptr),
-            mPhysicalCamUnavailable(nullptr),
-            mContext(callback->context) {}
+        explicit Callback(const DeviceContext& deviceContext,
+                 const ACameraManager_AvailabilityCallbacks* callback)
+            : mDeviceContext(deviceContext),
+              mAvailable(callback->onCameraAvailable),
+              mUnavailable(callback->onCameraUnavailable),
+              mAccessPriorityChanged(nullptr),
+              mPhysicalCamAvailable(nullptr),
+              mPhysicalCamUnavailable(nullptr),
+              mContext(callback->context) {}
 
-        explicit Callback(const ACameraManager_ExtendedAvailabilityCallbacks *callback) :
-            mAvailable(callback->availabilityCallbacks.onCameraAvailable),
-            mUnavailable(callback->availabilityCallbacks.onCameraUnavailable),
-            mAccessPriorityChanged(callback->onCameraAccessPrioritiesChanged),
-            mPhysicalCamAvailable(callback->onPhysicalCameraAvailable),
-            mPhysicalCamUnavailable(callback->onPhysicalCameraUnavailable),
-            mContext(callback->availabilityCallbacks.context) {}
+        explicit Callback(const DeviceContext& deviceContext,
+                 const ACameraManager_ExtendedAvailabilityCallbacks* callback)
+            : mDeviceContext(deviceContext),
+              mAvailable(callback->availabilityCallbacks.onCameraAvailable),
+              mUnavailable(callback->availabilityCallbacks.onCameraUnavailable),
+              mAccessPriorityChanged(callback->onCameraAccessPrioritiesChanged),
+              mPhysicalCamAvailable(callback->onPhysicalCameraAvailable),
+              mPhysicalCamUnavailable(callback->onPhysicalCameraUnavailable),
+              mContext(callback->availabilityCallbacks.context) {}
 
         bool operator == (const Callback& other) const {
-            return (mAvailable == other.mAvailable &&
-                    mUnavailable == other.mUnavailable &&
+            return (mAvailable == other.mAvailable && mUnavailable == other.mUnavailable &&
                     mAccessPriorityChanged == other.mAccessPriorityChanged &&
                     mPhysicalCamAvailable == other.mPhysicalCamAvailable &&
                     mPhysicalCamUnavailable == other.mPhysicalCamUnavailable &&
-                    mContext == other.mContext);
+                    mContext == other.mContext &&
+                    mDeviceContext.deviceId == other.mDeviceContext.deviceId &&
+                    mDeviceContext.policy == other.mDeviceContext.policy);
         }
         bool operator != (const Callback& other) const {
             return !(*this == other);
@@ -146,6 +184,9 @@
 #pragma GCC diagnostic push
 #pragma GCC diagnostic ignored "-Wordered-compare-function-pointers"
             if (*this == other) return false;
+            if (mDeviceContext.deviceId != other.mDeviceContext.deviceId) {
+                return mDeviceContext.deviceId < other.mDeviceContext.deviceId;
+            }
             if (mContext != other.mContext) return mContext < other.mContext;
             if (mPhysicalCamAvailable != other.mPhysicalCamAvailable) {
                 return mPhysicalCamAvailable < other.mPhysicalCamAvailable;
@@ -163,6 +204,7 @@
         bool operator > (const Callback& other) const {
             return (*this != other && !(*this < other));
         }
+        DeviceContext mDeviceContext;
         ACameraManager_AvailabilityCallback mAvailable;
         ACameraManager_AvailabilityCallback mUnavailable;
         ACameraManager_AccessPrioritiesChangedCallback mAccessPriorityChanged;
@@ -204,37 +246,17 @@
 
     sp<hardware::ICameraService> getCameraServiceLocked();
     void onCameraAccessPrioritiesChanged();
-    void onStatusChanged(int32_t status, const std::string& cameraId);
-    void onStatusChangedLocked(int32_t status, const std::string& cameraId);
-    void onStatusChanged(int32_t status, const std::string& cameraId, const std::string& physicalCameraId);
-    void onStatusChangedLocked(int32_t status, const std::string& cameraId,
-           const std::string& physicalCameraId);
+    void onStatusChanged(int32_t status, int deviceId, const std::string& cameraId);
+    void onStatusChangedLocked(int32_t status, int deviceId, const std::string& cameraId);
+    void onStatusChanged(int32_t status, int deviceId, const std::string& cameraId,
+                         const std::string& physicalCameraId);
+    void onStatusChangedLocked(int32_t status, int deviceId, const std::string& cameraId,
+                               const std::string& physicalCameraId);
     // Utils for status
     static bool validStatus(int32_t status);
     static bool isStatusAvailable(int32_t status);
     bool supportsCamera2ApiLocked(const std::string &cameraId);
 
-    // The sort logic must match the logic in
-    // libcameraservice/common/CameraProviderManager.cpp::getAPI1CompatibleCameraDeviceIds
-    struct CameraIdComparator {
-        bool operator()(const std::string& a, const std::string& b) const {
-            uint32_t aUint = 0, bUint = 0;
-            bool aIsUint = base::ParseUint(a.c_str(), &aUint);
-            bool bIsUint = base::ParseUint(b.c_str(), &bUint);
-
-            // Uint device IDs first
-            if (aIsUint && bIsUint) {
-                return aUint < bUint;
-            } else if (aIsUint) {
-                return true;
-            } else if (bIsUint) {
-                return false;
-            }
-            // Simple string compare if both id are not uint
-            return a < b;
-        }
-    };
-
     struct StatusAndHAL3Support {
       private:
         int32_t status = hardware::ICameraServiceListener::STATUS_NOT_PRESENT;
@@ -253,13 +275,40 @@
         std::set<std::string> getUnavailablePhysicalIds();
     };
 
-    // Map camera_id -> status
-    std::map<std::string, StatusAndHAL3Support, CameraIdComparator> mDeviceStatusMap;
+    struct DeviceStatusMapKey {
+        int deviceId;
+        std::string cameraId;
+
+        bool operator<(const DeviceStatusMapKey& other) const {
+            if (deviceId != other.deviceId) {
+                return deviceId < other.deviceId;
+            }
+
+            // The sort logic must match the logic in
+            // libcameraservice/common/CameraProviderManager.cpp::getAPI1CompatibleCameraDeviceIds
+            uint32_t cameraIdUint = 0, otherCameraIdUint = 0;
+            bool cameraIdIsUint = base::ParseUint(cameraId.c_str(), &cameraIdUint);
+            bool otherCameraIdIsUint = base::ParseUint(other.cameraId.c_str(), &otherCameraIdUint);
+
+            // Uint device IDs first
+            if (cameraIdIsUint && otherCameraIdIsUint) {
+                return cameraIdUint < otherCameraIdUint;
+            } else if (cameraIdIsUint) {
+                return true;
+            } else if (otherCameraIdIsUint) {
+                return false;
+            }
+            // Simple string compare if both id are not uint
+            return cameraIdIsUint < otherCameraIdIsUint;
+        }
+    };
+
+    std::map<DeviceStatusMapKey, StatusAndHAL3Support> mDeviceStatusMap;
 
     // For the singleton instance
     static Mutex sLock;
     static wp<CameraManagerGlobal> sInstance;
-    CameraManagerGlobal() {};
+    CameraManagerGlobal() {}
     ~CameraManagerGlobal();
 };
 
@@ -271,9 +320,7 @@
  * Leave outside of android namespace because it's NDK struct
  */
 struct ACameraManager {
-    ACameraManager() :
-            mGlobalManager(android::acam::CameraManagerGlobal::getInstance()) {}
-    ~ACameraManager();
+    ACameraManager() : mGlobalManager(android::acam::CameraManagerGlobal::getInstance()) {}
     camera_status_t getCameraIdList(ACameraIdList** cameraIdList);
     static void     deleteCameraIdList(ACameraIdList* cameraIdList);
 
@@ -282,6 +329,12 @@
     camera_status_t openCamera(const char* cameraId,
                                ACameraDevice_StateCallbacks* callback,
                                /*out*/ACameraDevice** device);
+    void registerAvailabilityCallback(const ACameraManager_AvailabilityCallbacks* callback);
+    void unregisterAvailabilityCallback(const ACameraManager_AvailabilityCallbacks* callback);
+    void registerExtendedAvailabilityCallback(
+            const ACameraManager_ExtendedAvailabilityCallbacks* callback);
+    void unregisterExtendedAvailabilityCallback(
+            const ACameraManager_ExtendedAvailabilityCallbacks* callback);
 
   private:
     enum {
@@ -289,6 +342,7 @@
     };
     android::Mutex         mLock;
     android::sp<android::acam::CameraManagerGlobal> mGlobalManager;
+    const android::acam::DeviceContext mDeviceContext;
 };
 
 #endif //_ACAMERA_MANAGER_H
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index c97059d..645a3d4 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -603,7 +603,7 @@
      * ACAMERA_SENSOR_FRAME_DURATION.</p>
      * <p>Note that the actual achievable max framerate also depends on the minimum frame
      * duration of the output streams. The max frame rate will be
-     * <code>min(aeTargetFpsRange.maxFps, 1 / max(individual stream min durations)</code>. For example,
+     * <code>min(aeTargetFpsRange.maxFps, 1 / max(individual stream min durations))</code>. For example,
      * if the application sets this key to <code>{60, 60}</code>, but the maximum minFrameDuration among
      * all configured streams is 33ms, the maximum framerate won't be 60fps, but will be
      * 30fps.</p>
@@ -2287,6 +2287,8 @@
      * <p>When low light boost is enabled by setting the AE mode to
      * 'ON_LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY', it can dynamically apply a low light
      * boost when the light level threshold is exceeded.</p>
+     * <p>This field is present in the CaptureResult when the AE mode is set to
+     * 'ON_LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY'. Otherwise, the field is not present.</p>
      * <p>This state indicates when low light boost is 'ACTIVE' and applied. Similarly, it can
      * indicate when it is not being applied by returning 'INACTIVE'.</p>
      * <p>This key will be absent from the CaptureResult if AE mode is not set to
@@ -2427,29 +2429,33 @@
      *
      * <p>Flash strength level to use in capture mode i.e. when the applications control
      * flash with either SINGLE or TORCH mode.</p>
-     * <p>Use android.flash.info.singleStrengthMaxLevel and
-     * android.flash.info.torchStrengthMaxLevel to check whether the device supports
+     * <p>Use ACAMERA_FLASH_SINGLE_STRENGTH_MAX_LEVEL and
+     * ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL to check whether the device supports
      * flash strength control or not.
      * If the values of android.flash.info.singleStrengthMaxLevel and
-     * android.flash.info.torchStrengthMaxLevel are greater than 1,
+     * ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL are greater than 1,
      * then the device supports manual flash strength control.</p>
      * <p>If the ACAMERA_FLASH_MODE <code>==</code> TORCH the value must be &gt;= 1
-     * and &lt;= android.flash.info.torchStrengthMaxLevel.
+     * and &lt;= ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL.
      * If the application doesn't set the key and
-     * android.flash.info.torchStrengthMaxLevel &gt; 1,
+     * ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL &gt; 1,
      * then the flash will be fired at the default level set by HAL in
-     * android.flash.info.torchStrengthDefaultLevel.
+     * ACAMERA_FLASH_TORCH_STRENGTH_DEFAULT_LEVEL.
      * If the ACAMERA_FLASH_MODE <code>==</code> SINGLE, then the value must be &gt;= 1
-     * and &lt;= android.flash.info.singleStrengthMaxLevel.
+     * and &lt;= ACAMERA_FLASH_SINGLE_STRENGTH_MAX_LEVEL.
      * If the application does not set this key and
-     * android.flash.info.singleStrengthMaxLevel &gt; 1,
+     * ACAMERA_FLASH_SINGLE_STRENGTH_MAX_LEVEL &gt; 1,
      * then the flash will be fired at the default level set by HAL
-     * in android.flash.info.singleStrengthDefaultLevel.
+     * in ACAMERA_FLASH_SINGLE_STRENGTH_DEFAULT_LEVEL.
      * If ACAMERA_CONTROL_AE_MODE is set to any of ON_AUTO_FLASH, ON_ALWAYS_FLASH,
      * ON_AUTO_FLASH_REDEYE, ON_EXTERNAL_FLASH values, then the strengthLevel will be ignored.</p>
      *
      * @see ACAMERA_CONTROL_AE_MODE
      * @see ACAMERA_FLASH_MODE
+     * @see ACAMERA_FLASH_SINGLE_STRENGTH_DEFAULT_LEVEL
+     * @see ACAMERA_FLASH_SINGLE_STRENGTH_MAX_LEVEL
+     * @see ACAMERA_FLASH_TORCH_STRENGTH_DEFAULT_LEVEL
+     * @see ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL
      */
     ACAMERA_FLASH_STRENGTH_LEVEL =                              // int32
             ACAMERA_FLASH_START + 6,
@@ -2485,9 +2491,11 @@
      * </ul></p>
      *
      * <p>If flash unit is available this will be greater than or equal to 1 and less
-     * or equal to <code>android.flash.info.singleStrengthMaxLevel</code>.
+     * or equal to ACAMERA_FLASH_SINGLE_STRENGTH_MAX_LEVEL.
      * Note for devices that do not support the manual flash strength control
      * feature, this level will always be equal to 1.</p>
+     *
+     * @see ACAMERA_FLASH_SINGLE_STRENGTH_MAX_LEVEL
      */
     ACAMERA_FLASH_SINGLE_STRENGTH_DEFAULT_LEVEL =               // int32
             ACAMERA_FLASH_START + 8,
@@ -2507,12 +2515,14 @@
      * otherwise the value will be equal to 1.</p>
      * <p>Note that this level is just a number of supported levels(the granularity of control).
      * There is no actual physical power units tied to this level.
-     * There is no relation between android.flash.info.torchStrengthMaxLevel and
-     * android.flash.info.singleStrengthMaxLevel i.e. the ratio of
-     * android.flash.info.torchStrengthMaxLevel:android.flash.info.singleStrengthMaxLevel
+     * There is no relation between ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL and
+     * ACAMERA_FLASH_SINGLE_STRENGTH_MAX_LEVEL i.e. the ratio of
+     * ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL:ACAMERA_FLASH_SINGLE_STRENGTH_MAX_LEVEL
      * is not guaranteed to be the ratio of actual brightness.</p>
      *
      * @see ACAMERA_FLASH_MODE
+     * @see ACAMERA_FLASH_SINGLE_STRENGTH_MAX_LEVEL
+     * @see ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL
      */
     ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL =                    // int32
             ACAMERA_FLASH_START + 9,
@@ -2527,9 +2537,11 @@
      * </ul></p>
      *
      * <p>If flash unit is available this will be greater than or equal to 1 and less
-     * or equal to android.flash.info.torchStrengthMaxLevel.
+     * or equal to ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL.
      * Note for the devices that do not support the manual flash strength control feature,
      * this level will always be equal to 1.</p>
+     *
+     * @see ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL
      */
     ACAMERA_FLASH_TORCH_STRENGTH_DEFAULT_LEVEL =                // int32
             ACAMERA_FLASH_START + 10,
@@ -8292,7 +8304,10 @@
      * FPS.</p>
      * <p>If the session configuration is not supported, the AE mode reported in the
      * CaptureResult will be 'ON' instead of 'ON_LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY'.</p>
-     * <p>The application can observe the CapturerResult field
+     * <p>When this AE mode is enabled, the CaptureResult field
+     * ACAMERA_CONTROL_LOW_LIGHT_BOOST_STATE will be present and not null. Otherwise, the
+     * ACAMERA_CONTROL_LOW_LIGHT_BOOST_STATE field will not be present in the CaptureResult.</p>
+     * <p>The application can observe the CaptureResult field
      * ACAMERA_CONTROL_LOW_LIGHT_BOOST_STATE to determine when low light boost is 'ACTIVE' or
      * 'INACTIVE'.</p>
      * <p>The low light boost is 'ACTIVE' once the scene lighting condition is less than the
diff --git a/camera/ndk/ndk_vendor/impl/ACameraManager.cpp b/camera/ndk/ndk_vendor/impl/ACameraManager.cpp
index 099786b..cdba8ff 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraManager.cpp
+++ b/camera/ndk/ndk_vendor/impl/ACameraManager.cpp
@@ -574,7 +574,7 @@
 void CameraManagerGlobal::onStatusChangedLocked(
         const CameraDeviceStatus &status, const std::string &cameraId) {
     if (!validStatus(status)) {
-        ALOGE("%s: Invalid status %d", __FUNCTION__, status);
+        ALOGE("%s: Invalid status %d", __FUNCTION__, static_cast<int>(status));
         return;
     }
 
@@ -629,7 +629,7 @@
         const CameraDeviceStatus &status, const std::string& cameraId,
         const std::string& physicalCameraId) {
     if (!validStatus(status)) {
-        ALOGE("%s: Invalid status %d", __FUNCTION__, status);
+        ALOGE("%s: Invalid status %d", __FUNCTION__, static_cast<int>(status));
         return;
     }
 
@@ -643,7 +643,8 @@
     if (logicalCamStatus != CameraDeviceStatus::STATUS_PRESENT &&
             logicalCamStatus != CameraDeviceStatus::STATUS_NOT_AVAILABLE) {
         ALOGE("%s: Physical camera id %s status %d change for an invalid logical camera state %d",
-                __FUNCTION__, physicalCameraId.c_str(), status, logicalCamStatus);
+              __FUNCTION__, physicalCameraId.c_str(), static_cast<int>(status),
+              static_cast<int>(logicalCamStatus));
         return;
     }
 
@@ -866,6 +867,25 @@
     return status == OK ? ACAMERA_OK : ACAMERA_ERROR_METADATA_NOT_FOUND;
 }
 
-ACameraManager::~ACameraManager() {
+void ACameraManager::registerAvailabilityCallback(
+        const ACameraManager_AvailabilityCallbacks* callback) {
+    mGlobalManager->registerAvailabilityCallback(callback);
+}
 
+void ACameraManager::unregisterAvailabilityCallback(
+        const ACameraManager_AvailabilityCallbacks* callback) {
+    mGlobalManager->unregisterAvailabilityCallback(callback);
+}
+
+void ACameraManager::registerExtendedAvailabilityCallback(
+        const ACameraManager_ExtendedAvailabilityCallbacks* callback) {
+    mGlobalManager->registerExtendedAvailabilityCallback(callback);
+}
+
+void ACameraManager::unregisterExtendedAvailabilityCallback(
+        const ACameraManager_ExtendedAvailabilityCallbacks* callback) {
+    mGlobalManager->unregisterExtendedAvailabilityCallback(callback);
+}
+
+ACameraManager::~ACameraManager() {
 }
diff --git a/camera/ndk/ndk_vendor/impl/ACameraManager.h b/camera/ndk/ndk_vendor/impl/ACameraManager.h
index 85acee7..2d8eefa 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraManager.h
+++ b/camera/ndk/ndk_vendor/impl/ACameraManager.h
@@ -265,6 +265,12 @@
                                ACameraDevice_StateCallbacks* callback,
                                /*out*/ACameraDevice** device);
     camera_status_t getTagFromName(const char *cameraId, const char *name, uint32_t *tag);
+    void registerAvailabilityCallback(const ACameraManager_AvailabilityCallbacks* callback);
+    void unregisterAvailabilityCallback(const ACameraManager_AvailabilityCallbacks* callback);
+    void registerExtendedAvailabilityCallback(
+            const ACameraManager_ExtendedAvailabilityCallbacks* callback);
+    void unregisterExtendedAvailabilityCallback(
+            const ACameraManager_ExtendedAvailabilityCallbacks* callback);
 
   private:
     enum {
diff --git a/camera/tests/fuzzer/camera_c2ConcurrentCamera_fuzzer.cpp b/camera/tests/fuzzer/camera_c2ConcurrentCamera_fuzzer.cpp
index 12b5bc3..c00f2ba 100644
--- a/camera/tests/fuzzer/camera_c2ConcurrentCamera_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_c2ConcurrentCamera_fuzzer.cpp
@@ -15,6 +15,7 @@
  */
 
 #include <camera2/ConcurrentCamera.h>
+#include <CameraUtils.h>
 #include <fuzzer/FuzzedDataProvider.h>
 #include "camera2common.h"
 
@@ -33,7 +34,8 @@
         size_t concurrentCameraIdSize = fdp.ConsumeIntegralInRange<size_t>(kRangeMin, kRangeMax);
         for (size_t idx = 0; idx < concurrentCameraIdSize; ++idx) {
             string concurrentCameraId = fdp.ConsumeRandomLengthString();
-            camIdCombination.mConcurrentCameraIds.push_back(concurrentCameraId);
+            camIdCombination.mConcurrentCameraIdDeviceIdPairs.push_back(
+                    {concurrentCameraId, kDefaultDeviceId});
         }
     }
 
diff --git a/camera/tests/fuzzer/camera_fuzzer.cpp b/camera/tests/fuzzer/camera_fuzzer.cpp
index 0812096..b0f59f1 100644
--- a/camera/tests/fuzzer/camera_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_fuzzer.cpp
@@ -89,6 +89,7 @@
     bool initCamera();
     void invokeCamera();
     void invokeSetParameters();
+    native_handle_t* createNativeHandle();
     sp<Camera> mCamera = nullptr;
     FuzzedDataProvider* mFDP = nullptr;
 
@@ -103,6 +104,18 @@
     };
 };
 
+native_handle_t* CameraFuzzer::createNativeHandle() {
+    int32_t numFds = mFDP->ConsumeIntegralInRange<int32_t>(kMinElements, kMaxElements);
+    int32_t numInts = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
+    native_handle_t* handle = native_handle_create(numFds, numInts);
+    for (int32_t i = 0; i < numFds; ++i) {
+        std::string filename = mFDP->ConsumeRandomLengthString(kMaxBytes);
+        int32_t fd = open(filename.c_str(), O_RDWR | O_CREAT | O_TRUNC);
+        handle->data[i] = fd;
+    }
+    return handle;
+}
+
 bool CameraFuzzer::initCamera() {
     ProcessState::self()->startThreadPool();
     sp<IServiceManager> sm = defaultServiceManager();
@@ -291,15 +304,11 @@
                 },
                 [&]() {
                     int64_t timestamp = mFDP->ConsumeIntegral<int64_t>();
-                    int32_t numFds = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
-                    int32_t numInts = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
-                    native_handle_t* handle = native_handle_create(numFds, numInts);
+                    native_handle_t* handle = createNativeHandle();
                     mCamera->recordingFrameHandleCallbackTimestamp(timestamp, handle);
                 },
                 [&]() {
-                    int32_t numFds = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
-                    int32_t numInts = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
-                    native_handle_t* handle = native_handle_create(numFds, numInts);
+                    native_handle_t* handle = createNativeHandle();
                     mCamera->releaseRecordingFrameHandle(handle);
                 },
                 [&]() { mCamera->releaseRecordingFrame(iMem); },
@@ -308,9 +317,7 @@
                     for (int8_t i = 0;
                          i < mFDP->ConsumeIntegralInRange<int8_t>(kMinElements, kMaxElements);
                          ++i) {
-                        int32_t numFds = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
-                        int32_t numInts = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
-                        native_handle_t* handle = native_handle_create(numFds, numInts);
+                        native_handle_t* handle = createNativeHandle();
                         handles.push_back(handle);
                     }
                     mCamera->releaseRecordingFrameHandleBatch(handles);
@@ -320,9 +327,7 @@
                     for (int8_t i = 0;
                          i < mFDP->ConsumeIntegralInRange<int8_t>(kMinElements, kMaxElements);
                          ++i) {
-                        int32_t numFds = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
-                        int32_t numInts = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
-                        native_handle_t* handle = native_handle_create(numFds, numInts);
+                        native_handle_t* handle = createNativeHandle();
                         handles.push_back(handle);
                     }
                     std::vector<nsecs_t> timestamps;
diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp
index 28670b1..03c765a 100644
--- a/cmds/screenrecord/screenrecord.cpp
+++ b/cmds/screenrecord/screenrecord.cpp
@@ -362,8 +362,8 @@
         const ui::DisplayState& displayState,
         const sp<IGraphicBufferProducer>& bufferProducer,
         sp<IBinder>* pDisplayHandle, sp<SurfaceControl>* mirrorRoot) {
-    sp<IBinder> dpy = SurfaceComposerClient::createDisplay(
-            String8("ScreenRecorder"), gSecureDisplay);
+    static const std::string kDisplayName("ScreenRecorder");
+    sp<IBinder> dpy = SurfaceComposerClient::createVirtualDisplay(kDisplayName, gSecureDisplay);
     SurfaceComposerClient::Transaction t;
     t.setDisplaySurface(dpy, bufferProducer);
     setDisplayProjection(t, dpy, displayState);
@@ -797,7 +797,7 @@
     sp<Overlay> overlay;
 
     ~RecordingData() {
-        if (dpy != nullptr) SurfaceComposerClient::destroyDisplay(dpy);
+        if (dpy != nullptr) SurfaceComposerClient::destroyVirtualDisplay(dpy);
         if (overlay != nullptr) overlay->stop();
         if (encoder != nullptr) {
             encoder->stop();
diff --git a/media/OWNERS b/media/OWNERS
index 976fb9e..b926075 100644
--- a/media/OWNERS
+++ b/media/OWNERS
@@ -14,5 +14,8 @@
 taklee@google.com
 wonsik@google.com
 
+# For TEST_MAPPING tv-presubmit and tv-postsubmit configurations:
+per-file TEST_MAPPING = blindahl@google.com
+
 # go/android-fwk-media-solutions for info on areas of ownership.
 include platform/frameworks/av:/media/janitors/media_solutions_OWNERS
diff --git a/media/TEST_MAPPING b/media/TEST_MAPPING
index cd5d354..1a637ac 100644
--- a/media/TEST_MAPPING
+++ b/media/TEST_MAPPING
@@ -44,5 +44,16 @@
             ],
             "file_patterns": ["(?i)drm|crypto"]
         }
+    ],
+    // Postsubmit tests for TV devices
+    "tv-postsubmit": [
+        {
+            "name": "CtsMediaDecoderTestCases",
+            "options": [
+                {
+                    "include-filter": "android.media.decoder.cts.DecoderRenderTest"
+                }
+            ]
+        }
     ]
 }
diff --git a/media/aconfig/codec_fwk.aconfig b/media/aconfig/codec_fwk.aconfig
index b3c02eb..d662585 100644
--- a/media/aconfig/codec_fwk.aconfig
+++ b/media/aconfig/codec_fwk.aconfig
@@ -44,6 +44,16 @@
 }
 
 flag {
+  name: "input_surface_throttle"
+  namespace: "codec_fwk"
+  description: "Bugfix flag for input surface throttle"
+  bug: "342269852"
+  metadata {
+    purpose: PURPOSE_BUGFIX
+  }
+}
+
+flag {
   name: "large_audio_frame_finish"
   namespace: "codec_fwk"
   description: "Implementation flag for large audio frame finishing tasks"
@@ -101,7 +111,17 @@
   name: "set_state_early"
   namespace: "codec_fwk"
   description: "Bugfix flag for setting state early to avoid a race condition"
-  bug: "298613711"
+  bug: "298613712"
+  metadata {
+    purpose: PURPOSE_BUGFIX
+  }
+}
+
+flag {
+  name: "stop_hal_before_surface"
+  namespace: "codec_fwk"
+  description: "Bugfix flag for setting state early to avoid a race condition"
+  bug: "339247977"
   metadata {
     purpose: PURPOSE_BUGFIX
   }
diff --git a/media/audio/aconfig/aaudio.aconfig b/media/audio/aconfig/aaudio.aconfig
index c160109..f9fb4c7 100644
--- a/media/audio/aconfig/aaudio.aconfig
+++ b/media/audio/aconfig/aaudio.aconfig
@@ -11,3 +11,10 @@
     description: "Enable the AAudio sample rate converter."
     bug: "219533889"
 }
+
+flag {
+    name: "start_stop_client_from_command_thread"
+    namespace: "media_audio"
+    description: "Start or stop client from command thread."
+    bug: "341627085"
+}
diff --git a/media/audio/aconfig/audio.aconfig b/media/audio/aconfig/audio.aconfig
index 1b845d2..c642a94 100644
--- a/media/audio/aconfig/audio.aconfig
+++ b/media/audio/aconfig/audio.aconfig
@@ -6,6 +6,14 @@
 container: "system"
 
 flag {
+    name: "abs_volume_index_fix"
+    namespace: "media_audio"
+    description:
+        "Fix double attenuation and index jumps in absolute volume mode"
+    bug: "340693050"
+}
+
+flag {
     name: "alarm_min_volume_zero"
     namespace: "media_audio"
     description: "Support configuring alarm min vol to zero"
@@ -52,6 +60,14 @@
 }
 
 flag {
+    name: "port_to_piid_simplification"
+    namespace: "media_audio"
+    description: "PAM only needs for each piid the last portId mapping"
+    bug: "335747248"
+
+}
+
+flag {
     name: "ringer_mode_affects_alarm"
     namespace: "media_audio"
     description:
diff --git a/media/audio/aconfig/audioserver.aconfig b/media/audio/aconfig/audioserver.aconfig
index 5c6504f..94b60e2 100644
--- a/media/audio/aconfig/audioserver.aconfig
+++ b/media/audio/aconfig/audioserver.aconfig
@@ -22,6 +22,15 @@
 }
 
 flag {
+    name: "fix_concurrent_playback_behavior_with_bit_perfect_client"
+    namespace: "media_audio"
+    description:
+        "Treat playback use cases differently when bit-perfect client is active to improve the "
+        "user experience with bit-perfect playback."
+    bug: "339515899"
+}
+
+flag {
     name: "mutex_priority_inheritance"
     namespace: "media_audio"
     description:
@@ -30,3 +39,12 @@
         "This feature helps reduce audio glitching caused by low priority blocking threads."
     bug: "209491695"
 }
+
+flag {
+    name: "fix_input_sharing_logic"
+    namespace: "media_audio"
+    description:
+        "Fix the audio policy logic that decides to reuse or close "
+        "input streams when resources are exhausted"
+    bug: "338446410"
+}
diff --git a/media/codec2/components/base/Android.bp b/media/codec2/components/base/Android.bp
index 4b189b4..2b59ee3 100644
--- a/media/codec2/components/base/Android.bp
+++ b/media/codec2/components/base/Android.bp
@@ -43,7 +43,7 @@
     ],
 
     static_libs: [
-        "libyuv_static", // for conversion routines
+        "libyuv", // for conversion routines
     ],
 
     shared_libs: [
diff --git a/media/codec2/components/flac/C2SoftFlacEnc.cpp b/media/codec2/components/flac/C2SoftFlacEnc.cpp
index 7b63e75..780660e 100644
--- a/media/codec2/components/flac/C2SoftFlacEnc.cpp
+++ b/media/codec2/components/flac/C2SoftFlacEnc.cpp
@@ -155,7 +155,7 @@
     mSignalledError = false;
     mSignalledOutputEos = false;
     mIsFirstFrame = true;
-    mAnchorTimeStamp = 0ull;
+    mAnchorTimeStamp = 0;
     mProcessedSamples = 0u;
     mEncoderWriteData = false;
     mEncoderReturnedNbBytes = 0;
@@ -186,7 +186,7 @@
     mSignalledError = false;
     mSignalledOutputEos = false;
     mIsFirstFrame = true;
-    mAnchorTimeStamp = 0ull;
+    mAnchorTimeStamp = 0;
     mProcessedSamples = 0u;
     mEncoderWriteData = false;
     mEncoderReturnedNbBytes = 0;
@@ -236,7 +236,7 @@
               inSize, (int)work->input.ordinal.timestamp.peeku(),
               (int)work->input.ordinal.frameIndex.peeku(), work->input.flags);
     if (mIsFirstFrame && inSize) {
-        mAnchorTimeStamp = work->input.ordinal.timestamp.peekull();
+        mAnchorTimeStamp = work->input.ordinal.timestamp.peekll();
         mIsFirstFrame = false;
     }
 
@@ -405,7 +405,7 @@
     C2WriteView wView = mOutputBlock->map().get();
     uint8_t* outData = wView.data();
     const uint32_t sampleRate = mIntf->getSampleRate();
-    const uint64_t outTimeStamp = mProcessedSamples * 1000000ll / sampleRate;
+    const int64_t outTimeStamp = mProcessedSamples * 1000000ll / sampleRate;
     ALOGV("writing %zu bytes of encoded data on output", bytes);
     // increment mProcessedSamples to maintain audio synchronization during
     // play back
diff --git a/media/codec2/components/flac/C2SoftFlacEnc.h b/media/codec2/components/flac/C2SoftFlacEnc.h
index 1f3be3c..ed9c298 100644
--- a/media/codec2/components/flac/C2SoftFlacEnc.h
+++ b/media/codec2/components/flac/C2SoftFlacEnc.h
@@ -72,7 +72,7 @@
     bool mSignalledOutputEos;
     uint32_t mBlockSize;
     bool mIsFirstFrame;
-    uint64_t mAnchorTimeStamp;
+    int64_t mAnchorTimeStamp;
     uint64_t mProcessedSamples;
     // should the data received by the callback be written to the output port
     bool mEncoderWriteData;
diff --git a/media/codec2/components/gav1/Android.bp b/media/codec2/components/gav1/Android.bp
index 9781b6d..f22490d 100644
--- a/media/codec2/components/gav1/Android.bp
+++ b/media/codec2/components/gav1/Android.bp
@@ -23,7 +23,7 @@
     srcs: ["C2SoftGav1Dec.cpp"],
     static_libs: [
         "libgav1",
-        "libyuv_static",
+        "libyuv",
     ],
 
     apex_available: [
diff --git a/media/codec2/components/mpeg2/Android.bp b/media/codec2/components/mpeg2/Android.bp
index a58044c..e644ee3 100644
--- a/media/codec2/components/mpeg2/Android.bp
+++ b/media/codec2/components/mpeg2/Android.bp
@@ -14,6 +14,10 @@
         "libcodec2_soft_sanitize_signed-defaults",
     ],
 
+    cflags: [
+        "-DKEEP_THREADS_ACTIVE=0",
+    ],
+
     srcs: ["C2SoftMpeg2Dec.cpp"],
 
     static_libs: ["libmpeg2dec"],
diff --git a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
index 491098d..562dcf5 100644
--- a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
+++ b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
@@ -16,6 +16,9 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "C2SoftMpeg2Dec"
+#ifndef KEEP_THREADS_ACTIVE
+#define KEEP_THREADS_ACTIVE 0
+#endif
 #include <log/log.h>
 
 #include <media/stagefright/foundation/MediaDefs.h>
@@ -433,7 +436,7 @@
 
     s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.u4_size = sizeof(ivdext_fill_mem_rec_ip_t);
     s_fill_mem_ip.u4_share_disp_buf = 0;
-    s_fill_mem_ip.u4_keep_threads_active = 1;
+    s_fill_mem_ip.u4_keep_threads_active = KEEP_THREADS_ACTIVE;
     s_fill_mem_ip.e_output_format = mIvColorformat;
     s_fill_mem_ip.u4_deinterlace = 1;
     s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.e_cmd = IV_CMD_FILL_NUM_MEM_REC;
@@ -475,7 +478,7 @@
     s_init_ip.s_ivd_init_ip_t.u4_frm_max_ht = mHeight;
     s_init_ip.u4_share_disp_buf = 0;
     s_init_ip.u4_deinterlace = 1;
-    s_init_ip.u4_keep_threads_active = 1;
+    s_init_ip.u4_keep_threads_active = KEEP_THREADS_ACTIVE;
     s_init_ip.s_ivd_init_ip_t.u4_num_mem_rec = mNumMemRecords;
     s_init_ip.s_ivd_init_ip_t.e_output_format = mIvColorformat;
     s_init_op.s_ivd_init_op_t.u4_size = sizeof(ivdext_init_op_t);
diff --git a/media/codec2/components/vpx/C2SoftVpxDec.cpp b/media/codec2/components/vpx/C2SoftVpxDec.cpp
index dab7b89..318f093 100644
--- a/media/codec2/components/vpx/C2SoftVpxDec.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxDec.cpp
@@ -446,6 +446,7 @@
     {
         IntfImpl::Lock lock = mIntf->lock();
         mPixelFormatInfo = mIntf->getPixelFormat_l();
+        mColorAspects = mIntf->getDefaultColorAspects_l();
     }
 
     mWidth = 320;
@@ -591,6 +592,41 @@
         return;
     }
 
+    // handle dynamic config parameters
+    {
+        IntfImpl::Lock lock = mIntf->lock();
+        std::shared_ptr<C2StreamColorAspectsTuning::output> defaultColorAspects =
+            mIntf->getDefaultColorAspects_l();
+        lock.unlock();
+
+        if (mColorAspects->range != defaultColorAspects->range ||
+            mColorAspects->primaries != defaultColorAspects->primaries ||
+            mColorAspects->matrix != defaultColorAspects->matrix ||
+            mColorAspects->transfer != defaultColorAspects->transfer) {
+
+            mColorAspects->range = defaultColorAspects->range;
+            mColorAspects->primaries = defaultColorAspects->primaries;
+            mColorAspects->matrix = defaultColorAspects->matrix;
+            mColorAspects->transfer = defaultColorAspects->transfer;
+
+            C2StreamColorAspectsTuning::output colorAspect(0u, defaultColorAspects->range,
+                defaultColorAspects->primaries, defaultColorAspects->transfer,
+                defaultColorAspects->matrix);
+            std::vector<std::unique_ptr<C2SettingResult>> failures;
+            c2_status_t err = mIntf->config({&colorAspect}, C2_MAY_BLOCK, &failures);
+            if (err == C2_OK) {
+                work->worklets.front()->output.configUpdate.push_back(
+                    C2Param::Copy(colorAspect));
+            } else {
+                ALOGE("Config update colorAspect failed");
+                mSignalledError = true;
+                work->workletsProcessed = 1u;
+                work->result = C2_CORRUPTED;
+                return;
+            }
+        }
+    }
+
     size_t inOffset = 0u;
     size_t inSize = 0u;
     C2ReadView rView = mDummyReadView;
diff --git a/media/codec2/components/vpx/C2SoftVpxDec.h b/media/codec2/components/vpx/C2SoftVpxDec.h
index e9d6dc9..93cc213 100644
--- a/media/codec2/components/vpx/C2SoftVpxDec.h
+++ b/media/codec2/components/vpx/C2SoftVpxDec.h
@@ -66,6 +66,7 @@
     // configurations used by component in process
     // (TODO: keep this in intf but make them internal only)
     std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormatInfo;
+    std::shared_ptr<C2StreamColorAspectsTuning::output> mColorAspects;
 
     std::shared_ptr<IntfImpl> mIntf;
     vpx_codec_ctx_t *mCodecCtx;
diff --git a/media/codec2/core/Android.bp b/media/codec2/core/Android.bp
index 7d5740b..c205dcd 100644
--- a/media/codec2/core/Android.bp
+++ b/media/codec2/core/Android.bp
@@ -26,9 +26,6 @@
         "//apex_available:platform",
         "com.android.media.swcodec",
     ],
-    vndk: {
-        enabled: true,
-    },
     double_loadable: true,
 
     srcs: ["C2.cpp"],
diff --git a/media/codec2/fuzzer/Android.bp b/media/codec2/fuzzer/Android.bp
index b387b2c..ec77427 100644
--- a/media/codec2/fuzzer/Android.bp
+++ b/media/codec2/fuzzer/Android.bp
@@ -163,7 +163,7 @@
 
     static_libs: [
         "libgav1",
-        "libyuv_static",
+        "libyuv",
         "libcodec2_soft_av1dec_gav1",
     ],
 }
diff --git a/media/codec2/hal/aidl/Android.bp b/media/codec2/hal/aidl/Android.bp
index 48b6e21..e16e2b1 100644
--- a/media/codec2/hal/aidl/Android.bp
+++ b/media/codec2/hal/aidl/Android.bp
@@ -8,6 +8,7 @@
     name: "libcodec2_aidl_client",
 
     defaults: [
+        "aconfig_lib_cc_static_link.defaults",
         "libcodec2_hal_selection",
     ],
 
@@ -65,6 +66,7 @@
     ],
 
     defaults: [
+        "aconfig_lib_cc_static_link.defaults",
         "libcodec2_hal_selection",
     ],
 
diff --git a/media/codec2/hal/client/GraphicsTracker.cpp b/media/codec2/hal/client/GraphicsTracker.cpp
index 1c2a0fb..dbbabfe 100644
--- a/media/codec2/hal/client/GraphicsTracker.cpp
+++ b/media/codec2/hal/client/GraphicsTracker.cpp
@@ -173,7 +173,7 @@
 }
 
 GraphicsTracker::GraphicsTracker(int maxDequeueCount)
-    : mBufferCache(new BufferCache()), mMaxDequeue{maxDequeueCount},
+    : mBufferCache(new BufferCache()), mNumDequeueing{0}, mMaxDequeue{maxDequeueCount},
     mMaxDequeueCommitted{maxDequeueCount},
     mDequeueable{maxDequeueCount},
     mTotalDequeued{0}, mTotalCancelled{0}, mTotalDropped{0}, mTotalReleased{0},
@@ -235,6 +235,7 @@
         const sp<IGraphicBufferProducer>& igbp, uint32_t generation) {
     // TODO: wait until operations to previous IGBP is completed.
     std::shared_ptr<BufferCache> prevCache;
+    int prevDequeueRequested = 0;
     int prevDequeueCommitted;
 
     std::unique_lock<std::mutex> cl(mConfigLock);
@@ -243,6 +244,9 @@
         mInConfig = true;
         prevCache = mBufferCache;
         prevDequeueCommitted = mMaxDequeueCommitted;
+        if (mMaxDequeueRequested.has_value()) {
+            prevDequeueRequested = mMaxDequeueRequested.value();
+        }
     }
     // NOTE: Switching to the same surface is blocked from MediaCodec.
     // Switching to the same surface might not work if tried, since disconnect()
@@ -263,6 +267,11 @@
         mInConfig = false;
         return C2_BAD_VALUE;
     }
+    ALOGD("new surface in configuration: maxDequeueRequested(%d), maxDequeueCommitted(%d)",
+          prevDequeueRequested, prevDequeueCommitted);
+    if (prevDequeueRequested > 0 && prevDequeueRequested > prevDequeueCommitted) {
+        prevDequeueCommitted = prevDequeueRequested;
+    }
     if (igbp) {
         ret = igbp->setMaxDequeuedBufferCount(prevDequeueCommitted);
         if (ret != ::android::OK) {
@@ -280,6 +289,34 @@
         std::unique_lock<std::mutex> l(mLock);
         mInConfig = false;
         mBufferCache = newCache;
+        // {@code dequeued} is the number of currently dequeued buffers.
+        // {@code prevDequeueCommitted} is max dequeued buffer at any moment
+        //  from the new surface.
+        // {@code newDequeueable} is hence the current # of dequeueable buffers
+        //  if no change occurs.
+        int dequeued = mDequeued.size() + mNumDequeueing;
+        int newDequeueable = prevDequeueCommitted - dequeued;
+        if (newDequeueable < 0) {
+            // This will not happen.
+            // But if this happens, we respect the value and try to continue.
+            ALOGE("calculated new dequeueable is negative: %d max(%d),dequeued(%d)",
+                  newDequeueable, prevDequeueCommitted, dequeued);
+        }
+
+        if (mMaxDequeueRequested.has_value() && mMaxDequeueRequested == prevDequeueCommitted) {
+            mMaxDequeueRequested.reset();
+        }
+        mMaxDequeue = mMaxDequeueCommitted = prevDequeueCommitted;
+
+        int delta = newDequeueable - mDequeueable;
+        if (delta > 0) {
+            writeIncDequeueableLocked(delta);
+        } else if (delta < 0) {
+            drainDequeueableLocked(-delta);
+        }
+        ALOGV("new surfcace dequeueable %d(delta %d), maxDequeue %d",
+              newDequeueable, delta, mMaxDequeue);
+        mDequeueable = newDequeueable;
     }
     return C2_OK;
 }
@@ -529,6 +566,7 @@
             ALOGE("writing end for the waitable object seems to be closed");
             return C2_BAD_STATE;
         }
+        mNumDequeueing++;
         mDequeueable--;
         *cache = mBufferCache;
         return C2_OK;
@@ -543,6 +581,7 @@
                     bool cached, int slot, const sp<Fence> &fence,
                     std::shared_ptr<BufferItem> *pBuffer, bool *updateDequeue) {
     std::unique_lock<std::mutex> l(mLock);
+    mNumDequeueing--;
     if (res == C2_OK) {
         if (cached) {
             auto it = cache->mBuffers.find(slot);
@@ -655,7 +694,8 @@
             ALOGE("allocate by dequeueBuffer() successful, but requestBuffer() failed %d",
                   status);
             igbp->cancelBuffer(slotId, fence);
-            return C2_CORRUPTED;
+            // This might be due to life-cycle end and/or surface switching.
+            return C2_BLOCKING;
         }
         *buffer = std::make_shared<BufferItem>(generation, slotId, realloced, fence);
         if (!*buffer) {
diff --git a/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h b/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h
index dd6c869..762030b 100644
--- a/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h
+++ b/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h
@@ -234,6 +234,7 @@
     // Maps bufferId to buffer
     std::map<uint64_t, std::shared_ptr<BufferItem>> mDequeued;
     std::set<uint64_t> mDeallocating;
+    int mNumDequeueing;
 
     // These member variables are read and modified accessed as follows.
     // 1. mConfigLock being held
diff --git a/media/codec2/hal/common/Android.bp b/media/codec2/hal/common/Android.bp
index 7d7b285..4c9da33 100644
--- a/media/codec2/hal/common/Android.bp
+++ b/media/codec2/hal/common/Android.bp
@@ -31,6 +31,10 @@
     ],
 
     static_libs: ["aconfig_mediacodec_flags_c_lib"],
+
+    defaults: [
+        "aconfig_lib_cc_static_link.defaults",
+    ],
 }
 
 cc_library_static {
diff --git a/media/codec2/hal/common/MultiAccessUnitHelper.cpp b/media/codec2/hal/common/MultiAccessUnitHelper.cpp
index 8086ef2..b1fa82f 100644
--- a/media/codec2/hal/common/MultiAccessUnitHelper.cpp
+++ b/media/codec2/hal/common/MultiAccessUnitHelper.cpp
@@ -27,6 +27,7 @@
 #include <C2Debug.h>
 #include <C2PlatformSupport.h>
 
+static inline constexpr  uint32_t MAX_SUPPORTED_SIZE = ( 10 * 512000 * 8 * 2u);
 namespace android {
 
 static C2R MultiAccessUnitParamsSetter(
@@ -39,8 +40,6 @@
         res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.thresholdSize)));
     } else if (me.v.maxSize < me.v.thresholdSize) {
         me.set().maxSize = me.v.thresholdSize;
-    } else if (me.v.thresholdSize == 0 && me.v.maxSize > 0) {
-        me.set().thresholdSize = me.v.maxSize;
     }
     std::vector<std::unique_ptr<C2SettingResult>> failures;
     res.retrieveFailures(&failures);
@@ -61,9 +60,9 @@
             .withDefault(new C2LargeFrame::output(0u, 0, 0))
             .withFields({
                 C2F(mLargeFrameParams, maxSize).inRange(
-                        0, c2_min(UINT_MAX, 10 * 512000 * 8 * 2u)),
+                        0, c2_min(UINT_MAX, MAX_SUPPORTED_SIZE)),
                 C2F(mLargeFrameParams, thresholdSize).inRange(
-                        0, c2_min(UINT_MAX, 10 * 512000 * 8 * 2u))
+                        0, c2_min(UINT_MAX, MAX_SUPPORTED_SIZE))
             })
             .withSetter(MultiAccessUnitParamsSetter)
             .build());
@@ -115,6 +114,18 @@
     return false;
 }
 
+bool MultiAccessUnitInterface::getMaxInputSize(
+        C2StreamMaxBufferSizeInfo::input* const maxInputSize) const {
+    if (maxInputSize == nullptr || mC2ComponentIntf == nullptr) {
+        return false;
+    }
+    c2_status_t err = mC2ComponentIntf->query_vb({maxInputSize}, {}, C2_MAY_BLOCK, nullptr);
+    if (err != OK) {
+        return false;
+    }
+    return true;
+}
+
 //C2MultiAccessUnitBuffer
 class C2MultiAccessUnitBuffer : public C2Buffer {
     public:
@@ -128,6 +139,7 @@
 MultiAccessUnitHelper::MultiAccessUnitHelper(
         const std::shared_ptr<MultiAccessUnitInterface>& intf,
         std::shared_ptr<C2BlockPool>& linearPool):
+        mMultiAccessOnOffAllowed(true),
         mInit(false),
         mInterface(intf),
         mLinearPool(linearPool) {
@@ -152,6 +164,63 @@
     return result;
 }
 
+bool MultiAccessUnitHelper::tryReconfigure(const std::unique_ptr<C2Param> &param) {
+    C2LargeFrame::output *lfp = C2LargeFrame::output::From(param.get());
+    if (lfp == nullptr) {
+        return false;
+    }
+    bool isDecoder = (mInterface->kind() == C2Component::KIND_DECODER) ? true : false;
+    if (!isDecoder) {
+        C2StreamMaxBufferSizeInfo::input maxInputSize(0);
+        if (!mInterface->getMaxInputSize(&maxInputSize)) {
+            LOG(ERROR) << "Error in reconfigure: "
+                    << "Encoder failed to respond with a valid max input size";
+            return false;
+        }
+        // This is assuming a worst case compression ratio of 1:1
+        // In no case the encoder should give an output more than
+        // what is being provided to the encoder in a single call.
+        if (lfp->maxSize < maxInputSize.value) {
+            lfp->maxSize = maxInputSize.value;
+        }
+    }
+    lfp->maxSize =
+            (lfp->maxSize > MAX_SUPPORTED_SIZE) ? MAX_SUPPORTED_SIZE :
+                    (lfp->maxSize < 0) ? 0 : lfp->maxSize;
+    lfp->thresholdSize =
+            (lfp->thresholdSize > MAX_SUPPORTED_SIZE) ? MAX_SUPPORTED_SIZE :
+                    (lfp->thresholdSize < 0) ? 0 : lfp->thresholdSize;
+    C2LargeFrame::output currentConfig = mInterface->getLargeFrameParam();
+    if ((currentConfig.maxSize == lfp->maxSize)
+            && (currentConfig.thresholdSize == lfp->thresholdSize)) {
+        // no need to update
+        return false;
+    }
+    if (isDecoder) {
+        bool isOnOffTransition =
+                (currentConfig.maxSize == 0 && lfp->maxSize != 0)
+                || (currentConfig.maxSize != 0 && lfp->maxSize == 0);
+            if (isOnOffTransition && !mMultiAccessOnOffAllowed) {
+                LOG(ERROR) << "Setting new configs not allowed"
+                        << " MaxSize: " << lfp->maxSize
+                        << " ThresholdSize: " << lfp->thresholdSize;
+                return false;
+            }
+    }
+    std::vector<C2Param*> config{lfp};
+    std::vector<std::unique_ptr<C2SettingResult>> failures;
+    if (C2_OK != mInterface->config(config, C2_MAY_BLOCK, &failures)) {
+        LOG(ERROR) << "Dynamic config not applied for"
+                << " MaxSize: " << lfp->maxSize
+                << " ThresholdSize: " << lfp->thresholdSize;
+        return false;
+    }
+    LOG(DEBUG) << "Updated from param maxSize "
+            << lfp->maxSize
+            << " ThresholdSize " << lfp->thresholdSize;
+    return true;
+}
+
 std::shared_ptr<MultiAccessUnitInterface> MultiAccessUnitHelper::getInterface() {
     return mInterface;
 }
@@ -163,6 +232,7 @@
 void MultiAccessUnitHelper::reset() {
     std::lock_guard<std::mutex> l(mLock);
     mFrameHolder.clear();
+    mMultiAccessOnOffAllowed = true;
 }
 
 c2_status_t MultiAccessUnitHelper::error(
@@ -181,6 +251,7 @@
         }
     }
     mFrameHolder.clear();
+    mMultiAccessOnOffAllowed = true;
     return C2_OK;
 }
 
@@ -232,16 +303,23 @@
         uint64_t newFrameIdx = mFrameIndex++;
         // TODO: Do not split buffers if component inherantly supports MultipleFrames.
         // if thats case, only replace frameindex.
-        auto cloneInputWork = [&newFrameIdx](std::unique_ptr<C2Work>& inWork, uint32_t flags) {
+        auto cloneInputWork = [&frameInfo, &newFrameIdx, this]
+                (std::unique_ptr<C2Work>& inWork, uint32_t flags) -> std::unique_ptr<C2Work> {
             std::unique_ptr<C2Work> newWork(new C2Work);
             newWork->input.flags = (C2FrameData::flags_t)flags;
             newWork->input.ordinal = inWork->input.ordinal;
             newWork->input.ordinal.frameIndex = newFrameIdx;
             if (!inWork->input.configUpdate.empty()) {
                 for (std::unique_ptr<C2Param>& param : inWork->input.configUpdate) {
-                    newWork->input.configUpdate.push_back(
-                            std::move(C2Param::Copy(*(param.get()))));
+                    if (param->index() == C2LargeFrame::output::PARAM_TYPE) {
+                        if (tryReconfigure(param)) {
+                            frameInfo.mConfigUpdate.push_back(std::move(param));
+                        }
+                    } else {
+                        newWork->input.configUpdate.push_back(std::move(param));
+                    }
                 }
+                inWork->input.configUpdate.clear();
             }
             newWork->input.infoBuffers = (inWork->input.infoBuffers);
             if (!inWork->worklets.empty() && inWork->worklets.front() != nullptr) {
@@ -331,6 +409,7 @@
             frameInfo.mLargeFrameTuning = multiAccessParams;
             std::lock_guard<std::mutex> l(mLock);
             mFrameHolder.push_back(std::move(frameInfo));
+            mMultiAccessOnOffAllowed = false;
         }
     }
     return C2_OK;
@@ -360,6 +439,7 @@
             std::list<MultiAccessUnitInfo>::iterator frame =
                     mFrameHolder.begin();
             while (!foundFrame && frame != mFrameHolder.end()) {
+                c2_status_t res = C2_OK;
                 auto it = frame->mComponentFrameIds.find(thisFrameIndex);
                 if (it != frame->mComponentFrameIds.end()) {
                     foundFrame = true;
@@ -369,8 +449,7 @@
                     if (work->result != C2_OK
                             || work->worklets.empty()
                             || !work->worklets.front()
-                            || (frame->mLargeFrameTuning.thresholdSize == 0
-                            || frame->mLargeFrameTuning.maxSize == 0)) {
+                            || frame->mLargeFrameTuning.maxSize == 0) {
                         if (removeEntry) {
                             frame->mComponentFrameIds.erase(it);
                             removeEntry = false;
@@ -388,10 +467,27 @@
                         addOutWork(frame->mLargeWork);
                         frame->reset();
                         if (workResult != C2_OK) {
-                            frame->mAccessUnitInfos.clear();
+                            frame->mComponentFrameIds.clear();
+                            removeEntry = false;
                         }
-                    } else if (C2_OK != processWorklets(*frame, work, addOutWork)) {
-                        LOG(DEBUG) << "Error while processing work";
+                    } else if (C2_OK != (res = processWorklets(*frame, work, addOutWork))) {
+                        // Upon error in processing worklets, we return the work with
+                        // result set to the error. This should indicate the error to the
+                        // framework and thus doing what is necessary to handle the
+                        // error.
+                        LOG(DEBUG) << "Error while processing worklets";
+                        if (frame->mLargeWork == nullptr) {
+                            frame->mLargeWork.reset(new C2Work);
+                            frame->mLargeWork->input.ordinal = frame->inOrdinal;
+                            frame->mLargeWork->input.ordinal.frameIndex =
+                                    frame->inOrdinal.frameIndex;
+                        }
+                        frame->mLargeWork->result = res;
+                        finalizeWork(*frame);
+                        addOutWork(frame->mLargeWork);
+                        frame->reset();
+                        frame->mComponentFrameIds.clear();
+                        removeEntry = false;
                     }
                     if (removeEntry) {
                         LOG(DEBUG) << "Removing entry: " << thisFrameIndex
@@ -528,9 +624,6 @@
 
         LOG(DEBUG) << "maxOutSize " << frame.mLargeFrameTuning.maxSize
                 << " threshold " << frame.mLargeFrameTuning.thresholdSize;
-        if ((*worklet)->output.buffers.size() > 0) {
-            allocateWork(frame, true, true);
-        }
         LOG(DEBUG) << "This worklet has " << (*worklet)->output.buffers.size() << " buffers"
                 << " ts: " << (*worklet)->output.ordinal.timestamp.peekull();
         int64_t workletTimestamp = (*worklet)->output.ordinal.timestamp.peekull();
@@ -552,43 +645,39 @@
                     inputSize -= (inputSize % frameSize);
                 }
                 while (inputOffset < inputSize) {
-                    if (frame.mWview->offset() >= frame.mLargeFrameTuning.thresholdSize) {
+                    if ((frame.mWview != nullptr)
+                            && (frame.mWview->offset() >= frame.mLargeFrameTuning.thresholdSize)) {
                         frame.mLargeWork->result = C2_OK;
                         finalizeWork(frame, flagsForCopy);
                         addWork(frame.mLargeWork);
                         frame.reset();
-                        allocateWork(frame, true, true);
                     }
                     if (mInterface->kind() == C2Component::KIND_ENCODER) {
                         if (inputSize > frame.mLargeFrameTuning.maxSize) {
-                            LOG(ERROR) << "Enc: Output buffer too small for AU, configured with "
-                                    << frame.mLargeFrameTuning.maxSize
-                                    << " block size: " << blocks.front().size()
-                                    << "alloc size " << frame.mWview->size();
-                            if (frame.mLargeWork
-                                    && frame.mWview && frame.mWview->offset() > 0) {
+                            LOG(WARNING) << "WARNING Encoder:"
+                                    << " Output buffer too small for configuration"
+                                    << " configured max size " << frame.mLargeFrameTuning.maxSize
+                                    << " access unit size " << inputSize;
+                            if (frame.mLargeWork && (frame.mWview && frame.mWview->offset() > 0)) {
+                                frame.mLargeWork->result = C2_OK;
                                 finalizeWork(frame, flagsForCopy);
                                 addWork(frame.mLargeWork);
                                 frame.reset();
-                                allocateWork(frame, true, false);
                             }
-                            frame.mLargeWork->result = C2_NO_MEMORY;
-                            finalizeWork(frame, 0, true);
-                            addWork(frame.mLargeWork);
-                            frame.reset();
-                            return C2_NO_MEMORY;
-                        } else if (inputSize > frame.mWview->size()) {
+                            frame.mLargeFrameTuning.maxSize = inputSize;
+                        } else if ((frame.mWview != nullptr)
+                                && (inputSize > frame.mWview->size())) {
                             LOG(DEBUG) << "Enc: Large frame hitting bufer limit, current size "
                                 << frame.mWview->offset();
-                            if (frame.mLargeWork
-                                    && frame.mWview && frame.mWview->offset() > 0) {
+                            if (frame.mWview->offset() > 0) {
+                                frame.mLargeWork->result = C2_OK;
                                 finalizeWork(frame, flagsForCopy);
                                 addWork(frame.mLargeWork);
                                 frame.reset();
-                                allocateWork(frame, true, true);
                             }
                         }
                     }
+                    allocateWork(frame, true, true);
                     C2ReadView rView = blocks.front().map().get();
                     if (rView.error()) {
                         LOG(ERROR) << "Buffer read view error";
@@ -683,26 +772,39 @@
             frame.mWview->setOffset(0);
             std::shared_ptr<C2Buffer> c2Buffer = C2Buffer::CreateLinearBuffer(
                     frame.mBlock->share(0, size, ::C2Fence()));
-            if (frame.mAccessUnitInfos.size() > 0) {
-                if (finalFlags & C2FrameData::FLAG_END_OF_STREAM) {
-                    frame.mAccessUnitInfos.back().flags |=
-                            C2FrameData::FLAG_END_OF_STREAM;
-                }
-                std::shared_ptr<C2AccessUnitInfos::output> largeFrame =
-                        C2AccessUnitInfos::output::AllocShared(
-                        frame.mAccessUnitInfos.size(), 0u, frame.mAccessUnitInfos);
-                frame.mInfos.push_back(largeFrame);
-                frame.mAccessUnitInfos.clear();
-            }
-            for (auto &info : frame.mInfos) {
-                c2Buffer->setInfo(std::const_pointer_cast<C2Info>(info));
-            }
             frame.mLargeWork->worklets.front()->output.buffers.push_back(std::move(c2Buffer));
-            frame.mInfos.clear();
-            frame.mBlock.reset();
-            frame.mWview.reset();
+        }
+        if (frame.mLargeWork->worklets.front()->output.buffers.size() > 0) {
+            std::shared_ptr<C2Buffer>& c2Buffer =
+                frame.mLargeWork->worklets.front()->output.buffers.front();
+            if (c2Buffer != nullptr) {
+                if (frame.mAccessUnitInfos.size() > 0) {
+                    if (finalFlags & C2FrameData::FLAG_END_OF_STREAM) {
+                        frame.mAccessUnitInfos.back().flags |= C2FrameData::FLAG_END_OF_STREAM;
+                    }
+                    std::shared_ptr<C2AccessUnitInfos::output> largeFrame =
+                            C2AccessUnitInfos::output::AllocShared(
+                                    frame.mAccessUnitInfos.size(), 0u, frame.mAccessUnitInfos);
+                    frame.mInfos.push_back(largeFrame);
+                    frame.mAccessUnitInfos.clear();
+                }
+                for (auto &info : frame.mInfos) {
+                    c2Buffer->setInfo(std::const_pointer_cast<C2Info>(info));
+                }
+            }
+        }
+        if (frame.mConfigUpdate.size() > 0) {
+            outFrameData.configUpdate.insert(
+                    outFrameData.configUpdate.end(),
+                    make_move_iterator(frame.mConfigUpdate.begin()),
+                    make_move_iterator(frame.mConfigUpdate.end()));
         }
     }
+    frame.mConfigUpdate.clear();
+    frame.mInfos.clear();
+    frame.mBlock.reset();
+    frame.mWview.reset();
+
     LOG(DEBUG) << "Multi access-unitflag setting as " << finalFlags;
     return C2_OK;
 }
@@ -735,6 +837,7 @@
     mBlock.reset();
     mWview.reset();
     mInfos.clear();
+    mConfigUpdate.clear();
     mAccessUnitInfos.clear();
     mLargeWork.reset();
 }
diff --git a/media/codec2/hal/common/include/codec2/common/MultiAccessUnitHelper.h b/media/codec2/hal/common/include/codec2/common/MultiAccessUnitHelper.h
index bb4464c..070a1f5 100644
--- a/media/codec2/hal/common/include/codec2/common/MultiAccessUnitHelper.h
+++ b/media/codec2/hal/common/include/codec2/common/MultiAccessUnitHelper.h
@@ -46,6 +46,7 @@
 protected:
     bool getDecoderSampleRateAndChannelCount(
             uint32_t * const sampleRate_, uint32_t * const channelCount_) const;
+    bool getMaxInputSize(C2StreamMaxBufferSizeInfo::input* const maxInputSize) const;
     const std::shared_ptr<C2ComponentInterface> mC2ComponentIntf;
     std::shared_ptr<C2LargeFrame::output> mLargeFrameParams;
     C2ComponentKindSetting mKind;
@@ -140,6 +141,11 @@
         std::vector<std::shared_ptr<const C2Info>> mInfos;
 
         /*
+         * Vector for holding config updates from the wrapper
+         */
+        std::vector<std::unique_ptr<C2Param>> mConfigUpdate;
+
+        /*
          * C2AccessUnitInfos for the current buffer
          */
         std::vector<C2AccessUnitInfosStruct> mAccessUnitInfos;
@@ -170,6 +176,11 @@
     };
 
     /*
+     * Reconfigure helper
+     */
+    bool tryReconfigure(const std::unique_ptr<C2Param> &p);
+
+    /*
      * Creates a linear block to be used with work
      */
     c2_status_t createLinearBlock(MultiAccessUnitInfo &frame);
@@ -195,6 +206,14 @@
             uint32_t size,
             int64_t timestamp);
 
+    // Flag to allow dynamic on/off settings on this helper.
+    // Once enabled and buffers in transit, it is not possible
+    // to turn this module off by setting the max output value
+    // to 0. This is because the skip cut buffer expects the
+    // metadata to be always present along with a valid buffer.
+    // This flag is used to monitor that state of this module.
+    bool mMultiAccessOnOffAllowed;
+
     bool mInit;
 
     // Interface of this module
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp b/media/codec2/hal/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
index ab47b7c..36907e1 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
@@ -263,9 +263,6 @@
     ALOGV("mComponent->reset() timeConsumed=%" PRId64 " us", timeConsumed);
     ASSERT_EQ(err, C2_OK);
 
-    err = mComponent->start();
-    ASSERT_EQ(err, C2_OK);
-
     // Query supported params by the component
     std::vector<std::shared_ptr<C2ParamDescriptor>> params;
     startTime = getNowUs();
@@ -298,6 +295,9 @@
               timeConsumed);
     }
 
+    err = mComponent->start();
+    ASSERT_EQ(err, C2_OK);
+
     std::list<std::unique_ptr<C2Work>> workList;
     startTime = getNowUs();
     err = mComponent->queue(&workList);
diff --git a/media/codec2/hal/plugin/FilterWrapper.cpp b/media/codec2/hal/plugin/FilterWrapper.cpp
index 197d6e7..b926150 100644
--- a/media/codec2/hal/plugin/FilterWrapper.cpp
+++ b/media/codec2/hal/plugin/FilterWrapper.cpp
@@ -49,11 +49,6 @@
             std::weak_ptr<FilterWrapper> filterWrapper)
         : mIntf(intf), mFilterWrapper(filterWrapper) {
         takeFilters(std::move(filters));
-        for (size_t i = 0; i < mFilters.size(); ++i) {
-            mControlParamTypes.insert(
-                    mFilters[i].desc.controlParams.begin(),
-                    mFilters[i].desc.controlParams.end());
-        }
     }
 
     ~WrappedDecoderInterface() override = default;
@@ -91,6 +86,12 @@
 
         // TODO: documentation
         mFilters = std::move(filters);
+        mControlParamTypes.clear();
+        for (size_t i = 0; i < mFilters.size(); ++i) {
+            mControlParamTypes.insert(
+                    mFilters[i].desc.controlParams.begin(),
+                    mFilters[i].desc.controlParams.end());
+        }
         mTypeToIndexForQuery.clear();
         mTypeToIndexForConfig.clear();
         for (size_t i = 0; i < mFilters.size(); ++i) {
diff --git a/media/codec2/sfplugin/Android.bp b/media/codec2/sfplugin/Android.bp
index 362373e..7076bac 100644
--- a/media/codec2/sfplugin/Android.bp
+++ b/media/codec2/sfplugin/Android.bp
@@ -91,6 +91,10 @@
         "libcodec2_client",
     ],
 
+    defaults: [
+        "aconfig_lib_cc_static_link.defaults",
+    ],
+
     sanitize: {
         cfi: true,
         misc_undefined: [
diff --git a/media/codec2/sfplugin/C2AidlNode.cpp b/media/codec2/sfplugin/C2AidlNode.cpp
index 93c9d8b..0f23205 100644
--- a/media/codec2/sfplugin/C2AidlNode.cpp
+++ b/media/codec2/sfplugin/C2AidlNode.cpp
@@ -105,6 +105,10 @@
     return mImpl->onInputBufferDone(index);
 }
 
+void C2AidlNode::onInputBufferEmptied() {
+    return mImpl->onInputBufferEmptied();
+}
+
 android_dataspace C2AidlNode::getDataspace() {
     return mImpl->getDataspace();
 }
diff --git a/media/codec2/sfplugin/C2AidlNode.h b/media/codec2/sfplugin/C2AidlNode.h
index 365a41d..9dd3504 100644
--- a/media/codec2/sfplugin/C2AidlNode.h
+++ b/media/codec2/sfplugin/C2AidlNode.h
@@ -68,13 +68,19 @@
     void setFrameSize(uint32_t width, uint32_t height);
 
     /**
-     * Clean up work item reference.
+     * Notify that the input buffer reference is no longer needed by the component.
+     * Clean up if necessary.
      *
      * \param index input work index
      */
     void onInputBufferDone(c2_cntr64_t index);
 
     /**
+     * Notify input buffer is emptied.
+     */
+    void onInputBufferEmptied();
+
+    /**
      * Returns dataspace information from GraphicBufferSource.
      */
     android_dataspace getDataspace();
diff --git a/media/codec2/sfplugin/C2NodeImpl.cpp b/media/codec2/sfplugin/C2NodeImpl.cpp
index 6f53e0f..585072d 100644
--- a/media/codec2/sfplugin/C2NodeImpl.cpp
+++ b/media/codec2/sfplugin/C2NodeImpl.cpp
@@ -25,6 +25,7 @@
 #include <C2Debug.h>
 #include <C2PlatformSupport.h>
 
+#include <android_media_codec.h>
 #include <android/fdsan.h>
 #include <media/stagefright/foundation/ColorUtils.h>
 #include <ui/Fence.h>
@@ -373,7 +374,10 @@
     }
     work->worklets.clear();
     work->worklets.emplace_back(new C2Worklet);
-    mBufferIdsInUse.lock()->emplace(work->input.ordinal.frameIndex.peeku(), buffer);
+    {
+        Mutexed<BuffersTracker>::Locked buffers(mBuffersTracker);
+        buffers->mIdsInUse.emplace(work->input.ordinal.frameIndex.peeku(), buffer);
+    }
     mQueueThread->queue(comp, fenceFd, std::move(work), std::move(fd0), std::move(fd1));
 
     return OK;
@@ -405,29 +409,74 @@
 }
 
 void C2NodeImpl::onInputBufferDone(c2_cntr64_t index) {
-    if (mAidlHal) {
-        if (!mAidlBufferSource) {
-            ALOGD("Buffer source not set (index=%llu)", index.peekull());
-            return;
-        }
-    } else {
-        if (!mBufferSource) {
-            ALOGD("Buffer source not set (index=%llu)", index.peekull());
-            return;
-        }
-    }
-
-    int32_t bufferId = 0;
-    {
-        decltype(mBufferIdsInUse)::Locked bufferIds(mBufferIdsInUse);
-        auto it = bufferIds->find(index.peeku());
-        if (it == bufferIds->end()) {
+    if (android::media::codec::provider_->input_surface_throttle()) {
+        Mutexed<BuffersTracker>::Locked buffers(mBuffersTracker);
+        auto it = buffers->mIdsInUse.find(index.peeku());
+        if (it == buffers->mIdsInUse.end()) {
             ALOGV("Untracked input index %llu (maybe already removed)", index.peekull());
             return;
         }
-        bufferId = it->second;
-        (void)bufferIds->erase(it);
+        int32_t bufferId = it->second;
+        (void)buffers->mIdsInUse.erase(it);
+        buffers->mAvailableIds.push_back(bufferId);
+    } else {
+        if (!hasBufferSource()) {
+            return;
+        }
+        int32_t bufferId = 0;
+        {
+            Mutexed<BuffersTracker>::Locked buffers(mBuffersTracker);
+            auto it = buffers->mIdsInUse.find(index.peeku());
+            if (it == buffers->mIdsInUse.end()) {
+                ALOGV("Untracked input index %llu (maybe already removed)", index.peekull());
+                return;
+            }
+            bufferId = it->second;
+            (void)buffers->mIdsInUse.erase(it);
+        }
+        notifyInputBufferEmptied(bufferId);
     }
+}
+
+void C2NodeImpl::onInputBufferEmptied() {
+    if (!android::media::codec::provider_->input_surface_throttle()) {
+        ALOGE("onInputBufferEmptied should not be called "
+              "when input_surface_throttle is false");
+        return;
+    }
+    if (!hasBufferSource()) {
+        return;
+    }
+    int32_t bufferId = 0;
+    {
+        Mutexed<BuffersTracker>::Locked buffers(mBuffersTracker);
+        if (buffers->mAvailableIds.empty()) {
+            ALOGV("The codec is ready to take more input buffers "
+                    "but no input buffers are ready yet.");
+            return;
+        }
+        bufferId = buffers->mAvailableIds.front();
+        buffers->mAvailableIds.pop_front();
+    }
+    notifyInputBufferEmptied(bufferId);
+}
+
+bool C2NodeImpl::hasBufferSource() {
+    if (mAidlHal) {
+        if (!mAidlBufferSource) {
+            ALOGD("Buffer source not set");
+            return false;
+        }
+    } else {
+        if (!mBufferSource) {
+            ALOGD("Buffer source not set");
+            return false;
+        }
+    }
+    return true;
+}
+
+void C2NodeImpl::notifyInputBufferEmptied(int32_t bufferId) {
     if (mAidlHal) {
         ::ndk::ScopedFileDescriptor nullFence;
         (void)mAidlBufferSource->onInputBufferEmptied(bufferId, nullFence);
diff --git a/media/codec2/sfplugin/C2NodeImpl.h b/media/codec2/sfplugin/C2NodeImpl.h
index e060fd8..cc826b4 100644
--- a/media/codec2/sfplugin/C2NodeImpl.h
+++ b/media/codec2/sfplugin/C2NodeImpl.h
@@ -73,13 +73,19 @@
     void setFrameSize(uint32_t width, uint32_t height);
 
     /**
-     * Clean up work item reference.
+     * Notify that the input buffer reference is no longer needed by the component.
+     * Clean up if necessary.
      *
      * \param index input work index
      */
     void onInputBufferDone(c2_cntr64_t index);
 
     /**
+     * Notify input buffer is emptied.
+     */
+    void onInputBufferEmptied();
+
+    /**
      * Returns dataspace information from GraphicBufferSource.
      */
     android_dataspace getDataspace();
@@ -118,12 +124,24 @@
     c2_cntr64_t mPrevInputTimestamp; // input timestamp for previous frame
     c2_cntr64_t mPrevCodecTimestamp; // adjusted (codec) timestamp for previous frame
 
-    Mutexed<std::map<uint64_t, uint32_t>> mBufferIdsInUse;
+    // Tracks the status of buffers
+    struct BuffersTracker {
+        BuffersTracker() = default;
+
+        // Keeps track of buffers that are used by the component. Maps timestamp -> ID
+        std::map<uint64_t, uint32_t> mIdsInUse;
+        // Keeps track of the buffer IDs that are available after being released from the component.
+        std::list<uint32_t> mAvailableIds;
+    };
+    Mutexed<BuffersTracker> mBuffersTracker;
 
     class QueueThread;
     sp<QueueThread> mQueueThread;
 
     bool mAidlHal;
+
+    bool hasBufferSource();
+    void notifyInputBufferEmptied(int32_t bufferId);
 };
 
 }  // namespace android
diff --git a/media/codec2/sfplugin/C2OMXNode.cpp b/media/codec2/sfplugin/C2OMXNode.cpp
index ce02c88..98e25e2 100644
--- a/media/codec2/sfplugin/C2OMXNode.cpp
+++ b/media/codec2/sfplugin/C2OMXNode.cpp
@@ -291,6 +291,10 @@
     return mImpl->onInputBufferDone(index);
 }
 
+void C2OMXNode::onInputBufferEmptied() {
+    return mImpl->onInputBufferEmptied();
+}
+
 android_dataspace C2OMXNode::getDataspace() {
     return mImpl->getDataspace();
 }
diff --git a/media/codec2/sfplugin/C2OMXNode.h b/media/codec2/sfplugin/C2OMXNode.h
index d077202..5549b88 100644
--- a/media/codec2/sfplugin/C2OMXNode.h
+++ b/media/codec2/sfplugin/C2OMXNode.h
@@ -86,13 +86,19 @@
     void setFrameSize(uint32_t width, uint32_t height);
 
     /**
-     * Clean up work item reference.
+     * Notify that the input buffer reference is no longer needed by the component.
+     * Clean up if necessary.
      *
      * \param index input work index
      */
     void onInputBufferDone(c2_cntr64_t index);
 
     /**
+     * Notify input buffer is emptied.
+     */
+    void onInputBufferEmptied();
+
+    /**
      * Returns dataspace information from GraphicBufferSource.
      */
     android_dataspace getDataspace();
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index 463b63f..ca0aabb 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -444,6 +444,10 @@
         mNode->onInputBufferDone(index);
     }
 
+    void onInputBufferEmptied() override {
+        mNode->onInputBufferEmptied();
+    }
+
     android_dataspace getDataspace() override {
         return mNode->getDataspace();
     }
@@ -663,6 +667,10 @@
         mNode->onInputBufferDone(index);
     }
 
+    void onInputBufferEmptied() override {
+        mNode->onInputBufferEmptied();
+    }
+
     android_dataspace getDataspace() override {
         return mNode->getDataspace();
     }
@@ -2227,8 +2235,23 @@
     // So we reverse their order for stopUseOutputSurface() to notify C2Fence waiters
     // prior to comp->stop().
     // See also b/300350761.
-    mChannel->stopUseOutputSurface(pushBlankBuffer);
-    status_t err = comp->stop();
+    //
+    // The workaround is no longer needed with fetchGraphicBlock & C2Fence changes.
+    // so we are reverting back to the logical sequence of the operations when
+    // AIDL HALs are selected.
+    // When the HIDL HALs are selected, we retained workaround(the reversed
+    // order) as default in order to keep legacy behavior.
+    bool stopHalBeforeSurface =
+            Codec2Client::IsAidlSelected() ||
+            property_get_bool("debug.codec2.stop_hal_before_surface", false);
+    status_t err = C2_OK;
+    if (stopHalBeforeSurface && android::media::codec::provider_->stop_hal_before_surface()) {
+        err = comp->stop();
+        mChannel->stopUseOutputSurface(pushBlankBuffer);
+    } else {
+        mChannel->stopUseOutputSurface(pushBlankBuffer);
+        err = comp->stop();
+    }
     if (err != C2_OK) {
         // TODO: convert err into status_t
         mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
@@ -2323,8 +2346,22 @@
     // So we reverse their order for stopUseOutputSurface() to notify C2Fence waiters
     // prior to comp->release().
     // See also b/300350761.
-    mChannel->stopUseOutputSurface(pushBlankBuffer);
-    comp->release();
+    //
+    // The workaround is no longer needed with fetchGraphicBlock & C2Fence changes.
+    // so we are reverting back to the logical sequence of the operations when
+    // AIDL HALs are selected.
+    // When the HIDL HALs are selected, we retained workaround(the reversed
+    // order) as default in order to keep legacy behavior.
+    bool stopHalBeforeSurface =
+            Codec2Client::IsAidlSelected() ||
+            property_get_bool("debug.codec2.stop_hal_before_surface", false);
+    if (stopHalBeforeSurface && android::media::codec::provider_->stop_hal_before_surface()) {
+        comp->release();
+        mChannel->stopUseOutputSurface(pushBlankBuffer);
+    } else {
+        mChannel->stopUseOutputSurface(pushBlankBuffer);
+        comp->release();
+    }
 
     {
         Mutexed<State>::Locked state(mState);
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 3984b83..f0a4180 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -1069,6 +1069,10 @@
             return;
         }
     }
+    if (android::media::codec::provider_->input_surface_throttle()
+            && mInputSurface != nullptr) {
+        mInputSurface->onInputBufferEmptied();
+    }
     size_t numActiveSlots = 0;
     while (!mPipelineWatcher.lock()->pipelineFull()) {
         sp<MediaCodecBuffer> inBuffer;
@@ -2784,7 +2788,16 @@
 }
 
 void CCodecBufferChannel::setInfoBuffer(const std::shared_ptr<C2InfoBuffer> &buffer) {
-    mInfoBuffers.push_back(buffer);
+    if (mInputSurface == nullptr) {
+        mInfoBuffers.push_back(buffer);
+    } else {
+        std::list<std::unique_ptr<C2Work>> items;
+        std::unique_ptr<C2Work> work(new C2Work);
+        work->input.infoBuffers.emplace_back(*buffer);
+        work->worklets.emplace_back(new C2Worklet);
+        items.push_back(std::move(work));
+        c2_status_t err = mComponent->queue(&items);
+    }
 }
 
 status_t toStatusT(c2_status_t c2s, c2_operation_t c2op) {
diff --git a/media/codec2/sfplugin/InputSurfaceWrapper.h b/media/codec2/sfplugin/InputSurfaceWrapper.h
index 4bf6cd0..c158c5b 100644
--- a/media/codec2/sfplugin/InputSurfaceWrapper.h
+++ b/media/codec2/sfplugin/InputSurfaceWrapper.h
@@ -102,6 +102,7 @@
     }
 
     /**
+     * Notify that the input buffer reference is no longer needed.
      * Clean up C2Work related references if necessary. No-op by default.
      *
      * \param index index of input work.
@@ -109,6 +110,12 @@
     virtual void onInputBufferDone(c2_cntr64_t /* index */) {}
 
     /**
+     * Signal one input buffer as emptied.
+     * No-op by default.
+     */
+    virtual void onInputBufferEmptied() {}
+
+    /**
      * Returns dataspace information from GraphicBufferSource.
      */
     virtual android_dataspace getDataspace() { return mDataSpace; }
diff --git a/media/codec2/sfplugin/utils/Android.bp b/media/codec2/sfplugin/utils/Android.bp
index 54a6fb1..bed594c 100644
--- a/media/codec2/sfplugin/utils/Android.bp
+++ b/media/codec2/sfplugin/utils/Android.bp
@@ -54,7 +54,7 @@
 
     static_libs: [
         "libarect",
-        "libyuv_static",
+        "libyuv",
     ],
 
     sanitize: {
diff --git a/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp b/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
index 77a76e8..7a33af4 100644
--- a/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
+++ b/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
@@ -32,10 +32,15 @@
 namespace android {
 
 
-static bool isAtLeast(int version, const char *codeName) {
-    char deviceCodeName[PROP_VALUE_MAX];
-    __system_property_get("ro.build.version.codename", deviceCodeName);
-    return android_get_device_api_level() >= version || !strcmp(deviceCodeName, codeName);
+static bool isAtLeast(int version, const std::string codeName) {
+    static std::once_flag sCheckOnce;
+    static std::string sDeviceCodeName;
+    static int sDeviceApiLevel;
+    std::call_once(sCheckOnce, [&](){
+        sDeviceCodeName = base::GetProperty("ro.build.version.codename", "");
+        sDeviceApiLevel = android_get_device_api_level();
+    });
+    return sDeviceApiLevel >= version || sDeviceCodeName == codeName;
 }
 
 bool isAtLeastT() {
diff --git a/media/codec2/vndk/Android.bp b/media/codec2/vndk/Android.bp
index 9f57bfd..dc06ee6 100644
--- a/media/codec2/vndk/Android.bp
+++ b/media/codec2/vndk/Android.bp
@@ -53,6 +53,7 @@
     ],
 
     defaults: [
+	"aconfig_lib_cc_static_link.defaults",
         "libcodec2_hal_selection",
     ],
 
diff --git a/media/libaaudio/examples/loopback/src/loopback.cpp b/media/libaaudio/examples/loopback/src/loopback.cpp
index 4affaed..6a35ced 100644
--- a/media/libaaudio/examples/loopback/src/loopback.cpp
+++ b/media/libaaudio/examples/loopback/src/loopback.cpp
@@ -323,7 +323,6 @@
     printf("      -C{channels}      number of input channels\n");
     printf("      -D{deviceId}      input device ID\n");
     printf("      -F{0,1,2}         input format, 1=I16, 2=FLOAT\n");
-    printf("      -g{gain}          recirculating loopback gain\n");
     printf("      -h{hangMillis}    occasionally hang in the callback\n");
     printf("      -P{inPerf}        set input AAUDIO_PERFORMANCE_MODE*\n");
     printf("          n for _NONE\n");
@@ -436,7 +435,6 @@
     int                   written                    = 0;
 
     int                   testMode                   = TEST_LATENCY;
-    double                gain                       = 1.0;
     int                   hangTimeMillis             = 0;
     std::string           report;
 
@@ -468,9 +466,6 @@
                     case 'F':
                         requestedInputFormat = atoi(&arg[2]);
                         break;
-                    case 'g':
-                        gain = atof(&arg[2]);
-                        break;
                     case 'h':
                         // Was there a number after the "-h"?
                         if (arg[2]) {
diff --git a/media/libaaudio/src/binding/AAudioBinderClient.cpp b/media/libaaudio/src/binding/AAudioBinderClient.cpp
index 5f34a75..439d5af 100644
--- a/media/libaaudio/src/binding/AAudioBinderClient.cpp
+++ b/media/libaaudio/src/binding/AAudioBinderClient.cpp
@@ -71,18 +71,10 @@
     {
         Mutex::Autolock _l(mServiceLock);
         if (mAdapter == nullptr) {
-            sp<IBinder> binder;
             sp<IServiceManager> sm = defaultServiceManager();
-            // Try several times to get the service.
-            int retries = 4;
-            do {
-                binder = sm->getService(String16(AAUDIO_SERVICE_NAME)); // This will wait a while.
-                if (binder.get() != nullptr) {
-                    break;
-                }
-            } while (retries-- > 0);
+            sp<IBinder> binder = sm->waitForService(String16(AAUDIO_SERVICE_NAME));
 
-            if (binder.get() != nullptr) {
+            if (binder != nullptr) {
                 // Ask for notification if the service dies.
                 status_t status = binder->linkToDeath(mAAudioClient);
                 // TODO review what we should do if this fails
diff --git a/media/libaaudio/src/core/AudioStreamBuilder.cpp b/media/libaaudio/src/core/AudioStreamBuilder.cpp
index ac4e2b3..9bbaf3e 100644
--- a/media/libaaudio/src/core/AudioStreamBuilder.cpp
+++ b/media/libaaudio/src/core/AudioStreamBuilder.cpp
@@ -24,6 +24,7 @@
 
 #include <aaudio/AAudio.h>
 #include <aaudio/AAudioTesting.h>
+#include <android/media/audio/common/AudioMMapPolicy.h>
 #include <android/media/audio/common/AudioMMapPolicyInfo.h>
 #include <android/media/audio/common/AudioMMapPolicyType.h>
 #include <media/AudioSystem.h>
@@ -40,11 +41,14 @@
 
 using namespace aaudio;
 
+using android::media::audio::common::AudioMMapPolicy;
 using android::media::audio::common::AudioMMapPolicyInfo;
 using android::media::audio::common::AudioMMapPolicyType;
 
 #define AAUDIO_MMAP_POLICY_DEFAULT             AAUDIO_POLICY_NEVER
 #define AAUDIO_MMAP_EXCLUSIVE_POLICY_DEFAULT   AAUDIO_POLICY_NEVER
+#define AAUDIO_MMAP_POLICY_DEFAULT_AIDL        AudioMMapPolicy::NEVER
+#define AAUDIO_MMAP_EXCLUSIVE_POLICY_DEFAULT_AIDL AudioMMapPolicy::NEVER
 
 // These values are for a pre-check before we ask the lower level service to open a stream.
 // So they are just outside the maximum conceivable range of value,
@@ -116,7 +120,8 @@
     aaudio_policy_t mmapPolicy = AudioGlobal_getMMapPolicy();
     if (android::AudioSystem::getMmapPolicyInfo(
             AudioMMapPolicyType::DEFAULT, &policyInfos) == NO_ERROR) {
-        aaudio_policy_t systemMmapPolicy = AAudio_getAAudioPolicy(policyInfos);
+        aaudio_policy_t systemMmapPolicy = AAudio_getAAudioPolicy(
+                policyInfos, AAUDIO_MMAP_POLICY_DEFAULT_AIDL);
         if (mmapPolicy == AAUDIO_POLICY_ALWAYS && systemMmapPolicy == AAUDIO_POLICY_NEVER) {
             // No need to try as AAudioService is not created and the client only wants MMAP path.
             return AAUDIO_ERROR_NO_SERVICE;
@@ -145,7 +150,8 @@
     aaudio_policy_t mmapExclusivePolicy = AAUDIO_UNSPECIFIED;
     if (android::AudioSystem::getMmapPolicyInfo(
             AudioMMapPolicyType::EXCLUSIVE, &policyInfos) == NO_ERROR) {
-        mmapExclusivePolicy = AAudio_getAAudioPolicy(policyInfos);
+        mmapExclusivePolicy = AAudio_getAAudioPolicy(
+                policyInfos, AAUDIO_MMAP_EXCLUSIVE_POLICY_DEFAULT_AIDL);
     }
     if (mmapExclusivePolicy == AAUDIO_UNSPECIFIED) {
         mmapExclusivePolicy = AAUDIO_MMAP_EXCLUSIVE_POLICY_DEFAULT;
diff --git a/media/libaaudio/src/flowgraph/SampleRateConverter.cpp b/media/libaaudio/src/flowgraph/SampleRateConverter.cpp
index a15fcb8..890057d 100644
--- a/media/libaaudio/src/flowgraph/SampleRateConverter.cpp
+++ b/media/libaaudio/src/flowgraph/SampleRateConverter.cpp
@@ -28,7 +28,8 @@
 
 void SampleRateConverter::reset() {
     FlowGraphNode::reset();
-    mInputCursor = kInitialCallCount;
+    mInputCallCount = kInitialCallCount;
+    mInputCursor = 0;
 }
 
 // Return true if there is a sample available.
diff --git a/media/libaaudio/src/flowgraph/SampleRateConverter.h b/media/libaaudio/src/flowgraph/SampleRateConverter.h
index f883e6c..a4318f0 100644
--- a/media/libaaudio/src/flowgraph/SampleRateConverter.h
+++ b/media/libaaudio/src/flowgraph/SampleRateConverter.h
@@ -54,7 +54,7 @@
     int32_t mNumValidInputFrames = 0; // number of valid frames currently in the input port buffer
     // We need our own callCount for upstream calls because calls occur at a different rate.
     // This means we cannot have cyclic graphs or merges that contain an SRC.
-    int64_t mInputCallCount = 0;
+    int64_t mInputCallCount = kInitialCallCount;
 
 };
 
diff --git a/media/libaaudio/src/utility/AAudioUtilities.cpp b/media/libaaudio/src/utility/AAudioUtilities.cpp
index 0cbf79d..3df23ee 100644
--- a/media/libaaudio/src/utility/AAudioUtilities.cpp
+++ b/media/libaaudio/src/utility/AAudioUtilities.cpp
@@ -680,12 +680,16 @@
 
 } // namespace
 
-aaudio_policy_t AAudio_getAAudioPolicy(const std::vector<AudioMMapPolicyInfo>& policyInfos) {
-    if (policyInfos.empty()) return AAUDIO_POLICY_AUTO;
-    for (size_t i = 1; i < policyInfos.size(); ++i) {
-        if (policyInfos.at(i).mmapPolicy != policyInfos.at(0).mmapPolicy) {
+aaudio_policy_t AAudio_getAAudioPolicy(const std::vector<AudioMMapPolicyInfo>& policyInfos,
+                                       AudioMMapPolicy defaultPolicy) {
+    AudioMMapPolicy policy = defaultPolicy;
+    for (const auto& policyInfo : policyInfos) {
+        if (policyInfo.mmapPolicy == AudioMMapPolicy::NEVER) {
+            policy = policyInfo.mmapPolicy;
+        } else if (policyInfo.mmapPolicy == AudioMMapPolicy::AUTO ||
+                   policyInfo.mmapPolicy == AudioMMapPolicy::ALWAYS) {
             return AAUDIO_POLICY_AUTO;
         }
     }
-    return aidl2legacy_aaudio_policy(policyInfos.at(0).mmapPolicy);
+    return aidl2legacy_aaudio_policy(policy);
 }
diff --git a/media/libaaudio/src/utility/AAudioUtilities.h b/media/libaaudio/src/utility/AAudioUtilities.h
index d44bbab..7c351e1 100644
--- a/media/libaaudio/src/utility/AAudioUtilities.h
+++ b/media/libaaudio/src/utility/AAudioUtilities.h
@@ -348,9 +348,19 @@
     AAUDIO_CHANNEL_INDEX_MASK_24 = AAUDIO_CHANNEL_BIT_INDEX | (1 << 24) - 1,
 };
 
-// The aaudio policy will be ALWAYS, NEVER, UNSPECIFIED only when all policy info are
-// ALWAYS, NEVER or UNSPECIFIED. Otherwise, the aaudio policy will be AUTO.
+/**
+ * Returns the aaudio mmap policy based on the vector of mmap policy info. The rule as
+ * 1. Returns AUTO if any of the policy is AUTO or ALWAYS
+ * 2. Returns NEVER if all of the policies are NEVER or UNSPECIFIED
+ * 3. Returns default policy if all of the policies are UNSPECIFIED
+ *
+ * @param policyInfos
+ * @param defaultPolicy
+ * @return
+ */
 aaudio_policy_t AAudio_getAAudioPolicy(
-        const std::vector<android::media::audio::common::AudioMMapPolicyInfo>& policyInfos);
+        const std::vector<android::media::audio::common::AudioMMapPolicyInfo>& policyInfos,
+        android::media::audio::common::AudioMMapPolicy defaultPolicy =
+                android::media::audio::common::AudioMMapPolicy::NEVER);
 
 #endif //UTILITY_AAUDIO_UTILITIES_H
diff --git a/media/libaudioclient/Android.bp b/media/libaudioclient/Android.bp
index 9ccabe9..5785537 100644
--- a/media/libaudioclient/Android.bp
+++ b/media/libaudioclient/Android.bp
@@ -315,6 +315,7 @@
         "aidl/android/media/DeviceConnectedState.aidl",
         "aidl/android/media/EffectDescriptor.aidl",
         "aidl/android/media/SurroundSoundConfig.aidl",
+        "aidl/android/media/TrackInternalMuteInfo.aidl",
         "aidl/android/media/TrackSecondaryOutputInfo.aidl",
     ],
     defaults: [
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index aa51652..3f4fcfd 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -1289,6 +1289,21 @@
     (void) status;
 }
 
+status_t AudioSystem::setDeviceAbsoluteVolumeEnabled(audio_devices_t deviceType,
+                                                     const char *address,
+                                                     bool enabled,
+                                                     audio_stream_type_t streamToDriveAbs) {
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
+    if (aps == nullptr) return PERMISSION_DENIED;
+
+    AudioDevice deviceAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_device_AudioDevice(deviceType, address));
+    AudioStreamType streamToDriveAbsAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_stream_type_t_AudioStreamType(streamToDriveAbs));
+    return statusTFromBinderStatus(
+            aps->setDeviceAbsoluteVolumeEnabled(deviceAidl, enabled, streamToDriveAbsAidl));
+}
+
 status_t AudioSystem::initStreamVolume(audio_stream_type_t stream,
                                        int indexMin,
                                        int indexMax) {
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index 5d96d8e..6772201 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -1680,14 +1680,14 @@
         mSelectedDeviceId = deviceId;
         if (mStatus == NO_ERROR) {
             if (isOffloadedOrDirect_l()) {
-                if (mState == STATE_STOPPED || mState == STATE_FLUSHED) {
-                    ALOGD("%s(%d): creating a new AudioTrack", __func__, mPortId);
-                    result = restoreTrack_l("setOutputDevice", true /* forceRestore */);
-                } else {
+                if (isPlaying_l()) {
                     ALOGW("%s(%d). Offloaded or Direct track is not STOPPED or FLUSHED. "
                           "State: %s.",
                             __func__, mPortId, stateToString(mState));
                     result = INVALID_OPERATION;
+                } else {
+                    ALOGD("%s(%d): creating a new AudioTrack", __func__, mPortId);
+                    result = restoreTrack_l("setOutputDevice", true /* forceRestore */);
                 }
             } else {
                 // allow track invalidation when track is not playing to propagate
diff --git a/media/libaudioclient/IAudioFlinger.cpp b/media/libaudioclient/IAudioFlinger.cpp
index aa5c840..cf3b43a 100644
--- a/media/libaudioclient/IAudioFlinger.cpp
+++ b/media/libaudioclient/IAudioFlinger.cpp
@@ -918,6 +918,11 @@
     return OK;
 }
 
+status_t AudioFlingerClientAdapter::setTracksInternalMute(
+        const std::vector<media::TrackInternalMuteInfo>& tracksInternalMuted) {
+    return statusTFromBinderStatus(mDelegate->setTracksInternalMute(tracksInternalMuted));
+}
+
 ////////////////////////////////////////////////////////////////////////////////////////////////////
 // AudioFlingerServerAdapter
 AudioFlingerServerAdapter::AudioFlingerServerAdapter(
@@ -1477,4 +1482,9 @@
     return Status::ok();
 }
 
+Status AudioFlingerServerAdapter::setTracksInternalMute(
+        const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) {
+    return Status::fromStatusT(mDelegate->setTracksInternalMute(tracksInternalMute));
+}
+
 } // namespace android
diff --git a/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl b/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
index 31d3af5..e8fcf77 100644
--- a/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
@@ -41,6 +41,7 @@
 import android.media.ISoundDoseCallback;
 import android.media.MicrophoneInfoFw;
 import android.media.RenderPosition;
+import android.media.TrackInternalMuteInfo;
 import android.media.TrackSecondaryOutputInfo;
 import android.media.audio.common.AudioChannelLayout;
 import android.media.audio.common.AudioFormatDescription;
@@ -293,6 +294,11 @@
      */
     AudioPortFw getAudioMixPort(in AudioPortFw devicePort, in AudioPortFw mixPort);
 
+    /**
+     * Set internal mute for a list of tracks.
+     */
+    void setTracksInternalMute(in TrackInternalMuteInfo[] tracksInternalMute);
+
     // When adding a new method, please review and update
     // IAudioFlinger.h AudioFlingerServerAdapter::Delegate::TransactionCode
     // AudioFlinger.cpp AudioFlinger::onTransactWrapper()
diff --git a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
index 7894699..ac42ea9 100644
--- a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
@@ -116,6 +116,10 @@
 
     void releaseInput(int /* audio_port_handle_t */ portId);
 
+    oneway void setDeviceAbsoluteVolumeEnabled(in AudioDevice device,
+                                               boolean enabled,
+                                               AudioStreamType streamToDriveAbs);
+
     void initStreamVolume(AudioStreamType stream,
                           int indexMin,
                           int indexMax);
diff --git a/media/libaudioclient/aidl/android/media/TrackInternalMuteInfo.aidl b/media/libaudioclient/aidl/android/media/TrackInternalMuteInfo.aidl
new file mode 100644
index 0000000..05b1fa4
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/TrackInternalMuteInfo.aidl
@@ -0,0 +1,23 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+parcelable TrackInternalMuteInfo {
+    /* Interpreted as audio_port_handle_t. */
+    int portId;
+    boolean muted;
+}
diff --git a/media/libaudioclient/aidl/fuzzer/Android.bp b/media/libaudioclient/aidl/fuzzer/Android.bp
index f58be0b..11955c9 100644
--- a/media/libaudioclient/aidl/fuzzer/Android.bp
+++ b/media/libaudioclient/aidl/fuzzer/Android.bp
@@ -79,7 +79,7 @@
     ],
     fuzz_config: {
         cc: [
-            "android-media-fuzzing-reports@google.com",
+            "android-audio-fuzzing-reports@google.com",
         ],
         componentid: 155276,
         hotlists: ["4593311"],
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index 5c9a7c6..9cfd540 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -374,9 +374,13 @@
     static status_t startInput(audio_port_handle_t portId);
     static status_t stopInput(audio_port_handle_t portId);
     static void releaseInput(audio_port_handle_t portId);
+    static status_t setDeviceAbsoluteVolumeEnabled(audio_devices_t deviceType,
+                                                   const char *address,
+                                                   bool enabled,
+                                                   audio_stream_type_t streamToDriveAbs);
     static status_t initStreamVolume(audio_stream_type_t stream,
-                                      int indexMin,
-                                      int indexMax);
+                                     int indexMin,
+                                     int indexMax);
     static status_t setStreamVolumeIndex(audio_stream_type_t stream,
                                          int index,
                                          audio_devices_t device);
diff --git a/media/libaudioclient/include/media/IAudioFlinger.h b/media/libaudioclient/include/media/IAudioFlinger.h
index 5a1e037..860a0bc 100644
--- a/media/libaudioclient/include/media/IAudioFlinger.h
+++ b/media/libaudioclient/include/media/IAudioFlinger.h
@@ -60,6 +60,7 @@
 #include "android/media/OpenInputResponse.h"
 #include "android/media/OpenOutputRequest.h"
 #include "android/media/OpenOutputResponse.h"
+#include "android/media/TrackInternalMuteInfo.h"
 #include "android/media/TrackSecondaryOutputInfo.h"
 
 namespace android {
@@ -388,6 +389,9 @@
 
     virtual status_t getAudioMixPort(const struct audio_port_v7 *devicePort,
                                      struct audio_port_v7 *mixPort) const = 0;
+
+    virtual status_t setTracksInternalMute(
+            const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) = 0;
 };
 
 /**
@@ -504,6 +508,8 @@
     status_t getAudioPolicyConfig(media::AudioPolicyConfig* output) override;
     status_t getAudioMixPort(const struct audio_port_v7 *devicePort,
                              struct audio_port_v7 *mixPort) const override;
+    status_t setTracksInternalMute(
+            const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) override;
 
 private:
     const sp<media::IAudioFlingerService> mDelegate;
@@ -606,6 +612,7 @@
             GET_AUDIO_POLICY_CONFIG =
                     media::BnAudioFlingerService::TRANSACTION_getAudioPolicyConfig,
             GET_AUDIO_MIX_PORT = media::BnAudioFlingerService::TRANSACTION_getAudioMixPort,
+            SET_TRACKS_INTERNAL_MUTE = media::BnAudioFlingerService::TRANSACTION_setTracksInternalMute,
         };
 
     protected:
@@ -742,6 +749,8 @@
     Status getAudioMixPort(const media::AudioPortFw& devicePort,
                            const media::AudioPortFw& mixPort,
                            media::AudioPortFw* _aidl_return) override;
+    Status setTracksInternalMute(
+            const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) override;
 private:
     const sp<AudioFlingerServerAdapter::Delegate> mDelegate;
 };
diff --git a/media/libaudiohal/impl/Android.bp b/media/libaudiohal/impl/Android.bp
index dd8f021..1a6b949 100644
--- a/media/libaudiohal/impl/Android.bp
+++ b/media/libaudiohal/impl/Android.bp
@@ -227,11 +227,11 @@
         "latest_android_hardware_audio_core_sounddose_ndk_shared",
         "latest_android_hardware_audio_effect_ndk_shared",
         "latest_android_media_audio_common_types_ndk_shared",
+        "latest_av_audio_types_aidl_ndk_shared",
     ],
     shared_libs: [
         "android.hardware.common-V2-ndk",
         "android.hardware.common.fmq-V1-ndk",
-        "av-audio-types-aidl-V1-ndk",
         "libaudio_aidl_conversion_common_cpp",
         "libaudio_aidl_conversion_common_ndk",
         "libaudio_aidl_conversion_common_ndk_cpp",
diff --git a/media/libaudiohal/impl/DeviceHalAidl.cpp b/media/libaudiohal/impl/DeviceHalAidl.cpp
index 032533c..1990858 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalAidl.cpp
@@ -387,7 +387,7 @@
         return runCb([](CbRef cb) { cb->onWriteReady(); });
     }
     ndk::ScopedAStatus onError() override {
-        return runCb([](CbRef cb) { cb->onError(); });
+        return runCb([](CbRef cb) { cb->onError(true /*isHardError*/); });
     }
     ndk::ScopedAStatus onDrainReady() override {
         return runCb([](CbRef cb) { cb->onDrainReady(); });
diff --git a/media/libaudiohal/impl/EffectConversionHelperAidl.cpp b/media/libaudiohal/impl/EffectConversionHelperAidl.cpp
index ff6126d..a13903b 100644
--- a/media/libaudiohal/impl/EffectConversionHelperAidl.cpp
+++ b/media/libaudiohal/impl/EffectConversionHelperAidl.cpp
@@ -29,6 +29,7 @@
 #include <system/audio_effects/effect_visualizer.h>
 
 #include <utils/Log.h>
+#include <Utils.h>
 
 #include "EffectConversionHelperAidl.h"
 #include "EffectProxy.h"
@@ -37,18 +38,20 @@
 namespace effect {
 
 using ::aidl::android::aidl_utils::statusTFromBinderStatus;
+using ::aidl::android::hardware::audio::common::getChannelCount;
 using ::aidl::android::hardware::audio::effect::CommandId;
 using ::aidl::android::hardware::audio::effect::Descriptor;
 using ::aidl::android::hardware::audio::effect::Flags;
 using ::aidl::android::hardware::audio::effect::IEffect;
 using ::aidl::android::hardware::audio::effect::Parameter;
 using ::aidl::android::hardware::audio::effect::State;
+using ::aidl::android::media::audio::common::AudioChannelLayout;
 using ::aidl::android::media::audio::common::AudioDeviceDescription;
 using ::aidl::android::media::audio::common::AudioMode;
 using ::aidl::android::media::audio::common::AudioSource;
-using ::android::hardware::EventFlag;
 using android::effect::utils::EffectParamReader;
 using android::effect::utils::EffectParamWriter;
+using android::hardware::EventFlag;
 
 using ::android::status_t;
 
@@ -519,5 +522,15 @@
     return OK;
 }
 
+size_t EffectConversionHelperAidl::getAudioChannelCount() const {
+    return getChannelCount(mCommon.input.base.channelMask,
+                           ~AudioChannelLayout::LAYOUT_HAPTIC_AB /* mask */);
+}
+
+size_t EffectConversionHelperAidl::getHapticChannelCount() const {
+    return getChannelCount(mCommon.input.base.channelMask,
+                           AudioChannelLayout::LAYOUT_HAPTIC_AB /* mask */);
+}
+
 }  // namespace effect
 }  // namespace android
diff --git a/media/libaudiohal/impl/EffectConversionHelperAidl.h b/media/libaudiohal/impl/EffectConversionHelperAidl.h
index 29c5a83..50b47a9 100644
--- a/media/libaudiohal/impl/EffectConversionHelperAidl.h
+++ b/media/libaudiohal/impl/EffectConversionHelperAidl.h
@@ -49,6 +49,9 @@
     ::aidl::android::hardware::audio::effect::Descriptor getDescriptor() const;
     status_t reopen();
 
+    size_t getAudioChannelCount() const;
+    size_t getHapticChannelCount() const;
+
     uint8_t mOutputAccessMode = EFFECT_BUFFER_ACCESS_WRITE;
 
   protected:
diff --git a/media/libaudiohal/impl/EffectHalAidl.cpp b/media/libaudiohal/impl/EffectHalAidl.cpp
index 87d24a5..ea4dbf6 100644
--- a/media/libaudiohal/impl/EffectHalAidl.cpp
+++ b/media/libaudiohal/impl/EffectHalAidl.cpp
@@ -15,6 +15,7 @@
  */
 
 #include <cstddef>
+#include <cstring>
 #define LOG_TAG "EffectHalAidl"
 //#define LOG_NDEBUG 0
 
@@ -58,7 +59,9 @@
 using ::aidl::android::hardware::audio::effect::Descriptor;
 using ::aidl::android::hardware::audio::effect::IEffect;
 using ::aidl::android::hardware::audio::effect::IFactory;
+using ::aidl::android::hardware::audio::effect::kEventFlagDataMqNotEmpty;
 using ::aidl::android::hardware::audio::effect::kEventFlagDataMqUpdate;
+using ::aidl::android::hardware::audio::effect::kEventFlagNotEmpty;
 using ::aidl::android::hardware::audio::effect::kReopenSupportedVersion;
 using ::aidl::android::hardware::audio::effect::State;
 
@@ -126,6 +129,7 @@
                ::aidl::android::hardware::audio::effect::getEffectTypeUuidHapticGenerator()) {
         mConversion = std::make_unique<android::effect::AidlConversionHapticGenerator>(
                 effect, sessionId, ioId, desc, mIsProxyEffect);
+        mIsHapticGenerator = true;
     } else if (typeUuid ==
                ::aidl::android::hardware::audio::effect::getEffectTypeUuidLoudnessEnhancer()) {
         mConversion = std::make_unique<android::effect::AidlConversionLoudnessEnhancer>(
@@ -198,8 +202,9 @@
                               ::android::OK == efGroup->wait(kEventFlagDataMqUpdate, &efState,
                                                              1 /* ns */, true /* retry */) &&
                               efState & kEventFlagDataMqUpdate) {
-        ALOGV("%s %s V%d receive dataMQUpdate eventFlag from HAL", __func__, effectName.c_str(),
+        ALOGD("%s %s V%d receive dataMQUpdate eventFlag from HAL", __func__, effectName.c_str(),
               halVersion);
+
         mConversion->reopen();
     }
     auto statusQ = mConversion->getStatusMQ();
@@ -213,7 +218,7 @@
     }
 
     size_t available = inputQ->availableToWrite();
-    size_t floatsToWrite = std::min(available, mInBuffer->getSize() / sizeof(float));
+    const size_t floatsToWrite = std::min(available, mInBuffer->getSize() / sizeof(float));
     if (floatsToWrite == 0) {
         ALOGE("%s not able to write, floats in buffer %zu, space in FMQ %zu", __func__,
               mInBuffer->getSize() / sizeof(float), available);
@@ -225,17 +230,27 @@
               floatsToWrite, mInBuffer->audioBuffer(), inputQ->availableToWrite());
         return INVALID_OPERATION;
     }
-    efGroup->wake(aidl::android::hardware::audio::effect::kEventFlagNotEmpty);
+
+    // for V2 audio effect HAL, expect different EventFlag to avoid bit conflict with FMQ_NOT_EMPTY
+    efGroup->wake(halVersion >= kReopenSupportedVersion ? kEventFlagDataMqNotEmpty
+                                                        : kEventFlagNotEmpty);
 
     IEffect::Status retStatus{};
-    if (!statusQ->readBlocking(&retStatus, 1) || retStatus.status != OK ||
-        (size_t)retStatus.fmqConsumed != floatsToWrite || retStatus.fmqProduced == 0) {
-        ALOGE("%s read status failed: %s", __func__, retStatus.toString().c_str());
+    if (!statusQ->readBlocking(&retStatus, 1)) {
+        ALOGE("%s %s V%d read status from status FMQ failed", __func__, effectName.c_str(),
+              halVersion);
+        return INVALID_OPERATION;
+    }
+    if (retStatus.status != OK || (size_t)retStatus.fmqConsumed != floatsToWrite ||
+        retStatus.fmqProduced == 0) {
+        ALOGE("%s read status failed: %s, consumed %d (of %zu) produced %d", __func__,
+              retStatus.toString().c_str(), retStatus.fmqConsumed, floatsToWrite,
+              retStatus.fmqProduced);
         return INVALID_OPERATION;
     }
 
     available = outputQ->availableToRead();
-    size_t floatsToRead = std::min(available, mOutBuffer->getSize() / sizeof(float));
+    const size_t floatsToRead = std::min(available, mOutBuffer->getSize() / sizeof(float));
     if (floatsToRead == 0) {
         ALOGE("%s not able to read, buffer space %zu, floats in FMQ %zu", __func__,
               mOutBuffer->getSize() / sizeof(float), available);
@@ -244,7 +259,8 @@
 
     float *outputRawBuffer = mOutBuffer->audioBuffer()->f32;
     std::vector<float> tempBuffer;
-    if (mConversion->mOutputAccessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
+    // keep original data in the output buffer for accumulate mode or HapticGenerator effect
+    if (mConversion->mOutputAccessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE || mIsHapticGenerator) {
         tempBuffer.resize(floatsToRead);
         outputRawBuffer = tempBuffer.data();
     }
@@ -254,7 +270,31 @@
               mOutBuffer->audioBuffer());
         return INVALID_OPERATION;
     }
-    if (mConversion->mOutputAccessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
+
+    // HapticGenerator needs special handling because the generated haptic samples should append to
+    // the end of audio samples, the generated haptic data pass back from HAL in output FMQ at same
+    // offset as input buffer, here we skip the audio samples in output FMQ and append haptic
+    // samples to the end of input buffer
+    if (mIsHapticGenerator) {
+        static constexpr float kHalFloatSampleLimit = 2.0f;
+        assert(floatsToRead == floatsToWrite);
+        const auto audioChNum = mConversion->getAudioChannelCount();
+        const auto audioSamples =
+                floatsToWrite * audioChNum / (audioChNum + mConversion->getHapticChannelCount());
+        // accumulate or copy input to output, haptic samples remains all zero
+        if (mConversion->mOutputAccessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
+            accumulate_float(mOutBuffer->audioBuffer()->f32, mInBuffer->audioBuffer()->f32,
+                             audioSamples);
+        } else {
+            memcpy_to_float_from_float_with_clamping(mOutBuffer->audioBuffer()->f32,
+                                                     mInBuffer->audioBuffer()->f32, audioSamples,
+                                                     kHalFloatSampleLimit);
+        }
+        // append the haptic sample at the end of input audio samples
+        memcpy_to_float_from_float_with_clamping(mInBuffer->audioBuffer()->f32 + audioSamples,
+                                                 outputRawBuffer + audioSamples,
+                                                 floatsToRead - audioSamples, kHalFloatSampleLimit);
+    } else if (mConversion->mOutputAccessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
         accumulate_float(mOutBuffer->audioBuffer()->f32, outputRawBuffer, floatsToRead);
     }
 
diff --git a/media/libaudiohal/impl/EffectHalAidl.h b/media/libaudiohal/impl/EffectHalAidl.h
index bbcb7e2..4f7de7c 100644
--- a/media/libaudiohal/impl/EffectHalAidl.h
+++ b/media/libaudiohal/impl/EffectHalAidl.h
@@ -73,6 +73,7 @@
     const int32_t mSessionId;
     const int32_t mIoId;
     const bool mIsProxyEffect;
+    bool mIsHapticGenerator = false;
 
     std::unique_ptr<EffectConversionHelperAidl> mConversion;
 
diff --git a/media/libaudiohal/impl/Hal2AidlMapper.cpp b/media/libaudiohal/impl/Hal2AidlMapper.cpp
index 263e3e9..cbade70 100644
--- a/media/libaudiohal/impl/Hal2AidlMapper.cpp
+++ b/media/libaudiohal/impl/Hal2AidlMapper.cpp
@@ -136,8 +136,8 @@
     // 'sinks' will not be updated because 'setAudioPatch' only needs IDs. Here we log
     // the source arguments, where only the audio configuration and device specifications
     // are relevant.
-    ALOGD("%s: [disregard IDs] sources: %s, sinks: %s",
-            __func__, ::android::internal::ToString(sources).c_str(),
+    ALOGD("%s: patch ID: %d, [disregard IDs] sources: %s, sinks: %s",
+            __func__, *patchId, ::android::internal::ToString(sources).c_str(),
             ::android::internal::ToString(sinks).c_str());
     auto fillPortConfigs = [&](
             const std::vector<AudioPortConfig>& configs,
@@ -181,7 +181,9 @@
     };
     // When looking up port configs, the destinationPortId is only used for mix ports.
     // Thus, we process device port configs first, and look up the destination port ID from them.
-    bool sourceIsDevice = std::any_of(sources.begin(), sources.end(),
+    const bool sourceIsDevice = std::any_of(sources.begin(), sources.end(),
+            [](const auto& config) { return config.ext.getTag() == AudioPortExt::device; });
+    const bool sinkIsDevice = std::any_of(sinks.begin(), sinks.end(),
             [](const auto& config) { return config.ext.getTag() == AudioPortExt::device; });
     const std::vector<AudioPortConfig>& devicePortConfigs =
             sourceIsDevice ? sources : sinks;
@@ -202,10 +204,29 @@
         existingPatchIt->second = patch;
     } else {
         bool created = false;
-        RETURN_STATUS_IF_ERROR(findOrCreatePatch(patch, &patch, &created));
+        // When the framework does not specify a patch ID, only the mix port config
+        // is used for finding an existing patch. That's because the framework assumes
+        // that there can only be one patch for an I/O thread.
+        PatchMatch match = sourceIsDevice && sinkIsDevice ?
+                MATCH_BOTH : (sourceIsDevice ? MATCH_SINKS : MATCH_SOURCES);
+        auto requestedPatch = patch;
+        RETURN_STATUS_IF_ERROR(findOrCreatePatch(patch, match,
+                                                 &patch, &created));
         // No cleanup of the patch is needed, it is managed by the framework.
         *patchId = patch.id;
         if (!created) {
+            requestedPatch.id = patch.id;
+            if (patch != requestedPatch) {
+                ALOGI("%s: Updating transient patch. Current: %s, new: %s",
+                        __func__, patch.toString().c_str(), requestedPatch.toString().c_str());
+                // Since matching may be done by mix port only, update the patch if the device port
+                // config has changed.
+                patch = requestedPatch;
+                RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+                                mModule->setAudioPatch(patch, &patch)));
+                existingPatchIt = mPatches.find(patch.id);
+                existingPatchIt->second = patch;
+            }
             // The framework might have "created" a patch which already existed due to
             // stream creation. Need to release the ownership from the stream.
             for (auto& s : mStreams) {
@@ -274,18 +295,18 @@
 }
 
 status_t Hal2AidlMapper::findOrCreatePatch(
-        const AudioPatch& requestedPatch, AudioPatch* patch, bool* created) {
+        const AudioPatch& requestedPatch, PatchMatch match, AudioPatch* patch, bool* created) {
     std::set<int32_t> sourcePortConfigIds(requestedPatch.sourcePortConfigIds.begin(),
             requestedPatch.sourcePortConfigIds.end());
     std::set<int32_t> sinkPortConfigIds(requestedPatch.sinkPortConfigIds.begin(),
             requestedPatch.sinkPortConfigIds.end());
-    return findOrCreatePatch(sourcePortConfigIds, sinkPortConfigIds, patch, created);
+    return findOrCreatePatch(sourcePortConfigIds, sinkPortConfigIds, match, patch, created);
 }
 
 status_t Hal2AidlMapper::findOrCreatePatch(
         const std::set<int32_t>& sourcePortConfigIds, const std::set<int32_t>& sinkPortConfigIds,
-        AudioPatch* patch, bool* created) {
-    auto patchIt = findPatch(sourcePortConfigIds, sinkPortConfigIds);
+        PatchMatch match, AudioPatch* patch, bool* created) {
+    auto patchIt = findPatch(sourcePortConfigIds, sinkPortConfigIds, match);
     if (patchIt == mPatches.end()) {
         AudioPatch requestedPatch, appliedPatch;
         requestedPatch.sourcePortConfigIds.insert(requestedPatch.sourcePortConfigIds.end(),
@@ -456,7 +477,8 @@
 }
 
 Hal2AidlMapper::Patches::iterator Hal2AidlMapper::findPatch(
-        const std::set<int32_t>& sourcePortConfigIds, const std::set<int32_t>& sinkPortConfigIds) {
+        const std::set<int32_t>& sourcePortConfigIds, const std::set<int32_t>& sinkPortConfigIds,
+        PatchMatch match) {
     return std::find_if(mPatches.begin(), mPatches.end(),
             [&](const auto& pair) {
                 const auto& p = pair.second;
@@ -464,7 +486,15 @@
                         p.sourcePortConfigIds.begin(), p.sourcePortConfigIds.end());
                 std::set<int32_t> patchSinks(
                         p.sinkPortConfigIds.begin(), p.sinkPortConfigIds.end());
-                return sourcePortConfigIds == patchSrcs && sinkPortConfigIds == patchSinks; });
+                switch (match) {
+                    case MATCH_SOURCES:
+                        return sourcePortConfigIds == patchSrcs;
+                    case MATCH_SINKS:
+                        return sinkPortConfigIds == patchSinks;
+                    case MATCH_BOTH:
+                        return sourcePortConfigIds == patchSrcs && sinkPortConfigIds == patchSinks;
+                }
+            });
 }
 
 Hal2AidlMapper::Ports::iterator Hal2AidlMapper::findPort(const AudioDevice& device) {
@@ -816,10 +846,10 @@
     }
     if (isInput) {
         RETURN_STATUS_IF_ERROR(findOrCreatePatch(
-                        {devicePortConfigId}, {mixPortConfig->id}, patch, &created));
+                        {devicePortConfigId}, {mixPortConfig->id}, MATCH_BOTH, patch, &created));
     } else {
         RETURN_STATUS_IF_ERROR(findOrCreatePatch(
-                        {mixPortConfig->id}, {devicePortConfigId}, patch, &created));
+                        {mixPortConfig->id}, {devicePortConfigId}, MATCH_BOTH, patch, &created));
     }
     if (created) {
         cleanups->add(&Hal2AidlMapper::resetPatch, patch->id);
diff --git a/media/libaudiohal/impl/Hal2AidlMapper.h b/media/libaudiohal/impl/Hal2AidlMapper.h
index f302c23..c70c8af 100644
--- a/media/libaudiohal/impl/Hal2AidlMapper.h
+++ b/media/libaudiohal/impl/Hal2AidlMapper.h
@@ -133,6 +133,8 @@
     using Streams = std::map<wp<StreamHalInterface>,
             std::pair<int32_t /*mix port config ID*/, int32_t /*patch ID*/>>;
 
+    enum PatchMatch { MATCH_SOURCES, MATCH_SINKS, MATCH_BOTH };
+
     const std::string mInstance;
     const std::shared_ptr<::aidl::android::hardware::audio::core::IModule> mModule;
 
@@ -150,11 +152,13 @@
             ::aidl::android::media::audio::common::AudioPortConfig* result, bool *created);
     void eraseConnectedPort(int32_t portId);
     status_t findOrCreatePatch(
-        const std::set<int32_t>& sourcePortConfigIds,
-        const std::set<int32_t>& sinkPortConfigIds,
+            const std::set<int32_t>& sourcePortConfigIds,
+            const std::set<int32_t>& sinkPortConfigIds,
+            PatchMatch match,
         ::aidl::android::hardware::audio::core::AudioPatch* patch, bool* created);
     status_t findOrCreatePatch(
         const ::aidl::android::hardware::audio::core::AudioPatch& requestedPatch,
+        PatchMatch match,
         ::aidl::android::hardware::audio::core::AudioPatch* patch, bool* created);
     status_t findOrCreateDevicePortConfig(
             const ::aidl::android::media::audio::common::AudioDevice& device,
@@ -175,7 +179,7 @@
         const std::set<int32_t>& destinationPortIds,
         ::aidl::android::media::audio::common::AudioPortConfig* portConfig, bool* created);
     Patches::iterator findPatch(const std::set<int32_t>& sourcePortConfigIds,
-            const std::set<int32_t>& sinkPortConfigIds);
+            const std::set<int32_t>& sinkPortConfigIds, PatchMatch match);
     Ports::iterator findPort(const ::aidl::android::media::audio::common::AudioDevice& device);
     Ports::iterator findPort(
             const ::aidl::android::media::audio::common::AudioConfig& config,
diff --git a/media/libaudiohal/impl/StreamHalAidl.cpp b/media/libaudiohal/impl/StreamHalAidl.cpp
index 46f4f13..2fb4756 100644
--- a/media/libaudiohal/impl/StreamHalAidl.cpp
+++ b/media/libaudiohal/impl/StreamHalAidl.cpp
@@ -202,8 +202,12 @@
     StreamDescriptor::Reply reply;
     switch (state) {
         case StreamDescriptor::State::ACTIVE:
+        case StreamDescriptor::State::DRAINING:
+        case StreamDescriptor::State::TRANSFERRING:
             RETURN_STATUS_IF_ERROR(pause(&reply));
-            if (reply.state != StreamDescriptor::State::PAUSED) {
+            if (reply.state != StreamDescriptor::State::PAUSED &&
+                    reply.state != StreamDescriptor::State::DRAIN_PAUSED &&
+                    reply.state != StreamDescriptor::State::TRANSFER_PAUSED) {
                 ALOGE("%s: unexpected stream state: %s (expected PAUSED)",
                         __func__, toString(reply.state).c_str());
                 return INVALID_OPERATION;
@@ -211,6 +215,7 @@
             FALLTHROUGH_INTENDED;
         case StreamDescriptor::State::PAUSED:
         case StreamDescriptor::State::DRAIN_PAUSED:
+        case StreamDescriptor::State::TRANSFER_PAUSED:
             if (mIsInput) return flush();
             RETURN_STATUS_IF_ERROR(flush(&reply));
             if (reply.state != StreamDescriptor::State::IDLE) {
@@ -252,20 +257,71 @@
     ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
-    const auto state = getState();
-    StreamDescriptor::Reply reply;
-    if (state == StreamDescriptor::State::STANDBY) {
-        RETURN_STATUS_IF_ERROR(sendCommand(makeHalCommand<HalCommand::Tag::start>(), &reply, true));
-        return sendCommand(makeHalCommand<HalCommand::Tag::burst>(0), &reply, true);
+    if (!mContext.isMmapped()) {
+        return BAD_VALUE;
     }
-
-    return INVALID_OPERATION;
+    StreamDescriptor::Reply reply;
+    RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply));
+    switch (reply.state) {
+        case StreamDescriptor::State::STANDBY:
+            RETURN_STATUS_IF_ERROR(
+                    sendCommand(makeHalCommand<HalCommand::Tag::start>(), &reply, true));
+            if (reply.state != StreamDescriptor::State::IDLE) {
+                ALOGE("%s: unexpected stream state: %s (expected IDLE)",
+                        __func__, toString(reply.state).c_str());
+                return INVALID_OPERATION;
+            }
+            FALLTHROUGH_INTENDED;
+        case StreamDescriptor::State::IDLE:
+            RETURN_STATUS_IF_ERROR(
+                    sendCommand(makeHalCommand<HalCommand::Tag::burst>(0), &reply, true));
+            if (reply.state != StreamDescriptor::State::ACTIVE) {
+                ALOGE("%s: unexpected stream state: %s (expected ACTIVE)",
+                        __func__, toString(reply.state).c_str());
+                return INVALID_OPERATION;
+            }
+            FALLTHROUGH_INTENDED;
+        case StreamDescriptor::State::ACTIVE:
+            return OK;
+        case StreamDescriptor::State::DRAINING:
+            RETURN_STATUS_IF_ERROR(
+                    sendCommand(makeHalCommand<HalCommand::Tag::start>(), &reply, true));
+            if (reply.state != StreamDescriptor::State::ACTIVE) {
+                ALOGE("%s: unexpected stream state: %s (expected ACTIVE)",
+                        __func__, toString(reply.state).c_str());
+                return INVALID_OPERATION;
+            }
+            return OK;
+        default:
+            ALOGE("%s: not supported from %s stream state %s",
+                    __func__, mIsInput ? "input" : "output", toString(reply.state).c_str());
+            return INVALID_OPERATION;
+    }
 }
 
 status_t StreamHalAidl::stop() {
     ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    TIME_CHECK();
     if (!mStream) return NO_INIT;
-    return standby();
+    if (!mContext.isMmapped()) {
+        return BAD_VALUE;
+    }
+    StreamDescriptor::Reply reply;
+    RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply));
+    if (const auto state = reply.state; state == StreamDescriptor::State::ACTIVE) {
+        return drain(false /*earlyNotify*/, nullptr);
+    } else if (state == StreamDescriptor::State::DRAINING) {
+        RETURN_STATUS_IF_ERROR(pause());
+        return flush();
+    } else if (state == StreamDescriptor::State::PAUSED) {
+        return flush();
+    } else if (state != StreamDescriptor::State::IDLE &&
+            state != StreamDescriptor::State::STANDBY) {
+        ALOGE("%s: not supported from %s stream state %s",
+                __func__, mIsInput ? "input" : "output", toString(state).c_str());
+        return INVALID_OPERATION;
+    }
+    return OK;
 }
 
 status_t StreamHalAidl::getLatency(uint32_t *latency) {
@@ -280,11 +336,12 @@
     return OK;
 }
 
-status_t StreamHalAidl::getObservablePosition(int64_t *frames, int64_t *timestamp) {
+status_t StreamHalAidl::getObservablePosition(int64_t* frames, int64_t* timestamp,
+        StatePositions* statePositions) {
     ALOGV("%p %s::%s", this, getClassName().c_str(), __func__);
     if (!mStream) return NO_INIT;
     StreamDescriptor::Reply reply;
-    RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply));
+    RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply, statePositions));
     *frames = std::max<int64_t>(0, reply.observable.frames);
     *timestamp = std::max<int64_t>(0, reply.observable.timeNs);
     return OK;
@@ -327,8 +384,11 @@
             return INVALID_OPERATION;
         }
     }
+    StreamContextAidl::DataMQ::Error fmqError = StreamContextAidl::DataMQ::Error::NONE;
+    std::string fmqErrorMsg;
     if (!mIsInput) {
-        bytes = std::min(bytes, mContext.getDataMQ()->availableToWrite());
+        bytes = std::min(bytes,
+                mContext.getDataMQ()->availableToWrite(&fmqError, &fmqErrorMsg));
     }
     StreamDescriptor::Command burst =
             StreamDescriptor::Command::make<StreamDescriptor::Command::Tag::burst>(bytes);
@@ -345,12 +405,14 @@
         LOG_ALWAYS_FATAL_IF(*transferred > bytes,
                 "%s: HAL module read %zu bytes, which exceeds requested count %zu",
                 __func__, *transferred, bytes);
-        if (auto toRead = mContext.getDataMQ()->availableToRead();
+        if (auto toRead = mContext.getDataMQ()->availableToRead(&fmqError, &fmqErrorMsg);
                 toRead != 0 && !mContext.getDataMQ()->read(static_cast<int8_t*>(buffer), toRead)) {
             ALOGE("%s: failed to read %zu bytes to data MQ", __func__, toRead);
             return NOT_ENOUGH_DATA;
         }
     }
+    LOG_ALWAYS_FATAL_IF(fmqError != StreamContextAidl::DataMQ::Error::NONE,
+            "%s", fmqErrorMsg.c_str());
     mStreamPowerLog.log(buffer, *transferred);
     return OK;
 }
@@ -383,10 +445,12 @@
                 return INVALID_OPERATION;
             }
             return OK;
-        } else if (state == StreamDescriptor::State::PAUSED) {
+        } else if (state == StreamDescriptor::State::PAUSED ||
+                   state == StreamDescriptor::State::TRANSFER_PAUSED ||
+                   state == StreamDescriptor::State::DRAIN_PAUSED) {
             return sendCommand(makeHalCommand<HalCommand::Tag::start>(), reply);
         } else {
-            ALOGE("%s: unexpected stream state: %s (expected IDLE or PAUSED)",
+            ALOGE("%s: unexpected stream state: %s (expected IDLE or one of *PAUSED states)",
                         __func__, toString(state).c_str());
             return INVALID_OPERATION;
         }
@@ -434,8 +498,12 @@
     if (auto state = getState(); state == StreamDescriptor::State::DRAINING) {
         // Retrieve the current state together with position counters unconditionally
         // to ensure that the state on our side gets updated.
-        sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(),
-                nullptr, true /*safeFromNonWorkerThread */);
+        sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(), nullptr,
+                    true /*safeFromNonWorkerThread */);
+        // For compatibility with HIDL behavior, apply a "soft" position reset
+        // after receiving the "drain ready" callback.
+        std::lock_guard l(mLock);
+        mStatePositions.framesAtFlushOrDrain = mLastReply.observable.frames;
     } else {
         ALOGW("%s: unexpected onDrainReady in the state %s", __func__, toString(state).c_str());
     }
@@ -443,15 +511,8 @@
 
 void StreamHalAidl::onAsyncError() {
     std::lock_guard l(mLock);
-    if (mLastReply.state == StreamDescriptor::State::IDLE ||
-        mLastReply.state == StreamDescriptor::State::DRAINING ||
-        mLastReply.state == StreamDescriptor::State::TRANSFERRING) {
-        mLastReply.state = StreamDescriptor::State::ERROR;
-        ALOGW("%s: onError received", __func__);
-    } else {
-        ALOGW("%s: unexpected onError in the state %s", __func__,
-                toString(mLastReply.state).c_str());
-    }
+    ALOGW("%s: received in the state %s", __func__, toString(mLastReply.state).c_str());
+    mLastReply.state = StreamDescriptor::State::ERROR;
 }
 
 status_t StreamHalAidl::createMmapBuffer(int32_t minSizeFrames __unused,
@@ -502,9 +563,9 @@
 }
 
 status_t StreamHalAidl::sendCommand(
-        const ::aidl::android::hardware::audio::core::StreamDescriptor::Command &command,
+        const ::aidl::android::hardware::audio::core::StreamDescriptor::Command& command,
         ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply,
-        bool safeFromNonWorkerThread) {
+        bool safeFromNonWorkerThread, StatePositions* statePositions) {
     // TIME_CHECK();  // TODO(b/243839867) reenable only when optimized.
     if (!safeFromNonWorkerThread) {
         const pid_t workerTid = mWorkerTid.load(std::memory_order_acquire);
@@ -536,6 +597,23 @@
             }
             mLastReply = *reply;
             mLastReplyExpirationNs = uptimeNanos() + mLastReplyLifeTimeNs;
+            if (!mIsInput && reply->status == STATUS_OK) {
+                if (command.getTag() == StreamDescriptor::Command::standby &&
+                        reply->state == StreamDescriptor::State::STANDBY) {
+                    mStatePositions.framesAtStandby = reply->observable.frames;
+                } else if (command.getTag() == StreamDescriptor::Command::flush &&
+                           reply->state == StreamDescriptor::State::IDLE) {
+                    mStatePositions.framesAtFlushOrDrain = reply->observable.frames;
+                } else if (!mContext.isAsynchronous() &&
+                        command.getTag() == StreamDescriptor::Command::drain &&
+                        (reply->state == StreamDescriptor::State::IDLE ||
+                                reply->state == StreamDescriptor::State::DRAINING)) {
+                    mStatePositions.framesAtFlushOrDrain = reply->observable.frames;
+                } // for asynchronous drain, the frame count is saved in 'onAsyncDrainReady'
+            }
+            if (statePositions != nullptr) {
+                *statePositions = mStatePositions;
+            }
         }
     }
     switch (reply->status) {
@@ -551,7 +629,8 @@
 }
 
 status_t StreamHalAidl::updateCountersIfNeeded(
-        ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply) {
+        ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply,
+        StatePositions* statePositions) {
     bool doUpdate = false;
     {
         std::lock_guard l(mLock);
@@ -561,10 +640,13 @@
         // Since updates are paced, it is OK to perform them from any thread, they should
         // not interfere with I/O operations of the worker.
         return sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(),
-                reply, true /*safeFromNonWorkerThread */);
+                reply, true /*safeFromNonWorkerThread */, statePositions);
     } else if (reply != nullptr) {  // provide cached reply
         std::lock_guard l(mLock);
         *reply = mLastReply;
+        if (statePositions != nullptr) {
+            *statePositions = mStatePositions;
+        }
     }
     return OK;
 }
@@ -622,7 +704,7 @@
 status_t StreamOutHalAidl::setVolume(float left, float right) {
     TIME_CHECK();
     if (!mStream) return NO_INIT;
-    size_t channelCount = audio_channel_out_mask_from_count(mConfig.channel_mask);
+    size_t channelCount = audio_channel_count_from_out_mask(mConfig.channel_mask);
     if (channelCount == 0) channelCount = 2;
     std::vector<float> volumes(channelCount);
     if (channelCount == 1) {
@@ -651,21 +733,27 @@
     return transfer(const_cast<void*>(buffer), bytes, written);
 }
 
-status_t StreamOutHalAidl::getRenderPosition(uint32_t *dspFrames) {
+status_t StreamOutHalAidl::getRenderPosition(uint64_t *dspFrames) {
     if (dspFrames == nullptr) {
         return BAD_VALUE;
     }
     int64_t aidlFrames = 0, aidlTimestamp = 0;
-    RETURN_STATUS_IF_ERROR(getObservablePosition(&aidlFrames, &aidlTimestamp));
-    *dspFrames = static_cast<uint32_t>(aidlFrames);
+    StatePositions statePositions{};
+    RETURN_STATUS_IF_ERROR(
+            getObservablePosition(&aidlFrames, &aidlTimestamp, &statePositions));
+    // Number of audio frames since the stream has exited standby.
+    // See the table at the start of 'StreamHalInterface' on when it needs to reset.
+    int64_t mostRecentResetPoint;
+    if (!mContext.isAsynchronous() && audio_has_proportional_frames(mConfig.format)) {
+        mostRecentResetPoint = statePositions.framesAtStandby;
+    } else {
+        mostRecentResetPoint =
+                std::max(statePositions.framesAtStandby, statePositions.framesAtFlushOrDrain);
+    }
+    *dspFrames = aidlFrames <= mostRecentResetPoint ? 0 : aidlFrames - mostRecentResetPoint;
     return OK;
 }
 
-status_t StreamOutHalAidl::getNextWriteTimestamp(int64_t *timestamp __unused) {
-    // Obsolete, use getPresentationPosition.
-    return INVALID_OPERATION;
-}
-
 status_t StreamOutHalAidl::setCallback(wp<StreamOutHalInterfaceCallback> callback) {
     ALOGD("%p %s", this, __func__);
     TIME_CHECK();
@@ -719,13 +807,26 @@
         return BAD_VALUE;
     }
     int64_t aidlFrames = 0, aidlTimestamp = 0;
-    RETURN_STATUS_IF_ERROR(getObservablePosition(&aidlFrames, &aidlTimestamp));
-    *frames = aidlFrames;
+    StatePositions statePositions{};
+    RETURN_STATUS_IF_ERROR(getObservablePosition(&aidlFrames, &aidlTimestamp, &statePositions));
+    // See the table at the start of 'StreamHalInterface'.
+    if (!mContext.isAsynchronous() && audio_has_proportional_frames(mConfig.format)) {
+        *frames = aidlFrames;
+    } else {
+        const int64_t mostRecentResetPoint =
+                std::max(statePositions.framesAtStandby, statePositions.framesAtFlushOrDrain);
+        *frames = aidlFrames <= mostRecentResetPoint ? 0 : aidlFrames - mostRecentResetPoint;
+    }
     timestamp->tv_sec = aidlTimestamp / NANOS_PER_SECOND;
     timestamp->tv_nsec = aidlTimestamp - timestamp->tv_sec * NANOS_PER_SECOND;
     return OK;
 }
 
+status_t StreamOutHalAidl::presentationComplete() {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    return OK;
+}
+
 status_t StreamOutHalAidl::updateSourceMetadata(
         const StreamOutHalInterface::SourceMetadata& sourceMetadata) {
     TIME_CHECK();
@@ -855,10 +956,10 @@
     }
 }
 
-void StreamOutHalAidl::onError() {
+void StreamOutHalAidl::onError(bool isHardError) {
     onAsyncError();
     if (auto clientCb = mClientCallback.load().promote(); clientCb != nullptr) {
-        clientCb->onError();
+        clientCb->onError(isHardError);
     }
 }
 
diff --git a/media/libaudiohal/impl/StreamHalAidl.h b/media/libaudiohal/impl/StreamHalAidl.h
index b20eb00..9cb2cff 100644
--- a/media/libaudiohal/impl/StreamHalAidl.h
+++ b/media/libaudiohal/impl/StreamHalAidl.h
@@ -95,7 +95,8 @@
     size_t getBufferSizeBytes() const { return mFrameSizeBytes * mBufferSizeFrames; }
     size_t getBufferSizeFrames() const { return mBufferSizeFrames; }
     size_t getBufferDurationMs(int32_t sampleRate) const {
-        return sampleRate != 0 ? mBufferSizeFrames * MILLIS_PER_SECOND / sampleRate : 0;
+        auto bufferSize = mIsMmapped ? getMmapBurstSize() : mBufferSizeFrames;
+        return sampleRate != 0 ? bufferSize * MILLIS_PER_SECOND / sampleRate : 0;
     }
     CommandMQ* getCommandMQ() const { return mCommandMQ.get(); }
     DataMQ* getDataMQ() const { return mDataMQ.get(); }
@@ -104,7 +105,7 @@
     bool isAsynchronous() const { return mIsAsynchronous; }
     bool isMmapped() const { return mIsMmapped; }
     const MmapBufferDescriptor& getMmapBufferDescriptor() const { return mMmapBufferDescriptor; }
-
+    size_t getMmapBurstSize() const { return mMmapBufferDescriptor.burstSizeFrames;}
   private:
     static std::unique_ptr<DataMQ> maybeCreateDataMQ(
             const ::aidl::android::hardware::audio::core::StreamDescriptor& descriptor) {
@@ -194,6 +195,11 @@
     // For tests.
     friend class sp<StreamHalAidl>;
 
+    struct StatePositions {
+        int64_t framesAtFlushOrDrain;
+        int64_t framesAtStandby;
+    };
+
     template<class T>
     static std::shared_ptr<::aidl::android::hardware::audio::core::IStreamCommon> getStreamCommon(
             const std::shared_ptr<T>& stream);
@@ -212,7 +218,8 @@
     status_t getLatency(uint32_t *latency);
 
     // Always returns non-negative values.
-    status_t getObservablePosition(int64_t *frames, int64_t *timestamp);
+    status_t getObservablePosition(int64_t* frames, int64_t* timestamp,
+            StatePositions* statePositions = nullptr);
 
     // Always returns non-negative values.
     status_t getHardwarePosition(int64_t *frames, int64_t *timestamp);
@@ -268,11 +275,13 @@
     // Note: Since `sendCommand` takes mLock while holding mCommandReplyLock, never call
     // it with `mLock` being held.
     status_t sendCommand(
-            const ::aidl::android::hardware::audio::core::StreamDescriptor::Command &command,
+            const ::aidl::android::hardware::audio::core::StreamDescriptor::Command& command,
             ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply = nullptr,
-            bool safeFromNonWorkerThread = false);
+            bool safeFromNonWorkerThread = false,
+            StatePositions* statePositions = nullptr);
     status_t updateCountersIfNeeded(
-            ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply = nullptr);
+            ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply = nullptr,
+            StatePositions* statePositions = nullptr);
 
     const std::shared_ptr<::aidl::android::hardware::audio::core::IStreamCommon> mStream;
     const std::shared_ptr<::aidl::android::media::audio::IHalAdapterVendorExtension> mVendorExt;
@@ -280,6 +289,9 @@
     std::mutex mLock;
     ::aidl::android::hardware::audio::core::StreamDescriptor::Reply mLastReply GUARDED_BY(mLock);
     int64_t mLastReplyExpirationNs GUARDED_BY(mLock) = 0;
+    // Cached values of observable positions when the stream last entered certain state.
+    // Updated for output streams only.
+    StatePositions mStatePositions GUARDED_BY(mLock) = {};
     // mStreamPowerLog is used for audio signal power logging.
     StreamPowerLog mStreamPowerLog;
     std::atomic<pid_t> mWorkerTid = -1;
@@ -308,10 +320,7 @@
 
     // Return the number of audio frames written by the audio dsp to DAC since
     // the output has exited standby.
-    status_t getRenderPosition(uint32_t *dspFrames) override;
-
-    // Get the local time at which the next write to the audio driver will be presented.
-    status_t getNextWriteTimestamp(int64_t *timestamp) override;
+    status_t getRenderPosition(uint64_t *dspFrames) override;
 
     // Set the callback for notifying completion of non-blocking write and drain.
     status_t setCallback(wp<StreamOutHalInterfaceCallback> callback) override;
@@ -331,12 +340,19 @@
     // Requests notification when data buffered by the driver/hardware has been played.
     status_t drain(bool earlyNotify) override;
 
-    // Notifies to the audio driver to flush the queued data.
+    // Notifies to the audio driver to flush (that is, drop) the queued data. Stream must
+    // already be paused before calling 'flush'.
     status_t flush() override;
 
     // Return a recent count of the number of audio frames presented to an external observer.
+    // This excludes frames which have been written but are still in the pipeline. See the
+    // table at the start of the 'StreamOutHalInterface' for the specification of the frame
+    // count behavior w.r.t. 'flush', 'drain' and 'standby' operations.
     status_t getPresentationPosition(uint64_t *frames, struct timespec *timestamp) override;
 
+    // Notifies the HAL layer that the framework considers the current playback as completed.
+    status_t presentationComplete() override;
+
     // Called when the metadata of the stream's source has been changed.
     status_t updateSourceMetadata(const SourceMetadata& sourceMetadata) override;
 
@@ -370,7 +386,7 @@
     // StreamOutHalInterfaceCallback
     void onWriteReady() override;
     void onDrainReady() override;
-    void onError() override;
+    void onError(bool isHardError) override;
 
   private:
     friend class sp<StreamOutHalAidl>;
@@ -413,6 +429,7 @@
 
     // Return a recent count of the number of audio frames received and
     // the clock time associated with that frame count.
+    // The count must not reset to zero when a PCM input enters standby.
     status_t getCapturePosition(int64_t *frames, int64_t *time) override;
 
     // Get active microphones
diff --git a/media/libaudiohal/impl/StreamHalHidl.cpp b/media/libaudiohal/impl/StreamHalHidl.cpp
index 77c75db..a931fdd 100644
--- a/media/libaudiohal/impl/StreamHalHidl.cpp
+++ b/media/libaudiohal/impl/StreamHalHidl.cpp
@@ -17,6 +17,8 @@
 #define LOG_TAG "StreamHalHidl"
 //#define LOG_NDEBUG 0
 
+#include <cinttypes>
+
 #include <android/hidl/manager/1.0/IServiceManager.h>
 #include <hwbinder/IPCThreadState.h>
 #include <media/AudioParameter.h>
@@ -589,32 +591,39 @@
     return OK;
 }
 
-status_t StreamOutHalHidl::getRenderPosition(uint32_t *dspFrames) {
+status_t StreamOutHalHidl::getRenderPosition(uint64_t *dspFrames) {
     // TIME_CHECK();  // TODO(b/243839867) reenable only when optimized.
     if (mStream == 0) return NO_INIT;
     Result retval;
+    uint32_t halPosition = 0;
     Return<void> ret = mStream->getRenderPosition(
             [&](Result r, uint32_t d) {
                 retval = r;
                 if (retval == Result::OK) {
-                    *dspFrames = d;
+                    halPosition = d;
                 }
             });
-    return processReturn("getRenderPosition", ret, retval);
-}
+    status_t status = processReturn("getRenderPosition", ret, retval);
+    if (status != OK) {
+        return status;
+    }
+    // Maintain a 64-bit render position using the 32-bit result from the HAL.
+    // This delta calculation relies on the arithmetic overflow behavior
+    // of integers. For example (100 - 0xFFFFFFF0) = 116.
+    std::lock_guard l(mPositionMutex);
+    const auto truncatedPosition = (uint32_t)mRenderPosition;
+    int32_t deltaHalPosition; // initialization not needed, overwitten by __builtin_sub_overflow()
+    (void) __builtin_sub_overflow(halPosition, truncatedPosition, &deltaHalPosition);
 
-status_t StreamOutHalHidl::getNextWriteTimestamp(int64_t *timestamp) {
-    TIME_CHECK();
-    if (mStream == 0) return NO_INIT;
-    Result retval;
-    Return<void> ret = mStream->getNextWriteTimestamp(
-            [&](Result r, int64_t t) {
-                retval = r;
-                if (retval == Result::OK) {
-                    *timestamp = t;
-                }
-            });
-    return processReturn("getRenderPosition", ret, retval);
+    if (deltaHalPosition >= 0) {
+        mRenderPosition += deltaHalPosition;
+    } else if (mExpectRetrograde) {
+        mExpectRetrograde = false;
+        mRenderPosition -= static_cast<uint64_t>(-deltaHalPosition);
+        ALOGW("Retrograde motion of %" PRId32 " frames", -deltaHalPosition);
+    }
+    *dspFrames = mRenderPosition;
+    return OK;
 }
 
 status_t StreamOutHalHidl::setCallback(wp<StreamOutHalInterfaceCallback> callback) {
@@ -667,9 +676,23 @@
 status_t StreamOutHalHidl::flush() {
     TIME_CHECK();
     if (mStream == 0) return NO_INIT;
+    {
+        std::lock_guard l(mPositionMutex);
+        mRenderPosition = 0;
+        mExpectRetrograde = false;
+    }
     return processReturn("pause", mStream->flush());
 }
 
+status_t StreamOutHalHidl::standby() {
+    {
+        std::lock_guard l(mPositionMutex);
+        mRenderPosition = 0;
+        mExpectRetrograde = false;
+    }
+    return StreamHalHidl::standby();
+}
+
 status_t StreamOutHalHidl::getPresentationPosition(uint64_t *frames, struct timespec *timestamp) {
     // TIME_CHECK();  // TODO(b/243839867) reenable only when optimized.
     if (mStream == 0) return NO_INIT;
@@ -696,6 +719,16 @@
     }
 }
 
+status_t StreamOutHalHidl::presentationComplete() {
+    // Avoid suppressing retrograde motion in mRenderPosition for gapless offload/direct when
+    // transitioning between tracks.
+    // The HAL resets the frame position without flush/stop being called, but calls back prior to
+    // this event. So, on the next occurrence of retrograde motion, we permit backwards movement of
+    // mRenderPosition.
+    mExpectRetrograde = true;
+    return OK;
+}
+
 #if MAJOR_VERSION == 2
 status_t StreamOutHalHidl::updateSourceMetadata(
         const StreamOutHalInterface::SourceMetadata& /* sourceMetadata */) {
@@ -964,7 +997,7 @@
     sp<StreamOutHalInterfaceCallback> callback = mCallback.load().promote();
     if (callback == 0) return;
     ALOGV("asyncCallback onError");
-    callback->onError();
+    callback->onError(false /*isHardError*/);
 }
 
 void StreamOutHalHidl::onCodecFormatChanged(const std::vector<uint8_t>& metadataBs) {
diff --git a/media/libaudiohal/impl/StreamHalHidl.h b/media/libaudiohal/impl/StreamHalHidl.h
index 48da633..433e0a3 100644
--- a/media/libaudiohal/impl/StreamHalHidl.h
+++ b/media/libaudiohal/impl/StreamHalHidl.h
@@ -18,10 +18,12 @@
 #define ANDROID_HARDWARE_STREAM_HAL_HIDL_H
 
 #include <atomic>
+#include <mutex>
 
 #include PATH(android/hardware/audio/CORE_TYPES_FILE_VERSION/IStream.h)
 #include PATH(android/hardware/audio/CORE_TYPES_FILE_VERSION/IStreamIn.h)
 #include PATH(android/hardware/audio/FILE_VERSION/IStreamOut.h)
+#include <android-base/thread_annotations.h>
 #include <fmq/EventFlag.h>
 #include <fmq/MessageQueue.h>
 #include <media/audiohal/EffectHalInterface.h>
@@ -119,6 +121,9 @@
 
 class StreamOutHalHidl : public StreamOutHalInterface, public StreamHalHidl {
   public:
+    // Put the audio hardware input/output into standby mode (from StreamHalInterface).
+    status_t standby() override;
+
     // Return the frame size (number of bytes per sample) of a stream.
     virtual status_t getFrameSize(size_t *size);
 
@@ -136,10 +141,7 @@
 
     // Return the number of audio frames written by the audio dsp to DAC since
     // the output has exited standby.
-    virtual status_t getRenderPosition(uint32_t *dspFrames);
-
-    // Get the local time at which the next write to the audio driver will be presented.
-    virtual status_t getNextWriteTimestamp(int64_t *timestamp);
+    virtual status_t getRenderPosition(uint64_t *dspFrames);
 
     // Set the callback for notifying completion of non-blocking write and drain.
     virtual status_t setCallback(wp<StreamOutHalInterfaceCallback> callback);
@@ -159,12 +161,19 @@
     // Requests notification when data buffered by the driver/hardware has been played.
     virtual status_t drain(bool earlyNotify);
 
-    // Notifies to the audio driver to flush the queued data.
+    // Notifies to the audio driver to flush (that is, drop) the queued data. Stream must
+    // already be paused before calling 'flush'.
     virtual status_t flush();
 
     // Return a recent count of the number of audio frames presented to an external observer.
+    // This excludes frames which have been written but are still in the pipeline. See the
+    // table at the start of the 'StreamOutHalInterface' for the specification of the frame
+    // count behavior w.r.t. 'flush', 'drain' and 'standby' operations.
     virtual status_t getPresentationPosition(uint64_t *frames, struct timespec *timestamp);
 
+    // Notifies the HAL layer that the framework considers the current playback as completed.
+    status_t presentationComplete() override;
+
     // Called when the metadata of the stream's source has been changed.
     status_t updateSourceMetadata(const SourceMetadata& sourceMetadata) override;
 
@@ -221,6 +230,10 @@
     std::unique_ptr<StatusMQ> mStatusMQ;
     std::atomic<pid_t> mWriterClient;
     EventFlag* mEfGroup;
+    std::mutex mPositionMutex;
+    // Used to expand correctly the 32-bit position from the HAL.
+    uint64_t mRenderPosition GUARDED_BY(mPositionMutex) = 0;
+    bool mExpectRetrograde GUARDED_BY(mPositionMutex) = false; // See 'presentationComplete'.
 
     // Can not be constructed directly by clients.
     StreamOutHalHidl(const sp<::android::hardware::audio::CPP_VERSION::IStreamOut>& stream);
@@ -250,6 +263,7 @@
 
     // Return a recent count of the number of audio frames received and
     // the clock time associated with that frame count.
+    // The count must not reset to zero when a PCM input enters standby.
     virtual status_t getCapturePosition(int64_t *frames, int64_t *time);
 
     // Get active microphones
diff --git a/media/libaudiohal/include/media/audiohal/StreamHalInterface.h b/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
index 37615af..4bd7e3d 100644
--- a/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
@@ -107,7 +107,7 @@
   public:
     virtual void onWriteReady() {}
     virtual void onDrainReady() {}
-    virtual void onError() {}
+    virtual void onError(bool /*isHardError*/) {}
 
   protected:
     StreamOutHalInterfaceCallback() = default;
@@ -135,6 +135,38 @@
     virtual ~StreamOutHalInterfaceLatencyModeCallback() = default;
 };
 
+/**
+ * On position reporting. There are two methods: 'getRenderPosition' and
+ * 'getPresentationPosition'. The first difference is that they may have a
+ * time offset because "render" position relates to what happens between
+ * ADSP and DAC, while "observable" position is relative to the external
+ * observer. The second difference is that 'getRenderPosition' always
+ * resets on standby (for all types of stream data) according to its
+ * definition. Since the original C definition of 'getRenderPosition' used
+ * 32-bit frame counters, and also because in complex playback chains that
+ * include wireless devices the "observable" position has more practical
+ * meaning, 'getRenderPosition' does not exist in the AIDL HAL interface.
+ * The table below summarizes frame count behavior for 'getPresentationPosition':
+ *
+ *               | Mixed      | Direct       | Direct
+ *               |            | non-offload  | offload
+ * ==============|============|==============|==============
+ *  PCM and      | Continuous |              |
+ *  encapsulated |            |              |
+ *  bitstream    |            |              |
+ * --------------|------------| Continuous†  |
+ *  Bitstream    |            |              | Reset on
+ *  encapsulated |            |              | flush, drain
+ *  into PCM     |            |              | and standby
+ *               | Not        |              |
+ * --------------| supported  |--------------|
+ *  Bitstream    |            | Reset on     |
+ *               |            | flush, drain |
+ *               |            | and standby  |
+ *               |            |              |
+ *
+ * † - on standby, reset of the frame count happens at the framework level.
+ */
 class StreamOutHalInterface : public virtual StreamHalInterface {
   public:
     // Return the audio hardware driver estimated latency in milliseconds.
@@ -151,10 +183,7 @@
 
     // Return the number of audio frames written by the audio dsp to DAC since
     // the output has exited standby.
-    virtual status_t getRenderPosition(uint32_t *dspFrames) = 0;
-
-    // Get the local time at which the next write to the audio driver will be presented.
-    virtual status_t getNextWriteTimestamp(int64_t *timestamp) = 0;
+    virtual status_t getRenderPosition(uint64_t *dspFrames) = 0;
 
     // Set the callback for notifying completion of non-blocking write and drain.
     // The callback must be owned by someone else. The output stream does not own it
@@ -176,12 +205,19 @@
     // Requests notification when data buffered by the driver/hardware has been played.
     virtual status_t drain(bool earlyNotify) = 0;
 
-    // Notifies to the audio driver to flush the queued data.
+    // Notifies to the audio driver to flush (that is, drop) the queued data. Stream must
+    // already be paused before calling 'flush'.
     virtual status_t flush() = 0;
 
     // Return a recent count of the number of audio frames presented to an external observer.
+    // This excludes frames which have been written but are still in the pipeline. See the
+    // table at the start of the 'StreamOutHalInterface' for the specification of the frame
+    // count behavior w.r.t. 'flush', 'drain' and 'standby' operations.
     virtual status_t getPresentationPosition(uint64_t *frames, struct timespec *timestamp) = 0;
 
+    // Notifies the HAL layer that the framework considers the current playback as completed.
+    virtual status_t presentationComplete() = 0;
+
     struct SourceMetadata {
         std::vector<playback_track_metadata_v7_t> tracks;
     };
@@ -270,6 +306,7 @@
 
     // Return a recent count of the number of audio frames received and
     // the clock time associated with that frame count.
+    // The count must not reset to zero when a PCM input enters standby.
     virtual status_t getCapturePosition(int64_t *frames, int64_t *time) = 0;
 
     // Get active microphones
diff --git a/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp b/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
index 5106874..0bd6fb0 100644
--- a/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
+++ b/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
@@ -39,6 +39,7 @@
 using ::aidl::android::hardware::audio::core::VendorParameter;
 using ::aidl::android::media::audio::common::AudioChannelLayout;
 using ::aidl::android::media::audio::common::AudioConfig;
+using ::aidl::android::media::audio::common::AudioDevice;
 using ::aidl::android::media::audio::common::AudioDeviceDescription;
 using ::aidl::android::media::audio::common::AudioDeviceType;
 using ::aidl::android::media::audio::common::AudioFormatDescription;
@@ -160,6 +161,24 @@
             createProfile(PcmType::INT_16_BIT, {AudioChannelLayout::LAYOUT_STEREO}, {48000})};
     Configuration c;
 
+    AudioPort micInDevice =
+            createPort(c.nextPortId++, "Built-In Mic", 0, true,
+                       createPortDeviceExt(AudioDeviceType::IN_MICROPHONE,
+                                           1 << AudioPortDeviceExt::FLAG_INDEX_DEFAULT_DEVICE));
+    micInDevice.profiles = standardPcmAudioProfiles;
+    c.ports.push_back(micInDevice);
+
+    AudioPort micInBackDevice =
+            createPort(c.nextPortId++, "Built-In Back Mic", 0, true,
+                       createPortDeviceExt(AudioDeviceType::IN_MICROPHONE_BACK, 0));
+    micInDevice.profiles = standardPcmAudioProfiles;
+    c.ports.push_back(micInBackDevice);
+
+    AudioPort primaryInMix =
+            createPort(c.nextPortId++, "primary input", 0, true, createPortMixExt(0, 1));
+    primaryInMix.profiles = standardPcmAudioProfiles;
+    c.ports.push_back(primaryInMix);
+
     AudioPort btOutDevice =
             createPort(c.nextPortId++, "BT A2DP Out", 0, false,
                        createPortDeviceExt(AudioDeviceType::OUT_DEVICE, 0,
@@ -172,6 +191,7 @@
     btOutMix.profiles = standardPcmAudioProfiles;
     c.ports.push_back(btOutMix);
 
+    c.routes.push_back(createRoute({micInDevice, micInBackDevice}, primaryInMix));
     c.routes.push_back(createRoute({btOutMix}, btOutDevice));
 
     return c;
@@ -184,6 +204,11 @@
     explicit ModuleMock(const Configuration& config) : mConfig(config) {}
     bool isScreenTurnedOn() const { return mIsScreenTurnedOn; }
     ScreenRotation getScreenRotation() const { return mScreenRotation; }
+    std::vector<AudioPatch> getPatches() {
+        std::vector<AudioPatch> result;
+        getAudioPatches(&result);
+        return result;
+    }
 
   private:
     ndk::ScopedAStatus setModuleDebug(
@@ -1141,3 +1166,51 @@
     EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
     EXPECT_EQ(0, mMapper->findFwkPatch(newPatchId));
 }
+
+TEST_F(Hal2AidlMapperTest, ChangeTransientPatchDevice) {
+    std::mutex mutex;  // Only needed for cleanups.
+    auto mapperAccessor = std::make_unique<LockedAccessor<Hal2AidlMapper>>(*mMapper, mutex);
+    Hal2AidlMapper::Cleanups cleanups(*mapperAccessor);
+    AudioConfig config;
+    config.base.channelMask = AudioChannelLayout::make<AudioChannelLayout::layoutMask>(
+            AudioChannelLayout::LAYOUT_STEREO);
+    config.base.format =
+            AudioFormatDescription{.type = AudioFormatType::PCM, .pcm = PcmType::INT_16_BIT};
+    config.base.sampleRate = 48000;
+    AudioDevice defaultDevice;
+    defaultDevice.type.type = AudioDeviceType::IN_DEFAULT;
+    AudioPortConfig mixPortConfig;
+    AudioPatch transientPatch;
+    ASSERT_EQ(OK, mMapper->prepareToOpenStream(43 /*ioHandle*/, defaultDevice,
+                                               AudioIoFlags::make<AudioIoFlags::input>(0),
+                                               AudioSource::DEFAULT, &cleanups, &config,
+                                               &mixPortConfig, &transientPatch));
+    cleanups.disarmAll();
+    ASSERT_NE(0, transientPatch.id);
+    ASSERT_NE(0, mixPortConfig.id);
+    sp<StreamHalInterface> stream = sp<StreamHalMock>::make();
+    mMapper->addStream(stream, mixPortConfig.id, transientPatch.id);
+
+    AudioPatch patch{};
+    int32_t patchId;
+    AudioPortConfig backMicPortConfig;
+    backMicPortConfig.channelMask = config.base.channelMask;
+    backMicPortConfig.format = config.base.format;
+    backMicPortConfig.sampleRate = aidl::android::media::audio::common::Int{config.base.sampleRate};
+    backMicPortConfig.flags = AudioIoFlags::make<AudioIoFlags::input>(0);
+    backMicPortConfig.ext = createPortDeviceExt(AudioDeviceType::IN_MICROPHONE_BACK, 0);
+    ASSERT_EQ(OK, mMapper->createOrUpdatePatch({backMicPortConfig}, {mixPortConfig}, &patchId,
+                                               &cleanups));
+    cleanups.disarmAll();
+    ASSERT_EQ(android::OK,
+              mMapper->findPortConfig(backMicPortConfig.ext.get<AudioPortExt::device>().device,
+                                      &backMicPortConfig));
+    EXPECT_NE(0, backMicPortConfig.id);
+
+    EXPECT_EQ(transientPatch.id, patchId);
+    auto patches = mModule->getPatches();
+    auto patchIt = findById(patches, patchId);
+    ASSERT_NE(patchIt, patches.end());
+    EXPECT_EQ(std::vector<int32_t>{backMicPortConfig.id}, patchIt->sourcePortConfigIds);
+    EXPECT_EQ(std::vector<int32_t>{mixPortConfig.id}, patchIt->sinkPortConfigIds);
+}
diff --git a/media/libeffects/data/Android.bp b/media/libeffects/data/Android.bp
new file mode 100644
index 0000000..2acf229
--- /dev/null
+++ b/media/libeffects/data/Android.bp
@@ -0,0 +1,19 @@
+// Copyright (C) 2024 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+prebuilt_etc {
+    name: "framework-audio_effects.xml",
+    src: "audio_effects.xml",
+    filename: "audio_effects.xml",
+}
diff --git a/media/libeffects/downmix/aidl/EffectDownmix.cpp b/media/libeffects/downmix/aidl/EffectDownmix.cpp
index de60ca4..883d41d 100644
--- a/media/libeffects/downmix/aidl/EffectDownmix.cpp
+++ b/media/libeffects/downmix/aidl/EffectDownmix.cpp
@@ -177,7 +177,10 @@
      * in the life cycle of workerThread (threadLoop).
      */
     uint32_t efState = 0;
-    if (!mEventFlag || ::android::OK != mEventFlag->wait(kEventFlagNotEmpty, &efState)) {
+    if (!mEventFlag ||
+        ::android::OK != mEventFlag->wait(mDataMqNotEmptyEf, &efState, 0 /* no timeout */,
+                                          true /* retry */) ||
+        !(efState & mDataMqNotEmptyEf)) {
         LOG(ERROR) << getEffectName() << __func__ << ": StatusEventFlag invalid";
     }
 
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp
index fdc16e3..836e034 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp
@@ -213,11 +213,12 @@
     RETURN_OK_IF(mState != State::INIT);
     mImplContext = createContext(common);
     RETURN_IF(!mContext || !mImplContext, EX_NULL_POINTER, "createContextFailed");
-    int version = 0;
-    RETURN_IF(!getInterfaceVersion(&version).isOk(), EX_UNSUPPORTED_OPERATION,
+    RETURN_IF(!getInterfaceVersion(&mVersion).isOk(), EX_UNSUPPORTED_OPERATION,
               "FailedToGetInterfaceVersion");
     mImplContext->setVersion(version);
     mEventFlag = mImplContext->getStatusEventFlag();
+    mDataMqNotEmptyEf =
+            mVersion >= kReopenSupportedVersion ? kEventFlagDataMqNotEmpty : kEventFlagNotEmpty;
 
     if (specific.has_value()) {
         RETURN_IF_ASTATUS_NOT_OK(setParameterSpecific(specific.value()), "setSpecParamErr");
@@ -231,8 +232,9 @@
 
     mState = State::IDLE;
     mContext->dupeFmq(ret);
-    RETURN_IF(createThread(getEffectName()) != RetCode::SUCCESS, EX_UNSUPPORTED_OPERATION,
-              "FailedToCreateWorker");
+    RETURN_IF(createThread(getEffectNameWithVersion()) != RetCode::SUCCESS,
+              EX_UNSUPPORTED_OPERATION, "FailedToCreateWorker");
+    LOG(INFO) << getEffectNameWithVersion() << __func__;
     return ndk::ScopedAStatus::ok();
 }
 
diff --git a/media/libeffects/hapticgenerator/Android.bp b/media/libeffects/hapticgenerator/Android.bp
index 9975f75..e4ac38e 100644
--- a/media/libeffects/hapticgenerator/Android.bp
+++ b/media/libeffects/hapticgenerator/Android.bp
@@ -37,6 +37,14 @@
     header_libs: [
         "libaudioeffects",
     ],
+    cflags: [
+        // This is needed for the non-zero coefficients optimization for
+        // BiquadFilter. Try the biquad_filter_benchmark test in audio_utils
+        // with/without `-ffast-math` for more context.
+        "-ffast-math",
+        "-fhonor-infinities",
+        "-fhonor-nans",
+    ],
     relative_install_path: "soundfx",
 }
 
@@ -59,10 +67,6 @@
         "-O2",
         "-Wall",
         "-Werror",
-        // This is needed for the non-zero coefficients optimization for
-        // BiquadFilter. Try the biquad_filter_benchmark test in audio_utils
-        // with/without `-ffast-math` for more context.
-        "-ffast-math",
         "-fvisibility=hidden",
     ],
 }
diff --git a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp
index 0a04250..9b2f443 100644
--- a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp
+++ b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp
@@ -14,30 +14,47 @@
  * limitations under the License.
  */
 
-#include <cstddef>
 #define LOG_TAG "AHAL_HapticGeneratorContext"
 
-#include <Utils.h>
+#include "HapticGeneratorContext.h"
 #include <android-base/logging.h>
 #include <android-base/parsedouble.h>
 #include <android-base/properties.h>
+#include <audio_utils/primitives.h>
+#include <audio_utils/safe_math.h>
+#include <Utils.h>
 
-#include "HapticGeneratorContext.h"
+#include <cstddef>
+
+using aidl::android::hardware::audio::common::getChannelCount;
+using aidl::android::hardware::audio::common::getPcmSampleSizeInBytes;
+using aidl::android::media::audio::common::AudioChannelLayout;
 
 namespace aidl::android::hardware::audio::effect {
 
 HapticGeneratorContext::HapticGeneratorContext(int statusDepth, const Parameter::Common& common)
     : EffectContext(statusDepth, common) {
     mState = HAPTIC_GENERATOR_STATE_UNINITIALIZED;
-    mSampleRate = common.input.base.sampleRate;
-    mFrameCount = common.input.frameCount;
-    init_params(common.input.base.channelMask, common.output.base.channelMask);
+
+    mParams.mMaxVibratorScale = HapticGenerator::VibratorScale::MUTE;
+    mParams.mVibratorInfo.resonantFrequencyHz = DEFAULT_RESONANT_FREQUENCY;
+    mParams.mVibratorInfo.qFactor = DEFAULT_BSF_ZERO_Q;
+    mParams.mVibratorInfo.maxAmplitude = 0.f;
+
+    init_params(common);
+    mState = HAPTIC_GENERATOR_STATE_INITIALIZED;
 }
 
 HapticGeneratorContext::~HapticGeneratorContext() {
     mState = HAPTIC_GENERATOR_STATE_UNINITIALIZED;
 }
 
+// Override EffectImpl::setCommon for HapticGenerator because we need init_params
+RetCode HapticGeneratorContext::setCommon(const Parameter::Common& common) {
+    init_params(common);
+    return EffectContext::setCommon(common);
+}
+
 RetCode HapticGeneratorContext::enable() {
     if (mState != HAPTIC_GENERATOR_STATE_INITIALIZED) {
         return RetCode::ERROR_EFFECT_LIB_ERROR;
@@ -75,14 +92,15 @@
     for (const auto& [id, vibratorScale] : mParams.mHapticScales) {
         mParams.mMaxVibratorScale = std::max(mParams.mMaxVibratorScale, vibratorScale);
     }
+    LOG(INFO) << " HapticGenerator VibratorScale set to " << toString(mParams.mMaxVibratorScale);
     return RetCode::SUCCESS;
 }
 
-HapticGenerator::VibratorInformation HapticGeneratorContext::getHgVibratorInformation() {
+HapticGenerator::VibratorInformation HapticGeneratorContext::getHgVibratorInformation() const {
     return mParams.mVibratorInfo;
 }
 
-std::vector<HapticGenerator::HapticScale> HapticGeneratorContext::getHgHapticScales() {
+std::vector<HapticGenerator::HapticScale> HapticGeneratorContext::getHgHapticScales() const {
     std::vector<HapticGenerator::HapticScale> result;
     for (const auto& [id, vibratorScale] : mParams.mHapticScales) {
         result.push_back({id, vibratorScale});
@@ -93,6 +111,15 @@
 RetCode HapticGeneratorContext::setHgVibratorInformation(
         const HapticGenerator::VibratorInformation& vibratorInfo) {
     mParams.mVibratorInfo = vibratorInfo;
+    if (::android::audio_utils::safe_isnan(mParams.mVibratorInfo.resonantFrequencyHz)) {
+        LOG(WARNING) << __func__ << " resonantFrequencyHz reset from nan to "
+                     << DEFAULT_RESONANT_FREQUENCY;
+        mParams.mVibratorInfo.resonantFrequencyHz = DEFAULT_RESONANT_FREQUENCY;
+    }
+    if (::android::audio_utils::safe_isnan(mParams.mVibratorInfo.qFactor)) {
+        LOG(WARNING) << __func__ << " qFactor reset from nan to " << DEFAULT_BSF_ZERO_Q;
+        mParams.mVibratorInfo.qFactor = DEFAULT_BSF_ZERO_Q;
+    }
 
     if (mProcessorsRecord.bpf != nullptr) {
         mProcessorsRecord.bpf->setCoefficients(::android::audio_effect::haptic_generator::bpfCoefs(
@@ -117,15 +144,8 @@
     auto frameSize = getInputFrameSize();
     RETURN_VALUE_IF(0 == frameSize, status, "zeroFrameSize");
 
-    // The audio data must not be modified but just written to
-    // output buffer according the access mode.
-    if (in != out) {
-        for (int i = 0; i < samples; i++) {
-            out[i] = in[i];
-        }
-    }
-
     if (mState != HAPTIC_GENERATOR_STATE_ACTIVE) {
+        LOG(WARNING) << " HapticGenerator in wrong state " << mState;
         return status;
     }
 
@@ -135,7 +155,8 @@
     }
 
     // Resize buffer if the haptic sample count is greater than buffer size.
-    size_t hapticSampleCount = mFrameCount * mParams.mHapticChannelCount;
+    const size_t hapticSampleCount = mFrameCount * mParams.mHapticChannelCount;
+    const size_t audioSampleCount = mFrameCount * mParams.mAudioChannelCount;
     if (hapticSampleCount > mInputBuffer.size()) {
         // The inputBuffer and outputBuffer must have the same size, which must be at least
         // the haptic sample count.
@@ -155,45 +176,45 @@
             runProcessingChain(mInputBuffer.data(), mOutputBuffer.data(), mFrameCount);
     ::android::os::scaleHapticData(
             hapticOutBuffer, hapticSampleCount,
-            {/*level=*/static_cast<::android::os::HapticLevel>(mParams.mMaxVibratorScale) },
-            mParams.mVibratorInfo.qFactor);
+            {static_cast<::android::os::HapticLevel>(mParams.mMaxVibratorScale)} /* scale */,
+            mParams.mVibratorInfo.maxAmplitude /* limit */);
 
     // For haptic data, the haptic playback thread will copy the data from effect input
     // buffer, which contains haptic data at the end of the buffer, directly to sink buffer.
-    // In that case, copy haptic data to input buffer instead of output buffer.
-    // Note: this may not work with rpc/binder calls
-    for (size_t i = 0; i < hapticSampleCount; ++i) {
-        in[samples + i] = hapticOutBuffer[i];
-    }
-    return {STATUS_OK, samples, static_cast<int32_t>(samples + hapticSampleCount)};
+    // In AIDL only output buffer is send back to the audio framework via FMQ. Here the effect copy
+    // the generated haptic data to the target position of output buffer, the framework then append
+    // it to the same position of input buffer.
+    memcpy_to_float_from_float_with_clamping(out + audioSampleCount, hapticOutBuffer,
+                                             hapticSampleCount, 2.f /* absMax */);
+    return {STATUS_OK, samples, samples};
 }
 
-void HapticGeneratorContext::init_params(media::audio::common::AudioChannelLayout inputChMask,
-                                         media::audio::common::AudioChannelLayout outputChMask) {
-    mParams.mMaxVibratorScale = HapticGenerator::VibratorScale::MUTE;
-    mParams.mVibratorInfo.resonantFrequencyHz = DEFAULT_RESONANT_FREQUENCY;
-    mParams.mVibratorInfo.qFactor = DEFAULT_BSF_ZERO_Q;
+void HapticGeneratorContext::init_params(const Parameter::Common& common) {
+    mSampleRate = common.input.base.sampleRate;
+    mFrameCount = common.input.frameCount;
 
     mParams.mAudioChannelCount = ::aidl::android::hardware::audio::common::getChannelCount(
-            inputChMask, ~media::audio::common::AudioChannelLayout::LAYOUT_HAPTIC_AB);
+            common.input.base.channelMask,
+            ~media::audio::common::AudioChannelLayout::LAYOUT_HAPTIC_AB);
     mParams.mHapticChannelCount = ::aidl::android::hardware::audio::common::getChannelCount(
-            outputChMask, media::audio::common::AudioChannelLayout::LAYOUT_HAPTIC_AB);
+            common.output.base.channelMask,
+            media::audio::common::AudioChannelLayout::LAYOUT_HAPTIC_AB);
     LOG_ALWAYS_FATAL_IF(mParams.mHapticChannelCount > 2, "haptic channel count is too large");
     for (int i = 0; i < mParams.mHapticChannelCount; ++i) {
         // By default, use the first audio channel to generate haptic channels.
         mParams.mHapticChannelSource[i] = 0;
     }
-
-    mState = HAPTIC_GENERATOR_STATE_INITIALIZED;
+    configure();
+    LOG(DEBUG) << " HapticGenerator init context:\n" << contextToString();
 }
 
-float HapticGeneratorContext::getDistortionOutputGain() {
+float HapticGeneratorContext::getDistortionOutputGain() const {
     float distortionOutputGain = getFloatProperty(
             "vendor.audio.hapticgenerator.distortion.output.gain", DEFAULT_DISTORTION_OUTPUT_GAIN);
     return distortionOutputGain;
 }
 
-float HapticGeneratorContext::getFloatProperty(const std::string& key, float defaultValue) {
+float HapticGeneratorContext::getFloatProperty(const std::string& key, float defaultValue) const {
     float result;
     std::string value = ::android::base::GetProperty(key, "");
     if (!value.empty() && ::android::base::ParseFloat(value, &result)) {
@@ -322,4 +343,34 @@
     return in;
 }
 
+std::string HapticGeneratorContext::paramToString(const struct HapticGeneratorParam& param) const {
+    std::stringstream ss;
+    ss << "\t\ttHapticGenerator Parameters:\n";
+    ss << "\t\t- mHapticChannelCount: " << param.mHapticChannelCount << '\n';
+    ss << "\t\t- mAudioChannelCount: " << param.mAudioChannelCount << '\n';
+    ss << "\t\t- mHapticChannelSource: " << param.mHapticChannelSource[0] << ", "
+       << param.mHapticChannelSource[1] << '\n';
+    ss << "\t\t- mMaxVibratorScale: " << ::android::internal::ToString(param.mMaxVibratorScale)
+       << '\n';
+    ss << "\t\t- mVibratorInfo: " << param.mVibratorInfo.toString() << '\n';
+    for (const auto& it : param.mHapticScales)
+        ss << "\t\t\t" << it.first << ": " << toString(it.second) << '\n';
+
+    return ss.str();
+}
+
+std::string HapticGeneratorContext::contextToString() const {
+    std::stringstream ss;
+    ss << "\t\tHapticGenerator Context:\n";
+    ss << "\t\t- state: " << mState << '\n';
+    ss << "\t\t- bpf Q: " << DEFAULT_BPF_Q << '\n';
+    ss << "\t\t- slow env normalization power: " << DEFAULT_SLOW_ENV_NORMALIZATION_POWER << '\n';
+    ss << "\t\t- distortion corner frequency: " << DEFAULT_DISTORTION_CORNER_FREQUENCY << '\n';
+    ss << "\t\t- distortion input gain: " << DEFAULT_DISTORTION_INPUT_GAIN << '\n';
+    ss << "\t\t- distortion cube threshold: " << DEFAULT_DISTORTION_CUBE_THRESHOLD << '\n';
+    ss << "\t\t- distortion output gain: " << getDistortionOutputGain() << '\n';
+    ss << "\t\tHapticGenerator Parameters:\n" << paramToString(mParams) << "\n";
+    return ss.str();
+}
+
 }  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.h b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.h
index 3a2ad1c..8a736e3 100644
--- a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.h
+++ b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.h
@@ -16,11 +16,13 @@
 
 #pragma once
 
-#include <vibrator/ExternalVibrationUtils.h>
-#include <map>
-
-#include "Processors.h"
 #include "effect-impl/EffectContext.h"
+#include "Processors.h"
+
+#include <vibrator/ExternalVibrationUtils.h>
+
+#include <cstddef>
+#include <map>
 
 namespace aidl::android::hardware::audio::effect {
 
@@ -39,7 +41,6 @@
     int mHapticChannelCount;
     int mAudioChannelCount;
 
-    HapticGenerator::HapticScale mHapticScale;
     std::map<int, HapticGenerator::VibratorScale> mHapticScales;
     // max intensity will be used to scale haptic data.
     HapticGenerator::VibratorScale mMaxVibratorScale;
@@ -69,13 +70,15 @@
     void reset();
 
     RetCode setHgHapticScales(const std::vector<HapticGenerator::HapticScale>& hapticScales);
-    std::vector<HapticGenerator::HapticScale> getHgHapticScales();
+    std::vector<HapticGenerator::HapticScale> getHgHapticScales() const;
 
     RetCode setHgVibratorInformation(const HapticGenerator::VibratorInformation& vibratorInfo);
-    HapticGenerator::VibratorInformation getHgVibratorInformation();
+    HapticGenerator::VibratorInformation getHgVibratorInformation() const;
 
     IEffect::Status process(float* in, float* out, int samples);
 
+    RetCode setCommon(const Parameter::Common& common) override;
+
   private:
     static constexpr float DEFAULT_RESONANT_FREQUENCY = 150.0f;
     static constexpr float DEFAULT_BSF_ZERO_Q = 8.0f;
@@ -108,15 +111,17 @@
     // intermediate buffer in the generating algorithm.
     std::vector<float> mOutputBuffer;
 
-    void init_params(media::audio::common::AudioChannelLayout inputChMask,
-                     media::audio::common::AudioChannelLayout outputChMask);
+    void init_params(const Parameter::Common& common);
     void configure();
 
-    float getDistortionOutputGain();
-    float getFloatProperty(const std::string& key, float defaultValue);
+    float getDistortionOutputGain() const;
+    float getFloatProperty(const std::string& key, float defaultValue) const;
     void addBiquadFilter(std::shared_ptr<HapticBiquadFilter> filter);
     void buildProcessingChain();
     float* runProcessingChain(float* buf1, float* buf2, size_t frameCount);
+
+    std::string paramToString(const struct HapticGeneratorParam& param) const;
+    std::string contextToString() const;
 };
 
 }  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
index 44ea2a4..3ae3edc 100644
--- a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
+++ b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
@@ -19,6 +19,7 @@
 #define LOG_TAG "ReverbContext"
 #include <android-base/logging.h>
 #include <Utils.h>
+#include <audio_utils/primitives.h>
 
 #include "ReverbContext.h"
 #include "VectorArithmetic.h"
@@ -347,6 +348,15 @@
             mCommon.output.base.channelMask);
     int frameCount = mCommon.input.frameCount;
 
+    if (mBypass) {
+        if (isAuxiliary()) {
+            memset(out, 0, getOutputFrameSize() * frameCount);
+        } else {
+            memcpy_to_float_from_float_with_clamping(out, in, samples, 1);
+        }
+        return {STATUS_OK, samples, outChannels * frameCount};
+    }
+
     // Reverb only effects the stereo channels in multichannel source.
     if (channels < 1 || channels > LVM_MAX_CHANNELS) {
         LOG(ERROR) << __func__ << " process invalid PCM channels " << channels;
diff --git a/media/libheif/OWNERS b/media/libheif/OWNERS
new file mode 100644
index 0000000..a61ad21
--- /dev/null
+++ b/media/libheif/OWNERS
@@ -0,0 +1,2 @@
+include platform/frameworks/av:/media/janitors/avic_OWNERS
+include platform/frameworks/av:/media/janitors/codec_OWNERS
\ No newline at end of file
diff --git a/media/libmedia/Android.bp b/media/libmedia/Android.bp
index 840897f..9cd0e6e 100644
--- a/media/libmedia/Android.bp
+++ b/media/libmedia/Android.bp
@@ -81,9 +81,6 @@
 cc_library_shared {
     name: "libmedia_omx",
     vendor_available: true,
-    vndk: {
-        enabled: true,
-    },
     double_loadable: true,
 
     srcs: [
diff --git a/media/libmediahelper/Android.bp b/media/libmediahelper/Android.bp
index 649f813..b5867a6 100644
--- a/media/libmediahelper/Android.bp
+++ b/media/libmediahelper/Android.bp
@@ -30,9 +30,6 @@
     name: "libmedia_helper",
     vendor_available: true,
     min_sdk_version: "29",
-    vndk: {
-        enabled: true,
-    },
     double_loadable: true,
     srcs: [
         "AudioParameter.cpp",
diff --git a/media/libmediaplayerservice/DeathNotifier.cpp b/media/libmediaplayerservice/DeathNotifier.cpp
index ab22f67..241c52d 100644
--- a/media/libmediaplayerservice/DeathNotifier.cpp
+++ b/media/libmediaplayerservice/DeathNotifier.cpp
@@ -17,11 +17,18 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "MediaPlayerService-DeathNotifier"
 #include <android-base/logging.h>
+#include <map>
 
 #include "DeathNotifier.h"
 
 namespace android {
 
+// Only dereference the cookie if it's valid (if it's in this set)
+// Only used with ndk
+static uintptr_t sCookieKeyCounter = 0;
+static std::map<uintptr_t, wp<DeathNotifier::DeathRecipient>> sCookies;
+static std::mutex sCookiesMutex;
+
 class DeathNotifier::DeathRecipient :
         public IBinder::DeathRecipient,
         public hardware::hidl_death_recipient {
@@ -44,13 +51,32 @@
     }
 
     static void OnBinderDied(void *cookie) {
-        DeathRecipient *thiz = (DeathRecipient *)cookie;
-        thiz->mNotify();
+        std::unique_lock<std::mutex> guard(sCookiesMutex);
+        if (auto it = sCookies.find(reinterpret_cast<uintptr_t>(cookie)); it != sCookies.end()) {
+            sp<DeathRecipient> recipient = it->second.promote();
+            sCookies.erase(it);
+            guard.unlock();
+
+            if (recipient) {
+                LOG(INFO) << "Notifying DeathRecipient from OnBinderDied.";
+                recipient->mNotify();
+            } else {
+                LOG(INFO) <<
+                    "Tried to notify DeathRecipient from OnBinderDied but could not promote.";
+            }
+        }
     }
 
     AIBinder_DeathRecipient *getNdkRecipient() {
         return mNdkRecipient.get();;
     }
+    ~DeathRecipient() {
+        // lock must be taken so object is not used in OnBinderDied"
+        std::lock_guard<std::mutex> guard(sCookiesMutex);
+        sCookies.erase(mCookieKey);
+    }
+
+    uintptr_t mCookieKey;
 
 private:
     Notify mNotify;
@@ -73,8 +99,15 @@
       : mService{std::in_place_index<3>, service},
         mDeathRecipient{new DeathRecipient(notify)} {
     mDeathRecipient->initNdk();
+    {
+        std::lock_guard<std::mutex> guard(sCookiesMutex);
+        mDeathRecipient->mCookieKey = sCookieKeyCounter++;
+        sCookies[mDeathRecipient->mCookieKey] = mDeathRecipient;
+    }
     AIBinder_linkToDeath(
-            service.get(), mDeathRecipient->getNdkRecipient(), mDeathRecipient.get());
+            service.get(),
+            mDeathRecipient->getNdkRecipient(),
+            reinterpret_cast<void*>(mDeathRecipient->mCookieKey));
 }
 
 DeathNotifier::DeathNotifier(DeathNotifier&& other)
@@ -94,10 +127,11 @@
         std::get<2>(mService)->unlinkToDeath(mDeathRecipient);
         break;
     case 3:
+
         AIBinder_unlinkToDeath(
                 std::get<3>(mService).get(),
                 mDeathRecipient->getNdkRecipient(),
-                mDeathRecipient.get());
+                reinterpret_cast<void*>(mDeathRecipient->mCookieKey));
         break;
     default:
         CHECK(false) << "Corrupted service type during destruction.";
diff --git a/media/libmediaplayerservice/DeathNotifier.h b/media/libmediaplayerservice/DeathNotifier.h
index 24e45a3..0fd7c65 100644
--- a/media/libmediaplayerservice/DeathNotifier.h
+++ b/media/libmediaplayerservice/DeathNotifier.h
@@ -37,10 +37,11 @@
     DeathNotifier(DeathNotifier&& other);
     ~DeathNotifier();
 
+    class DeathRecipient;
+
 private:
     std::variant<std::monostate, sp<IBinder>, sp<HBase>, ::ndk::SpAIBinder> mService;
 
-    class DeathRecipient;
     sp<DeathRecipient> mDeathRecipient;
 };
 
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index 3ab32f0..f0e1b9e 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -2119,6 +2119,11 @@
 
     if (tsLayers > 1) {
         uint32_t bLayers = std::min(2u, tsLayers - 1); // use up-to 2 B-layers
+        // TODO(b/341121900): Remove this once B frames are handled correctly in screen recorder
+        // use case in case of mic only
+        if (mAudioSource == AUDIO_SOURCE_MIC && mVideoSource == VIDEO_SOURCE_SURFACE) {
+            bLayers = 0;
+        }
         uint32_t pLayers = tsLayers - bLayers;
         format->setString(
                 "ts-schema", AStringPrintf("android.generic.%u+%u", pLayers, bLayers));
diff --git a/media/libmediaplayerservice/fuzzer/Android.bp b/media/libmediaplayerservice/fuzzer/Android.bp
index 74b0a85..78163e4 100644
--- a/media/libmediaplayerservice/fuzzer/Android.bp
+++ b/media/libmediaplayerservice/fuzzer/Android.bp
@@ -44,7 +44,7 @@
     ],
     fuzz_config: {
         cc: [
-            "android-media-fuzzing-reports@google.com",
+            "android-media-playback+bugs@google.com",
         ],
         componentid: 155276,
         hotlists: [
@@ -141,7 +141,6 @@
         "libplayerservice_datasource",
     ],
     shared_libs: [
-        "libmediaplayerservice",
         "libdatasource",
         "libdrmframework",
         "libstagefright_httplive",
@@ -159,10 +158,13 @@
         "libmediaplayerserviceFuzzer_defaults",
     ],
     static_libs: [
+        "libgmock",
+        "libgtest_ndk_c++",
         "libplayerservice_datasource",
         "libstagefright_nuplayer",
         "libstagefright_rtsp",
         "libstagefright_timedtext",
+        "libbinder_random_parcel",
     ],
     shared_libs: [
         "android.hardware.media.c2@1.0",
@@ -191,7 +193,10 @@
         "libpowermanager",
         "libstagefright_httplive",
         "libaudiohal@7.0",
+        "libmediaextractorservice",
     ],
+    corpus: ["corpus/*"],
+    include_dirs: ["frameworks/av/services/mediaextractor"],
 }
 
 cc_fuzz {
diff --git a/media/libmediaplayerservice/fuzzer/corpus/08873afe0bb32c29d6aed741d06c3ebfcfcf6204 b/media/libmediaplayerservice/fuzzer/corpus/08873afe0bb32c29d6aed741d06c3ebfcfcf6204
new file mode 100755
index 0000000..13e4732
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/08873afe0bb32c29d6aed741d06c3ebfcfcf6204
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/corpus/2c1e844c2b86f22075a69121efd280af3104e31f b/media/libmediaplayerservice/fuzzer/corpus/2c1e844c2b86f22075a69121efd280af3104e31f
new file mode 100755
index 0000000..591816e
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/2c1e844c2b86f22075a69121efd280af3104e31f
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/corpus/2e5297399ed949e919852cb0471cab25bcca82f4 b/media/libmediaplayerservice/fuzzer/corpus/2e5297399ed949e919852cb0471cab25bcca82f4
new file mode 100755
index 0000000..2acf349
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/2e5297399ed949e919852cb0471cab25bcca82f4
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/corpus/2f9bd1fce3b3422e31f2f5bb38db695e2ace7bb8 b/media/libmediaplayerservice/fuzzer/corpus/2f9bd1fce3b3422e31f2f5bb38db695e2ace7bb8
new file mode 100755
index 0000000..941885f
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/2f9bd1fce3b3422e31f2f5bb38db695e2ace7bb8
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/corpus/3f967d82b89c0b39e04727213ba71910801b85fa b/media/libmediaplayerservice/fuzzer/corpus/3f967d82b89c0b39e04727213ba71910801b85fa
new file mode 100755
index 0000000..a6920fa
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/3f967d82b89c0b39e04727213ba71910801b85fa
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/corpus/40a46cd4f323d9d5d8508188f21f94ddae5bfae6 b/media/libmediaplayerservice/fuzzer/corpus/40a46cd4f323d9d5d8508188f21f94ddae5bfae6
new file mode 100755
index 0000000..6b70ddd
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/40a46cd4f323d9d5d8508188f21f94ddae5bfae6
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/corpus/543adcc7136463c859c38fce066f87b1141bd622 b/media/libmediaplayerservice/fuzzer/corpus/543adcc7136463c859c38fce066f87b1141bd622
new file mode 100755
index 0000000..a919290
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/543adcc7136463c859c38fce066f87b1141bd622
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/corpus/874152a3db53b5e4c153655e8ee9443e6ec4c0b1 b/media/libmediaplayerservice/fuzzer/corpus/874152a3db53b5e4c153655e8ee9443e6ec4c0b1
new file mode 100755
index 0000000..1062677
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/874152a3db53b5e4c153655e8ee9443e6ec4c0b1
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/corpus/bf37374209d5417fa2a6a1cddcae6df44397b075 b/media/libmediaplayerservice/fuzzer/corpus/bf37374209d5417fa2a6a1cddcae6df44397b075
new file mode 100755
index 0000000..ed11aff
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/bf37374209d5417fa2a6a1cddcae6df44397b075
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/corpus/d537f5df0726fa840146a4c788855f8658b7aae0 b/media/libmediaplayerservice/fuzzer/corpus/d537f5df0726fa840146a4c788855f8658b7aae0
new file mode 100755
index 0000000..d82f45d
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/d537f5df0726fa840146a4c788855f8658b7aae0
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/corpus/e66a979713ba719fc30005b770b627c187a64607 b/media/libmediaplayerservice/fuzzer/corpus/e66a979713ba719fc30005b770b627c187a64607
new file mode 100755
index 0000000..32af6ee
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/e66a979713ba719fc30005b770b627c187a64607
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/corpus/e712688b60610ff05d48b7b74695993a05338112 b/media/libmediaplayerservice/fuzzer/corpus/e712688b60610ff05d48b7b74695993a05338112
new file mode 100755
index 0000000..abfba79
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/e712688b60610ff05d48b7b74695993a05338112
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/corpus/f5c60a210c8fbd5a9a3c131f6097d5d07fc45324 b/media/libmediaplayerservice/fuzzer/corpus/f5c60a210c8fbd5a9a3c131f6097d5d07fc45324
new file mode 100755
index 0000000..7fb1bca
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/f5c60a210c8fbd5a9a3c131f6097d5d07fc45324
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/mediaplayer_fuzzer.cpp b/media/libmediaplayerservice/fuzzer/mediaplayer_fuzzer.cpp
index a189d04..652b1ee 100644
--- a/media/libmediaplayerservice/fuzzer/mediaplayer_fuzzer.cpp
+++ b/media/libmediaplayerservice/fuzzer/mediaplayer_fuzzer.cpp
@@ -15,9 +15,13 @@
  *
  */
 
+#include <MediaExtractorService.h>
 #include <MediaPlayerService.h>
+#include <android/gui/BnSurfaceComposerClient.h>
 #include <camera/Camera.h>
 #include <datasource/FileSource.h>
+#include <fuzzbinder/random_binder.h>
+#include <gmock/gmock.h>
 #include <gui/Surface.h>
 #include <gui/SurfaceComposerClient.h>
 #include <media/IMediaCodecList.h>
@@ -31,40 +35,37 @@
 #include <media/stagefright/RemoteDataSource.h>
 #include <media/stagefright/foundation/base64.h>
 #include <thread>
+#include "android-base/stringprintf.h"
 #include "fuzzer/FuzzedDataProvider.h"
-
-constexpr int32_t kUuidSize = 16;
-constexpr int32_t kMaxSleepTimeInMs = 100;
-constexpr int32_t kMinSleepTimeInMs = 0;
-constexpr int32_t kPlayCountMin = 1;
-constexpr int32_t kPlayCountMax = 10;
-constexpr int32_t kMaxDimension = 8192;
-constexpr int32_t kMinDimension = 0;
-
 using namespace std;
 using namespace android;
 
-constexpr audio_session_t kSupportedAudioSessions[] = {
-    AUDIO_SESSION_DEVICE, AUDIO_SESSION_OUTPUT_STAGE, AUDIO_SESSION_OUTPUT_MIX};
+constexpr int32_t kUuidSize = 16;
+constexpr int32_t kMinSize = 0;
+constexpr int32_t kMaxSize = 100;
+constexpr int32_t kFourCCVal = android::FOURCC('m', 't', 'r', 'X');
+constexpr int32_t kFlagVal =
+        ISurfaceComposerClient::eCursorWindow | ISurfaceComposerClient::eOpaque;
 
-constexpr audio_timestretch_stretch_mode_t kAudioStretchModes[] = {
-    AUDIO_TIMESTRETCH_STRETCH_DEFAULT, AUDIO_TIMESTRETCH_STRETCH_VOICE};
+const char dumpFile[] = "OutputDumpFile";
 
-constexpr audio_timestretch_fallback_mode_t kAudioFallbackModes[] = {
-    AUDIO_TIMESTRETCH_FALLBACK_CUT_REPEAT, AUDIO_TIMESTRETCH_FALLBACK_DEFAULT,
-    AUDIO_TIMESTRETCH_FALLBACK_MUTE, AUDIO_TIMESTRETCH_FALLBACK_FAIL};
+enum DataSourceType { HTTP, FD, STREAM, FILETYPE, SOCKET, kMaxValue = SOCKET };
+
+constexpr PixelFormat kPixelFormat[] = {
+        PIXEL_FORMAT_UNKNOWN,       PIXEL_FORMAT_NONE,        PIXEL_FORMAT_CUSTOM,
+        PIXEL_FORMAT_TRANSLUCENT,   PIXEL_FORMAT_TRANSPARENT, PIXEL_FORMAT_OPAQUE,
+        PIXEL_FORMAT_RGBA_8888,     PIXEL_FORMAT_RGBX_8888,   PIXEL_FORMAT_RGB_888,
+        PIXEL_FORMAT_RGB_565,       PIXEL_FORMAT_BGRA_8888,   PIXEL_FORMAT_RGBA_5551,
+        PIXEL_FORMAT_RGBA_4444,     PIXEL_FORMAT_RGBA_FP16,   PIXEL_FORMAT_RGBA_1010102,
+        PIXEL_FORMAT_R_8,           PIXEL_FORMAT_R_16_UINT,   PIXEL_FORMAT_RG_1616_UINT,
+        PIXEL_FORMAT_RGBA_10101010,
+};
 
 constexpr media_parameter_keys kMediaParamKeys[] = {
     KEY_PARAMETER_CACHE_STAT_COLLECT_FREQ_MS, KEY_PARAMETER_AUDIO_CHANNEL_COUNT,
     KEY_PARAMETER_PLAYBACK_RATE_PERMILLE, KEY_PARAMETER_AUDIO_ATTRIBUTES,
     KEY_PARAMETER_RTP_ATTRIBUTES};
 
-constexpr audio_stream_type_t kAudioStreamTypes[] = {
-    AUDIO_STREAM_DEFAULT,      AUDIO_STREAM_VOICE_CALL,    AUDIO_STREAM_SYSTEM,
-    AUDIO_STREAM_RING,         AUDIO_STREAM_MUSIC,         AUDIO_STREAM_ALARM,
-    AUDIO_STREAM_NOTIFICATION, AUDIO_STREAM_BLUETOOTH_SCO, AUDIO_STREAM_ENFORCED_AUDIBLE,
-    AUDIO_STREAM_DTMF,         AUDIO_STREAM_TTS,           AUDIO_STREAM_ASSISTANT};
-
 constexpr media_event_type kMediaEventTypes[] = {MEDIA_NOP,
                                                  MEDIA_PREPARED,
                                                  MEDIA_PLAYBACK_COMPLETE,
@@ -140,9 +141,26 @@
     DISALLOW_EVIL_CONSTRUCTORS(TestMediaHTTPService);
 };
 
-class BinderDeathNotifier : public IBinder::DeathRecipient {
-   public:
-    void binderDied(const wp<IBinder> &) { abort(); }
+class FakeBnSurfaceComposerClient : public gui::BnSurfaceComposerClient {
+  public:
+    MOCK_METHOD(binder::Status, createSurface,
+                (const std::string& name, int32_t flags, const sp<IBinder>& parent,
+                 const gui::LayerMetadata& metadata, gui::CreateSurfaceResult* outResult),
+                (override));
+
+    MOCK_METHOD(binder::Status, clearLayerFrameStats, (const sp<IBinder>& handle), (override));
+
+    MOCK_METHOD(binder::Status, getLayerFrameStats,
+                (const sp<IBinder>& handle, gui::FrameStats* outStats), (override));
+
+    MOCK_METHOD(binder::Status, mirrorSurface,
+                (const sp<IBinder>& mirrorFromHandle, gui::CreateSurfaceResult* outResult),
+                (override));
+
+    MOCK_METHOD(binder::Status, mirrorDisplay,
+                (int64_t displayId, gui::CreateSurfaceResult* outResult), (override));
+
+    MOCK_METHOD(binder::Status, getSchedulingPolicy, (gui::SchedulingPolicy*), (override));
 };
 
 class MediaPlayerServiceFuzzer {
@@ -153,24 +171,40 @@
     void process(const uint8_t *data, size_t size);
 
    private:
-    bool setDataSource(const uint8_t *data, size_t size);
-    void invokeMediaPlayer();
-    FuzzedDataProvider mFdp;
-    sp<IMediaPlayer> mMediaPlayer = nullptr;
-    sp<IMediaPlayerClient> mMediaPlayerClient = nullptr;
-    const int32_t mDataSourceFd;
+     FuzzedDataProvider mFdp;
+     const int32_t mDataSourceFd;
+     sp<IMediaPlayer> mMediaPlayer = nullptr;
+     sp<IMediaPlayerClient> mMediaPlayerClient = nullptr;
+     void invokeMediaPlayer();
+     sp<SurfaceControl> makeSurfaceControl();
+     bool setDataSource(const uint8_t* data, size_t size);
 };
 
-bool MediaPlayerServiceFuzzer::setDataSource(const uint8_t *data, size_t size) {
-    status_t status = -1;
-    enum DataSourceType {http, fd, stream, file, socket, kMaxValue = socket};
-    switch (mFdp.ConsumeEnum<DataSourceType>()) {
-        case http: {
+sp<SurfaceControl> MediaPlayerServiceFuzzer::makeSurfaceControl() {
+     sp<IBinder> handle = getRandomBinder(&mFdp);
+     const sp<FakeBnSurfaceComposerClient> testClient(new FakeBnSurfaceComposerClient());
+     sp<SurfaceComposerClient> client = new SurfaceComposerClient(testClient);
+     uint32_t width = mFdp.ConsumeIntegral<uint32_t>();
+     uint32_t height = mFdp.ConsumeIntegral<uint32_t>();
+     uint32_t transformHint = mFdp.ConsumeIntegral<uint32_t>();
+     uint32_t flags = mFdp.ConsumeBool() ? kFlagVal : mFdp.ConsumeIntegral<uint32_t>();
+     int32_t format = mFdp.ConsumeBool() ? mFdp.ConsumeIntegral<uint32_t>()
+                                         : mFdp.PickValueInArray(kPixelFormat);
+     int32_t layerId = mFdp.ConsumeIntegral<int32_t>();
+     std::string layerName = android::base::StringPrintf("#%d", layerId);
+     return new SurfaceControl(client, handle, layerId, layerName, width, height, format,
+                               transformHint, flags);
+}
+
+bool MediaPlayerServiceFuzzer::setDataSource(const uint8_t* data, size_t size) {
+     status_t status = UNKNOWN_ERROR;
+     switch (mFdp.ConsumeEnum<DataSourceType>()) {
+        case HTTP: {
             KeyedVector<String8, String8> headers;
             headers.add(String8(mFdp.ConsumeRandomLengthString().c_str()),
                         String8(mFdp.ConsumeRandomLengthString().c_str()));
 
-            uint32_t dataBlobSize = mFdp.ConsumeIntegralInRange<uint16_t>(0, size);
+            uint32_t dataBlobSize = mFdp.ConsumeIntegralInRange<uint16_t>(kMinSize, size);
             vector<uint8_t> uriSuffix = mFdp.ConsumeBytes<uint8_t>(dataBlobSize);
 
             string uri(mFdp.PickValueInArray(kUrlPrefix));
@@ -183,18 +217,17 @@
                     mMediaPlayer->setDataSource(testService /*httpService*/, uri.c_str(), &headers);
             break;
         }
-        case fd: {
+        case FD: {
             write(mDataSourceFd, data, size);
-
             status = mMediaPlayer->setDataSource(mDataSourceFd, 0, size);
             break;
         }
-        case stream: {
+        case STREAM: {
             sp<IStreamSource> streamSource = sp<TestStreamSource>::make();
             status = mMediaPlayer->setDataSource(streamSource);
             break;
         }
-        case file: {
+        case FILETYPE: {
             write(mDataSourceFd, data, size);
 
             sp<DataSource> dataSource = new FileSource(dup(mDataSourceFd), 0, size);
@@ -205,7 +238,7 @@
             status = mMediaPlayer->setDataSource(iDataSource);
             break;
         }
-        case socket: {
+        case SOCKET: {
             String8 rtpParams = String8(mFdp.ConsumeRandomLengthString().c_str());
             struct sockaddr_in endpoint;
             endpoint.sin_family = mFdp.ConsumeIntegral<unsigned short>();
@@ -214,190 +247,239 @@
             status = mMediaPlayer->setDataSource(rtpParams);
             break;
         }
-    }
-
-    if (status != 0) {
+     }
+     if (status != OK) {
         return false;
-    }
-    return true;
+     }
+     return true;
 }
 
 void MediaPlayerServiceFuzzer::invokeMediaPlayer() {
-    sp<SurfaceComposerClient> composerClient = new SurfaceComposerClient;
-    String8 name = String8(mFdp.ConsumeRandomLengthString().c_str());
-    uint32_t width = mFdp.ConsumeIntegralInRange<uint32_t>(kMinDimension, kMaxDimension);
-    uint32_t height = mFdp.ConsumeIntegralInRange<uint32_t>(kMinDimension, kMaxDimension);
-    uint32_t pixelFormat = mFdp.ConsumeIntegral<int32_t>();
-    uint32_t flags = mFdp.ConsumeIntegral<int32_t>();
-    sp<SurfaceControl> surfaceControl =
-        composerClient->createSurface(name, width, height, pixelFormat, flags);
-    if (surfaceControl) {
-        sp<Surface> surface = surfaceControl->getSurface();
-        mMediaPlayer->setVideoSurfaceTexture(surface->getIGraphicBufferProducer());
-    }
-
-    BufferingSettings buffering;
-    buffering.mInitialMarkMs = mFdp.ConsumeIntegral<int32_t>();
-    buffering.mResumePlaybackMarkMs = mFdp.ConsumeIntegral<int32_t>();
-    mMediaPlayer->setBufferingSettings(buffering);
-    mMediaPlayer->getBufferingSettings(&buffering);
-
-    mMediaPlayer->prepareAsync();
-    size_t playCount = mFdp.ConsumeIntegralInRange<size_t>(kPlayCountMin, kPlayCountMax);
-    for (size_t Idx = 0; Idx < playCount; ++Idx) {
-        mMediaPlayer->start();
-        this_thread::sleep_for(chrono::milliseconds(
-            mFdp.ConsumeIntegralInRange<int32_t>(kMinSleepTimeInMs, kMaxSleepTimeInMs)));
-        mMediaPlayer->pause();
-        this_thread::sleep_for(chrono::milliseconds(
-            mFdp.ConsumeIntegralInRange<int32_t>(kMinSleepTimeInMs, kMaxSleepTimeInMs)));
-        mMediaPlayer->stop();
-    }
-    bool state;
-    mMediaPlayer->isPlaying(&state);
-
-    AudioPlaybackRate rate;
-    rate.mSpeed = mFdp.ConsumeFloatingPoint<float>();
-    rate.mPitch = mFdp.ConsumeFloatingPoint<float>();
-    rate.mStretchMode = mFdp.PickValueInArray(kAudioStretchModes);
-    rate.mFallbackMode = mFdp.PickValueInArray(kAudioFallbackModes);
-    mMediaPlayer->setPlaybackSettings(rate);
-    mMediaPlayer->getPlaybackSettings(&rate);
-
-    AVSyncSettings *avSyncSettings = new AVSyncSettings();
-    float videoFpsHint = mFdp.ConsumeFloatingPoint<float>();
-    mMediaPlayer->setSyncSettings(*avSyncSettings, videoFpsHint);
-    mMediaPlayer->getSyncSettings(avSyncSettings, &videoFpsHint);
-    delete avSyncSettings;
-
-    mMediaPlayer->seekTo(mFdp.ConsumeIntegral<int32_t>());
-
-    int32_t msec;
-    mMediaPlayer->getCurrentPosition(&msec);
-    mMediaPlayer->getDuration(&msec);
-    mMediaPlayer->reset();
-
-    mMediaPlayer->notifyAt(mFdp.ConsumeIntegral<int64_t>());
-
-    mMediaPlayer->setAudioStreamType(mFdp.PickValueInArray(kAudioStreamTypes));
-    mMediaPlayer->setLooping(mFdp.ConsumeIntegral<int32_t>());
-    float left = mFdp.ConsumeFloatingPoint<float>();
-    float right = mFdp.ConsumeFloatingPoint<float>();
-    mMediaPlayer->setVolume(left, right);
-
-    Parcel request, reply;
-    request.writeInt32(mFdp.ConsumeIntegral<int32_t>());
-    request.setDataPosition(0);
-    mMediaPlayer->invoke(request, &reply);
-
-    Parcel filter;
-    filter.writeInt32(mFdp.ConsumeIntegral<int32_t>());
-    filter.setDataPosition(0);
-    mMediaPlayer->setMetadataFilter(filter);
-
-    bool updateOnly = mFdp.ConsumeBool();
-    bool applyFilter = mFdp.ConsumeBool();
-    mMediaPlayer->getMetadata(updateOnly, applyFilter, &reply);
-    mMediaPlayer->setAuxEffectSendLevel(mFdp.ConsumeFloatingPoint<float>());
-    mMediaPlayer->attachAuxEffect(mFdp.ConsumeIntegral<int32_t>());
-
-    int32_t key = mFdp.PickValueInArray(kMediaParamKeys);
-    request.writeInt32(mFdp.ConsumeIntegral<int32_t>());
-    request.setDataPosition(0);
-    mMediaPlayer->setParameter(key, request);
-    key = mFdp.PickValueInArray(kMediaParamKeys);
-    mMediaPlayer->getParameter(key, &reply);
-
-    struct sockaddr_in endpoint;
-    mMediaPlayer->getRetransmitEndpoint(&endpoint);
-
-    AttributionSourceState attributionSource;
-    attributionSource.packageName = mFdp.ConsumeRandomLengthString().c_str();
-    attributionSource.token = sp<BBinder>::make();
-    const sp<IMediaPlayerService> mpService(IMediaDeathNotifier::getMediaPlayerService());
-    sp<IMediaPlayer> mNextMediaPlayer = mpService->create(
-        mMediaPlayerClient, mFdp.PickValueInArray(kSupportedAudioSessions), attributionSource);
-    mMediaPlayer->setNextPlayer(mNextMediaPlayer);
-
-    const sp<media::VolumeShaper::Configuration> configuration =
-        sp<media::VolumeShaper::Configuration>::make();
-    const sp<media::VolumeShaper::Operation> operation = sp<media::VolumeShaper::Operation>::make();
-    mMediaPlayer->applyVolumeShaper(configuration, operation);
-
-    mMediaPlayer->getVolumeShaperState(mFdp.ConsumeIntegral<int32_t>());
-    uint8_t uuid[kUuidSize];
-    for (int32_t index = 0; index < kUuidSize; ++index) {
-        uuid[index] = mFdp.ConsumeIntegral<uint8_t>();
-    }
-    Vector<uint8_t> drmSessionId;
-    drmSessionId.push_back(mFdp.ConsumeIntegral<uint8_t>());
-    mMediaPlayer->prepareDrm(uuid, drmSessionId);
-    mMediaPlayer->releaseDrm();
-
-    audio_port_handle_t deviceId = mFdp.ConsumeIntegral<int32_t>();
-    mMediaPlayer->setOutputDevice(deviceId);
-    mMediaPlayer->getRoutedDeviceId(&deviceId);
-
-    mMediaPlayer->enableAudioDeviceCallback(mFdp.ConsumeBool());
-
-    sp<MediaPlayer> mediaPlayer = (MediaPlayer *)mMediaPlayer.get();
-
-    int32_t msg = mFdp.PickValueInArray(kMediaEventTypes);
-    int32_t ext1 = mFdp.PickValueInArray(kMediaInfoTypes);
-    int32_t ext2 = mFdp.ConsumeIntegral<int32_t>();
-    Parcel obj;
-    obj.writeInt32(mFdp.ConsumeIntegral<int32_t>());
-    obj.setDataPosition(0);
-    mediaPlayer->notify(msg, ext1, ext2, &obj);
-
-    int32_t mediaPlayerDumpFd = memfd_create("OutputDumpFile", MFD_ALLOW_SEALING);
-    Vector<String16> args;
-    args.push_back(String16(mFdp.ConsumeRandomLengthString().c_str()));
-    mediaPlayer->dump(mediaPlayerDumpFd, args);
-    close(mediaPlayerDumpFd);
-
-    mMediaPlayer->disconnect();
+     Parcel request, reply;
+     while (mFdp.remaining_bytes()) {
+        auto invokeMediaPlayerApi = mFdp.PickValueInArray<const std::function<void()>>({
+                [&]() {
+                    sp<SurfaceControl> surfaceControl = makeSurfaceControl();
+                    if (surfaceControl) {
+                        sp<Surface> surface = surfaceControl->getSurface();
+                        mMediaPlayer->setVideoSurfaceTexture(surface->getIGraphicBufferProducer());
+                    }
+                },
+                [&]() {
+                    BufferingSettings buffering;
+                    buffering.mInitialMarkMs = mFdp.ConsumeIntegral<int32_t>();
+                    buffering.mResumePlaybackMarkMs = mFdp.ConsumeIntegral<int32_t>();
+                    mMediaPlayer->setBufferingSettings(buffering);
+                },
+                [&]() {
+                    BufferingSettings buffering;
+                    mMediaPlayer->getBufferingSettings(&buffering);
+                },
+                [&]() {
+                    mMediaPlayer->prepareAsync();
+                    this_thread::sleep_for(chrono::milliseconds(100));  // Time to post message
+                },
+                [&]() {
+                    mMediaPlayer->start();
+                    this_thread::sleep_for(chrono::milliseconds(100));  // Time to post message
+                },
+                [&]() {
+                    mMediaPlayer->pause();
+                    this_thread::sleep_for(chrono::milliseconds(100));  // Time to post message
+                },
+                [&]() { mMediaPlayer->stop(); },
+                [&]() {
+                    bool state;
+                    mMediaPlayer->isPlaying(&state);
+                },
+                [&]() {
+                    AudioPlaybackRate rate;
+                    rate.mSpeed = mFdp.ConsumeFloatingPoint<float>();
+                    rate.mPitch = mFdp.ConsumeFloatingPoint<float>();
+                    rate.mStretchMode = mFdp.ConsumeBool() ? AUDIO_TIMESTRETCH_STRETCH_DEFAULT
+                                                           : AUDIO_TIMESTRETCH_STRETCH_VOICE;
+                    rate.mFallbackMode =
+                            (audio_timestretch_fallback_mode_t)mFdp.ConsumeIntegralInRange<int32_t>(
+                                    AUDIO_TIMESTRETCH_FALLBACK_CUT_REPEAT,
+                                    AUDIO_TIMESTRETCH_FALLBACK_FAIL);
+                    mMediaPlayer->setPlaybackSettings(rate);
+                    mMediaPlayer->getPlaybackSettings(&rate);
+                },
+                [&]() {
+                    AVSyncSettings* avSyncSettings = new AVSyncSettings();
+                    float videoFpsHint = mFdp.ConsumeFloatingPoint<float>();
+                    mMediaPlayer->setSyncSettings(*avSyncSettings, videoFpsHint);
+                    delete avSyncSettings;
+                },
+                [&]() {
+                    AVSyncSettings* avSyncSettings = new AVSyncSettings();
+                    float videoFpsHint = 0;
+                    mMediaPlayer->getSyncSettings(avSyncSettings, &videoFpsHint);
+                    delete avSyncSettings;
+                },
+                [&]() { mMediaPlayer->seekTo(mFdp.ConsumeIntegral<int32_t>()); },
+                [&]() {
+                    int32_t msec;
+                    mMediaPlayer->getCurrentPosition(&msec);
+                    mMediaPlayer->getDuration(&msec);
+                },
+                [&]() { mMediaPlayer->reset(); },
+                [&]() { mMediaPlayer->notifyAt(mFdp.ConsumeIntegral<uint64_t>()); },
+                [&]() {
+                    mMediaPlayer->setAudioStreamType(
+                            (audio_stream_type_t)mFdp.ConsumeIntegralInRange<int32_t>(
+                                    AUDIO_STREAM_VOICE_CALL, AUDIO_STREAM_CALL_ASSISTANT));
+                },
+                [&]() { mMediaPlayer->setLooping(mFdp.ConsumeIntegral<int32_t>()); },
+                [&]() {
+                    mMediaPlayer->setVolume(mFdp.ConsumeFloatingPoint<float>() /* left */,
+                                            mFdp.ConsumeFloatingPoint<float>() /* right */);
+                },
+                [&]() {
+                    request.writeInt32(mFdp.ConsumeIntegral<int32_t>());
+                    request.setDataPosition(0);
+                    mMediaPlayer->invoke(request, &reply);
+                },
+                [&]() {
+                    Parcel filter;
+                    filter.writeInt32(mFdp.ConsumeIntegral<int32_t>());
+                    filter.setDataPosition(0);
+                    mMediaPlayer->setMetadataFilter(filter);
+                },
+                [&]() {
+                    mMediaPlayer->getMetadata(mFdp.ConsumeBool() /* updateOnly */,
+                                              mFdp.ConsumeBool() /* applyFilter */, &reply);
+                },
+                [&]() { mMediaPlayer->setAuxEffectSendLevel(mFdp.ConsumeFloatingPoint<float>()); },
+                [&]() { mMediaPlayer->attachAuxEffect(mFdp.ConsumeIntegral<int32_t>()); },
+                [&]() {
+                    int32_t key = mFdp.PickValueInArray(kMediaParamKeys);
+                    request.writeInt32(mFdp.ConsumeIntegral<int32_t>());
+                    request.setDataPosition(0);
+                    mMediaPlayer->setParameter(key, request);
+                    key = mFdp.PickValueInArray(kMediaParamKeys);
+                    mMediaPlayer->getParameter(key, &reply);
+                },
+                [&]() {
+                    int32_t key =
+                            mFdp.ConsumeBool() ? kFourCCVal : mFdp.ConsumeIntegral<uint32_t>();
+                    mMediaPlayer->getParameter(key, &reply);
+                },
+                [&]() {
+                    struct sockaddr_in endpoint;
+                    mMediaPlayer->getRetransmitEndpoint(&endpoint);
+                },
+                [&]() {
+                    AttributionSourceState attributionSource;
+                    attributionSource.packageName = mFdp.ConsumeRandomLengthString().c_str();
+                    attributionSource.token = sp<BBinder>::make();
+                    const sp<IMediaPlayerService> mpService(
+                            IMediaDeathNotifier::getMediaPlayerService());
+                    audio_session_t audioSessionId =
+                            (audio_session_t)mFdp.ConsumeIntegralInRange<int32_t>(
+                                    AUDIO_SESSION_DEVICE, AUDIO_SESSION_OUTPUT_MIX);
+                    sp<IMediaPlayer> mNextMediaPlayer = mpService->create(
+                            mMediaPlayerClient, audioSessionId, attributionSource);
+                    mMediaPlayer->setNextPlayer(mNextMediaPlayer);
+                },
+                [&]() {
+                    const sp<media::VolumeShaper::Configuration> configuration =
+                            sp<media::VolumeShaper::Configuration>::make();
+                    const sp<media::VolumeShaper::Operation> operation =
+                            sp<media::VolumeShaper::Operation>::make();
+                    mMediaPlayer->applyVolumeShaper(configuration, operation);
+                },
+                [&]() { mMediaPlayer->getVolumeShaperState(mFdp.ConsumeIntegral<int32_t>()); },
+                [&]() {
+                    uint8_t uuid[kUuidSize];
+                    for (int32_t index = 0; index < kUuidSize; ++index) {
+                        uuid[index] = mFdp.ConsumeIntegral<uint8_t>();
+                    }
+                    Vector<uint8_t> drmSessionId;
+                    int32_t length = mFdp.ConsumeIntegralInRange<uint32_t>(kMinSize, kMaxSize);
+                    while (length--) {
+                        drmSessionId.push_back(mFdp.ConsumeIntegral<uint8_t>());
+                    }
+                    mMediaPlayer->prepareDrm(uuid, drmSessionId);
+                },
+                [&]() { mMediaPlayer->releaseDrm(); },
+                [&]() {
+                    audio_port_handle_t deviceId = mFdp.ConsumeIntegral<int32_t>();
+                    mMediaPlayer->setOutputDevice(deviceId);
+                },
+                [&]() {
+                    audio_port_handle_t deviceId;
+                    mMediaPlayer->getRoutedDeviceId(&deviceId);
+                },
+                [&]() { mMediaPlayer->enableAudioDeviceCallback(mFdp.ConsumeBool()); },
+                [&]() {
+                    sp<MediaPlayer> mediaPlayer = (MediaPlayer*)mMediaPlayer.get();
+                    Parcel obj;
+                    obj.writeInt32(mFdp.ConsumeIntegral<int32_t>());
+                    obj.setDataPosition(0);
+                    mediaPlayer->notify(mFdp.PickValueInArray(kMediaEventTypes) /* msg */,
+                                        mFdp.PickValueInArray(kMediaInfoTypes) /* ext1 */,
+                                        mFdp.ConsumeIntegral<int32_t>() /* ext2 */, &obj);
+                },
+                [&]() {
+                    sp<MediaPlayer> mediaPlayer = (MediaPlayer*)mMediaPlayer.get();
+                    int32_t mediaPlayerDumpFd = memfd_create(dumpFile, MFD_ALLOW_SEALING);
+                    Vector<String16> args;
+                    args.push_back(String16(mFdp.ConsumeRandomLengthString().c_str()));
+                    mediaPlayer->dump(mediaPlayerDumpFd, args);
+                    close(mediaPlayerDumpFd);
+                },
+                [&]() { mMediaPlayer->disconnect(); },
+        });
+        invokeMediaPlayerApi();
+     }
 }
 
-void MediaPlayerServiceFuzzer::process(const uint8_t *data, size_t size) {
-    MediaPlayerService::instantiate();
-
-    const sp<IMediaPlayerService> mpService(IMediaDeathNotifier::getMediaPlayerService());
-    if (!mpService) {
+void MediaPlayerServiceFuzzer::process(const uint8_t* data, size_t size) {
+     const sp<IMediaPlayerService> mpService(IMediaDeathNotifier::getMediaPlayerService());
+     if (!mpService) {
         return;
-    }
+     }
 
-    sp<IMediaCodecList> mediaCodecList = mpService->getCodecList();
+     sp<IMediaCodecList> mediaCodecList = mpService->getCodecList();
 
-    sp<IRemoteDisplayClient> remoteDisplayClient;
-    sp<IRemoteDisplay> remoteDisplay = mpService->listenForRemoteDisplay(
-        String16(mFdp.ConsumeRandomLengthString().c_str()) /*opPackageName*/, remoteDisplayClient,
-        String8(mFdp.ConsumeRandomLengthString().c_str()) /*iface*/);
+     sp<IRemoteDisplayClient> remoteDisplayClient;
+     sp<IRemoteDisplay> remoteDisplay = mpService->listenForRemoteDisplay(
+             String16(mFdp.ConsumeRandomLengthString().c_str()) /*opPackageName*/,
+             remoteDisplayClient, String8(mFdp.ConsumeRandomLengthString().c_str()) /*iface*/);
 
-    mpService->addBatteryData(mFdp.ConsumeIntegral<uint32_t>());
-    Parcel reply;
-    mpService->pullBatteryData(&reply);
+     mpService->addBatteryData(mFdp.ConsumeIntegral<uint32_t>());
+     Parcel reply;
+     mpService->pullBatteryData(&reply);
 
-    sp<MediaPlayerService> mediaPlayerService = (MediaPlayerService *)mpService.get();
-    AttributionSourceState attributionSource;
-    attributionSource.packageName = mFdp.ConsumeRandomLengthString().c_str();
-    attributionSource.token = sp<BBinder>::make();
-    mMediaPlayer = mediaPlayerService->create(
-        mMediaPlayerClient, mFdp.PickValueInArray(kSupportedAudioSessions), attributionSource);
+     sp<MediaPlayerService> mediaPlayerService = (MediaPlayerService*)mpService.get();
+     AttributionSourceState attributionSource;
+     attributionSource.packageName = mFdp.ConsumeRandomLengthString().c_str();
+     attributionSource.token = sp<BBinder>::make();
+     mMediaPlayer =
+             mediaPlayerService->create(mMediaPlayerClient,
+                                        (audio_session_t)mFdp.ConsumeIntegralInRange<int32_t>(
+                                                AUDIO_SESSION_DEVICE, AUDIO_SESSION_OUTPUT_MIX),
+                                        attributionSource);
 
-    int32_t mediaPlayerServiceDumpFd = memfd_create("OutputDumpFile", MFD_ALLOW_SEALING);
-    Vector<String16> args;
-    args.push_back(String16(mFdp.ConsumeRandomLengthString().c_str()));
-    mediaPlayerService->dump(mediaPlayerServiceDumpFd, args);
-    close(mediaPlayerServiceDumpFd);
+     int32_t mediaPlayerServiceDumpFd = memfd_create(dumpFile, MFD_ALLOW_SEALING);
+     Vector<String16> args;
+     args.push_back(String16(mFdp.ConsumeRandomLengthString().c_str()));
+     mediaPlayerService->dump(mediaPlayerServiceDumpFd, args);
+     close(mediaPlayerServiceDumpFd);
 
-    if (!mMediaPlayer) {
+     if (!mMediaPlayer) {
         return;
-    }
-
-    if (setDataSource(data, size)) {
+     }
+     if (setDataSource(data, size)) {
         invokeMediaPlayer();
-    }
+     }
+}
+
+extern "C" int LLVMFuzzerInitialize(int* /* argc */, char*** /* argv */) {
+     MediaPlayerService::instantiate();
+     MediaExtractorService::instantiate();
+     return 0;
 }
 
 extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
diff --git a/media/libnblog/Reader.cpp b/media/libnblog/Reader.cpp
index 71ebfd1..d5f16e8 100644
--- a/media/libnblog/Reader.cpp
+++ b/media/libnblog/Reader.cpp
@@ -93,7 +93,7 @@
     do {
         availToRead = mFifoReader->obtain(iovec, capacity, NULL /*timeout*/, &lostTemp);
         lost += lostTemp;
-    } while (availToRead < 0 || ++tries <= kMaxObtainTries);
+    } while (availToRead < 0 && ++tries <= kMaxObtainTries);
 
     if (availToRead <= 0) {
         ALOGW_IF(availToRead < 0, "NBLog Reader %s failed to catch up with Writer", mName.c_str());
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 1593aa0..e06efac 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -7575,6 +7575,22 @@
             return true;
         }
 
+        // When Acodec receive an error event at LoadedToIdleState, it will not release
+        // allocated buffers, which will cause gralloc buffer leak issue. We need to first release
+        // these buffers and then process the error event
+        case OMX_EventError:
+        {
+            if (mCodec->allYourBuffersAreBelongToUs(kPortIndexInput)) {
+                mCodec->freeBuffersOnPort(kPortIndexInput);
+            }
+
+            if (mCodec->allYourBuffersAreBelongToUs(kPortIndexOutput)) {
+                mCodec->freeBuffersOnPort(kPortIndexOutput);
+            }
+
+            return BaseState::onOMXEvent(event, data1, data2);
+        }
+
         default:
             return BaseState::onOMXEvent(event, data1, data2);
     }
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index 886285e..5b6848c 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -318,11 +318,15 @@
         "aconfig_mediacodec_flags_c_lib",
     ],
 
+    defaults: [
+        "aconfig_lib_cc_static_link.defaults",
+    ],
+
     static_libs: [
         "android.media.codec-aconfig-cc",
         "libstagefright_esds",
         "libstagefright_color_conversion",
-        "libyuv_static",
+        "libyuv",
         "libstagefright_webm",
         "libstagefright_timedtext",
         "libogg",
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index 4441121..e229844 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -151,7 +151,8 @@
 
     if (camera == 0) {
         mCamera = Camera::connect(cameraId, clientName, clientUid, clientPid,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+                /*rotationOverride*/hardware::ICameraService::ROTATION_OVERRIDE_NONE,
                 /*forceSlowJpegMode*/false);
         if (mCamera == 0) return -EBUSY;
         mCameraFlags &= ~FLAGS_HOT_CAMERA;
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 4e378d4..0401e82 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -3975,6 +3975,15 @@
                     switch (mState) {
                         case INITIALIZING:
                         {
+                            // Resource error during INITIALIZING state needs to be logged
+                            // through metrics, to be able to track such occurrences.
+                            if (isResourceError(err)) {
+                                mediametrics_setInt32(mMetricsHandle, kCodecError, err);
+                                mediametrics_setCString(mMetricsHandle, kCodecErrorState,
+                                                        stateString(mState).c_str());
+                                flushMediametrics();
+                                initMediametrics();
+                            }
                             setState(UNINITIALIZED);
                             break;
                         }
@@ -4912,8 +4921,8 @@
                 if (flags & CONFIGURE_FLAG_USE_CRYPTO_ASYNC) {
                     mFlags |= kFlagUseCryptoAsync;
                     if ((mFlags & kFlagUseBlockModel)) {
-                        ALOGW("CrytoAsync not yet enabled for block model,\
-                                falling back to normal");
+                        ALOGW("CrytoAsync not yet enabled for block model, "
+                                "falling back to normal");
                     }
                 }
             }
@@ -4970,8 +4979,7 @@
 
             mDescrambler = static_cast<IDescrambler *>(descrambler);
             mBufferChannel->setDescrambler(mDescrambler);
-            if ((mFlags & kFlagUseCryptoAsync) &&
-                mCrypto  && (mDomain == DOMAIN_VIDEO)) {
+            if ((mFlags & kFlagUseCryptoAsync) && mCrypto) {
                 // set kFlagUseCryptoAsync but do-not use this for block model
                 // this is to propagate the error in onCryptoError()
                 // TODO (b/274628160): Enable Use of CONFIG_FLAG_USE_CRYPTO_ASYNC
@@ -6269,15 +6277,8 @@
         cryptoInfo->setInt32("secure", mFlags & kFlagIsSecure);
         sp<RefBase> obj;
         if (msg->findObject("cryptoInfos", &obj)) {
-            sp<CryptoInfosWrapper> infos{(CryptoInfosWrapper*)obj.get()};
-            sp<CryptoInfosWrapper> asyncInfos{
-                    new CryptoInfosWrapper(std::vector<std::unique_ptr<CodecCryptoInfo>>())};
-            for (std::unique_ptr<CodecCryptoInfo> &info : infos->value) {
-                if (info) {
-                    asyncInfos->value.emplace_back(new CryptoAsync::CryptoAsyncInfo(info));
-                }
-            }
-            buffer->meta()->setObject("cryptoInfos", asyncInfos);
+            // this object is a standalone object when created (no copy requied here)
+            buffer->meta()->setObject("cryptoInfos", obj);
         } else {
             size_t key_len = (key != nullptr)? 16 : 0;
             size_t iv_len = (iv != nullptr)? 16 : 0;
@@ -6416,7 +6417,6 @@
             }
         }
         if (mCryptoAsync) {
-            // TODO b/316565675 - enable async path for audio
             // prepare a message and enqueue
             sp<AMessage> cryptoInfo = new AMessage();
             buildCryptoInfoAMessage(cryptoInfo, CryptoAsync::kActionDecrypt);
diff --git a/media/libstagefright/SurfaceUtils.cpp b/media/libstagefright/SurfaceUtils.cpp
index 604dcb0..714e312 100644
--- a/media/libstagefright/SurfaceUtils.cpp
+++ b/media/libstagefright/SurfaceUtils.cpp
@@ -111,8 +111,9 @@
         }
     }
 
-    int finalUsage = usage | consumerUsage;
-    ALOGV("gralloc usage: %#x(producer) + %#x(consumer) = %#x", usage, consumerUsage, finalUsage);
+    uint64_t finalUsage = (uint32_t) usage | (uint32_t) consumerUsage;
+    ALOGV("gralloc usage: %#x(producer) + %#x(consumer) = 0x%" PRIx64,
+            usage, consumerUsage, finalUsage);
     err = native_window_set_usage(nativeWindow, finalUsage);
     if (err != NO_ERROR) {
         ALOGE("native_window_set_usage failed: %s (%d)", strerror(-err), -err);
@@ -126,7 +127,7 @@
         return err;
     }
 
-    ALOGD("set up nativeWindow %p for %dx%d, color %#x, rotation %d, usage %#x",
+    ALOGD("set up nativeWindow %p for %dx%d, color %#x, rotation %d, usage 0x%" PRIx64,
             nativeWindow, width, height, format, rotation, finalUsage);
     return NO_ERROR;
 }
diff --git a/media/libstagefright/VideoRenderQualityTracker.cpp b/media/libstagefright/VideoRenderQualityTracker.cpp
index eb9ac0f..bf29b1d 100644
--- a/media/libstagefright/VideoRenderQualityTracker.cpp
+++ b/media/libstagefright/VideoRenderQualityTracker.cpp
@@ -302,13 +302,6 @@
         mRenderDurationMs += (actualRenderTimeUs - mLastRenderTimeUs) / 1000;
     }
 
-    // Now that a frame has been rendered, the previously skipped frames can be processed as skipped
-    // frames since the app is not skipping them to terminate playback.
-    for (int64_t contentTimeUs : mPendingSkippedFrameContentTimeUsList) {
-        processMetricsForSkippedFrame(contentTimeUs);
-    }
-    mPendingSkippedFrameContentTimeUsList = {};
-
     // We can render a pending queued frame if it's the last frame of the video, so release it
     // immediately.
     if (contentTimeUs == mTunnelFrameQueuedContentTimeUs && mTunnelFrameQueuedContentTimeUs != -1) {
@@ -332,9 +325,25 @@
                   (long long) contentTimeUs, (long long) nextExpectedFrame.contentTimeUs);
             break;
         }
+        // Process all skipped frames before the dropped frame.
+        while (!mPendingSkippedFrameContentTimeUsList.empty()) {
+            if (mPendingSkippedFrameContentTimeUsList.front() >= nextExpectedFrame.contentTimeUs) {
+                break;
+            }
+            processMetricsForSkippedFrame(mPendingSkippedFrameContentTimeUsList.front());
+            mPendingSkippedFrameContentTimeUsList.pop_front();
+        }
         processMetricsForDroppedFrame(nextExpectedFrame.contentTimeUs,
                                       nextExpectedFrame.desiredRenderTimeUs);
     }
+    // Process all skipped frames before the rendered frame.
+    while (!mPendingSkippedFrameContentTimeUsList.empty()) {
+        if (mPendingSkippedFrameContentTimeUsList.front() >= nextExpectedFrame.contentTimeUs) {
+            break;
+        }
+        processMetricsForSkippedFrame(mPendingSkippedFrameContentTimeUsList.front());
+        mPendingSkippedFrameContentTimeUsList.pop_front();
+    }
     processMetricsForRenderedFrame(nextExpectedFrame.contentTimeUs,
                                    nextExpectedFrame.desiredRenderTimeUs, actualRenderTimeUs,
                                    freezeEventOut, judderEventOut);
diff --git a/media/libstagefright/colorconversion/Android.bp b/media/libstagefright/colorconversion/Android.bp
index 7ff9b10..4072bf9 100644
--- a/media/libstagefright/colorconversion/Android.bp
+++ b/media/libstagefright/colorconversion/Android.bp
@@ -36,7 +36,7 @@
         "media_plugin_headers",
     ],
 
-    static_libs: ["libyuv_static"],
+    static_libs: ["libyuv"],
 
     cflags: ["-Werror"],
 
diff --git a/media/libstagefright/colorconversion/fuzzer/Android.bp b/media/libstagefright/colorconversion/fuzzer/Android.bp
index 237e715..50a2477 100644
--- a/media/libstagefright/colorconversion/fuzzer/Android.bp
+++ b/media/libstagefright/colorconversion/fuzzer/Android.bp
@@ -27,7 +27,7 @@
 cc_defaults {
     name: "libcolorconversion_fuzzer_defaults",
     static_libs: [
-        "libyuv_static",
+        "libyuv",
         "libstagefright_color_conversion",
         "libstagefright",
         "liblog",
diff --git a/media/libstagefright/data/media_codecs_sw.xml b/media/libstagefright/data/media_codecs_sw.xml
index b3be24b..6fb9232 100644
--- a/media/libstagefright/data/media_codecs_sw.xml
+++ b/media/libstagefright/data/media_codecs_sw.xml
@@ -139,6 +139,7 @@
                 <Limit name="bitrate" range="1-40000000" />
             </Variant>
             <Feature name="adaptive-playback" />
+            <Feature name="dynamic-color-aspects" />
             <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.hevc.decoder" type="video/hevc" variant="slow-cpu,!slow-cpu">
@@ -160,6 +161,7 @@
                 <Limit name="bitrate" range="1-5000000" />
             </Variant>
             <Feature name="adaptive-playback" />
+            <Feature name="dynamic-color-aspects" />
             <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.vp8.decoder" type="video/x-vnd.on2.vp8" variant="slow-cpu,!slow-cpu">
@@ -178,6 +180,7 @@
                 <Limit name="bitrate" range="1-40000000" />
             </Variant>
             <Feature name="adaptive-playback" />
+            <Feature name="dynamic-color-aspects" />
             <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.vp9.decoder" type="video/x-vnd.on2.vp9" variant="slow-cpu,!slow-cpu">
@@ -197,9 +200,30 @@
                 <Limit name="bitrate" range="1-5000000" />
             </Variant>
             <Feature name="adaptive-playback" />
+            <Feature name="dynamic-color-aspects" />
             <Attribute name="software-codec" />
         </MediaCodec>
-        <MediaCodec name="c2.android.av1.decoder" type="video/av01" variant="slow-cpu,!slow-cpu">
+        <MediaCodec name="c2.android.av1-dav1d.decoder" type="video/av01" variant="slow-cpu,!slow-cpu">
+            <Limit name="alignment" value="1x1" />
+            <Limit name="block-size" value="16x16" />
+            <Variant name="!slow-cpu">
+                <Limit name="size" min="2x2" max="2048x2048" />
+                <Limit name="block-count" range="1-8192" /> <!-- max 2048x1024 -->
+                <Limit name="blocks-per-second" range="1-245760" />
+                <Limit name="bitrate" range="1-40000000" />
+            </Variant>
+            <Variant name="slow-cpu">
+                <Limit name="size" min="2x2" max="1280x1280" />
+                <Limit name="block-count" range="1-3600" /> <!-- max 1280x720 -->
+                <Limit name="blocks-per-second" range="1-108000" />
+                <Limit name="bitrate" range="1-5000000" />
+            </Variant>
+            <Feature name="adaptive-playback" />
+            <Feature name="dynamic-color-aspects" />
+            <Feature name="low-latency" />
+            <Attribute name="software-codec" />
+        </MediaCodec>
+        <MediaCodec name="c2.android.av1.decoder" type="video/av01" variant="slow-cpu,!slow-cpu" rank="1024">
             <!-- TODO: implement a mechanism to prevent AV1 Decoder usage on pre-U devices -->
             <Limit name="alignment" value="1x1" />
             <Limit name="block-size" value="16x16" />
@@ -216,25 +240,7 @@
                 <Limit name="bitrate" range="1-5000000" />
             </Variant>
             <Feature name="adaptive-playback" />
-            <Feature name="low-latency" />
-            <Attribute name="software-codec" />
-        </MediaCodec>
-        <MediaCodec name="c2.android.av1-dav1d.decoder" type="video/av01" variant="slow-cpu,!slow-cpu" rank="1024">
-            <Limit name="alignment" value="1x1" />
-            <Limit name="block-size" value="16x16" />
-            <Variant name="!slow-cpu">
-                <Limit name="size" min="2x2" max="2048x2048" />
-                <Limit name="block-count" range="1-8192" /> <!-- max 2048x1024 -->
-                <Limit name="blocks-per-second" range="1-245760" />
-                <Limit name="bitrate" range="1-40000000" />
-            </Variant>
-            <Variant name="slow-cpu">
-                <Limit name="size" min="2x2" max="1280x1280" />
-                <Limit name="block-count" range="1-3600" /> <!-- max 1280x720 -->
-                <Limit name="blocks-per-second" range="1-108000" />
-                <Limit name="bitrate" range="1-5000000" />
-            </Variant>
-            <Feature name="adaptive-playback" />
+            <Feature name="dynamic-color-aspects" />
             <Feature name="low-latency" />
             <Attribute name="software-codec" />
         </MediaCodec>
@@ -353,6 +359,7 @@
                 <Limit name="bitrate" range="1-20000000" />
             </Variant>
             <Feature name="bitrate-modes" value="VBR,CBR" />
+            <Feature name="qp-bounds" />
             <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.hevc.encoder" type="video/hevc" variant="!slow-cpu">
@@ -367,6 +374,7 @@
             <Limit name="complexity" range="0-10"  default="0" />
             <Limit name="quality" range="0-100"  default="80" />
             <Feature name="bitrate-modes" value="VBR,CBR,CQ" />
+            <Feature name="qp-bounds" />
             <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.vp9.encoder" type="video/x-vnd.on2.vp9" variant="!slow-cpu">
@@ -379,6 +387,7 @@
             <Limit name="block-count" range="1-3600" /> <!-- max 1280x720 -->
             <Limit name="bitrate" range="1-40000000" />
             <Feature name="bitrate-modes" value="VBR,CBR" />
+            <Feature name="qp-bounds" />
             <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.av1.encoder" type="video/av01" enabled="false" minsdk="34" variant="slow-cpu,!slow-cpu">
@@ -397,6 +406,7 @@
             <Limit name="quality" range="0-100"  default="80" />
             <Limit name="complexity" range="0-5"  default="0" />
             <Feature name="bitrate-modes" value="VBR,CBR,CQ" />
+            <Feature name="qp-bounds" />
             <Attribute name="software-codec" />
         </MediaCodec>
     </Encoders>
diff --git a/media/libstagefright/omx/Android.bp b/media/libstagefright/omx/Android.bp
index 79ab009..630817c 100644
--- a/media/libstagefright/omx/Android.bp
+++ b/media/libstagefright/omx/Android.bp
@@ -20,9 +20,6 @@
 cc_library_shared {
     name: "libstagefright_omx",
     vendor_available: true,
-    vndk: {
-        enabled: true,
-    },
     double_loadable: true,
 
     srcs: [
@@ -218,9 +215,6 @@
 cc_library_shared {
     name: "libstagefright_omx_utils",
     vendor_available: true,
-    vndk: {
-        enabled: true,
-    },
     double_loadable: true,
     srcs: ["OMXUtils.cpp"],
     export_include_dirs: [
diff --git a/media/libstagefright/renderfright/Android.bp b/media/libstagefright/renderfright/Android.bp
index 22b13f6..bb850ca 100644
--- a/media/libstagefright/renderfright/Android.bp
+++ b/media/libstagefright/renderfright/Android.bp
@@ -84,9 +84,6 @@
     name: "librenderfright",
     defaults: ["librenderfright_defaults"],
     vendor_available: true,
-    vndk: {
-        enabled: true,
-    },
     double_loadable: true,
 
     cflags: [
diff --git a/media/libstagefright/rtsp/fuzzer/Android.bp b/media/libstagefright/rtsp/fuzzer/Android.bp
index a2791ba..ff64af5 100644
--- a/media/libstagefright/rtsp/fuzzer/Android.bp
+++ b/media/libstagefright/rtsp/fuzzer/Android.bp
@@ -29,11 +29,19 @@
     header_libs: [
         "libstagefright_rtsp_headers",
     ],
-    fuzz_config:{
+    fuzz_config: {
         cc: [
-            "android-media-fuzzing-reports@google.com",
+            "android-media-playback@google.com",
         ],
         componentid: 155276,
+        hotlists: [
+            "4593311",
+        ],
+        description: "This fuzzer targets the APIs of libstagefright_rtsp",
+        vector: "local_privileges_required",
+        service_privilege: "privileged",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
 
@@ -44,7 +52,7 @@
     ],
     defaults: [
         "libstagefright_rtsp_fuzzer_defaults",
-    ]
+    ],
 }
 
 cc_fuzz {
@@ -55,7 +63,7 @@
     defaults: [
         "libstagefright_rtsp_fuzzer_defaults",
     ],
-    shared_libs:[
+    shared_libs: [
         "libandroid_net",
         "libbase",
         "libstagefright",
diff --git a/media/libstagefright/tests/fuzzers/Android.bp b/media/libstagefright/tests/fuzzers/Android.bp
index 2bcfd67..43542c5 100644
--- a/media/libstagefright/tests/fuzzers/Android.bp
+++ b/media/libstagefright/tests/fuzzers/Android.bp
@@ -32,6 +32,15 @@
         "liblog",
         "media_permission-aidl-cpp",
     ],
+    fuzz_config: {
+        componentid: 42195,
+        hotlists: ["4593311"],
+        description: "The fuzzer targets the APIs of libstagefright",
+        vector: "local_no_privileges_required",
+        service_privilege: "constrained",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
+    },
 }
 
 cc_fuzz {
diff --git a/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.cpp b/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.cpp
index 70d73c8..5ac2a54 100644
--- a/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.cpp
+++ b/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.cpp
@@ -13,94 +13,221 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-// Authors: corbin.souffrant@leviathansecurity.com
-//          dylan.katz@leviathansecurity.com
 
-#include <MediaMuxerFuzzer.h>
-#include <cutils/ashmem.h>
 #include <fuzzer/FuzzedDataProvider.h>
 #include <media/stagefright/MediaMuxer.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/MediaDefs.h>
 
 namespace android {
+const uint8_t kMinSize = 0;
+const uint8_t kMinTrackCount = 0;
 
-// Can't seem to get setBuffer or setString working. It always segfaults on a
-// null pointer read or memleaks. So that functionality is missing.
-void createMessage(AMessage *msg, FuzzedDataProvider *fdp) {
-  size_t count = fdp->ConsumeIntegralInRange<size_t>(0, 32);
-  while (fdp->remaining_bytes() > 0 && count > 0) {
-    uint8_t function_id =
-        fdp->ConsumeIntegralInRange<uint8_t>(0, amessage_setvals.size() - 1);
-    amessage_setvals[function_id](msg, fdp);
-    count--;
-  }
+enum kBufferFlags { BUFFER_FLAG_SYNCFRAME = 1, BUFFER_FLAG_CODECCONFIG = 2, BUFFER_FLAG_EOS = 4 };
+
+constexpr char kMuxerFile[] = "MediaMuxer";
+
+const std::string kAudioMimeTypes[] = {
+        MEDIA_MIMETYPE_AUDIO_AMR_NB,
+        MEDIA_MIMETYPE_AUDIO_AMR_WB,
+        MEDIA_MIMETYPE_AUDIO_MPEG,
+        MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I,
+        MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II,
+        MEDIA_MIMETYPE_AUDIO_MIDI,
+        MEDIA_MIMETYPE_AUDIO_AAC,
+        MEDIA_MIMETYPE_AUDIO_QCELP,
+        MEDIA_MIMETYPE_AUDIO_VORBIS,
+        MEDIA_MIMETYPE_AUDIO_OPUS,
+        MEDIA_MIMETYPE_AUDIO_G711_ALAW,
+        MEDIA_MIMETYPE_AUDIO_G711_MLAW,
+        MEDIA_MIMETYPE_AUDIO_RAW,
+        MEDIA_MIMETYPE_AUDIO_FLAC,
+        MEDIA_MIMETYPE_AUDIO_AAC_ADTS,
+        MEDIA_MIMETYPE_AUDIO_MSGSM,
+        MEDIA_MIMETYPE_AUDIO_AC3,
+        MEDIA_MIMETYPE_AUDIO_EAC3,
+        MEDIA_MIMETYPE_AUDIO_EAC3_JOC,
+        MEDIA_MIMETYPE_AUDIO_AC4,
+        MEDIA_MIMETYPE_AUDIO_MPEGH_MHA1,
+        MEDIA_MIMETYPE_AUDIO_MPEGH_MHM1,
+        MEDIA_MIMETYPE_AUDIO_MPEGH_BL_L3,
+        MEDIA_MIMETYPE_AUDIO_MPEGH_BL_L4,
+        MEDIA_MIMETYPE_AUDIO_MPEGH_LC_L3,
+        MEDIA_MIMETYPE_AUDIO_MPEGH_LC_L4,
+        MEDIA_MIMETYPE_AUDIO_SCRAMBLED,
+        MEDIA_MIMETYPE_AUDIO_ALAC,
+        MEDIA_MIMETYPE_AUDIO_WMA,
+        MEDIA_MIMETYPE_AUDIO_MS_ADPCM,
+        MEDIA_MIMETYPE_AUDIO_DVI_IMA_ADPCM,
+        MEDIA_MIMETYPE_AUDIO_DTS,
+        MEDIA_MIMETYPE_AUDIO_DTS_HD,
+        MEDIA_MIMETYPE_AUDIO_DTS_HD_MA,
+        MEDIA_MIMETYPE_AUDIO_DTS_UHD,
+        MEDIA_MIMETYPE_AUDIO_DTS_UHD_P1,
+        MEDIA_MIMETYPE_AUDIO_DTS_UHD_P2,
+        MEDIA_MIMETYPE_AUDIO_EVRC,
+        MEDIA_MIMETYPE_AUDIO_EVRCB,
+        MEDIA_MIMETYPE_AUDIO_EVRCWB,
+        MEDIA_MIMETYPE_AUDIO_EVRCNW,
+        MEDIA_MIMETYPE_AUDIO_AMR_WB_PLUS,
+        MEDIA_MIMETYPE_AUDIO_APTX,
+        MEDIA_MIMETYPE_AUDIO_DRA,
+        MEDIA_MIMETYPE_AUDIO_DOLBY_MAT,
+        MEDIA_MIMETYPE_AUDIO_DOLBY_MAT_1_0,
+        MEDIA_MIMETYPE_AUDIO_DOLBY_MAT_2_0,
+        MEDIA_MIMETYPE_AUDIO_DOLBY_MAT_2_1,
+        MEDIA_MIMETYPE_AUDIO_DOLBY_TRUEHD,
+        MEDIA_MIMETYPE_AUDIO_AAC_MP4,
+        MEDIA_MIMETYPE_AUDIO_AAC_MAIN,
+        MEDIA_MIMETYPE_AUDIO_AAC_LC,
+        MEDIA_MIMETYPE_AUDIO_AAC_SSR,
+        MEDIA_MIMETYPE_AUDIO_AAC_LTP,
+        MEDIA_MIMETYPE_AUDIO_AAC_HE_V1,
+        MEDIA_MIMETYPE_AUDIO_AAC_SCALABLE,
+        MEDIA_MIMETYPE_AUDIO_AAC_ERLC,
+        MEDIA_MIMETYPE_AUDIO_AAC_LD,
+        MEDIA_MIMETYPE_AUDIO_AAC_HE_V2,
+        MEDIA_MIMETYPE_AUDIO_AAC_ELD,
+        MEDIA_MIMETYPE_AUDIO_AAC_XHE,
+        MEDIA_MIMETYPE_AUDIO_AAC_ADIF,
+        MEDIA_MIMETYPE_AUDIO_AAC_ADTS_MAIN,
+        MEDIA_MIMETYPE_AUDIO_AAC_ADTS_LC,
+        MEDIA_MIMETYPE_AUDIO_AAC_ADTS_SSR,
+        MEDIA_MIMETYPE_AUDIO_AAC_ADTS_LTP,
+        MEDIA_MIMETYPE_AUDIO_AAC_ADTS_HE_V1,
+        MEDIA_MIMETYPE_AUDIO_AAC_ADTS_SCALABLE,
+        MEDIA_MIMETYPE_AUDIO_AAC_ADTS_ERLC,
+        MEDIA_MIMETYPE_AUDIO_AAC_ADTS_LD,
+        MEDIA_MIMETYPE_AUDIO_AAC_ADTS_HE_V2,
+        MEDIA_MIMETYPE_AUDIO_AAC_ADTS_ELD,
+        MEDIA_MIMETYPE_AUDIO_AAC_ADTS_XHE,
+        MEDIA_MIMETYPE_AUDIO_AAC_LATM_LC,
+        MEDIA_MIMETYPE_AUDIO_AAC_LATM_HE_V1,
+        MEDIA_MIMETYPE_AUDIO_AAC_LATM_HE_V2,
+        MEDIA_MIMETYPE_AUDIO_IEC61937,
+        MEDIA_MIMETYPE_AUDIO_IEC60958,
+};
+
+const std::string kVideoMimeTypes[] = {
+        MEDIA_MIMETYPE_VIDEO_VP8,       MEDIA_MIMETYPE_VIDEO_VP9,
+        MEDIA_MIMETYPE_VIDEO_AV1,       MEDIA_MIMETYPE_VIDEO_AVC,
+        MEDIA_MIMETYPE_VIDEO_HEVC,      MEDIA_MIMETYPE_VIDEO_MPEG4,
+        MEDIA_MIMETYPE_VIDEO_H263,      MEDIA_MIMETYPE_VIDEO_MPEG2,
+        MEDIA_MIMETYPE_VIDEO_RAW,       MEDIA_MIMETYPE_VIDEO_DOLBY_VISION,
+        MEDIA_MIMETYPE_VIDEO_SCRAMBLED, MEDIA_MIMETYPE_VIDEO_DIVX,
+        MEDIA_MIMETYPE_VIDEO_DIVX3,     MEDIA_MIMETYPE_VIDEO_XVID,
+        MEDIA_MIMETYPE_VIDEO_MJPEG,
+};
+
+void getSampleAudioFormat(FuzzedDataProvider& fdp, AMessage* format) {
+    std::string mimeType = fdp.PickValueInArray(kAudioMimeTypes);
+    format->setString("mime", mimeType.c_str(), mimeType.length());
+    format->setInt32("sample-rate", fdp.ConsumeIntegral<int32_t>());
+    format->setInt32("channel-count", fdp.ConsumeIntegral<int32_t>());
+}
+
+void getSampleVideoFormat(FuzzedDataProvider& fdp, AMessage* format) {
+    std::string mimeType = fdp.PickValueInArray(kVideoMimeTypes);
+    format->setString("mime", mimeType.c_str(), mimeType.length());
+    format->setInt32("height", fdp.ConsumeIntegral<int32_t>());
+    format->setInt32("width", fdp.ConsumeIntegral<int32_t>());
+    format->setInt32("time-lapse-fps", fdp.ConsumeIntegral<int32_t>());
 }
 
 extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
-  FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+    FuzzedDataProvider fdp(data, size);
 
-  size_t data_size = fdp.ConsumeIntegralInRange<size_t>(0, size);
-  int fd = ashmem_create_region("mediamuxer_fuzz_region", data_size);
-  if (fd < 0)
+    // memfd_create() creates an anonymous file and returns a file
+    // descriptor that refers to it. MFD_ALLOW_SEALING allows sealing
+    // operations on this file.
+    int32_t fd = memfd_create(kMuxerFile, MFD_ALLOW_SEALING);
+    if (fd == -1) {
+        ALOGE("memfd_create failed: %s", strerror(errno));
+        return 0;
+    }
+
+    auto outputFormat = (MediaMuxer::OutputFormat)fdp.ConsumeIntegralInRange<int32_t>(
+            MediaMuxer::OutputFormat::OUTPUT_FORMAT_MPEG_4,
+            MediaMuxer::OutputFormat::OUTPUT_FORMAT_LIST_END);
+
+    sp<MediaMuxer> mMuxer = MediaMuxer::create(fd, outputFormat);
+    if (mMuxer == nullptr) {
+        close(fd);
+        return 0;
+    }
+
+    // Used to consume a maximum of 80% of the data to send buffer data to writeSampleData().
+    // This ensures that we don't completely exhaust data and use the rest 20% for fuzzing
+    // of APIs.
+    const size_t kMaxSize = (size * 80) / 100;
+    while (fdp.remaining_bytes()) {
+        auto invokeMediaMuxerAPI = fdp.PickValueInArray<const std::function<void()>>({
+                [&]() {
+                    // Using 'return' here due to a timeout bug present in OGGWriter.cpp
+                    // (b/310316183).
+                    if (outputFormat == MediaMuxer::OutputFormat::OUTPUT_FORMAT_OGG) {
+                        return;
+                    }
+
+                    sp<AMessage> format = sp<AMessage>::make();
+                    fdp.ConsumeBool() ? getSampleAudioFormat(fdp, format.get())
+                                      : getSampleVideoFormat(fdp, format.get());
+
+                    mMuxer->addTrack(fdp.ConsumeBool() ? format : nullptr);
+                },
+                [&]() {
+                    mMuxer->setLocation(fdp.ConsumeIntegral<int32_t>() /* latitude */,
+                                        fdp.ConsumeIntegral<int32_t>() /* longitude */);
+                },
+                [&]() { mMuxer->setOrientationHint(fdp.ConsumeIntegral<int32_t>() /* degrees */); },
+                [&]() { mMuxer->start(); },
+                [&]() {
+                    std::vector<uint8_t> sample = fdp.ConsumeBytes<uint8_t>(
+                            fdp.ConsumeIntegralInRange<size_t>(kMinSize, kMaxSize));
+                    sp<ABuffer> buffer = sp<ABuffer>::make(sample.data(), sample.size());
+
+                    size_t offset = fdp.ConsumeIntegralInRange<size_t>(kMinSize, sample.size());
+                    size_t length =
+                            fdp.ConsumeIntegralInRange<size_t>(kMinSize, buffer->size() - offset);
+                    buffer->setRange(offset, length);
+
+                    sp<AMessage> meta = buffer->meta();
+                    meta->setInt64("sample-file-offset", fdp.ConsumeIntegral<int64_t>());
+                    meta->setInt64("last-sample-index-in-chunk", fdp.ConsumeIntegral<int64_t>());
+
+                    uint32_t flags = 0;
+                    if (fdp.ConsumeBool()) {
+                        flags |= kBufferFlags::BUFFER_FLAG_SYNCFRAME;
+                    }
+                    if (fdp.ConsumeBool()) {
+                        flags |= kBufferFlags::BUFFER_FLAG_CODECCONFIG;
+                    }
+                    if (fdp.ConsumeBool()) {
+                        flags |= kBufferFlags::BUFFER_FLAG_EOS;
+                    }
+
+                    size_t trackIndex = fdp.ConsumeBool()
+                                                ? fdp.ConsumeIntegralInRange<size_t>(
+                                                          kMinTrackCount, mMuxer->getTrackCount())
+                                                : fdp.ConsumeIntegral<size_t>();
+                    int64_t timeUs = fdp.ConsumeIntegral<int64_t>();
+                    mMuxer->writeSampleData(fdp.ConsumeBool() ? buffer : nullptr, trackIndex,
+                                            timeUs, flags);
+                },
+                [&]() {
+                    mMuxer->getTrackFormat(
+                            fdp.ConsumeBool() ? fdp.ConsumeIntegralInRange<size_t>(
+                                                        kMinTrackCount, mMuxer->getTrackCount())
+                                              : fdp.ConsumeIntegral<size_t>() /* idx */);
+                },
+                [&]() { mMuxer->stop(); },
+        });
+
+        invokeMediaMuxerAPI();
+    }
+
+    close(fd);
     return 0;
-
-  uint8_t *sh_data = static_cast<uint8_t *>(
-      mmap(NULL, data_size, PROT_READ | PROT_WRITE, MAP_SHARED, fd, 0));
-  if (sh_data == MAP_FAILED)
-    return 0;
-
-  MediaMuxer::OutputFormat format =
-      (MediaMuxer::OutputFormat)fdp.ConsumeIntegralInRange<int32_t>(0, 4);
-  sp<MediaMuxer> mMuxer = MediaMuxer::create(fd, format);
-  if (mMuxer == nullptr) {
-    return 0;
-  }
-
-  while (fdp.remaining_bytes() > 1) {
-    switch (fdp.ConsumeIntegralInRange<uint8_t>(0, 4)) {
-    case 0: {
-      // For some reason it only likes mp4s here...
-      if (format == 1 || format == 4)
-        break;
-
-      sp<AMessage> a_format(new AMessage);
-      createMessage(a_format.get(), &fdp);
-      mMuxer->addTrack(a_format);
-      break;
-    }
-    case 1: {
-      mMuxer->start();
-      break;
-    }
-    case 2: {
-      int degrees = fdp.ConsumeIntegral<int>();
-      mMuxer->setOrientationHint(degrees);
-      break;
-    }
-    case 3: {
-      int latitude = fdp.ConsumeIntegral<int>();
-      int longitude = fdp.ConsumeIntegral<int>();
-      mMuxer->setLocation(latitude, longitude);
-      break;
-    }
-    case 4: {
-      size_t buf_size = fdp.ConsumeIntegralInRange<size_t>(0, data_size);
-      sp<ABuffer> a_buffer(new ABuffer(buf_size));
-
-      size_t trackIndex = fdp.ConsumeIntegral<size_t>();
-      int64_t timeUs = fdp.ConsumeIntegral<int64_t>();
-      uint32_t flags = fdp.ConsumeIntegral<uint32_t>();
-      mMuxer->writeSampleData(a_buffer, trackIndex, timeUs, flags);
-    }
-    }
-  }
-
-  if (fdp.ConsumeBool())
-    mMuxer->stop();
-
-  munmap(sh_data, data_size);
-  close(fd);
-  return 0;
 }
 } // namespace android
diff --git a/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.h b/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.h
deleted file mode 100644
index 7d4421d..0000000
--- a/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.h
+++ /dev/null
@@ -1,109 +0,0 @@
-/*
- * Copyright 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-// Authors: corbin.souffrant@leviathansecurity.com
-//          dylan.katz@leviathansecurity.com
-
-#pragma once
-
-#include <fuzzer/FuzzedDataProvider.h>
-#include <media/stagefright/foundation/AMessage.h>
-
-namespace android {
-
-// Mappings vectors are the list of attributes that the MediaMuxer
-// class looks for in the message.
-static std::vector<const char *> floatMappings{
-    "capture-rate",
-    "time-lapse-fps",
-    "frame-rate",
-};
-
-static std::vector<const char *> int64Mappings{
-    "exif-offset",    "exif-size", "target-time",
-    "thumbnail-time", "timeUs",    "durationUs",
-};
-
-static std::vector<const char *> int32Mappings{"loop",
-                                               "time-scale",
-                                               "crypto-mode",
-                                               "crypto-default-iv-size",
-                                               "crypto-encrypted-byte-block",
-                                               "crypto-skip-byte-block",
-                                               "frame-count",
-                                               "max-bitrate",
-                                               "pcm-big-endian",
-                                               "temporal-layer-count",
-                                               "temporal-layer-id",
-                                               "thumbnail-width",
-                                               "thumbnail-height",
-                                               "track-id",
-                                               "valid-samples",
-                                               "color-format",
-                                               "ca-system-id",
-                                               "is-sync-frame",
-                                               "bitrate",
-                                               "max-bitrate",
-                                               "width",
-                                               "height",
-                                               "sar-width",
-                                               "sar-height",
-                                               "display-width",
-                                               "display-height",
-                                               "is-default",
-                                               "tile-width",
-                                               "tile-height",
-                                               "grid-rows",
-                                               "grid-cols",
-                                               "rotation-degrees",
-                                               "channel-count",
-                                               "sample-rate",
-                                               "bits-per-sample",
-                                               "channel-mask",
-                                               "encoder-delay",
-                                               "encoder-padding",
-                                               "is-adts",
-                                               "frame-rate",
-                                               "max-height",
-                                               "max-width",
-                                               "max-input-size",
-                                               "haptic-channel-count",
-                                               "pcm-encoding",
-                                               "aac-profile"};
-
-static const std::vector<std::function<void(AMessage *, FuzzedDataProvider *)>>
-    amessage_setvals = {
-        [](AMessage *msg, FuzzedDataProvider *fdp) -> void {
-          msg->setRect("crop", fdp->ConsumeIntegral<int32_t>(),
-                       fdp->ConsumeIntegral<int32_t>(),
-                       fdp->ConsumeIntegral<int32_t>(),
-                       fdp->ConsumeIntegral<int32_t>());
-        },
-        [](AMessage *msg, FuzzedDataProvider *fdp) -> void {
-          msg->setFloat(floatMappings[fdp->ConsumeIntegralInRange<size_t>(
-                            0, floatMappings.size() - 1)],
-                        fdp->ConsumeFloatingPoint<float>());
-        },
-        [](AMessage *msg, FuzzedDataProvider *fdp) -> void {
-          msg->setInt64(int64Mappings[fdp->ConsumeIntegralInRange<size_t>(
-                            0, int64Mappings.size() - 1)],
-                        fdp->ConsumeIntegral<int64_t>());
-        },
-        [](AMessage *msg, FuzzedDataProvider *fdp) -> void {
-          msg->setInt32(int32Mappings[fdp->ConsumeIntegralInRange<size_t>(
-                            0, int32Mappings.size() - 1)],
-                        fdp->ConsumeIntegral<int32_t>());
-        }};
-} // namespace android
diff --git a/media/libstagefright/timedtext/test/fuzzer/Android.bp b/media/libstagefright/timedtext/test/fuzzer/Android.bp
index 6590ebb..8724d51 100644
--- a/media/libstagefright/timedtext/test/fuzzer/Android.bp
+++ b/media/libstagefright/timedtext/test/fuzzer/Android.bp
@@ -48,8 +48,16 @@
     ],
     fuzz_config: {
         cc: [
-            "android-media-fuzzing-reports@google.com",
+            "android-media-playback@google.com",
         ],
-        componentid: 155276,
+        componentid: 42195,
+        hotlists: [
+            "4593311",
+        ],
+        description: "This fuzzer targets the APIs of libstagefright_timedtext",
+        vector: "local_no_privileges_required",
+        service_privilege: "constrained",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
diff --git a/media/libstagefright/xmlparser/Android.bp b/media/libstagefright/xmlparser/Android.bp
index 2f204f9..2c5e81a 100644
--- a/media/libstagefright/xmlparser/Android.bp
+++ b/media/libstagefright/xmlparser/Android.bp
@@ -16,9 +16,6 @@
 cc_library_shared {
     name: "libstagefright_xmlparser",
     vendor_available: true,
-    vndk: {
-        enabled: true,
-    },
     double_loadable: true,
 
     srcs: [
diff --git a/media/module/bqhelper/Android.bp b/media/module/bqhelper/Android.bp
index c4dadd0..f9b7dea 100644
--- a/media/module/bqhelper/Android.bp
+++ b/media/module/bqhelper/Android.bp
@@ -69,9 +69,6 @@
     name: "libstagefright_bufferqueue_helper",
     defaults: ["libstagefright_bufferqueue-defaults"],
     vendor_available: true,
-    vndk: {
-        enabled: true,
-    },
     min_sdk_version: "29",
 
     shared_libs: [ "libgui" ],
diff --git a/media/module/bufferpool/2.0/AccessorImpl.cpp b/media/module/bufferpool/2.0/AccessorImpl.cpp
index 1d2562e..202d803 100644
--- a/media/module/bufferpool/2.0/AccessorImpl.cpp
+++ b/media/module/bufferpool/2.0/AccessorImpl.cpp
@@ -609,7 +609,7 @@
         }
         if (ret == false) {
             ALOGW("buffer status message processing failure - message : %d connection : %lld",
-                  message.newStatus, (long long)message.connectionId);
+                  (int)message.newStatus, (long long)message.connectionId);
         }
     }
     messages.clear();
diff --git a/media/module/bufferpool/2.0/Android.bp b/media/module/bufferpool/2.0/Android.bp
index 930b026..bdab103 100644
--- a/media/module/bufferpool/2.0/Android.bp
+++ b/media/module/bufferpool/2.0/Android.bp
@@ -60,7 +60,4 @@
     vendor_available: true,
     // TODO: b/147147992
     double_loadable: true,
-    vndk: {
-        enabled: true,
-    },
 }
diff --git a/media/module/foundation/Android.bp b/media/module/foundation/Android.bp
index dc8384d..edf4cb5 100644
--- a/media/module/foundation/Android.bp
+++ b/media/module/foundation/Android.bp
@@ -33,9 +33,6 @@
 cc_defaults {
     name: "libstagefright_foundation_defaults",
     vendor_available: true,
-    vndk: {
-        enabled: true,
-    },
     host_supported: true,
     double_loadable: true,
 
diff --git a/media/module/id3/Android.bp b/media/module/id3/Android.bp
index bea3e34..e426796 100644
--- a/media/module/id3/Android.bp
+++ b/media/module/id3/Android.bp
@@ -17,6 +17,24 @@
     ],
 }
 
+cc_library_headers {
+    name: "libstagefright_id3_headers",
+    export_include_dirs: ["include"],
+    vendor_available: true,
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media",
+        "com.android.media.swcodec",
+    ],
+    min_sdk_version: "29",
+    host_supported: true,
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+}
+
 cc_library_static {
     name: "libstagefright_id3",
     min_sdk_version: "29",
@@ -25,7 +43,6 @@
         "com.android.media",
     ],
 
-
     srcs: ["ID3.cpp"],
 
     header_libs: [
@@ -35,6 +52,8 @@
         "media_ndk_headers",
     ],
 
+    export_include_dirs: ["include"],
+
     cflags: [
         "-Werror",
         "-Wall",
diff --git a/media/libstagefright/include/ID3.h b/media/module/id3/include/ID3.h
similarity index 100%
rename from media/libstagefright/include/ID3.h
rename to media/module/id3/include/ID3.h
diff --git a/media/module/libmediatranscoding/TranscodingResourcePolicy.cpp b/media/module/libmediatranscoding/TranscodingResourcePolicy.cpp
index af53f64..43a4628 100644
--- a/media/module/libmediatranscoding/TranscodingResourcePolicy.cpp
+++ b/media/module/libmediatranscoding/TranscodingResourcePolicy.cpp
@@ -21,6 +21,7 @@
 #include <aidl/android/media/IResourceObserverService.h>
 #include <android/binder_manager.h>
 #include <android/binder_process.h>
+#include <map>
 #include <media/TranscodingResourcePolicy.h>
 #include <utils/Log.h>
 
@@ -66,11 +67,31 @@
     TranscodingResourcePolicy* mOwner;
 };
 
+// cookie used for death recipients. The TranscodingResourcePolicy
+// that this cookie is associated with must outlive this cookie. It is
+// either deleted by binderDied, or in unregisterSelf which is also called
+// in the destructor of TranscodingResourcePolicy
+class TranscodingResourcePolicyCookie {
+ public:
+    TranscodingResourcePolicyCookie(TranscodingResourcePolicy* policy) : mPolicy(policy) {}
+    TranscodingResourcePolicyCookie() = delete;
+    TranscodingResourcePolicy* mPolicy;
+};
+
+static std::map<uintptr_t, std::unique_ptr<TranscodingResourcePolicyCookie>> sCookies;
+static uintptr_t sCookieKeyCounter;
+static std::mutex sCookiesMutex;
+
 // static
 void TranscodingResourcePolicy::BinderDiedCallback(void* cookie) {
-    TranscodingResourcePolicy* owner = reinterpret_cast<TranscodingResourcePolicy*>(cookie);
-    if (owner != nullptr) {
-        owner->unregisterSelf();
+    std::lock_guard<std::mutex> guard(sCookiesMutex);
+    if (auto it = sCookies.find(reinterpret_cast<uintptr_t>(cookie)); it != sCookies.end()) {
+        ALOGI("BinderDiedCallback unregistering TranscodingResourcePolicy");
+        auto policy = reinterpret_cast<TranscodingResourcePolicy*>(it->second->mPolicy);
+        if (policy) {
+            policy->unregisterSelf();
+        }
+        sCookies.erase(it);
     }
     // TODO(chz): retry to connecting to IResourceObserverService after failure.
     // Also need to have back-up logic if IResourceObserverService is offline for
@@ -88,6 +109,23 @@
 }
 
 TranscodingResourcePolicy::~TranscodingResourcePolicy() {
+    {
+        std::lock_guard<std::mutex> guard(sCookiesMutex);
+
+        // delete all of the cookies associated with this TranscodingResourcePolicy
+        // instance since they are holding pointers to this object that will no
+        // longer be valid.
+        std::erase_if(sCookies, [this](const auto& cookieEntry) {
+            auto const& [key, cookie] = cookieEntry;
+            std::lock_guard guard(mCookieKeysLock);
+            if (const auto& it = mCookieKeys.find(key); it != mCookieKeys.end()) {
+                // No longer need to track this cookie
+                mCookieKeys.erase(key);
+                return true;
+            }
+            return false;
+        });
+    }
     unregisterSelf();
 }
 
@@ -123,7 +161,17 @@
         return;
     }
 
-    AIBinder_linkToDeath(binder.get(), mDeathRecipient.get(), reinterpret_cast<void*>(this));
+    std::unique_ptr<TranscodingResourcePolicyCookie> cookie =
+            std::make_unique<TranscodingResourcePolicyCookie>(this);
+    void* cookiePtr = static_cast<void*>(cookie.get());
+    uintptr_t cookieKey = sCookieKeyCounter++;
+    sCookies.emplace(cookieKey, std::move(cookie));
+    {
+        std::lock_guard guard(mCookieKeysLock);
+        mCookieKeys.insert(cookieKey);
+    }
+
+    AIBinder_linkToDeath(binder.get(), mDeathRecipient.get(), reinterpret_cast<void*>(cookieKey));
 
     ALOGD("@@@ registered observer");
     mRegistered = true;
@@ -141,7 +189,6 @@
     ::ndk::SpAIBinder binder = mService->asBinder();
     if (binder.get() != nullptr) {
         Status status = mService->unregisterObserver(mObserver);
-        AIBinder_unlinkToDeath(binder.get(), mDeathRecipient.get(), reinterpret_cast<void*>(this));
     }
 
     mService = nullptr;
diff --git a/media/module/libmediatranscoding/include/media/TranscodingResourcePolicy.h b/media/module/libmediatranscoding/include/media/TranscodingResourcePolicy.h
index ee232e7..4d762b5 100644
--- a/media/module/libmediatranscoding/include/media/TranscodingResourcePolicy.h
+++ b/media/module/libmediatranscoding/include/media/TranscodingResourcePolicy.h
@@ -22,6 +22,7 @@
 #include <utils/Condition.h>
 
 #include <mutex>
+#include <set>
 namespace aidl {
 namespace android {
 namespace media {
@@ -48,6 +49,8 @@
     bool mRegistered GUARDED_BY(mRegisteredLock);
     std::shared_ptr<IResourceObserverService> mService GUARDED_BY(mRegisteredLock);
     std::shared_ptr<ResourceObserver> mObserver;
+    mutable std::mutex mCookieKeysLock;
+    std::set<uintptr_t> mCookieKeys;
 
     mutable std::mutex mCallbackLock;
     std::weak_ptr<ResourcePolicyCallbackInterface> mResourcePolicyCallback
@@ -59,6 +62,7 @@
     static void BinderDiedCallback(void* cookie);
 
     void registerSelf();
+    // must delete the associated TranscodingResourcePolicyCookie any time this is called
     void unregisterSelf();
     void onResourceAvailable(pid_t pid);
 };  // class TranscodingUidPolicy
diff --git a/media/mtp/MtpServer.cpp b/media/mtp/MtpServer.cpp
index d917772..80fe51a 100644
--- a/media/mtp/MtpServer.cpp
+++ b/media/mtp/MtpServer.cpp
@@ -132,6 +132,10 @@
 }
 
 MtpServer::~MtpServer() {
+    if (mHandle) {
+        delete mHandle;
+        mHandle = NULL;
+    }
 }
 
 void MtpServer::addStorage(MtpStorage* storage) {
diff --git a/media/ndk/Android.bp b/media/ndk/Android.bp
index 9ec7700..3d873df 100644
--- a/media/ndk/Android.bp
+++ b/media/ndk/Android.bp
@@ -192,7 +192,6 @@
     header_libs: [
         "libstagefright_headers",
         "libmedia_headers",
-        "libstagefright_headers",
     ],
 
     shared_libs: [
diff --git a/media/utils/Android.bp b/media/utils/Android.bp
index 5b7319a..e340b40 100644
--- a/media/utils/Android.bp
+++ b/media/utils/Android.bp
@@ -113,6 +113,7 @@
 
     export_shared_lib_headers: [
         "libpermission",
+        "packagemanager_aidl-cpp",
     ],
 
     required: [
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index d5d778f..c1d1a63 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -1935,10 +1935,11 @@
     if (mPrimaryHardwareDev == nullptr) {
         return 0;
     }
+    if (mInputBufferSizeOrderedDevs.empty()) {
+        return 0;
+    }
     mHardwareStatus = AUDIO_HW_GET_INPUT_BUFFER_SIZE;
 
-    sp<DeviceHalInterface> dev = mPrimaryHardwareDev.load()->hwDevice();
-
     std::vector<audio_channel_mask_t> channelMasks = {channelMask};
     if (channelMask != AUDIO_CHANNEL_IN_MONO) {
         channelMasks.push_back(AUDIO_CHANNEL_IN_MONO);
@@ -1968,6 +1969,22 @@
 
     mHardwareStatus = AUDIO_HW_IDLE;
 
+    auto getInputBufferSize = [](const sp<DeviceHalInterface>& dev, audio_config_t config,
+                                 size_t* bytes) -> status_t {
+        if (!dev) {
+            return BAD_VALUE;
+        }
+        status_t result = dev->getInputBufferSize(&config, bytes);
+        if (result == BAD_VALUE) {
+            // Retry with the config suggested by the HAL.
+            result = dev->getInputBufferSize(&config, bytes);
+        }
+        if (result != OK || *bytes == 0) {
+            return BAD_VALUE;
+        }
+        return result;
+    };
+
     // Change parameters of the configuration each iteration until we find a
     // configuration that the device will support, or HAL suggests what it supports.
     audio_config_t config = AUDIO_CONFIG_INITIALIZER;
@@ -1979,16 +1996,15 @@
                 config.sample_rate = testSampleRate;
 
                 size_t bytes = 0;
-                audio_config_t loopConfig = config;
-                status_t result = dev->getInputBufferSize(&config, &bytes);
-                if (result == BAD_VALUE) {
-                    // Retry with the config suggested by the HAL.
-                    result = dev->getInputBufferSize(&config, &bytes);
+                ret = BAD_VALUE;
+                for (const AudioHwDevice* dev : mInputBufferSizeOrderedDevs) {
+                    ret = getInputBufferSize(dev->hwDevice(), config, &bytes);
+                    if (ret == OK) {
+                        break;
+                    }
                 }
-                if (result != OK || bytes == 0) {
-                    config = loopConfig;
-                    continue;
-                }
+                if (ret == BAD_VALUE) continue;
+
                 if (config.sample_rate != sampleRate || config.channel_mask != channelMask ||
                     config.format != format) {
                     uint32_t dstChannelCount = audio_channel_count_from_in_mask(channelMask);
@@ -2173,6 +2189,13 @@
     }
 }
 
+void AudioFlinger::onHardError(std::set<audio_port_handle_t>& trackPortIds) {
+    ALOGI("releasing tracks due to a hard error occurred on an I/O thread");
+    for (const auto portId : trackPortIds) {
+        AudioSystem::releaseOutput(portId);
+    }
+}
+
 // removeClient_l() must be called with AudioFlinger::clientMutex() held
 void AudioFlinger::removeClient_l(pid_t pid)
 {
@@ -2610,12 +2633,43 @@
     }
 
     mAudioHwDevs.add(handle, audioDevice);
+    if (strcmp(name, AUDIO_HARDWARE_MODULE_ID_STUB) != 0) {
+        mInputBufferSizeOrderedDevs.insert(audioDevice);
+    }
 
     ALOGI("loadHwModule() Loaded %s audio interface, handle %d", name, handle);
 
     return audioDevice;
 }
 
+// Sort AudioHwDevice to be traversed in the getInputBufferSize call in the following order:
+// Primary, Usb, Bluetooth, A2DP, other modules, remote submix.
+/* static */
+bool AudioFlinger::inputBufferSizeDevsCmp(const AudioHwDevice* lhs, const AudioHwDevice* rhs) {
+    static const std::map<std::string_view, int> kPriorities = {
+        { AUDIO_HARDWARE_MODULE_ID_PRIMARY, 0 }, { AUDIO_HARDWARE_MODULE_ID_USB, 1 },
+        { AUDIO_HARDWARE_MODULE_ID_BLUETOOTH, 2 }, { AUDIO_HARDWARE_MODULE_ID_A2DP, 3 },
+        { AUDIO_HARDWARE_MODULE_ID_REMOTE_SUBMIX, std::numeric_limits<int>::max() }
+    };
+
+    const std::string_view lhsName = lhs->moduleName();
+    const std::string_view rhsName = rhs->moduleName();
+
+    auto lhsPriority = std::numeric_limits<int>::max() - 1;
+    if (const auto lhsIt = kPriorities.find(lhsName); lhsIt != kPriorities.end()) {
+        lhsPriority = lhsIt->second;
+    }
+    auto rhsPriority = std::numeric_limits<int>::max() - 1;
+    if (const auto rhsIt = kPriorities.find(rhsName); rhsIt != kPriorities.end()) {
+        rhsPriority = rhsIt->second;
+    }
+
+    if (lhsPriority != rhsPriority) {
+        return lhsPriority < rhsPriority;
+    }
+    return lhsName < rhsName;
+}
+
 // ----------------------------------------------------------------------------
 
 uint32_t AudioFlinger::getPrimaryOutputSamplingRate() const
@@ -4915,6 +4969,23 @@
     return mPatchPanel->getAudioMixPort_l(devicePort, mixPort);
 }
 
+status_t AudioFlinger::setTracksInternalMute(
+        const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) {
+    audio_utils::lock_guard _l(mutex());
+    ALOGV("%s", __func__);
+
+    std::map<audio_port_handle_t, bool> tracksInternalMuteMap;
+    for (const auto& trackInternalMute : tracksInternalMute) {
+        audio_port_handle_t portId = VALUE_OR_RETURN_STATUS(
+                aidl2legacy_int32_t_audio_port_handle_t(trackInternalMute.portId));
+        tracksInternalMuteMap.emplace(portId, trackInternalMute.muted);
+    }
+    for (size_t i = 0; i < mPlaybackThreads.size() && !tracksInternalMuteMap.empty(); i++) {
+        mPlaybackThreads.valueAt(i)->setTracksInternalMute(&tracksInternalMuteMap);
+    }
+    return NO_ERROR;
+}
+
 // ----------------------------------------------------------------------------
 
 status_t AudioFlinger::onTransactWrapper(TransactionCode code,
@@ -4949,6 +5020,7 @@
         case TransactionCode::INVALIDATE_TRACKS:
         case TransactionCode::GET_AUDIO_POLICY_CONFIG:
         case TransactionCode::GET_AUDIO_MIX_PORT:
+        case TransactionCode::SET_TRACKS_INTERNAL_MUTE:
             ALOGW("%s: transaction %d received from PID %d",
                   __func__, static_cast<int>(code), IPCThreadState::self()->getCallingPid());
             // return status only for non void methods
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 4e46bea..a8f983b 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -259,6 +259,10 @@
     status_t getAudioMixPort(const struct audio_port_v7* devicePort,
                              struct audio_port_v7* mixPort) const final EXCLUDES_AudioFlinger_Mutex;
 
+    status_t setTracksInternalMute(
+            const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) final
+            EXCLUDES_AudioFlinger_Mutex;
+
     status_t onTransactWrapper(TransactionCode code, const Parcel& data, uint32_t flags,
             const std::function<status_t()>& delegate) final EXCLUDES_AudioFlinger_Mutex;
 
@@ -397,6 +401,8 @@
     void onSupportedLatencyModesChanged(
             audio_io_handle_t output, const std::vector<audio_latency_mode_t>& modes) final
             EXCLUDES_AudioFlinger_ClientMutex;
+    void onHardError(std::set<audio_port_handle_t>& trackPortIds) final
+            EXCLUDES_AudioFlinger_ClientMutex;
 
     // ---- end of IAfThreadCallback interface
 
@@ -630,6 +636,10 @@
     DefaultKeyedVector<audio_module_handle_t, AudioHwDevice*> mAudioHwDevs
             GUARDED_BY(hardwareMutex()) {nullptr /* defValue */};
 
+    static bool inputBufferSizeDevsCmp(const AudioHwDevice* lhs, const AudioHwDevice* rhs);
+    std::set<AudioHwDevice*, decltype(&inputBufferSizeDevsCmp)>
+            mInputBufferSizeOrderedDevs GUARDED_BY(hardwareMutex()) {inputBufferSizeDevsCmp};
+
      const sp<DevicesFactoryHalInterface> mDevicesFactoryHal =
              DevicesFactoryHalInterface::create();
      /* const */ sp<DevicesFactoryHalCallback> mDevicesFactoryHalCallback;  // set onFirstRef().
diff --git a/services/audioflinger/IAfThread.h b/services/audioflinger/IAfThread.h
index 4b6ab89..4518d48 100644
--- a/services/audioflinger/IAfThread.h
+++ b/services/audioflinger/IAfThread.h
@@ -116,9 +116,11 @@
             const sp<AudioIoDescriptor>& ioDesc,
             pid_t pid = 0) EXCLUDES_AudioFlinger_ClientMutex = 0;
     virtual void onNonOffloadableGlobalEffectEnable() EXCLUDES_AudioFlinger_Mutex = 0;
-    virtual void onSupportedLatencyModesChanged(
-            audio_io_handle_t output, const std::vector<audio_latency_mode_t>& modes)
+    virtual void onSupportedLatencyModesChanged(audio_io_handle_t output,
+                                                const std::vector<audio_latency_mode_t>& modes)
             EXCLUDES_AudioFlinger_ClientMutex = 0;
+
+    virtual void onHardError(std::set<audio_port_handle_t>& trackPortIds) = 0;
 };
 
 class IAfThreadBase : public virtual RefBase {
@@ -536,6 +538,9 @@
     virtual const std::atomic<int64_t>& framesWritten() const = 0;
 
     virtual bool usesHwAvSync() const = 0;
+
+    virtual void setTracksInternalMute(std::map<audio_port_handle_t, bool>* tracksInternalMute)
+            EXCLUDES_ThreadBase_Mutex = 0;
 };
 
 class IAfDirectOutputThread : public virtual IAfPlaybackThread {
diff --git a/services/audioflinger/IAfTrack.h b/services/audioflinger/IAfTrack.h
index 8ed44c6..168bcca 100644
--- a/services/audioflinger/IAfTrack.h
+++ b/services/audioflinger/IAfTrack.h
@@ -375,6 +375,8 @@
     virtual void triggerEvents(AudioSystem::sync_event_t type) = 0;
 
     virtual void disable() = 0;
+    virtual bool isDisabled() const = 0;
+
     virtual int& fastIndex() = 0;
     virtual bool isPlaybackRestricted() const = 0;
 
@@ -425,6 +427,10 @@
     virtual FillingStatus& fillingStatus() = 0;
     virtual int8_t& retryCount() = 0;
     virtual FastTrackUnderruns& fastTrackUnderruns() = 0;
+
+    // Internal mute, this is currently only used for bit-perfect playback
+    virtual bool getInternalMute() const = 0;
+    virtual void setInternalMute(bool muted) = 0;
 };
 
 // playback track, used by DuplicatingThread
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index 6c22e21..1e1b6d2 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -220,6 +220,8 @@
      */
     void processMuteEvent_l(const sp<IAudioManager>& audioManager, mute_state_t muteState) final;
 
+    bool getInternalMute() const final { return mInternalMute; }
+    void setInternalMute(bool muted) final { mInternalMute = muted; }
 protected:
 
     DISALLOW_COPY_AND_ASSIGN(Track);
@@ -275,6 +277,8 @@
     void triggerEvents(AudioSystem::sync_event_t type) final;
     void invalidate() final;
     void disable() final;
+    bool isDisabled() const final;
+
     int& fastIndex() final { return mFastIndex; }
     bool isPlaybackRestricted() const final {
         // The monitor is only created for tracks that can be silenced.
@@ -399,6 +403,8 @@
     // access these two variables only when holding player thread lock.
     std::unique_ptr<os::PersistableBundle> mMuteEventExtras;
     mute_state_t        mMuteState;
+
+    bool                mInternalMute = false;
 };  // end of Track
 
 
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index bcd8b99..66e89e4 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -33,6 +33,7 @@
 #include <afutils/Vibrator.h>
 #include <audio_utils/MelProcessor.h>
 #include <audio_utils/Metadata.h>
+#include <com_android_media_audioserver.h>
 #ifdef DEBUG_CPU_USAGE
 #include <audio_utils/Statistics.h>
 #include <cpustats/ThreadCpuUsage.h>
@@ -1486,8 +1487,8 @@
     }
 
     if (IAfEffectModule::isHapticGenerator(&desc->type) && mHapticChannelCount == 0) {
-        ALOGW("%s: thread doesn't support haptic playback while the effect is HapticGenerator",
-                __func__);
+        ALOGW("%s: thread (%s) doesn't support haptic playback while the effect is HapticGenerator",
+              __func__, threadTypeToString(mType));
         return BAD_VALUE;
     }
 
@@ -2696,14 +2697,17 @@
             }
         }
 
-        // Set DIRECT flag if current thread is DirectOutputThread. This can
-        // happen when the playback is rerouted to direct output thread by
+        // Set DIRECT/OFFLOAD flag if current thread is DirectOutputThread/OffloadThread.
+        // This can happen when the playback is rerouted to direct output/offload thread by
         // dynamic audio policy.
         // Do NOT report the flag changes back to client, since the client
-        // doesn't explicitly request a direct flag.
+        // doesn't explicitly request a direct/offload flag.
         audio_output_flags_t trackFlags = *flags;
         if (mType == DIRECT) {
             trackFlags = static_cast<audio_output_flags_t>(trackFlags | AUDIO_OUTPUT_FLAG_DIRECT);
+        } else if (mType == OFFLOAD) {
+            trackFlags = static_cast<audio_output_flags_t>(trackFlags |
+                                   AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD | AUDIO_OUTPUT_FLAG_DIRECT);
         }
         *afTrackFlags = trackFlags;
 
@@ -3003,6 +3007,23 @@
     }
 }
 
+std::set<audio_port_handle_t> PlaybackThread::getTrackPortIds_l()
+{
+    std::set<int32_t> result;
+    for (const auto& t : mTracks) {
+        if (t->isExternalTrack()) {
+            result.insert(t->portId());
+        }
+    }
+    return result;
+}
+
+std::set<audio_port_handle_t> PlaybackThread::getTrackPortIds()
+{
+    audio_utils::lock_guard _l(mutex());
+    return getTrackPortIds_l();
+}
+
 String8 PlaybackThread::getParameters(const String8& keys)
 {
     audio_utils::lock_guard _l(mutex());
@@ -3056,9 +3077,9 @@
     mCallbackThread->resetDraining();
 }
 
-void PlaybackThread::onError()
+void PlaybackThread::onError(bool isHardError)
 {
-    mCallbackThread->setAsyncError();
+    mCallbackThread->setAsyncError(isHardError);
 }
 
 void PlaybackThread::onCodecFormatChanged(
@@ -3401,9 +3422,9 @@
         return NO_ERROR;
     } else {
         status_t status;
-        uint32_t frames;
+        uint64_t frames = 0;
         status = mOutput->getRenderPosition(&frames);
-        *dspFrames = (size_t)frames;
+        *dspFrames = (uint32_t)frames;
         return status;
     }
 }
@@ -5424,11 +5445,15 @@
     broadcast_l();
 }
 
-void PlaybackThread::onAsyncError()
+void PlaybackThread::onAsyncError(bool isHardError)
 {
+    auto allTrackPortIds = getTrackPortIds();
     for (int i = AUDIO_STREAM_SYSTEM; i < (int)AUDIO_STREAM_CNT; i++) {
         invalidateTracks((audio_stream_type_t)i);
     }
+    if (isHardError) {
+        mAfThreadCallback->onHardError(allTrackPortIds);
+    }
 }
 
 void MixerThread::threadLoop_mix()
@@ -5771,6 +5796,11 @@
                 vlf *= volume;
                 vrf *= volume;
 
+                if (track->getInternalMute()) {
+                    vlf = 0.f;
+                    vrf = 0.f;
+                }
+
                 track->setFinalVolume(vlf, vrf);
                 ++fastTracks;
             } else {
@@ -5970,7 +6000,12 @@
                 vaf = v * sendLevel * (1. / MAX_GAIN_INT);
             }
 
-            track->setFinalVolume(vrf, vlf);
+            if (track->getInternalMute()) {
+                vrf = 0.f;
+                vlf = 0.f;
+            }
+
+            track->setFinalVolume(vlf, vrf);
 
             // Delegate volume control to effect in track effect chain if needed
             if (chain != 0 && chain->setVolume(&vl, &vr)) {
@@ -6159,8 +6194,8 @@
                 // No buffers for this track. Give it a few chances to
                 // fill a buffer, then remove it from active list.
                 if (--(track->retryCount()) <= 0) {
-                    ALOGI("BUFFER TIMEOUT: remove(%d) from active list on thread %p",
-                            trackId, this);
+                    ALOGI("%s BUFFER TIMEOUT: remove track(%d) from active list due to underrun"
+                          " on thread %d", __func__, trackId, mId);
                     tracksToRemove->add(track);
                     // indicate to client process that the track was disabled because of underrun;
                     // it will then automatically call start() when data is available
@@ -6917,7 +6952,8 @@
                     if (isTimestampAdvancing) { // HAL is still playing audio, give us more time.
                         track->retryCount() = kMaxTrackRetriesOffload;
                     } else {
-                        ALOGV("BUFFER TIMEOUT: remove track(%d) from active list", trackId);
+                        ALOGI("%s BUFFER TIMEOUT: remove track(%d) from active list due to"
+                              " underrun on thread %d", __func__, trackId, mId);
                         tracksToRemove->add(track);
                         // indicate to client process that the track was disabled because of
                         // underrun; it will then automatically call start() when data is available
@@ -7037,16 +7073,20 @@
 {
     bool trackPaused = false;
     bool trackStopped = false;
+    bool trackDisabled = false;
 
-    // do not put the HAL in standby when paused. AwesomePlayer clear the offloaded AudioTrack
+    // do not put the HAL in standby when paused. NuPlayer clear the offloaded AudioTrack
     // after a timeout and we will enter standby then.
+    // On offload threads, do not enter standby if the main track is still underrunning.
     if (mTracks.size() > 0) {
-        trackPaused = mTracks[mTracks.size() - 1]->isPaused();
-        trackStopped = mTracks[mTracks.size() - 1]->isStopped() ||
-                           mTracks[mTracks.size() - 1]->state() == IAfTrackBase::IDLE;
+        const auto& mainTrack = mTracks[mTracks.size() - 1];
+
+        trackPaused = mainTrack->isPaused();
+        trackStopped = mainTrack->isStopped() || mainTrack->state() == IAfTrackBase::IDLE;
+        trackDisabled = (mType == OFFLOAD) && mainTrack->isDisabled();
     }
 
-    return !mStandby && !(trackPaused || (mHwPaused && !trackStopped));
+    return !mStandby && !(trackPaused || (mHwPaused && !trackStopped) || trackDisabled);
 }
 
 // checkForNewParameter_l() must be called with ThreadBase::mutex() held
@@ -7145,11 +7185,14 @@
 {
     PlaybackThread::flushHw_l();
     mOutput->flush();
-    mHwPaused = false;
     mFlushPending = false;
     mTimestampVerifier.discontinuity(discontinuityForStandbyOrFlush());
     mTimestamp.clear();
     mMonotonicFrameCounter.onFlush();
+    // We do not reset mHwPaused which is hidden from the Track client.
+    // Note: the client track in Tracks.cpp and AudioTrack.cpp
+    // has a FLUSHED state but the DirectOutputThread does not;
+    // those tracks will continue to show isStopped().
 }
 
 int64_t DirectOutputThread::computeWaitTimeNs_l() const {
@@ -7167,7 +7210,7 @@
         mPlaybackThread(playbackThread),
         mWriteAckSequence(0),
         mDrainSequence(0),
-        mAsyncError(false)
+        mAsyncError(ASYNC_ERROR_NONE)
 {
 }
 
@@ -7181,7 +7224,7 @@
     while (!exitPending()) {
         uint32_t writeAckSequence;
         uint32_t drainSequence;
-        bool asyncError;
+        AsyncError asyncError;
 
         {
             audio_utils::unique_lock _l(mutex());
@@ -7202,7 +7245,7 @@
             drainSequence = mDrainSequence;
             mDrainSequence &= ~1;
             asyncError = mAsyncError;
-            mAsyncError = false;
+            mAsyncError = ASYNC_ERROR_NONE;
         }
         {
             const sp<PlaybackThread> playbackThread = mPlaybackThread.promote();
@@ -7213,8 +7256,8 @@
                 if (drainSequence & 1) {
                     playbackThread->resetDraining(drainSequence >> 1);
                 }
-                if (asyncError) {
-                    playbackThread->onAsyncError();
+                if (asyncError != ASYNC_ERROR_NONE) {
+                    playbackThread->onAsyncError(asyncError == ASYNC_ERROR_HARD);
                 }
             }
         }
@@ -7264,10 +7307,10 @@
     }
 }
 
-void AsyncCallbackThread::setAsyncError()
+void AsyncCallbackThread::setAsyncError(bool isHardError)
 {
     audio_utils::lock_guard _l(mutex());
-    mAsyncError = true;
+    mAsyncError = isHardError ? ASYNC_ERROR_HARD : ASYNC_ERROR_SOFT;
     mWaitWorkCV.notify_one();
 }
 
@@ -7511,8 +7554,8 @@
                     if (isTimestampAdvancing) { // HAL is still playing audio, give us more time.
                         track->retryCount() = kMaxTrackRetriesOffload;
                     } else {
-                        ALOGV("OffloadThread: BUFFER TIMEOUT: remove track(%d) from active list",
-                                track->id());
+                        ALOGI("%s BUFFER TIMEOUT: remove track(%d) from active list due to"
+                              " underrun on thread %d", __func__, track->id(), mId);
                         tracksToRemove->add(track);
                         // tell client process that the track was disabled because of underrun;
                         // it will then automatically call start() when data is available
@@ -8239,6 +8282,7 @@
     for (int64_t loopCount = 0;; ++loopCount) {  // loopCount used for statistics tracking
         // Note: these sp<> are released at the end of the for loop outside of the mutex() lock.
         sp<IAfRecordTrack> activeTrack;
+        std::vector<sp<IAfRecordTrack>> oldActiveTracks;
         Vector<sp<IAfEffectChain>> effectChains;
 
         // activeTracks accumulates a copy of a subset of mActiveTracks
@@ -8288,7 +8332,9 @@
             bool doBroadcast = false;
             bool allStopped = true;
             for (size_t i = 0; i < size; ) {
-
+                if (activeTrack) {  // ensure track release is outside lock.
+                    oldActiveTracks.emplace_back(std::move(activeTrack));
+                }
                 activeTrack = mActiveTracks[i];
                 if (activeTrack->isTerminated()) {
                     if (activeTrack->isFastTrack()) {
@@ -9164,7 +9210,7 @@
     // This is needed for proper patchRecord peer release.
     while (recordTrack->state() == IAfTrackBase::PAUSING && !recordTrack->isInvalid()) {
         mWaitWorkCV.notify_all(); // signal thread to stop
-        mStartStopCV.wait(_l);
+        mStartStopCV.wait(_l, getTid());
     }
 
     if (recordTrack->state() == IAfTrackBase::PAUSED) { // successful stop
@@ -11342,14 +11388,15 @@
     // If there is only one active track and it is bit-perfect, enable tee buffer.
     float volumeLeft = 1.0f;
     float volumeRight = 1.0f;
-    if (mActiveTracks.size() == 1 && mActiveTracks[0]->isBitPerfect()) {
-        const int trackId = mActiveTracks[0]->id();
+    if (sp<IAfTrack> bitPerfectTrack = getTrackToStreamBitPerfectly_l();
+        bitPerfectTrack != nullptr) {
+        const int trackId = bitPerfectTrack->id();
         mAudioMixer->setParameter(
                     trackId, AudioMixer::TRACK, AudioMixer::TEE_BUFFER, (void *)mSinkBuffer);
         mAudioMixer->setParameter(
                     trackId, AudioMixer::TRACK, AudioMixer::TEE_BUFFER_FRAME_COUNT,
                     (void *)(uintptr_t)mNormalFrameCount);
-        mActiveTracks[0]->getFinalVolume(&volumeLeft, &volumeRight);
+        bitPerfectTrack->getFinalVolume(&volumeLeft, &volumeRight);
         mIsBitPerfect = true;
     } else {
         mIsBitPerfect = false;
@@ -11374,4 +11421,39 @@
     mHasDataCopiedToSinkBuffer = mIsBitPerfect;
 }
 
+void BitPerfectThread::setTracksInternalMute(
+        std::map<audio_port_handle_t, bool>* tracksInternalMute) {
+    for (auto& track : mTracks) {
+        if (auto it = tracksInternalMute->find(track->portId()); it != tracksInternalMute->end()) {
+            track->setInternalMute(it->second);
+            tracksInternalMute->erase(it);
+        }
+    }
+}
+
+sp<IAfTrack> BitPerfectThread::getTrackToStreamBitPerfectly_l() {
+    if (com::android::media::audioserver::
+                fix_concurrent_playback_behavior_with_bit_perfect_client()) {
+        sp<IAfTrack> bitPerfectTrack = nullptr;
+        bool allOtherTracksMuted = true;
+        // Return the bit perfect track if all other tracks are muted
+        for (const auto& track : mActiveTracks) {
+            if (track->isBitPerfect()) {
+                bitPerfectTrack = track;
+            } else if (track->getFinalVolume() != 0.f) {
+                allOtherTracksMuted = false;
+                if (bitPerfectTrack != nullptr) {
+                    break;
+                }
+            }
+        }
+        return allOtherTracksMuted ? bitPerfectTrack : nullptr;
+    } else {
+        if (mActiveTracks.size() == 1 && mActiveTracks[0]->isBitPerfect()) {
+            return mActiveTracks[0];
+        }
+    }
+    return nullptr;
+}
+
 } // namespace android
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index 86e1894..4b2ade4 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -948,7 +948,7 @@
     // StreamOutHalInterfaceCallback implementation
     virtual     void        onWriteReady();
     virtual     void        onDrainReady();
-    virtual     void        onError();
+    virtual     void        onError(bool /*isHardError*/);
 
 public: // AsyncCallbackThread
                 void        resetWriteBlocked(uint32_t sequence);
@@ -960,7 +960,7 @@
     virtual bool shouldStandby_l() REQUIRES(mutex(), ThreadBase_ThreadLoop);
     virtual void onAddNewTrack_l() REQUIRES(mutex());
 public:  // AsyncCallbackThread
-                void        onAsyncError(); // error reported by AsyncCallbackThread
+                void        onAsyncError(bool isHardError); // error reported by AsyncCallbackThread
 protected:
     // StreamHalInterfaceCodecFormatCallback implementation
                 void        onCodecFormatChanged(
@@ -1200,6 +1200,11 @@
                     }
                     return mHalStarted;
                 }
+
+    void setTracksInternalMute(std::map<audio_port_handle_t, bool>* /* tracksInternalMute */)
+            override EXCLUDES_ThreadBase_Mutex {
+        // Do nothing. It is only used for bit perfect thread
+    }
 protected:
     // updated by readOutputParameters_l()
     size_t                          mNormalFrameCount;  // normal mixer and effects
@@ -1373,6 +1378,8 @@
     bool destroyTrack_l(const sp<IAfTrack>& track) final REQUIRES(mutex());
 
     void removeTrack_l(const sp<IAfTrack>& track) REQUIRES(mutex());
+    std::set<audio_port_handle_t> getTrackPortIds_l() REQUIRES(mutex());
+    std::set<audio_port_handle_t> getTrackPortIds();
 
     void readOutputParameters_l() REQUIRES(mutex());
     MetadataUpdate updateMetadata_l() final REQUIRES(mutex());
@@ -1836,7 +1843,7 @@
             void        resetWriteBlocked();
             void        setDraining(uint32_t sequence);
             void        resetDraining();
-            void        setAsyncError();
+            void        setAsyncError(bool isHardError);
 
 private:
     const wp<PlaybackThread>   mPlaybackThread;
@@ -1850,7 +1857,8 @@
     uint32_t                   mDrainSequence;
     audio_utils::condition_variable mWaitWorkCV;
     mutable audio_utils::mutex mMutex{audio_utils::MutexOrder::kAsyncCallbackThread_Mutex};
-    bool                       mAsyncError;
+    enum AsyncError { ASYNC_ERROR_NONE, ASYNC_ERROR_SOFT, ASYNC_ERROR_HARD };
+    AsyncError                 mAsyncError;
 
     audio_utils::mutex& mutex() const RETURN_CAPABILITY(audio_utils::AsyncCallbackThread_Mutex) {
         return mMutex;
@@ -2449,12 +2457,17 @@
     BitPerfectThread(const sp<IAfThreadCallback>& afThreadCallback, AudioStreamOut *output,
                      audio_io_handle_t id, bool systemReady);
 
+    void setTracksInternalMute(std::map<audio_port_handle_t, bool>* tracksInternalMuted)
+            final EXCLUDES_ThreadBase_Mutex;
+
 protected:
     mixer_state prepareTracks_l(Vector<sp<IAfTrack>>* tracksToRemove) final
             REQUIRES(mutex(), ThreadBase_ThreadLoop);
     void threadLoop_mix() final REQUIRES(ThreadBase_ThreadLoop);
 
 private:
+    sp<IAfTrack> getTrackToStreamBitPerfectly_l() REQUIRES(mutex());
+
     // These variables are only accessed on the threadLoop; hence need no mutex.
     bool mIsBitPerfect GUARDED_BY(ThreadBase_ThreadLoop) = false;
     float mVolumeLeft GUARDED_BY(ThreadBase_ThreadLoop) = 0.f;
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 8fa4b06..f8cfc12 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -913,7 +913,7 @@
                         "  Format Chn mask  SRate "
                         "ST Usg CT "
                         " G db  L dB  R dB  VS dB "
-                        "  Server FrmCnt  FrmRdy F Underruns  Flushed BitPerfect"
+                        "  Server FrmCnt  FrmRdy F Underruns  Flushed BitPerfect InternalMute"
                         "%s\n",
                         isServerLatencySupported() ? "   Latency" : "");
 }
@@ -999,7 +999,7 @@
                         "%08X %08X %6u "
                         "%2u %3x %2x "
                         "%5.2g %5.2g %5.2g %5.2g%c "
-                        "%08X %6zu%c %6zu %c %9u%c %7u %10s",
+                        "%08X %6zu%c %6zu %c %9u%c %7u %10s %12s",
             active ? "yes" : "no",
             (mClient == 0) ? getpid() : mClient->pid(),
             mSessionId,
@@ -1029,7 +1029,8 @@
             mAudioTrackServerProxy->getUnderrunFrames(),
             nowInUnderrun,
             (unsigned)mAudioTrackServerProxy->framesFlushed() % 10000000,
-            isBitPerfect() ? "true" : "false"
+            isBitPerfect() ? "true" : "false",
+            getInternalMute() ? "true" : "false"
             );
 
     if (isServerLatencySupported()) {
@@ -1881,6 +1882,12 @@
     signalClientFlag(CBLK_DISABLED);
 }
 
+bool Track::isDisabled() const {
+    audio_track_cblk_t* cblk = mCblk;
+    return (cblk != nullptr)
+            && ((android_atomic_release_load(&cblk->mFlags) & CBLK_DISABLED) != 0);
+}
+
 void Track::signalClientFlag(int32_t flag)
 {
     // FIXME should use proxy, and needs work
diff --git a/services/audioflinger/datapath/AudioStreamIn.cpp b/services/audioflinger/datapath/AudioStreamIn.cpp
index 76618f4..165ac25 100644
--- a/services/audioflinger/datapath/AudioStreamIn.cpp
+++ b/services/audioflinger/datapath/AudioStreamIn.cpp
@@ -58,7 +58,7 @@
 
     if (mHalFormatHasProportionalFrames &&
             (flags & AUDIO_INPUT_FLAG_DIRECT) == AUDIO_INPUT_FLAG_DIRECT) {
-        // For DirectRecord reset timestamp to 0 on standby.
+        // For DirectRecord reset position to 0 on standby.
         const uint64_t adjustedPosition = (halPosition <= mFramesReadAtStandby) ?
                 0 : (halPosition - mFramesReadAtStandby);
         // Scale from HAL sample rate to application rate.
diff --git a/services/audioflinger/datapath/AudioStreamOut.cpp b/services/audioflinger/datapath/AudioStreamOut.cpp
index aad538f..a686ff6 100644
--- a/services/audioflinger/datapath/AudioStreamOut.cpp
+++ b/services/audioflinger/datapath/AudioStreamOut.cpp
@@ -51,42 +51,17 @@
         return NO_INIT;
     }
 
-    uint32_t halPosition = 0;
+    uint64_t halPosition = 0;
     const status_t status = stream->getRenderPosition(&halPosition);
     if (status != NO_ERROR) {
         return status;
     }
-
-    // Maintain a 64-bit render position using the 32-bit result from the HAL.
-    // This delta calculation relies on the arithmetic overflow behavior
-    // of integers. For example (100 - 0xFFFFFFF0) = 116.
-    const auto truncatedPosition = (uint32_t)mRenderPosition;
-    int32_t deltaHalPosition; // initialization not needed, overwitten by __builtin_sub_overflow()
-    (void) __builtin_sub_overflow(halPosition, truncatedPosition, &deltaHalPosition);
-
-    if (deltaHalPosition >= 0) {
-        mRenderPosition += deltaHalPosition;
-    } else if (mExpectRetrograde) {
-        mExpectRetrograde = false;
-        mRenderPosition -= static_cast<uint64_t>(-deltaHalPosition);
-    }
     // Scale from HAL sample rate to application rate.
-    *frames = mRenderPosition / mRateMultiplier;
+    *frames = halPosition / mRateMultiplier;
 
     return status;
 }
 
-// return bottom 32-bits of the render position
-status_t AudioStreamOut::getRenderPosition(uint32_t *frames)
-{
-    uint64_t position64 = 0;
-    const status_t status = getRenderPosition(&position64);
-    if (status == NO_ERROR) {
-        *frames = (uint32_t)position64;
-    }
-    return status;
-}
-
 status_t AudioStreamOut::getPresentationPosition(uint64_t *frames, struct timespec *timestamp)
 {
     if (stream == nullptr) {
@@ -101,7 +76,7 @@
 
     if (mHalFormatHasProportionalFrames &&
             (flags & AUDIO_OUTPUT_FLAG_DIRECT) == AUDIO_OUTPUT_FLAG_DIRECT) {
-        // For DirectTrack reset timestamp to 0 on standby.
+        // For DirectTrack reset position to 0 on standby.
         const uint64_t adjustedPosition = (halPosition <= mFramesWrittenAtStandby) ?
                 0 : (halPosition - mFramesWrittenAtStandby);
         // Scale from HAL sample rate to application rate.
@@ -179,8 +154,6 @@
 
 int AudioStreamOut::flush()
 {
-    mRenderPosition = 0;
-    mExpectRetrograde = false;
     mFramesWritten = 0;
     mFramesWrittenAtStandby = 0;
     const status_t result = stream->flush();
@@ -189,12 +162,14 @@
 
 int AudioStreamOut::standby()
 {
-    mRenderPosition = 0;
-    mExpectRetrograde = false;
     mFramesWrittenAtStandby = mFramesWritten;
     return stream->standby();
 }
 
+void AudioStreamOut::presentationComplete() {
+    stream->presentationComplete();
+}
+
 ssize_t AudioStreamOut::write(const void *buffer, size_t numBytes)
 {
     size_t bytesWritten;
diff --git a/services/audioflinger/datapath/AudioStreamOut.h b/services/audioflinger/datapath/AudioStreamOut.h
index ea41bba..2c9fb3e 100644
--- a/services/audioflinger/datapath/AudioStreamOut.h
+++ b/services/audioflinger/datapath/AudioStreamOut.h
@@ -51,9 +51,6 @@
 
     virtual ~AudioStreamOut();
 
-    // Get the bottom 32-bits of the 64-bit render position.
-    status_t getRenderPosition(uint32_t *frames);
-
     virtual status_t getRenderPosition(uint64_t *frames);
 
     virtual status_t getPresentationPosition(uint64_t *frames, struct timespec *timestamp);
@@ -91,21 +88,14 @@
     virtual status_t flush();
     virtual status_t standby();
 
-    // Avoid suppressing retrograde motion in mRenderPosition for gapless offload/direct when
-    // transitioning between tracks.
-    // The HAL resets the frame position without flush/stop being called, but calls back prior to
-    // this event. So, on the next occurrence of retrograde motion, we permit backwards movement of
-    // mRenderPosition.
-    virtual void presentationComplete() { mExpectRetrograde = true; }
+    virtual void presentationComplete();
 
 protected:
     uint64_t mFramesWritten = 0; // reset by flush
     uint64_t mFramesWrittenAtStandby = 0;
-    uint64_t mRenderPosition = 0; // reset by flush, standby, or presentation complete
     int mRateMultiplier = 1;
     bool mHalFormatHasProportionalFrames = false;
     size_t mHalFrameSize = 0;
-    bool mExpectRetrograde = false; // see presentationComplete
 };
 
 } // namespace android
diff --git a/services/audioparameterparser/Android.bp b/services/audioparameterparser/Android.bp
index f5feece..1c1c1e1 100644
--- a/services/audioparameterparser/Android.bp
+++ b/services/audioparameterparser/Android.bp
@@ -35,10 +35,10 @@
     name: "android.hardware.audio.parameter_parser.example_defaults",
     defaults: [
         "latest_android_hardware_audio_core_ndk_shared",
+        "latest_av_audio_types_aidl_ndk_shared",
     ],
 
     shared_libs: [
-        "av-audio-types-aidl-V1-ndk",
         "libbase",
         "libbinder_ndk",
     ],
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index 8f17ffc..e91e2a3 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -18,6 +18,7 @@
 #define ANDROID_AUDIOPOLICY_INTERFACE_H
 
 #include <android/media/DeviceConnectedState.h>
+#include <android/media/TrackInternalMuteInfo.h>
 #include <media/AudioCommonTypes.h>
 #include <media/AudioContainers.h>
 #include <media/AudioDeviceTypeAddr.h>
@@ -179,10 +180,16 @@
     // volume control functions
     //
 
+    // notifies the audio policy manager that the absolute volume mode is enabled/disabled on
+    // the passed device. Also specifies the stream that is controlling the absolute volume.
+    virtual status_t setDeviceAbsoluteVolumeEnabled(audio_devices_t device,
+                                                    const char *address,
+                                                    bool enabled,
+                                                    audio_stream_type_t streamToDriveAbs) = 0;
     // initialises stream volume conversion parameters by specifying volume index range.
     virtual void initStreamVolume(audio_stream_type_t stream,
-                                      int indexMin,
-                                      int indexMax) = 0;
+                                  int indexMin,
+                                  int indexMax) = 0;
 
     // sets the new stream volume at a level corresponding to the supplied index for the
     // supplied device. By convention, specifying AUDIO_DEVICE_OUT_DEFAULT_FOR_VOLUME means
@@ -585,6 +592,9 @@
     // Get the attributes of the mix port when connecting to the given device port.
     virtual status_t getAudioMixPort(const struct audio_port_v7 *devicePort,
                                      struct audio_port_v7 *mixPort) = 0;
+
+    virtual status_t setTracksInternalMute(
+            const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) = 0;
 };
 
     // These are the signatures of createAudioPolicyManager/destroyAudioPolicyManager
diff --git a/services/audiopolicy/common/include/policy.h b/services/audiopolicy/common/include/policy.h
index 4643bd1..04d5362 100644
--- a/services/audiopolicy/common/include/policy.h
+++ b/services/audiopolicy/common/include/policy.h
@@ -17,6 +17,7 @@
 #pragma once
 
 #include <system/audio.h>
+#include <set>
 #include <vector>
 
 #include <media/AudioContainers.h>
@@ -27,6 +28,10 @@
 
 static const audio_attributes_t defaultAttr = AUDIO_ATTRIBUTES_INITIALIZER;
 
+static const std::set<audio_usage_t > gHighPriorityUseCases = {
+        AUDIO_USAGE_ALARM, AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE
+};
+
 } // namespace android
 
 static const audio_format_t gDynamicFormat = AUDIO_FORMAT_DEFAULT;
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
index 00958aa..914f3fe 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
@@ -29,6 +29,7 @@
 #include "ClientDescriptor.h"
 #include "DeviceDescriptor.h"
 #include "PolicyAudioPort.h"
+#include "PreferredMixerAttributesInfo.h"
 #include <vector>
 
 namespace android {
@@ -477,6 +478,16 @@
 
     PortHandleVector getClientsForStream(audio_stream_type_t streamType) const;
 
+    bool isBitPerfect() const {
+        return (getFlags().output & AUDIO_OUTPUT_FLAG_BIT_PERFECT) != AUDIO_OUTPUT_FLAG_NONE;
+    }
+
+    /**
+     * Return true if there is any client with the same usage active on the given device.
+     * When the given device is null, return true if there is any client active.
+     */
+    bool isUsageActiveOnDevice(audio_usage_t usage, sp<DeviceDescriptor> device) const;
+
     virtual std::string info() const override;
 
     const sp<IOProfile> mProfile;          // I/O profile this output derives from
@@ -489,7 +500,7 @@
     audio_session_t mDirectClientSession; // session id of the direct output client
     bool mPendingReopenToQueryProfiles = false;
     audio_channel_mask_t mMixerChannelMask = AUDIO_CHANNEL_NONE;
-    bool mUsePreferredMixerAttributes = false;
+    sp<PreferredMixerAttributesInfo> mPreferredAttrInfo = nullptr;
 };
 
 // Audio output driven by an input device directly.
@@ -616,6 +627,8 @@
      */
     bool isAnyDeviceTypeActive(const DeviceTypeSet& deviceTypes) const;
 
+    bool isUsageActiveOnDevice(audio_usage_t usage, sp<DeviceDescriptor> device) const;
+
     void dump(String8 *dst) const;
 };
 
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioProfileVectorHelper.h b/services/audiopolicy/common/managerdefinitions/include/AudioProfileVectorHelper.h
index f84bda7..5fb0ad4 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioProfileVectorHelper.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioProfileVectorHelper.h
@@ -38,11 +38,40 @@
 void appendAudioProfiles(AudioProfileVector &audioProfileVector,
                          const AudioProfileVector &audioProfileVectorToAppend);
 
+/**
+ * Check if the profile vector contains a profile that matches the given sampling rate, channel
+ * mask and format. Note that this method uses `audio_formats_match` from policy.h, which will
+ * consider PCM formats match if their bytes per sample are greater than 2.
+ *
+ * @param audioProfileVector
+ * @param samplingRate
+ * @param channelMask
+ * @param format
+ * @return NO_ERROR if the given profile vector is empty or it contains a profile that matches the
+ *         given sampling rate, channel mask and format. Otherwise, returns BAD_VALUE.
+ */
 status_t checkExactProfile(const AudioProfileVector &audioProfileVector,
                            const uint32_t samplingRate,
                            audio_channel_mask_t channelMask,
                            audio_format_t format);
 
+/**
+ * Check if the profile vector contains a profile that has exactly the same sampling rate, channel
+ * mask and format as the given values.
+ *
+ * @param audioProfileVector
+ * @param samplingRate
+ * @param channelMask
+ * @param format
+ * @return NO_ERROR if the given profile vector is empty or it contains a profile that that has
+ *         exactly the same sampling rate, channel mask and format as the given values. Otherwise,
+ *         returns BAD_VALUE.
+ */
+status_t checkIdenticalProfile(const AudioProfileVector &audioProfileVector,
+                               const uint32_t samplingRate,
+                               audio_channel_mask_t channelMask,
+                               audio_format_t format);
+
 status_t checkCompatibleProfile(const AudioProfileVector &audioProfileVector,
                                 uint32_t &samplingRate,
                                 audio_channel_mask_t &channelMask,
diff --git a/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
index fe90a1e..a596c43 100644
--- a/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
@@ -165,6 +165,18 @@
         mIsInvalid = true;
     }
 
+    bool getInternalMute() const { return mInternalMute; }
+
+    /**
+     * Set the internal mute for a client. Return true if the existing value is different from
+     * the given value.
+     */
+    bool setInternalMute(bool muted) {
+        const bool result = (mInternalMute != muted);
+        mInternalMute = muted;
+        return result;
+    }
+
 private:
     const audio_stream_type_t mStream;
     const product_strategy_t mStrategy;
@@ -178,6 +190,7 @@
      */
     uint32_t mActivityCount = 0;
     bool mIsInvalid = false;
+    bool mInternalMute = false;
 };
 
 class RecordClientDescriptor: public ClientDescriptor
diff --git a/services/audiopolicy/common/managerdefinitions/include/PolicyAudioPort.h b/services/audiopolicy/common/managerdefinitions/include/PolicyAudioPort.h
index acf787b..6b21e9f 100644
--- a/services/audiopolicy/common/managerdefinitions/include/PolicyAudioPort.h
+++ b/services/audiopolicy/common/managerdefinitions/include/PolicyAudioPort.h
@@ -56,9 +56,14 @@
     // Audio port IDs are in a different namespace than AudioFlinger unique IDs
     static audio_port_handle_t getNextUniqueId();
 
-    // searches for an exact match
+    // searches for an exact match, note that this method use `audio_formats_match` from policy.h,
+    // which will consider PCM formats match if their bytes per sample are greater than 2.
     virtual status_t checkExactAudioProfile(const struct audio_port_config *config) const;
 
+    // searches for an identical match, unlike `checkExactAudioProfile` above, this will also
+    // require the formats to be exactly the same.
+    virtual status_t checkIdenticalAudioProfile(const struct audio_port_config *config) const;
+
     // searches for a compatible match, currently implemented for input
     // parameters are input|output, returned value is the best match.
     status_t checkCompatibleAudioProfile(uint32_t &samplingRate,
@@ -100,6 +105,12 @@
                          const ChannelMaskSet &channelMasks) const;
     void pickSamplingRate(uint32_t &rate, const SampleRateSet &samplingRates) const;
 
+    status_t checkAudioProfile(const struct audio_port_config *config,
+                               std::function<status_t(const AudioProfileVector&,
+                                                      const uint32_t samplingRate,
+                                                      audio_channel_mask_t,
+                                                      audio_format_t)> checkProfile) const;
+
     sp<HwModule> mModule;     // audio HW module exposing this I/O stream
     AudioRouteVector mRoutes; // Routes involving this port
 };
diff --git a/services/audiopolicy/common/managerdefinitions/include/PreferredMixerAttributesInfo.h b/services/audiopolicy/common/managerdefinitions/include/PreferredMixerAttributesInfo.h
index 9472481..a493e3c 100644
--- a/services/audiopolicy/common/managerdefinitions/include/PreferredMixerAttributesInfo.h
+++ b/services/audiopolicy/common/managerdefinitions/include/PreferredMixerAttributesInfo.h
@@ -44,6 +44,17 @@
 
     void increaseActiveClient() { mActiveClientsCount++; }
     void decreaseActiveClient() { mActiveClientsCount--; }
+    void resetActiveClient() { mActiveClientsCount = 0; }
+
+    bool isBitPerfect() const {
+        return (getFlags() & AUDIO_OUTPUT_FLAG_BIT_PERFECT) != AUDIO_OUTPUT_FLAG_NONE;
+    }
+
+    bool configMatches(const audio_config_t& config) const {
+        return config.format == mMixerAttributes.config.format &&
+                config.channel_mask == mMixerAttributes.config.channel_mask &&
+                config.sample_rate == mMixerAttributes.config.sample_rate;
+    }
 
     void dump(String8 *dst);
 
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
index 6537a00..68b2e7b 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
@@ -561,6 +561,7 @@
             audio_port_config config = {};
             devicePort->toAudioPortConfig(&config);
             config.config_mask = AUDIO_PORT_CONFIG_GAIN;
+            config.gain.mode = gains[0]->getMode();
             config.gain.values[0] = gainValueMb;
             return mClientInterface->setAudioPortConfig(&config, 0) == NO_ERROR;
         }
@@ -791,6 +792,16 @@
     mDevices = devices;
 }
 
+bool SwAudioOutputDescriptor::isUsageActiveOnDevice(audio_usage_t usage,
+                                                    sp<android::DeviceDescriptor> device) const {
+    if (device != nullptr && !mDevices.contains(device)) {
+        return false;
+    }
+    return std::any_of(mActiveClients.begin(), mActiveClients.end(),
+                       [usage](sp<TrackClientDescriptor> client) {
+                           return client->attributes().usage == usage; });
+}
+
 // HwAudioOutputDescriptor implementation
 HwAudioOutputDescriptor::HwAudioOutputDescriptor(const sp<SourceClientDescriptor>& source,
                                                  AudioPolicyClientInterface *clientInterface)
@@ -1016,6 +1027,17 @@
     return clientsForStream;
 }
 
+bool SwAudioOutputCollection::isUsageActiveOnDevice(audio_usage_t usage,
+                                                    sp<android::DeviceDescriptor> device) const {
+    for (size_t i = 0; i < this->size(); i++) {
+        const sp<SwAudioOutputDescriptor> outputDesc = this->valueAt(i);
+        if (outputDesc->isUsageActiveOnDevice(usage, device)) {
+            return true;
+        }
+    }
+    return false;
+}
+
 std::string SwAudioOutputDescriptor::info() const {
     std::string result;
     result.append("[" );
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioProfileVectorHelper.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioProfileVectorHelper.cpp
index 82f51ad..164f70a 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioProfileVectorHelper.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioProfileVectorHelper.cpp
@@ -190,6 +190,18 @@
     return BAD_VALUE;
 }
 
+status_t checkIdentical(const sp<AudioProfile> &audioProfile,
+                        uint32_t samplingRate,
+                        audio_channel_mask_t channelMask,
+                        audio_format_t format) {
+    if(audioProfile->getFormat() == format &&
+        audioProfile->supportsChannels(channelMask) &&
+        audioProfile->supportsRate(samplingRate)) {
+        return NO_ERROR;
+    }
+    return BAD_VALUE;
+}
+
 status_t checkCompatibleSamplingRate(const sp<AudioProfile> &audioProfile,
                                      uint32_t samplingRate,
                                      uint32_t &updatedSamplingRate)
@@ -320,23 +332,43 @@
     return bestMatch > 0 ? NO_ERROR : BAD_VALUE;
 }
 
-status_t checkExactProfile(const AudioProfileVector& audioProfileVector,
-                           const uint32_t samplingRate,
-                           audio_channel_mask_t channelMask,
-                           audio_format_t format)
-{
+namespace {
+
+status_t checkProfile(const AudioProfileVector& audioProfileVector,
+                      const uint32_t samplingRate,
+                      audio_channel_mask_t channelMask,
+                      audio_format_t format,
+                      std::function<status_t(const sp<AudioProfile> &, uint32_t,
+                                             audio_channel_mask_t, audio_format_t)> check) {
     if (audioProfileVector.empty()) {
         return NO_ERROR;
     }
 
     for (const auto& profile : audioProfileVector) {
-        if (checkExact(profile, samplingRate, channelMask, format) == NO_ERROR) {
+        if (check(profile, samplingRate, channelMask, format) == NO_ERROR) {
             return NO_ERROR;
         }
     }
     return BAD_VALUE;
 }
 
+} // namespace
+
+status_t checkExactProfile(const AudioProfileVector& audioProfileVector,
+                           const uint32_t samplingRate,
+                           audio_channel_mask_t channelMask,
+                           audio_format_t format)
+{
+    return checkProfile(audioProfileVector, samplingRate, channelMask, format, checkExact);
+}
+
+status_t checkIdenticalProfile(const AudioProfileVector &audioProfileVector,
+                               const uint32_t samplingRate,
+                               audio_channel_mask_t channelMask,
+                               audio_format_t format) {
+    return checkProfile(audioProfileVector, samplingRate, channelMask, format, checkIdentical);
+}
+
 status_t checkCompatibleProfile(const AudioProfileVector &audioProfileVector,
                                 uint32_t &samplingRate,
                                 audio_channel_mask_t &channelMask,
diff --git a/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
index 2aee501..667c189 100644
--- a/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
@@ -57,8 +57,8 @@
 void TrackClientDescriptor::dump(String8 *dst, int spaces) const
 {
     ClientDescriptor::dump(dst, spaces);
-    dst->appendFormat("%*sStream: %d; Flags: %08x; Refcount: %d\n", spaces, "",
-            mStream, mFlags, mActivityCount);
+    dst->appendFormat("%*sStream: %d; Flags: %08x; Refcount: %d; InternalMute: %s\n",
+            spaces, "", mStream, mFlags, mActivityCount, mInternalMute ? "Yes" : "No");
     dst->appendFormat("%*sDAP Primary Mix: %p\n", spaces, "", mPrimaryMix.promote().get());
     if (!mSecondaryOutputs.empty()) {
         dst->appendFormat("%*sDAP Secondary Outputs: ", spaces - 2, "");
diff --git a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
index d9fbd89..991b103 100644
--- a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
@@ -73,7 +73,7 @@
     if (isRecordThread)
     {
         if ((flags & AUDIO_INPUT_FLAG_MMAP_NOIRQ) != 0) {
-            if (checkExactAudioProfile(&config) != NO_ERROR) {
+            if (checkIdenticalAudioProfile(&config) != NO_ERROR) {
                 return result;
             }
             result = EXACT_MATCH;
@@ -86,7 +86,13 @@
             return result;
         }
     } else {
-        if (checkExactAudioProfile(&config) == NO_ERROR) {
+        if ((flags & AUDIO_OUTPUT_FLAG_MMAP_NOIRQ) != 0 ||
+            (flags & AUDIO_OUTPUT_FLAG_BIT_PERFECT) != 0) {
+            if (checkIdenticalAudioProfile(&config) != NO_ERROR) {
+                return result;
+            }
+            result = EXACT_MATCH;
+        } else if (checkExactAudioProfile(&config) == NO_ERROR) {
             result = EXACT_MATCH;
         } else {
             return result;
diff --git a/services/audiopolicy/common/managerdefinitions/src/PolicyAudioPort.cpp b/services/audiopolicy/common/managerdefinitions/src/PolicyAudioPort.cpp
index ce8178f..ae99191 100644
--- a/services/audiopolicy/common/managerdefinitions/src/PolicyAudioPort.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/PolicyAudioPort.cpp
@@ -63,21 +63,11 @@
 
 status_t PolicyAudioPort::checkExactAudioProfile(const struct audio_port_config *config) const
 {
-    status_t status = NO_ERROR;
-    auto config_mask = config->config_mask;
-    if (config_mask & AUDIO_PORT_CONFIG_GAIN) {
-        config_mask &= ~AUDIO_PORT_CONFIG_GAIN;
-        status = asAudioPort()->checkGain(&config->gain, config->gain.index);
-        if (status != NO_ERROR) {
-            return status;
-        }
-    }
-    if (config_mask != 0) {
-        // TODO should we check sample_rate / channel_mask / format separately?
-        status = checkExactProfile(asAudioPort()->getAudioProfiles(), config->sample_rate,
-                config->channel_mask, config->format);
-    }
-    return status;
+    return checkAudioProfile(config, checkExactProfile);
+}
+
+status_t PolicyAudioPort::checkIdenticalAudioProfile(const struct audio_port_config *config) const {
+    return checkAudioProfile(config, checkIdenticalProfile);
 }
 
 void PolicyAudioPort::pickSamplingRate(uint32_t &pickedRate,
@@ -266,4 +256,28 @@
             asAudioPort()->getName().c_str(), samplingRate, channelMask, format);
 }
 
+status_t PolicyAudioPort::checkAudioProfile(
+        const struct audio_port_config *config,
+        std::function<status_t(const AudioProfileVector &,
+                               const uint32_t,
+                               audio_channel_mask_t,
+                               audio_format_t)> checkProfile) const {
+    status_t status = NO_ERROR;
+    auto config_mask = config->config_mask;
+    if (config_mask & AUDIO_PORT_CONFIG_GAIN) {
+        config_mask &= ~AUDIO_PORT_CONFIG_GAIN;
+        status = asAudioPort()->checkGain(&config->gain, config->gain.index);
+        if (status != NO_ERROR) {
+            return status;
+        }
+    }
+    if (config_mask != 0) {
+        // TODO should we check sample_rate / channel_mask / format separately?
+        status = checkProfile(asAudioPort()->getAudioProfiles(), config->sample_rate,
+                   config->channel_mask, config->format);
+    }
+    return status;
+
+}
+
 } // namespace android
diff --git a/services/audiopolicy/engine/common/Android.bp b/services/audiopolicy/engine/common/Android.bp
index 9e07d79..d8aac37 100644
--- a/services/audiopolicy/engine/common/Android.bp
+++ b/services/audiopolicy/engine/common/Android.bp
@@ -62,4 +62,7 @@
         "com.android.media.audio-aconfig-cc",
         "server_configurable_flags",
     ],
+    defaults: [
+        "aconfig_lib_cc_static_link.defaults",
+    ],
 }
diff --git a/services/audiopolicy/engineconfigurable/Android.bp b/services/audiopolicy/engineconfigurable/Android.bp
index 66df930..a0a4bdf 100644
--- a/services/audiopolicy/engineconfigurable/Android.bp
+++ b/services/audiopolicy/engineconfigurable/Android.bp
@@ -54,4 +54,7 @@
         "libutils",
         "libxml2",
     ],
+    defaults: [
+        "aconfig_lib_cc_static_link.defaults",
+    ],
 }
diff --git a/services/audiopolicy/enginedefault/Android.bp b/services/audiopolicy/enginedefault/Android.bp
index 7810d63..799b8d9 100644
--- a/services/audiopolicy/enginedefault/Android.bp
+++ b/services/audiopolicy/enginedefault/Android.bp
@@ -41,4 +41,7 @@
         "libutils",
         "libxml2",
     ],
+    defaults: [
+        "aconfig_lib_cc_static_link.defaults",
+    ],
 }
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 6e9bd34..7ec5f48 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -338,7 +338,7 @@
                         && (!device_distinguishes_on_address(device->type())
                                 // always force when disconnecting (a non-duplicated device)
                                 || (state == AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE));
-                if (desc->mUsePreferredMixerAttributes && newDevices != desc->devices()) {
+                if (desc->mPreferredAttrInfo != nullptr && newDevices != desc->devices()) {
                     // If the device is using preferred mixer attributes, the output need to reopen
                     // with default configuration when the new selected devices are different from
                     // current routing devices
@@ -563,15 +563,31 @@
         }
     }
     auto musicStrategy = streamToStrategy(AUDIO_STREAM_MUSIC);
+    uint32_t muteWaitMs = 0;
     for (size_t i = 0; i < mOutputs.size(); i++) {
        sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
-       // mute media strategies and delay device switch by the largest
-       // This avoid sending the music tail into the earpiece or headset.
+       // mute media strategies to avoid sending the music tail into
+       // the earpiece or headset.
+       if (desc->isStrategyActive(musicStrategy)) {
+           uint32_t tempRecommendedMuteDuration = desc->getRecommendedMuteDurationMs();
+           uint32_t tempMuteDurationMs = tempRecommendedMuteDuration > 0 ?
+                        tempRecommendedMuteDuration : desc->latency() * 4;
+           if (muteWaitMs < tempMuteDurationMs) {
+               muteWaitMs = tempMuteDurationMs;
+           }
+       }
        setStrategyMute(musicStrategy, true, desc);
        setStrategyMute(musicStrategy, false, desc, MUTE_TIME_MS,
           mEngine->getOutputDevicesForAttributes(attributes_initializer(AUDIO_USAGE_MEDIA),
                                               nullptr, true /*fromCache*/).types());
     }
+    // Wait for the muted audio to propagate down the audio path see checkDeviceMuteStrategies().
+    // We assume that MUTE_TIME_MS is way larger than muteWaitMs so that unmuting still
+    // happens after the actual device switch.
+    if (muteWaitMs > 0) {
+        ALOGW_IF(MUTE_TIME_MS < muteWaitMs * 2, "%s excessive mute wait %d", __func__, muteWaitMs);
+        usleep(muteWaitMs * 1000);
+    }
     // Toggle the device state: UNAVAILABLE -> AVAILABLE
     // This will force reading again the device configuration
     status_t status = setDeviceConnectionState(device,
@@ -917,15 +933,15 @@
     for (size_t i = 0; i < mOutputs.size(); i++) {
         sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
         DeviceVector newDevices = getNewOutputDevices(desc, true /*fromCache*/);
+        if (state != AUDIO_MODE_NORMAL && oldState == AUDIO_MODE_NORMAL
+                && desc->mPreferredAttrInfo != nullptr) {
+            // If the output is using preferred mixer attributes and the audio mode is not normal,
+            // the output need to reopen with default configuration.
+            outputsToReopen.emplace(mOutputs.keyAt(i), newDevices);
+            continue;
+        }
         if (state != AUDIO_MODE_IN_CALL || (desc != mPrimaryOutput && !isTelephonyRxOrTx(desc))) {
             bool forceRouting = !newDevices.isEmpty();
-            if (desc->mUsePreferredMixerAttributes && newDevices != desc->devices()) {
-                // If the device is using preferred mixer attributes, the output need to reopen
-                // with default configuration when the new selected devices are different from
-                // current routing devices.
-                outputsToReopen.emplace(mOutputs.keyAt(i), newDevices);
-                continue;
-            }
             setOutputDevices(__func__, desc, newDevices, forceRouting, 0 /*delayMs*/, nullptr,
                              true /*requiresMuteCheck*/, !forceRouting /*requiresVolumeCheck*/);
         }
@@ -1337,23 +1353,40 @@
             // Only use preferred mixer if the uid matches or the preferred mixer is bit-perfect
             // and it is currently active.
             if (info != nullptr && info->getUid() != uid &&
-                ((info->getFlags() & AUDIO_OUTPUT_FLAG_BIT_PERFECT) == AUDIO_OUTPUT_FLAG_NONE ||
-                        info->getActiveClientCount() == 0)) {
+                (!info->isBitPerfect() || info->getActiveClientCount() == 0)) {
                 info = nullptr;
             }
+            if (com::android::media::audioserver::
+                    fix_concurrent_playback_behavior_with_bit_perfect_client()) {
+                if (info != nullptr && info->getUid() == uid &&
+                    info->configMatches(*config) &&
+                    (mEngine->getPhoneState() != AUDIO_MODE_NORMAL ||
+                            std::any_of(gHighPriorityUseCases.begin(), gHighPriorityUseCases.end(),
+                                        [this, &outputDevices](audio_usage_t usage) {
+                                            return mOutputs.isUsageActiveOnDevice(
+                                                    usage, outputDevices[0]); }))) {
+                    // Bit-perfect request is not allowed when the phone mode is not normal or
+                    // there is any higher priority user case active.
+                    return INVALID_OPERATION;
+                }
+            }
         }
         *output = getOutputForDevices(outputDevices, session, resultAttr, config,
                 flags, isSpatialized, info, resultAttr->flags & AUDIO_FLAG_MUTE_HAPTIC);
         // The client will be active if the client is currently preferred mixer owner and the
         // requested configuration matches the preferred mixer configuration.
         *isBitPerfect = (info != nullptr
-                && (info->getFlags() & AUDIO_OUTPUT_FLAG_BIT_PERFECT) != AUDIO_OUTPUT_FLAG_NONE
+                && info->isBitPerfect()
                 && info->getUid() == uid
                 && *output != AUDIO_IO_HANDLE_NONE
                 // When bit-perfect output is selected for the preferred mixer attributes owner,
                 // only need to consider the config matches.
                 && mOutputs.valueFor(*output)->isConfigurationMatched(
                         clientConfig, AUDIO_OUTPUT_FLAG_NONE));
+
+        if (*isBitPerfect) {
+            *flags = (audio_output_flags_t)(*flags | AUDIO_OUTPUT_FLAG_BIT_PERFECT);
+        }
     }
     if (*output == AUDIO_IO_HANDLE_NONE) {
         AudioProfileVector profiles;
@@ -1577,6 +1610,11 @@
     outputDesc->mDirectClientSession = session;
 
     addOutput(*output, outputDesc);
+    setOutputDevices(__func__, outputDesc,
+                     devices,
+                     true,
+                     0,
+                     NULL);
     mPreviousOutputs = mOutputs;
     ALOGV("%s returns new direct output %d", __func__, *output);
     mpClientInterface->onAudioPortListUpdate();
@@ -1638,12 +1676,14 @@
     }
 
     // Use the spatializer output if the content can be spatialized, no preferred mixer
-    // was specified and offload or direct playback is not explicitly requested.
+    // was specified and offload or direct playback is not explicitly requested, and there is no
+    // haptic channel included in playback
     *isSpatialized = false;
-    if (mSpatializerOutput != nullptr
-            && canBeSpatializedInt(attr, config, devices.toTypeAddrVector())
-            && prefMixerConfigInfo == nullptr
-            && ((*flags & (AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD | AUDIO_OUTPUT_FLAG_DIRECT)) == 0)) {
+    if (mSpatializerOutput != nullptr &&
+        canBeSpatializedInt(attr, config, devices.toTypeAddrVector()) &&
+        prefMixerConfigInfo == nullptr &&
+        ((*flags & (AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD | AUDIO_OUTPUT_FLAG_DIRECT)) == 0) &&
+        checkHapticCompatibilityOnSpatializerOutput(config, session)) {
         *isSpatialized = true;
         return mSpatializerOutput->mIoHandle;
     }
@@ -1701,6 +1741,24 @@
             // at this stage we should ignore the DIRECT flag as no direct output could be
             // found earlier
             *flags = (audio_output_flags_t) (*flags & ~AUDIO_OUTPUT_FLAG_DIRECT);
+            if (com::android::media::audioserver::
+                    fix_concurrent_playback_behavior_with_bit_perfect_client()) {
+                // If the preferred mixer attributes is null, do not select the bit-perfect output
+                // unless the bit-perfect output is the only output.
+                // The bit-perfect output can exist while the passed in preferred mixer attributes
+                // info is null when it is a high priority client. The high priority clients are
+                // ringtone or alarm, which is not a bit-perfect use case.
+                size_t i = 0;
+                while (i < outputs.size() && outputs.size() > 1) {
+                    auto desc = mOutputs.valueFor(outputs[i]);
+                    // The output descriptor must not be null here.
+                    if (desc->isBitPerfect()) {
+                        outputs.removeItemsAt(i);
+                    } else {
+                        i += 1;
+                    }
+                }
+            }
             output = selectOutput(
                     outputs, *flags, config->format, channelMask, config->sample_rate, session);
         }
@@ -2103,7 +2161,7 @@
         // matching.
         if ((outputHapticChannelCount >= hapticChannelCount && format == outputDesc->getFormat() &&
              samplingRate == outputDesc->getSamplingRate()) ||
-            (hapticChannelCount == 0 && hasOrphanHaptic)) {
+            (outputHapticChannelCount != 0 && hasOrphanHaptic)) {
             currentMatchCriteria[0] = outputHapticChannelCount;
         }
 
@@ -2130,7 +2188,14 @@
 
         // sampling rate match
         if (samplingRate > SAMPLE_RATE_HZ_DEFAULT) {
-            currentMatchCriteria[4] = outputDesc->getSamplingRate();
+            int diff;  // avoid unsigned integer overflow.
+            __builtin_sub_overflow(outputDesc->getSamplingRate(), samplingRate, &diff);
+
+            // prefer the closest output sampling rate greater than or equal to target
+            // if none exists, prefer the closest output sampling rate less than target.
+            //
+            // criteria is offset to make non-negative.
+            currentMatchCriteria[4] = diff >= 0 ? -diff + 200'000'000 : diff + 100'000'000;
         }
 
         // performance flags match
@@ -2177,6 +2242,20 @@
     ALOGV("startOutput() output %d, stream %d, session %d",
           outputDesc->mIoHandle, client->stream(), client->session());
 
+    if (com::android::media::audioserver::fix_concurrent_playback_behavior_with_bit_perfect_client()
+            && gHighPriorityUseCases.count(client->attributes().usage) != 0
+            && outputDesc->isBitPerfect()) {
+        // Usually, APM selects bit-perfect output for high priority use cases only when
+        // bit-perfect output is the only output that can be routed to the selected device.
+        // However, here is no need to play high priority use cases such as ringtone and alarm
+        // on the bit-perfect path. Reopen the output and return DEAD_OBJECT so that the client
+        // can attach to new output.
+        ALOGD("%s: reopen bit-perfect output as high priority use case(%d) is starting",
+              __func__, client->stream());
+        reopenOutput(outputDesc, nullptr /*config*/, AUDIO_OUTPUT_FLAG_NONE, __func__);
+        return DEAD_OBJECT;
+    }
+
     status_t status = outputDesc->start();
     if (status != NO_ERROR) {
         return status;
@@ -2195,7 +2274,6 @@
                 ALOGE("%s unable to open output with default config", __func__);
                 return status;
             }
-            desc->mUsePreferredMixerAttributes = true;
         }
         return status;
     }
@@ -2219,14 +2297,13 @@
                 if (desc == nullptr) {
                     return BAD_VALUE;
                 }
-                desc->mUsePreferredMixerAttributes = true;
+                desc->mPreferredAttrInfo = info;
                 // Intentionally return error to let the client side resending request for
                 // creating and starting.
                 return DEAD_OBJECT;
             }
             info->increaseActiveClient();
-            if (info->getActiveClientCount() == 1 &&
-                (info->getFlags() & AUDIO_OUTPUT_FLAG_BIT_PERFECT) != AUDIO_OUTPUT_FLAG_NONE) {
+            if (info->getActiveClientCount() == 1 && info->isBitPerfect()) {
                 // If it is first bit-perfect client, reroute all clients that will be routed to
                 // the bit-perfect sink so that it is guaranteed only bit-perfect stream is active.
                 PortHandleVector clientsToInvalidate;
@@ -2256,6 +2333,15 @@
         usleep(delayMs * 1000);
     }
 
+    if (status == NO_ERROR &&
+        outputDesc->mPreferredAttrInfo != nullptr &&
+        outputDesc->isBitPerfect() &&
+        com::android::media::audioserver::
+                fix_concurrent_playback_behavior_with_bit_perfect_client()) {
+        // A new client is started on bit-perfect output, update all clients internal mute.
+        updateClientsInternalMute(outputDesc);
+    }
+
     return status;
 }
 
@@ -2360,6 +2446,11 @@
              followsSameRouting(clientAttr, attributes_initializer(AUDIO_USAGE_NOTIFICATION)) ||
              (beaconMuteLatency > 0));
         uint32_t waitMs = beaconMuteLatency;
+        const bool needToCloseBitPerfectOutput =
+                (com::android::media::audioserver::
+                        fix_concurrent_playback_behavior_with_bit_perfect_client() &&
+                gHighPriorityUseCases.count(clientAttr.usage) != 0);
+        std::vector<sp<SwAudioOutputDescriptor>> outputsToReopen;
         for (size_t i = 0; i < mOutputs.size(); i++) {
             sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
             if (desc != outputDesc) {
@@ -2396,15 +2487,22 @@
                 // Note restoring AudioTracks onto this output needs to invoke
                 // a volume ramp if there is no mute.
                 requiresMuteCheck |= sharedDevice && isActive;
+
+                if (needToCloseBitPerfectOutput && desc->isBitPerfect()) {
+                    outputsToReopen.push_back(desc);
+                }
             }
         }
 
-        if (outputDesc->mUsePreferredMixerAttributes && devices != outputDesc->devices()) {
+        if (outputDesc->mPreferredAttrInfo != nullptr && devices != outputDesc->devices()) {
             // If the output is open with preferred mixer attributes, but the routed device is
             // changed when calling this function, returning DEAD_OBJECT to indicate routing
             // changed.
             return DEAD_OBJECT;
         }
+        for (auto& outputToReopen : outputsToReopen) {
+            reopenOutput(outputToReopen, nullptr /*config*/, AUDIO_OUTPUT_FLAG_NONE, __func__);
+        }
         const uint32_t muteWaitMs =
                 setOutputDevices(__func__, outputDesc, devices, force, 0, nullptr,
                                  requiresMuteCheck);
@@ -2483,7 +2581,7 @@
                                     getAudioDeviceOutLeAudioUnicastSet()).isEmpty()))) {
                 DeviceVector newDevices = getNewOutputDevices(desc, false /*fromCache*/);
                 bool force = desc->devices() != newDevices;
-                if (desc->mUsePreferredMixerAttributes && force) {
+                if (desc->mPreferredAttrInfo != nullptr && force) {
                     // If the device is using preferred mixer attributes, the output need to reopen
                     // with default configuration when the new selected devices are different from
                     // current routing devices.
@@ -2531,12 +2629,21 @@
     if (outputDesc->devices().size() == 1) {
         sp<PreferredMixerAttributesInfo> info = getPreferredMixerAttributesInfo(
                 outputDesc->devices()[0]->getId(), client->strategy());
+        bool outputReopened = false;
         if (info != nullptr && info->getUid() == client->uid()) {
             info->decreaseActiveClient();
             if (info->getActiveClientCount() == 0) {
                 reopenOutput(outputDesc, nullptr /*config*/, AUDIO_OUTPUT_FLAG_NONE, __func__);
+                outputReopened = true;
             }
         }
+        if (com::android::media::audioserver::
+                    fix_concurrent_playback_behavior_with_bit_perfect_client() &&
+            !outputReopened && outputDesc->isBitPerfect()) {
+            // Only need to update the clients' internal mute when the output is bit-perfect and it
+            // is not reopened.
+            updateClientsInternalMute(outputDesc);
+        }
     }
     return status;
 }
@@ -2608,7 +2715,7 @@
                     DeviceVector newDevices2 = getNewOutputDevices(desc, false /*fromCache*/);
                     bool force = desc->devices() != newDevices2;
 
-                    if (desc->mUsePreferredMixerAttributes && force) {
+                    if (desc->mPreferredAttrInfo != nullptr && force) {
                         // If the device is using preferred mixer attributes, the output need to
                         // reopen with default configuration when the new selected devices are
                         // different from current routing devices.
@@ -3267,6 +3374,27 @@
     }
 }
 
+status_t AudioPolicyManager::setDeviceAbsoluteVolumeEnabled(audio_devices_t deviceType,
+                                                            const char *address __unused,
+                                                            bool enabled,
+                                                            audio_stream_type_t streamToDriveAbs)
+{
+    audio_attributes_t attributesToDriveAbs = mEngine->getAttributesForStreamType(streamToDriveAbs);
+    if (attributesToDriveAbs == AUDIO_ATTRIBUTES_INITIALIZER) {
+        ALOGW("%s: no attributes for stream %s, bailing out", __func__,
+              toString(streamToDriveAbs).c_str());
+        return BAD_VALUE;
+    }
+
+    if (enabled) {
+        mAbsoluteVolumeDrivingStreams[deviceType] = attributesToDriveAbs;
+    } else {
+        mAbsoluteVolumeDrivingStreams.erase(deviceType);
+    }
+
+    return NO_ERROR;
+}
+
 void AudioPolicyManager::initStreamVolume(audio_stream_type_t stream, int indexMin, int indexMax)
 {
     ALOGV("initStreamVolume() stream %d, min %d, max %d", stream , indexMin, indexMax);
@@ -4100,7 +4228,7 @@
             // preventing the force re-routing in case of default dev that distinguishes on address.
             // Let's give back to engine full device choice decision however.
             bool forceRouting = !newDevices.isEmpty();
-            if (outputDesc->mUsePreferredMixerAttributes && newDevices != outputDesc->devices()) {
+            if (outputDesc->mPreferredAttrInfo != nullptr && newDevices != outputDesc->devices()) {
                 // If the device is using preferred mixer attributes, the output need to reopen
                 // with default configuration when the new selected devices are different from
                 // current routing devices.
@@ -4380,6 +4508,13 @@
 
     dst->appendFormat("\nPolicy Engine dump:\n");
     mEngine->dump(dst);
+
+    dst->appendFormat("\nAbsolute volume devices with driving streams:\n");
+    for (const auto it : mAbsoluteVolumeDrivingStreams) {
+        dst->appendFormat("   - device type: %s, driving stream %d\n",
+                          dumpDeviceTypes({it.first}).c_str(),
+                          mEngine->getVolumeGroupForAttributes(it.second));
+    }
 }
 
 status_t AudioPolicyManager::dump(int fd)
@@ -4701,7 +4836,7 @@
         const auto output = mOutputs.valueAt(i);
         if (output->mProfile == profile && output->devices().onlyContainsDevice(deviceDescriptor)) {
             if (output->isConfigurationMatched(mixerAttributes->config, flags)) {
-                output->mUsePreferredMixerAttributes = true;
+                output->mPreferredAttrInfo = mixerAttrInfo;
             } else {
                 for (const auto &client: output->getActiveClients()) {
                     if (client->uid() == uid && client->strategy() == strategy) {
@@ -4723,7 +4858,7 @@
             ALOGE("%s, failed to reopen output with preferred mixer attributes", __func__);
             continue;
         }
-        desc->mUsePreferredMixerAttributes = true;
+        desc->mPreferredAttrInfo = mixerAttrInfo;
     }
 
     return NO_ERROR;
@@ -4739,8 +4874,7 @@
     }
     if (activeBitPerfectPreferred) {
         for (auto [strategy, info] : it->second) {
-            if ((info->getFlags() & AUDIO_OUTPUT_FLAG_BIT_PERFECT) != AUDIO_OUTPUT_FLAG_NONE
-                && info->getActiveClientCount() != 0) {
+            if (info->isBitPerfect() && info->getActiveClientCount() != 0) {
                 return info;
             }
         }
@@ -5540,7 +5674,7 @@
             invalidateStreams(mEngine->getStreamTypesForProductStrategy(ps));
         } else {
             DeviceVector newDevices = getNewOutputDevices(outputDesc, false /*fromCache*/);
-            if (outputDesc->mUsePreferredMixerAttributes && outputDesc->devices() != newDevices) {
+            if (outputDesc->mPreferredAttrInfo != nullptr && outputDesc->devices() != newDevices) {
                 // If the device is using preferred mixer attributes, the output need to reopen
                 // with default configuration when the new selected devices are different from
                 // current routing devices.
@@ -6110,6 +6244,34 @@
     return true;
 }
 
+// The Spatializer output is compatible with Haptic use cases if:
+// 1. the Spatializer output thread supports Haptic, and format/sampleRate are same
+// with client if client haptic channel bits were set, or
+// 2. the Spatializer output thread does not support Haptic, and client did not ask haptic by
+// including the haptic bits or creating the HapticGenerator effect for same session.
+bool AudioPolicyManager::checkHapticCompatibilityOnSpatializerOutput(
+        const audio_config_t* config, audio_session_t sessionId) const {
+    const auto clientHapticChannel =
+            audio_channel_count_from_out_mask(config->channel_mask & AUDIO_CHANNEL_HAPTIC_ALL);
+    const auto threadOutputHapticChannel = audio_channel_count_from_out_mask(
+            mSpatializerOutput->getChannelMask() & AUDIO_CHANNEL_HAPTIC_ALL);
+
+    if (threadOutputHapticChannel) {
+        // check format and sampleRate match if client haptic channel mask exist
+        if (clientHapticChannel) {
+            return mSpatializerOutput->getFormat() == config->format &&
+                   mSpatializerOutput->getSamplingRate() == config->sample_rate;
+        }
+        return true;
+    } else {
+        // in the case of the Spatializer output channel mask does not have haptic channel bits, it
+        // means haptic use cases (either the client channelmask includes haptic bits, or created a
+        // HapticGenerator effect for this session) are not supported.
+        return clientHapticChannel == 0 &&
+               !mEffects.hasOrphanEffectsForSessionAndType(sessionId, FX_IID_HAPTICGENERATOR);
+    }
+}
+
 void AudioPolicyManager::checkVirtualizerClientRoutes() {
     std::set<audio_stream_type_t> streamsToInvalidate;
     for (size_t i = 0; i < mOutputs.size(); i++) {
@@ -6494,6 +6656,15 @@
         if ((desc->mFlags & AUDIO_OUTPUT_FLAG_SPATIALIZER) != 0
                 && !isOutputOnlyAvailableRouteToSomeDevice(desc)) {
             outputsClosed.push_back(desc->mIoHandle);
+            nextAudioPortGeneration();
+            ssize_t index = mAudioPatches.indexOfKey(desc->getPatchHandle());
+            if (index >= 0) {
+                sp<AudioPatch> patchDesc = mAudioPatches.valueAt(index);
+                (void) /*status_t status*/ mpClientInterface->releaseAudioPatch(
+                            patchDesc->getAfHandle(), 0);
+                mAudioPatches.removeItemsAt(index);
+                mpClientInterface->onAudioPatchListUpdate();
+            }
             desc->close();
         }
     }
@@ -6874,8 +7045,7 @@
         closingOutput->stop();
     }
     closingOutput->close();
-    if ((closingOutput->getFlags().output & AUDIO_OUTPUT_FLAG_BIT_PERFECT)
-            == AUDIO_OUTPUT_FLAG_BIT_PERFECT) {
+    if (closingOutput->isBitPerfect()) {
         for (const auto device : closingOutput->devices()) {
             device->setPreferredConfig(nullptr);
         }
@@ -6907,6 +7077,10 @@
             setMsdOutputPatches();
         }
     }
+
+    if (closingOutput->mPreferredAttrInfo != nullptr) {
+        closingOutput->mPreferredAttrInfo->resetActiveClient();
+    }
 }
 
 void AudioPolicyManager::closeInput(audio_io_handle_t input)
@@ -7840,14 +8014,57 @@
     return nullptr;
 }
 
+float AudioPolicyManager::adjustDeviceAttenuationForAbsVolume(IVolumeCurves &curves,
+                                                              VolumeSource volumeSource,
+                                                              int index,
+                                                              const DeviceTypeSet &deviceTypes)
+{
+    audio_devices_t volumeDevice = Volume::getDeviceForVolume(deviceTypes);
+    device_category deviceCategory = Volume::getDeviceCategory({volumeDevice});
+    float volumeDb = curves.volIndexToDb(deviceCategory, index);
+
+    if (com_android_media_audio_abs_volume_index_fix()) {
+        if (mAbsoluteVolumeDrivingStreams.find(volumeDevice) !=
+            mAbsoluteVolumeDrivingStreams.end()) {
+            audio_attributes_t attributesToDriveAbs = mAbsoluteVolumeDrivingStreams[volumeDevice];
+            auto groupToDriveAbs = mEngine->getVolumeGroupForAttributes(attributesToDriveAbs);
+            if (groupToDriveAbs == VOLUME_GROUP_NONE) {
+                ALOGD("%s: no group matching with %s", __FUNCTION__,
+                      toString(attributesToDriveAbs).c_str());
+                return volumeDb;
+            }
+
+            float volumeDbMax = curves.volIndexToDb(deviceCategory, curves.getVolumeIndexMax());
+            VolumeSource vsToDriveAbs = toVolumeSource(groupToDriveAbs);
+            if (vsToDriveAbs == volumeSource) {
+                // attenuation is applied by the abs volume controller
+                return volumeDbMax;
+            } else {
+                IVolumeCurves &curvesAbs = getVolumeCurves(vsToDriveAbs);
+                int indexAbs = curvesAbs.getVolumeIndex({volumeDevice});
+                float volumeDbAbs = curvesAbs.volIndexToDb(deviceCategory, indexAbs);
+                float volumeDbAbsMax = curvesAbs.volIndexToDb(deviceCategory,
+                                                              curvesAbs.getVolumeIndexMax());
+                float newVolumeDb = fminf(volumeDb + volumeDbAbsMax - volumeDbAbs, volumeDbMax);
+                ALOGV("%s: abs vol stream %d with attenuation %f is adjusting stream %d from "
+                      "attenuation %f to attenuation %f %f", __func__, vsToDriveAbs, volumeDbAbs,
+                      volumeSource, volumeDb, newVolumeDb, volumeDbMax);
+                return newVolumeDb;
+            }
+        }
+        return volumeDb;
+    } else {
+        return volumeDb;
+    }
+}
+
 float AudioPolicyManager::computeVolume(IVolumeCurves &curves,
                                         VolumeSource volumeSource,
                                         int index,
                                         const DeviceTypeSet& deviceTypes,
                                         bool computeInternalInteraction)
 {
-    float volumeDb = curves.volIndexToDb(Volume::getDeviceCategory(deviceTypes), index);
-
+    float volumeDb = adjustDeviceAttenuationForAbsVolume(curves, volumeSource, index, deviceTypes);
     ALOGV("%s volume source %d, index %d,  devices %s, compute internal %b ", __func__,
           volumeSource, index, dumpDeviceTypes(deviceTypes).c_str(), computeInternalInteraction);
 
@@ -8562,7 +8779,11 @@
     }
 
     addOutput(output, desc);
-
+    setOutputDevices(__func__, desc,
+                     devices,
+                     true,
+                     0,
+                     NULL);
     sp<DeviceDescriptor> speaker = mAvailableOutputDevices.getDevice(
             AUDIO_DEVICE_OUT_SPEAKER, String8(""), AUDIO_FORMAT_DEFAULT);
 
@@ -8760,4 +8981,60 @@
     mpClientInterface->invalidateTracks(clients);
 }
 
+void AudioPolicyManager::updateClientsInternalMute(
+        const sp<android::SwAudioOutputDescriptor> &desc) {
+    if (!desc->isBitPerfect() ||
+        !com::android::media::audioserver::
+                fix_concurrent_playback_behavior_with_bit_perfect_client()) {
+        // This is only used for bit perfect output now.
+        return;
+    }
+    sp<TrackClientDescriptor> bitPerfectClient = nullptr;
+    bool bitPerfectClientInternalMute = false;
+    std::vector<media::TrackInternalMuteInfo> clientsInternalMute;
+    for (const sp<TrackClientDescriptor>& client : desc->getActiveClients()) {
+        if ((client->flags() & AUDIO_OUTPUT_FLAG_BIT_PERFECT) != AUDIO_OUTPUT_FLAG_NONE) {
+            bitPerfectClient = client;
+            continue;
+        }
+        bool muted = false;
+        if (client->stream() == AUDIO_STREAM_SYSTEM) {
+            // System sound is muted.
+            muted = true;
+        } else {
+            bitPerfectClientInternalMute = true;
+        }
+        if (client->setInternalMute(muted)) {
+            auto result = legacy2aidl_audio_port_handle_t_int32_t(client->portId());
+            if (!result.ok()) {
+                ALOGE("%s, failed to convert port id(%d) to aidl", __func__, client->portId());
+                continue;
+            }
+            media::TrackInternalMuteInfo info;
+            info.portId = result.value();
+            info.muted = client->getInternalMute();
+            clientsInternalMute.push_back(std::move(info));
+        }
+    }
+    if (bitPerfectClient != nullptr &&
+        bitPerfectClient->setInternalMute(bitPerfectClientInternalMute)) {
+        auto result = legacy2aidl_audio_port_handle_t_int32_t(bitPerfectClient->portId());
+        if (result.ok()) {
+            media::TrackInternalMuteInfo info;
+            info.portId = result.value();
+            info.muted = bitPerfectClient->getInternalMute();
+            clientsInternalMute.push_back(std::move(info));
+        } else {
+            ALOGE("%s, failed to convert port id(%d) of bit perfect client to aidl",
+                  __func__, bitPerfectClient->portId());
+        }
+    }
+    if (!clientsInternalMute.empty()) {
+        if (status_t status = mpClientInterface->setTracksInternalMute(clientsInternalMute);
+                status != NO_ERROR) {
+            ALOGE("%s, failed to update tracks internal mute, err=%d", __func__, status);
+        }
+    }
+}
+
 } // namespace android
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index 50e8ed8..585f982 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -103,7 +103,7 @@
         virtual status_t setDeviceConnectionState(audio_policy_dev_state_t state,
                 const android::media::audio::common::AudioPort& port, audio_format_t encodedFormat);
         virtual audio_policy_dev_state_t getDeviceConnectionState(audio_devices_t device,
-                                                                              const char *device_address);
+                                                                  const char *device_address);
         virtual status_t handleDeviceConfigChange(audio_devices_t device,
                                                   const char *device_address,
                                                   const char *device_name,
@@ -151,6 +151,10 @@
         virtual status_t stopInput(audio_port_handle_t portId);
         virtual void releaseInput(audio_port_handle_t portId);
         virtual void checkCloseInputs();
+        virtual status_t setDeviceAbsoluteVolumeEnabled(audio_devices_t deviceType,
+                                                        const char *address,
+                                                        bool enabled,
+                                                        audio_stream_type_t streamToDriveAbs);
         /**
          * @brief initStreamVolume: even if the engine volume files provides min and max, keep this
          * api for compatibility reason.
@@ -1383,6 +1387,20 @@
 
         PortHandleVector getClientsForStream(audio_stream_type_t streamType) const;
         void invalidateStreams(StreamTypeVector streams) const;
+
+        bool checkHapticCompatibilityOnSpatializerOutput(const audio_config_t* config,
+                                                         audio_session_t sessionId) const;
+
+        void updateClientsInternalMute(const sp<SwAudioOutputDescriptor>& desc);
+
+        float adjustDeviceAttenuationForAbsVolume(IVolumeCurves &curves,
+                                                  VolumeSource volumeSource,
+                                                  int index,
+                                                  const DeviceTypeSet &deviceTypes);
+
+        // Contains for devices that support absolute volume the audio attributes
+        // corresponding to the streams that are driving the volume changes
+        std::unordered_map<audio_devices_t, audio_attributes_t> mAbsoluteVolumeDrivingStreams;
 };
 
 };
diff --git a/services/audiopolicy/service/AudioPolicyClientImpl.cpp b/services/audiopolicy/service/AudioPolicyClientImpl.cpp
index 6de71a3..f70dc52 100644
--- a/services/audiopolicy/service/AudioPolicyClientImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyClientImpl.cpp
@@ -352,4 +352,13 @@
     return af->getAudioMixPort(devicePort, port);
 }
 
+status_t AudioPolicyService::AudioPolicyClient::setTracksInternalMute(
+        const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) {
+    sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
+    if (af == nullptr) {
+        return PERMISSION_DENIED;
+    }
+    return af->setTracksInternalMute(tracksInternalMute);
+}
+
 } // namespace android
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index 106fbf6..8ba2814 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -1022,6 +1022,32 @@
     return Status::ok();
 }
 
+Status AudioPolicyService::setDeviceAbsoluteVolumeEnabled(const AudioDevice& deviceAidl,
+                                                          bool enabled,
+                                                          AudioStreamType streamToDriveAbsAidl) {
+    audio_stream_type_t streamToDriveAbs = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioStreamType_audio_stream_type_t(streamToDriveAbsAidl));
+    audio_devices_t deviceType;
+    std::string address;
+    RETURN_BINDER_STATUS_IF_ERROR(
+            aidl2legacy_AudioDevice_audio_device(deviceAidl, &deviceType, &address));
+
+    if (mAudioPolicyManager == nullptr) {
+        return binderStatusFromStatusT(NO_INIT);
+    }
+    if (!settingsAllowed()) {
+        return binderStatusFromStatusT(PERMISSION_DENIED);
+    }
+    if (uint32_t(streamToDriveAbs) >= AUDIO_STREAM_PUBLIC_CNT) {
+        return binderStatusFromStatusT(BAD_VALUE);
+    }
+    audio_utils::lock_guard _l(mMutex);
+    AutoCallerClear acc;
+    return binderStatusFromStatusT(
+            mAudioPolicyManager->setDeviceAbsoluteVolumeEnabled(deviceType, address.c_str(),
+                                                                enabled, streamToDriveAbs));
+}
+
 Status AudioPolicyService::initStreamVolume(AudioStreamType streamAidl,
                                             int32_t indexMinAidl,
                                             int32_t indexMaxAidl) {
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index cd010ab..73c4a15 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -79,6 +79,7 @@
 BINDER_METHOD_ENTRY(startInput) \
 BINDER_METHOD_ENTRY(stopInput) \
 BINDER_METHOD_ENTRY(releaseInput) \
+BINDER_METHOD_ENTRY(setDeviceAbsoluteVolumeEnabled) \
 BINDER_METHOD_ENTRY(initStreamVolume) \
 BINDER_METHOD_ENTRY(setStreamVolumeIndex) \
 BINDER_METHOD_ENTRY(getStreamVolumeIndex) \
@@ -868,6 +869,8 @@
 //            OR client has CAPTURE_AUDIO_OUTPUT privileged permission
 //    OR the client is the current InputMethodService
 //        AND a RTT call is active AND the source is VOICE_RECOGNITION
+//    OR The client is an active communication owner
+//        AND is on TOP or latest started
 //    OR Any client
 //        AND The assistant is not on TOP
 //        AND is on TOP or latest started
@@ -1032,7 +1035,12 @@
         bool isTopOrLatestAssistant = latestActiveAssistant == nullptr ? false :
             current->attributionSource.uid == latestActiveAssistant->attributionSource.uid;
 
-        auto canCaptureIfInCallOrCommunication = [&](const auto &recordClient) REQUIRES(mMutex) {
+        // TODO: b/339112720
+        // Refine this logic when we have the correct phone state owner UID. The current issue is
+        // when a VOIP APP use Telecom API to manage calls, the mPhoneStateOwnerUid is AID_SYSTEM
+        // instead of the actual VOIP APP UID, so isPhoneStateOwnerActive here is not accurate.
+        const bool canCaptureIfInCallOrCommunication = [&](const auto& recordClient) REQUIRES(
+                                                               mMutex) {
             uid_t recordUid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(
                 recordClient->attributionSource.uid));
             bool canCaptureCall = recordClient->canCaptureOutput;
@@ -1041,19 +1049,26 @@
                 || recordUid == mPhoneStateOwnerUid;
             return !(isInCall && !canCaptureCall)
                 && !(isInCommunication && !canCaptureCommunication);
-        };
+        }(current);
 
         // By default allow capture if:
         //     The assistant is not on TOP
-        //     AND is on TOP or latest started
-        //     AND there is no active privacy sensitive capture or call
+        //         AND is on TOP or latest started
+        //         AND there is no active privacy sensitive capture or call
         //             OR client has CAPTURE_AUDIO_OUTPUT privileged permission
-        bool allowSensitiveCapture =
+        //     The assistant is on TOP
+        //         AND is ongoing communication owner
+        //         AND is on TOP or latest started
+        const bool allowSensitiveCapture =
             !isSensitiveActive || isTopOrLatestSensitive || current->canCaptureOutput;
-        bool allowCapture = !isAssistantOnTop
-                && (isTopOrLatestActive || isTopOrLatestSensitive)
-                && allowSensitiveCapture
-                && canCaptureIfInCallOrCommunication(current);
+        bool allowCapture = false;
+        if (!isAssistantOnTop) {
+            allowCapture = (isTopOrLatestActive || isTopOrLatestSensitive) &&
+                           allowSensitiveCapture && canCaptureIfInCallOrCommunication;
+        } else {
+            allowCapture = isInCommunication && isTopOrLatestSensitive &&
+                           canCaptureIfInCallOrCommunication;
+        }
 
         if (!current->hasOp()) {
             // Never allow capture if app op is denied
@@ -1076,7 +1091,7 @@
                     allowCapture = true;
                 }
             } else if (allowSensitiveCapture
-                    && canCaptureIfInCallOrCommunication(current)) {
+                    && canCaptureIfInCallOrCommunication) {
                 if (isTopOrLatestAssistant
                     && (source == AUDIO_SOURCE_VOICE_RECOGNITION
                         || source == AUDIO_SOURCE_HOTWORD)) {
@@ -1097,7 +1112,7 @@
                     allowCapture = true;
                 }
             } else if (allowSensitiveCapture
-                        && canCaptureIfInCallOrCommunication(current)) {
+                        && canCaptureIfInCallOrCommunication) {
                 if ((source == AUDIO_SOURCE_VOICE_RECOGNITION) || (source == AUDIO_SOURCE_HOTWORD))
                 {
                     allowCapture = true;
@@ -1112,7 +1127,7 @@
             //         Is on TOP AND the source is VOICE_RECOGNITION or HOTWORD
             if (!isAssistantOnTop
                     && allowSensitiveCapture
-                    && canCaptureIfInCallOrCommunication(current)) {
+                    && canCaptureIfInCallOrCommunication) {
                 allowCapture = true;
             }
             if (isA11yOnTop) {
@@ -1126,7 +1141,7 @@
             //     AND no call is active
             //         OR client has CAPTURE_AUDIO_OUTPUT privileged permission
             if (onlyHotwordActive
-                    && canCaptureIfInCallOrCommunication(current)) {
+                    && canCaptureIfInCallOrCommunication) {
                 allowCapture = true;
             }
         } else if (mUidPolicy->isCurrentImeUid(currentUid)) {
@@ -1310,6 +1325,7 @@
         case TRANSACTION_setPhoneState:
 //FIXME: Allow setForceUse calls from system apps until a better use case routing API is available
 //      case TRANSACTION_setForceUse:
+        case TRANSACTION_setDeviceAbsoluteVolumeEnabled:
         case TRANSACTION_initStreamVolume:
         case TRANSACTION_setStreamVolumeIndex:
         case TRANSACTION_setVolumeIndexForAttributes:
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index abc1dca..41b28c5 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -125,6 +125,9 @@
     binder::Status startInput(int32_t portId) override;
     binder::Status stopInput(int32_t portId) override;
     binder::Status releaseInput(int32_t portId) override;
+    binder::Status setDeviceAbsoluteVolumeEnabled(const AudioDevice& device,
+                                                  bool enabled,
+                                                  AudioStreamType streamToDriveAbs) override;
     binder::Status initStreamVolume(AudioStreamType stream, int32_t indexMin,
                                     int32_t indexMax) override;
     binder::Status setStreamVolumeIndex(AudioStreamType stream,
@@ -879,6 +882,9 @@
         status_t getAudioMixPort(const struct audio_port_v7 *devicePort,
                                  struct audio_port_v7 *port) override;
 
+        status_t setTracksInternalMute(
+                const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) override;
+
      private:
         AudioPolicyService *mAudioPolicyService;
     };
diff --git a/services/audiopolicy/tests/Android.bp b/services/audiopolicy/tests/Android.bp
index fc349ee..dc61115 100644
--- a/services/audiopolicy/tests/Android.bp
+++ b/services/audiopolicy/tests/Android.bp
@@ -40,6 +40,7 @@
     static_libs: [
         "android.media.audiopolicy-aconfig-cc",
         "audioclient-types-aidl-cpp",
+        "com.android.media.audioserver-aconfig-cc",
         "libaudiopolicycomponents",
         "libflagtest",
         "libgmock",
diff --git a/services/audiopolicy/tests/AudioPolicyManagerTestClient.h b/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
index 7ef0266..30894c1 100644
--- a/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
+++ b/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
@@ -221,6 +221,15 @@
         return NO_ERROR;
     }
 
+    status_t setTracksInternalMute(
+            const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) override {
+        for (const auto& trackInternalMute : tracksInternalMute) {
+            mTracksInternalMute[(audio_port_handle_t)trackInternalMute.portId] =
+                    trackInternalMute.muted;
+        }
+        return NO_ERROR;
+    }
+
     void addSupportedFormat(audio_format_t format) {
         mSupportedFormats.insert(format);
     }
@@ -229,6 +238,11 @@
         mSupportedChannelMasks.insert(channelMask);
     }
 
+    bool getTrackInternalMute(audio_port_handle_t portId) {
+        auto it = mTracksInternalMute.find(portId);
+        return it == mTracksInternalMute.end() ? false : it->second;
+    }
+
 private:
     audio_module_handle_t mNextModuleHandle = AUDIO_MODULE_HANDLE_NONE + 1;
     audio_io_handle_t mNextIoHandle = AUDIO_IO_HANDLE_NONE + 1;
@@ -241,6 +255,7 @@
     std::vector<struct audio_port_v7> mDisconnectedDevicePorts;
     std::set<audio_format_t> mSupportedFormats;
     std::set<audio_channel_mask_t> mSupportedChannelMasks;
+    std::map<audio_port_handle_t, bool> mTracksInternalMute;
 };
 
 } // namespace android
diff --git a/services/audiopolicy/tests/AudioPolicyTestClient.h b/services/audiopolicy/tests/AudioPolicyTestClient.h
index e55e935..c15adcb 100644
--- a/services/audiopolicy/tests/AudioPolicyTestClient.h
+++ b/services/audiopolicy/tests/AudioPolicyTestClient.h
@@ -110,6 +110,11 @@
                              struct audio_port_v7 *mixPort __unused) override {
         return INVALID_OPERATION;
     }
+
+    status_t setTracksInternalMute(
+            const std::vector<media::TrackInternalMuteInfo>& /*tracksInternalMute*/) override {
+        return INVALID_OPERATION;
+    }
 };
 
 } // namespace android
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index f40a7d0..4f9e98e 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -29,6 +29,7 @@
 #include <android-base/properties.h>
 #include <android/content/AttributionSourceState.h>
 #include <android_media_audiopolicy.h>
+#include <com_android_media_audioserver.h>
 #include <flag_macros.h>
 #include <hardware/audio_effect.h>
 #include <media/AudioPolicy.h>
@@ -478,8 +479,8 @@
         MsdAudioPatchCount,
         AudioPolicyManagerTestMsd,
         ::testing::Values(
-                MsdAudioPatchCountSpecification(1u, "single"),
-                MsdAudioPatchCountSpecification(2u, "dual")
+                MsdAudioPatchCountSpecification(2u, "single"),
+                MsdAudioPatchCountSpecification(3u, "dual")
         ),
         [](const ::testing::TestParamInfo<MsdAudioPatchCountSpecification> &info) {
                 return std::get<MSD_AUDIO_PATCH_COUNT_NAME_INDEX>(info.param); }
@@ -506,7 +507,7 @@
     mConfig->addDevice(mMsdOutputDevice);
     mConfig->addDevice(mMsdInputDevice);
 
-    if (mExpectedAudioPatchCount == 2) {
+    if (mExpectedAudioPatchCount == 3) {
         // Add SPDIF device with PCM output profile as a second device for dual MSD audio patching.
         mSpdifDevice = new DeviceDescriptor(AUDIO_DEVICE_OUT_SPDIF);
         mSpdifDevice->addAudioProfile(pcmOutputProfile);
@@ -559,7 +560,7 @@
             addOutputProfile(primaryEncodedOutputProfile);
 
     mDefaultOutputDevice = mConfig->getDefaultOutputDevice();
-    if (mExpectedAudioPatchCount == 2) {
+    if (mExpectedAudioPatchCount == 3) {
         mSpdifDevice->addAudioProfile(dtsOutputProfile);
         primaryEncodedOutputProfile->addSupportedDevice(mSpdifDevice);
     }
@@ -608,7 +609,7 @@
     const PatchCountCheck patchCount = snapshotPatchCount();
     mManager->setForceUse(AUDIO_POLICY_FORCE_FOR_ENCODED_SURROUND,
             AUDIO_POLICY_FORCE_ENCODED_SURROUND_ALWAYS);
-    ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
+    ASSERT_EQ(mExpectedAudioPatchCount -1 , patchCount.deltaFromSnapshot());
 }
 
 TEST_P(AudioPolicyManagerTestMsd, PatchCreationSetReleaseMsdOutputPatches) {
@@ -616,15 +617,15 @@
     DeviceVector devices = mManager->getAvailableOutputDevices();
     // Remove MSD output device to avoid patching to itself
     devices.remove(mMsdOutputDevice);
-    ASSERT_EQ(mExpectedAudioPatchCount, devices.size());
+    ASSERT_EQ(mExpectedAudioPatchCount -1 , devices.size());
     mManager->setMsdOutputPatches(&devices);
-    ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
+    ASSERT_EQ(mExpectedAudioPatchCount - 1, patchCount.deltaFromSnapshot());
     // Dual patch: exercise creating one new audio patch and reusing another existing audio patch.
     DeviceVector singleDevice(devices[0]);
     mManager->releaseMsdOutputPatches(singleDevice);
-    ASSERT_EQ(mExpectedAudioPatchCount - 1, patchCount.deltaFromSnapshot());
+    ASSERT_EQ(mExpectedAudioPatchCount - 2, patchCount.deltaFromSnapshot());
     mManager->setMsdOutputPatches(&devices);
-    ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
+    ASSERT_EQ(mExpectedAudioPatchCount - 1, patchCount.deltaFromSnapshot());
     mManager->releaseMsdOutputPatches(devices);
     ASSERT_EQ(0, patchCount.deltaFromSnapshot());
 }
@@ -644,7 +645,7 @@
     getOutputForAttr(&selectedDeviceId,
             AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO, k48000SamplingRate);
     ASSERT_EQ(selectedDeviceId, mDefaultOutputDevice->getId());
-    ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
+    ASSERT_EQ(mExpectedAudioPatchCount - 1, patchCount.deltaFromSnapshot());
 }
 
 TEST_P(AudioPolicyManagerTestMsd, GetOutputForAttrEncodedPlusPcmRoutesToMsd) {
@@ -667,7 +668,7 @@
     getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_DTS, AUDIO_CHANNEL_OUT_5POINT1,
             k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT);
     ASSERT_NE(selectedDeviceId, mMsdOutputDevice->getId());
-    ASSERT_EQ(0, patchCount.deltaFromSnapshot());
+    ASSERT_EQ(1, patchCount.deltaFromSnapshot());
 }
 
 TEST_P(AudioPolicyManagerTestMsd, GetOutputForAttrFormatSwitching) {
@@ -681,7 +682,7 @@
         ASSERT_EQ(selectedDeviceId, mDefaultOutputDevice->getId());
         ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
         mManager->releaseOutput(portId);
-        ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
+        ASSERT_EQ(mExpectedAudioPatchCount - 1, patchCount.deltaFromSnapshot());
     }
     {
         const PatchCountCheck patchCount = snapshotPatchCount();
@@ -690,7 +691,7 @@
         getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_DTS, AUDIO_CHANNEL_OUT_5POINT1,
                 k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, nullptr /*output*/, &portId);
         ASSERT_NE(selectedDeviceId, mMsdOutputDevice->getId());
-        ASSERT_EQ(-static_cast<int>(mExpectedAudioPatchCount), patchCount.deltaFromSnapshot());
+        ASSERT_EQ(-static_cast<int>(mExpectedAudioPatchCount) + 2, patchCount.deltaFromSnapshot());
         mManager->releaseOutput(portId);
         ASSERT_EQ(0, patchCount.deltaFromSnapshot());
     }
@@ -700,7 +701,7 @@
         getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1,
                 k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT);
         ASSERT_EQ(selectedDeviceId, mDefaultOutputDevice->getId());
-        ASSERT_EQ(0, patchCount.deltaFromSnapshot());
+        ASSERT_EQ(1, patchCount.deltaFromSnapshot());
     }
 }
 
@@ -1155,129 +1156,6 @@
                                                            "", "", AUDIO_FORMAT_LDAC));
 }
 
-TEST_F(AudioPolicyManagerTestWithConfigurationFile, BitPerfectPlayback) {
-    const audio_format_t bitPerfectFormat = AUDIO_FORMAT_PCM_16_BIT;
-    const audio_channel_mask_t bitPerfectChannelMask = AUDIO_CHANNEL_OUT_QUAD;
-    const uint32_t bitPerfectSampleRate = 48000;
-    mClient->addSupportedFormat(bitPerfectFormat);
-    mClient->addSupportedChannelMask(bitPerfectChannelMask);
-    ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_USB_DEVICE,
-                                                           AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
-                                                           "", "", AUDIO_FORMAT_DEFAULT));
-    auto devices = mManager->getAvailableOutputDevices();
-    audio_port_handle_t usbPortId = AUDIO_PORT_HANDLE_NONE;
-    for (auto device : devices) {
-        if (device->type() == AUDIO_DEVICE_OUT_USB_DEVICE) {
-            usbPortId = device->getId();
-            break;
-        }
-    }
-    EXPECT_NE(AUDIO_PORT_HANDLE_NONE, usbPortId);
-
-    const uid_t uid = 1234;
-    const uid_t anotherUid = 5678;
-    const audio_attributes_t mediaAttr = {
-            .content_type = AUDIO_CONTENT_TYPE_MUSIC,
-            .usage = AUDIO_USAGE_MEDIA,
-    };
-
-    std::vector<audio_mixer_attributes_t> mixerAttributes;
-    EXPECT_EQ(NO_ERROR, mManager->getSupportedMixerAttributes(usbPortId, mixerAttributes));
-    EXPECT_GT(mixerAttributes.size(), 0);
-    size_t bitPerfectIndex = 0;
-    for (; bitPerfectIndex < mixerAttributes.size(); ++bitPerfectIndex) {
-        if (mixerAttributes[bitPerfectIndex].mixer_behavior == AUDIO_MIXER_BEHAVIOR_BIT_PERFECT) {
-            break;
-        }
-    }
-    EXPECT_LT(bitPerfectIndex, mixerAttributes.size());
-    EXPECT_EQ(bitPerfectFormat, mixerAttributes[bitPerfectIndex].config.format);
-    EXPECT_EQ(bitPerfectChannelMask, mixerAttributes[bitPerfectIndex].config.channel_mask);
-    EXPECT_EQ(bitPerfectSampleRate, mixerAttributes[bitPerfectIndex].config.sample_rate);
-    EXPECT_EQ(NO_ERROR,
-              mManager->setPreferredMixerAttributes(
-                      &mediaAttr, usbPortId, uid, &mixerAttributes[bitPerfectIndex]));
-
-    audio_io_handle_t bitPerfectOutput = AUDIO_IO_HANDLE_NONE;
-    audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
-    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
-    audio_port_handle_t bitPerfectPortId = AUDIO_PORT_HANDLE_NONE;
-    audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
-    bool isBitPerfect;
-
-    // When there is no active bit-perfect playback, the output selection will follow default
-    // routing strategy.
-    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
-            48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, mediaAttr, AUDIO_SESSION_NONE,
-            uid, &isBitPerfect);
-    EXPECT_FALSE(isBitPerfect);
-    EXPECT_NE(AUDIO_IO_HANDLE_NONE, output);
-    const auto outputDesc = mManager->getOutputs().valueFor(output);
-    EXPECT_NE(nullptr, outputDesc);
-    EXPECT_NE(AUDIO_OUTPUT_FLAG_BIT_PERFECT, outputDesc->mFlags & AUDIO_OUTPUT_FLAG_BIT_PERFECT);
-
-    // Start bit-perfect playback
-    getOutputForAttr(&selectedDeviceId, bitPerfectFormat, bitPerfectChannelMask,
-            bitPerfectSampleRate, AUDIO_OUTPUT_FLAG_NONE, &bitPerfectOutput, &bitPerfectPortId,
-            mediaAttr, AUDIO_SESSION_NONE, uid, &isBitPerfect);
-    status_t status = mManager->startOutput(bitPerfectPortId);
-    if (status == DEAD_OBJECT) {
-        getOutputForAttr(&selectedDeviceId, bitPerfectFormat, bitPerfectChannelMask,
-                bitPerfectSampleRate, AUDIO_OUTPUT_FLAG_NONE, &bitPerfectOutput, &bitPerfectPortId,
-                mediaAttr, AUDIO_SESSION_NONE, uid, &isBitPerfect);
-        status = mManager->startOutput(bitPerfectPortId);
-    }
-    EXPECT_EQ(NO_ERROR, status);
-    EXPECT_TRUE(isBitPerfect);
-    EXPECT_NE(AUDIO_IO_HANDLE_NONE, bitPerfectOutput);
-    const auto bitPerfectOutputDesc = mManager->getOutputs().valueFor(bitPerfectOutput);
-    EXPECT_NE(nullptr, bitPerfectOutputDesc);
-    EXPECT_EQ(AUDIO_OUTPUT_FLAG_BIT_PERFECT,
-              bitPerfectOutputDesc->mFlags & AUDIO_OUTPUT_FLAG_BIT_PERFECT);
-
-    // If the playback is from preferred mixer attributes owner but the request doesn't match
-    // preferred mixer attributes, it will not be bit-perfect.
-    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
-            48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, mediaAttr, AUDIO_SESSION_NONE,
-            uid, &isBitPerfect);
-    EXPECT_FALSE(isBitPerfect);
-    EXPECT_EQ(bitPerfectOutput, output);
-
-    // When bit-perfect playback is active, all other playback will be routed to bit-perfect output.
-    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
-            48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, mediaAttr, AUDIO_SESSION_NONE,
-            anotherUid, &isBitPerfect);
-    EXPECT_FALSE(isBitPerfect);
-    EXPECT_EQ(bitPerfectOutput, output);
-
-    const audio_attributes_t dtmfAttr = {
-            .content_type = AUDIO_CONTENT_TYPE_UNKNOWN,
-            .usage = AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING,
-    };
-    audio_io_handle_t dtmfOutput = AUDIO_IO_HANDLE_NONE;
-    selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
-    portId = AUDIO_PORT_HANDLE_NONE;
-    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
-            48000, AUDIO_OUTPUT_FLAG_NONE, &dtmfOutput, &portId, dtmfAttr,
-            AUDIO_SESSION_NONE, anotherUid, &isBitPerfect);
-    EXPECT_FALSE(isBitPerfect);
-    EXPECT_EQ(bitPerfectOutput, dtmfOutput);
-
-    // When configuration matches preferred mixer attributes, which is bit-perfect, but the client
-    // is not the owner of preferred mixer attributes, the playback will not be bit-perfect.
-    getOutputForAttr(&selectedDeviceId, bitPerfectFormat, bitPerfectChannelMask,
-            bitPerfectSampleRate, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, mediaAttr,
-            AUDIO_SESSION_NONE, anotherUid, &isBitPerfect);
-    EXPECT_FALSE(isBitPerfect);
-    EXPECT_EQ(bitPerfectOutput, output);
-
-    EXPECT_EQ(NO_ERROR,
-              mManager->clearPreferredMixerAttributes(&mediaAttr, usbPortId, uid));
-    ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_USB_DEVICE,
-                                                           AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
-                                                           "", "", AUDIO_FORMAT_LDAC));
-}
-
 TEST_F(AudioPolicyManagerTestWithConfigurationFile, PreferExactConfigForInput) {
     const audio_channel_mask_t deviceChannelMask = AUDIO_CHANNEL_IN_3POINT1;
     mClient->addSupportedFormat(AUDIO_FORMAT_PCM_16_BIT);
@@ -3612,3 +3490,314 @@
     // unregister effect should succeed since effect shall have been restore on the client session
     ASSERT_EQ(NO_ERROR, mManager->unregisterEffect(effectId));
 }
+
+class AudioPolicyManagerTestBitPerfectBase : public AudioPolicyManagerTestWithConfigurationFile {
+protected:
+    void SetUp() override;
+    void TearDown() override;
+
+    void startBitPerfectOutput();
+    void reset();
+    void getBitPerfectOutput(status_t expected);
+
+    const audio_format_t mBitPerfectFormat = AUDIO_FORMAT_PCM_16_BIT;
+    const audio_channel_mask_t mBitPerfectChannelMask = AUDIO_CHANNEL_OUT_STEREO;
+    const uint32_t mBitPerfectSampleRate = 48000;
+    const uid_t mUid = 1234;
+    audio_port_handle_t mUsbPortId = AUDIO_PORT_HANDLE_NONE;
+
+    audio_io_handle_t mBitPerfectOutput = AUDIO_IO_HANDLE_NONE;
+    audio_port_handle_t mSelectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    audio_port_handle_t mBitPerfectPortId = AUDIO_PORT_HANDLE_NONE;
+
+    static constexpr audio_attributes_t sMediaAttr = {
+            .content_type = AUDIO_CONTENT_TYPE_MUSIC,
+            .usage = AUDIO_USAGE_MEDIA,
+    };
+};
+
+void AudioPolicyManagerTestBitPerfectBase::SetUp() {
+    ASSERT_NO_FATAL_FAILURE(AudioPolicyManagerTestWithConfigurationFile::SetUp());
+
+    mClient->addSupportedFormat(mBitPerfectFormat);
+    mClient->addSupportedChannelMask(mBitPerfectChannelMask);
+    ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_USB_DEVICE,
+                                                           AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+                                                           "", "", AUDIO_FORMAT_DEFAULT));
+    auto devices = mManager->getAvailableOutputDevices();
+    mUsbPortId = AUDIO_PORT_HANDLE_NONE;
+    for (auto device : devices) {
+        if (device->type() == AUDIO_DEVICE_OUT_USB_DEVICE) {
+            mUsbPortId = device->getId();
+            break;
+        }
+    }
+    EXPECT_NE(AUDIO_PORT_HANDLE_NONE, mUsbPortId);
+
+    std::vector<audio_mixer_attributes_t> mixerAttributes;
+    EXPECT_EQ(NO_ERROR, mManager->getSupportedMixerAttributes(mUsbPortId, mixerAttributes));
+    EXPECT_GT(mixerAttributes.size(), 0);
+    size_t bitPerfectIndex = 0;
+    for (; bitPerfectIndex < mixerAttributes.size(); ++bitPerfectIndex) {
+        if (mixerAttributes[bitPerfectIndex].mixer_behavior == AUDIO_MIXER_BEHAVIOR_BIT_PERFECT) {
+            break;
+        }
+    }
+    EXPECT_LT(bitPerfectIndex, mixerAttributes.size());
+    EXPECT_EQ(mBitPerfectFormat, mixerAttributes[bitPerfectIndex].config.format);
+    EXPECT_EQ(mBitPerfectChannelMask, mixerAttributes[bitPerfectIndex].config.channel_mask);
+    EXPECT_EQ(mBitPerfectSampleRate, mixerAttributes[bitPerfectIndex].config.sample_rate);
+    EXPECT_EQ(NO_ERROR,
+              mManager->setPreferredMixerAttributes(
+                      &sMediaAttr, mUsbPortId, mUid, &mixerAttributes[bitPerfectIndex]));
+}
+
+void AudioPolicyManagerTestBitPerfectBase::TearDown() {
+    EXPECT_EQ(NO_ERROR,
+              mManager->clearPreferredMixerAttributes(&sMediaAttr, mUsbPortId, mUid));
+    ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_USB_DEVICE,
+                                                           AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+                                                           "", "", AUDIO_FORMAT_LDAC));
+
+    ASSERT_NO_FATAL_FAILURE(AudioPolicyManagerTestWithConfigurationFile::TearDown());
+}
+
+void AudioPolicyManagerTestBitPerfectBase::startBitPerfectOutput() {
+    reset();
+    bool isBitPerfect;
+
+    getOutputForAttr(&mSelectedDeviceId, mBitPerfectFormat, mBitPerfectChannelMask,
+                     mBitPerfectSampleRate, AUDIO_OUTPUT_FLAG_NONE, &mBitPerfectOutput,
+                     &mBitPerfectPortId, sMediaAttr, AUDIO_SESSION_NONE, mUid, &isBitPerfect);
+    status_t status = mManager->startOutput(mBitPerfectPortId);
+    if (status == DEAD_OBJECT) {
+        getOutputForAttr(&mSelectedDeviceId, mBitPerfectFormat, mBitPerfectChannelMask,
+                         mBitPerfectSampleRate, AUDIO_OUTPUT_FLAG_NONE, &mBitPerfectOutput,
+                         &mBitPerfectPortId, sMediaAttr, AUDIO_SESSION_NONE, mUid, &isBitPerfect);
+        status = mManager->startOutput(mBitPerfectPortId);
+    }
+    EXPECT_EQ(NO_ERROR, status);
+    EXPECT_TRUE(isBitPerfect);
+    EXPECT_NE(AUDIO_IO_HANDLE_NONE, mBitPerfectOutput);
+    const auto bitPerfectOutputDesc = mManager->getOutputs().valueFor(mBitPerfectOutput);
+    EXPECT_NE(nullptr, bitPerfectOutputDesc);
+    EXPECT_EQ(AUDIO_OUTPUT_FLAG_BIT_PERFECT,
+              bitPerfectOutputDesc->mFlags & AUDIO_OUTPUT_FLAG_BIT_PERFECT);
+};
+
+void AudioPolicyManagerTestBitPerfectBase::reset() {
+    mBitPerfectOutput = AUDIO_IO_HANDLE_NONE;
+    mSelectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    mBitPerfectPortId = AUDIO_PORT_HANDLE_NONE;
+}
+
+void AudioPolicyManagerTestBitPerfectBase::getBitPerfectOutput(status_t expected) {
+    reset();
+    audio_stream_type_t stream = AUDIO_STREAM_DEFAULT;
+    AttributionSourceState attributionSource = createAttributionSourceState(mUid);
+    audio_config_t config = AUDIO_CONFIG_INITIALIZER;
+    config.sample_rate = mBitPerfectSampleRate;
+    config.channel_mask = mBitPerfectChannelMask;
+    config.format = mBitPerfectFormat;
+    audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_BIT_PERFECT;
+    AudioPolicyInterface::output_type_t outputType;
+    bool isSpatialized;
+    bool isBitPerfect;
+    EXPECT_EQ(expected,
+              mManager->getOutputForAttr(&sMediaAttr, &mBitPerfectOutput, AUDIO_SESSION_NONE,
+                                         &stream, attributionSource, &config, &flags,
+                                         &mSelectedDeviceId, &mBitPerfectPortId, {}, &outputType,
+                                         &isSpatialized, &isBitPerfect));
+}
+
+class AudioPolicyManagerTestBitPerfect : public AudioPolicyManagerTestBitPerfectBase {
+};
+
+TEST_F(AudioPolicyManagerTestBitPerfect, UseBitPerfectOutput) {
+    const uid_t anotherUid = 5678;
+    audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
+    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
+    bool isBitPerfect;
+
+    // When there is no active bit-perfect playback, the output selection will follow default
+    // routing strategy.
+    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_QUAD,
+                     48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, sMediaAttr,
+                     AUDIO_SESSION_NONE, mUid, &isBitPerfect);
+    EXPECT_FALSE(isBitPerfect);
+    EXPECT_NE(AUDIO_IO_HANDLE_NONE, output);
+    const auto outputDesc = mManager->getOutputs().valueFor(output);
+    EXPECT_NE(nullptr, outputDesc);
+    EXPECT_NE(AUDIO_OUTPUT_FLAG_BIT_PERFECT, outputDesc->mFlags & AUDIO_OUTPUT_FLAG_BIT_PERFECT);
+
+    // Start bit-perfect playback
+    ASSERT_NO_FATAL_FAILURE(startBitPerfectOutput());
+
+    // If the playback is from preferred mixer attributes owner but the request doesn't match
+    // preferred mixer attributes, it will not be bit-perfect.
+    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_QUAD,
+                     48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, sMediaAttr,
+                     AUDIO_SESSION_NONE, mUid, &isBitPerfect);
+    EXPECT_FALSE(isBitPerfect);
+    EXPECT_EQ(mBitPerfectOutput, output);
+
+    // When bit-perfect playback is active, all other playback will be routed to bit-perfect output.
+    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+                     48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, sMediaAttr,
+                     AUDIO_SESSION_NONE, anotherUid, &isBitPerfect);
+    EXPECT_FALSE(isBitPerfect);
+    EXPECT_EQ(mBitPerfectOutput, output);
+
+    // When bit-pefect playback is active, dtmf will also be routed to bit-perfect output.
+    const audio_attributes_t dtmfAttr = {
+            .content_type = AUDIO_CONTENT_TYPE_UNKNOWN,
+            .usage = AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING,
+    };
+    audio_io_handle_t dtmfOutput = AUDIO_IO_HANDLE_NONE;
+    selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    portId = AUDIO_PORT_HANDLE_NONE;
+    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+                     48000, AUDIO_OUTPUT_FLAG_NONE, &dtmfOutput, &portId, dtmfAttr,
+                     AUDIO_SESSION_NONE, anotherUid, &isBitPerfect);
+    EXPECT_FALSE(isBitPerfect);
+    EXPECT_EQ(mBitPerfectOutput, dtmfOutput);
+
+    // When configuration matches preferred mixer attributes, which is bit-perfect, but the client
+    // is not the owner of preferred mixer attributes, the playback will not be bit-perfect.
+    getOutputForAttr(&selectedDeviceId, mBitPerfectFormat, mBitPerfectChannelMask,
+                     mBitPerfectSampleRate, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, sMediaAttr,
+                     AUDIO_SESSION_NONE, anotherUid, &isBitPerfect);
+    EXPECT_FALSE(isBitPerfect);
+    EXPECT_EQ(mBitPerfectOutput, output);
+}
+
+TEST_F_WITH_FLAGS(
+        AudioPolicyManagerTestBitPerfect,
+        InternalMuteWhenBitPerfectCLientIsActive,
+        REQUIRES_FLAGS_ENABLED(
+                ACONFIG_FLAG(com::android::media::audioserver,
+                             fix_concurrent_playback_behavior_with_bit_perfect_client))
+) {
+    ASSERT_NO_FATAL_FAILURE(startBitPerfectOutput());
+
+    // When bit-perfect playback is active, the system sound will be routed to bit-perfect output.
+    // The system sound will be muted internally in this case. The bit-perfect client will be
+    // played normally.
+    const uint32_t anotherSampleRate = 44100;
+    audio_port_handle_t systemSoundPortId = AUDIO_PORT_HANDLE_NONE;
+    audio_io_handle_t systemSoundOutput = AUDIO_IO_HANDLE_NONE;
+    const audio_attributes_t systemSoundAttr = {
+            .content_type = AUDIO_CONTENT_TYPE_SONIFICATION,
+            .usage = AUDIO_USAGE_ASSISTANCE_SONIFICATION,
+    };
+    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    bool isBitPerfect;
+    getOutputForAttr(&selectedDeviceId, mBitPerfectFormat, mBitPerfectChannelMask,
+                     anotherSampleRate, AUDIO_OUTPUT_FLAG_NONE, &systemSoundOutput,
+                     &systemSoundPortId, systemSoundAttr, AUDIO_SESSION_NONE, mUid, &isBitPerfect);
+    EXPECT_FALSE(isBitPerfect);
+    EXPECT_EQ(mBitPerfectOutput, systemSoundOutput);
+    EXPECT_EQ(NO_ERROR, mManager->startOutput(systemSoundPortId));
+    EXPECT_TRUE(mClient->getTrackInternalMute(systemSoundPortId));
+    EXPECT_FALSE(mClient->getTrackInternalMute(mBitPerfectPortId));
+    EXPECT_EQ(NO_ERROR, mManager->stopOutput(systemSoundPortId));
+    EXPECT_FALSE(mClient->getTrackInternalMute(mBitPerfectPortId));
+
+    // When bit-perfect playback is active, the notification will be routed to bit-perfect output.
+    // The notification sound will be played normally while the bit-perfect client will be muted
+    // internally.
+    audio_port_handle_t notificationPortId = AUDIO_PORT_HANDLE_NONE;
+    audio_io_handle_t notificationOutput = AUDIO_IO_HANDLE_NONE;
+    const audio_attributes_t notificationAttr = {
+            .content_type = AUDIO_CONTENT_TYPE_SONIFICATION,
+            .usage = AUDIO_USAGE_NOTIFICATION,
+    };
+    getOutputForAttr(&selectedDeviceId, mBitPerfectFormat, mBitPerfectChannelMask,
+                     anotherSampleRate, AUDIO_OUTPUT_FLAG_NONE, &notificationOutput,
+                     &notificationPortId, notificationAttr, AUDIO_SESSION_NONE, mUid,
+                     &isBitPerfect);
+    EXPECT_FALSE(isBitPerfect);
+    EXPECT_EQ(mBitPerfectOutput, notificationOutput);
+    EXPECT_EQ(NO_ERROR, mManager->startOutput(notificationPortId));
+    EXPECT_FALSE(mClient->getTrackInternalMute(notificationPortId));
+    EXPECT_TRUE(mClient->getTrackInternalMute(mBitPerfectPortId));
+    EXPECT_EQ(NO_ERROR, mManager->stopOutput(notificationPortId));
+    EXPECT_FALSE(mClient->getTrackInternalMute(mBitPerfectPortId));
+
+    EXPECT_EQ(NO_ERROR, mManager->stopOutput(mBitPerfectPortId));
+}
+
+class AudioPolicyManagerTestBitPerfectPhoneMode : public AudioPolicyManagerTestBitPerfectBase,
+        public testing::WithParamInterface<audio_mode_t> {
+};
+
+TEST_P(AudioPolicyManagerTestBitPerfectPhoneMode, RejectBitPerfectWhenPhoneModeIsNotNormal) {
+    if (!com::android::media::audioserver::
+            fix_concurrent_playback_behavior_with_bit_perfect_client()) {
+        GTEST_SKIP()
+                << "Flag fix_concurrent_playback_behavior_with_bit_perfect_client is not enabled";
+    }
+
+    ASSERT_NO_FATAL_FAILURE(startBitPerfectOutput());
+
+    audio_mode_t mode = GetParam();
+    mManager->setPhoneState(mode);
+    // When the phone mode is not normal, the bit-perfect output will be reopned
+    EXPECT_EQ(nullptr, mManager->getOutputs().valueFor(mBitPerfectOutput));
+
+    // When the phone mode is not normal, the bit-perfect output will be closed.
+    ASSERT_NO_FATAL_FAILURE(getBitPerfectOutput(INVALID_OPERATION));
+
+    mManager->setPhoneState(AUDIO_MODE_NORMAL);
+}
+
+INSTANTIATE_TEST_CASE_P(
+        PhoneMode,
+        AudioPolicyManagerTestBitPerfectPhoneMode,
+        testing::Values(AUDIO_MODE_IN_CALL,
+                        AUDIO_MODE_RINGTONE,
+                        AUDIO_MODE_IN_COMMUNICATION,
+                        AUDIO_MODE_CALL_SCREEN)
+);
+
+class AudioPolicyManagerTestBitPerfectHigherPriorityUseCaseActive :
+        public AudioPolicyManagerTestBitPerfectBase,
+        public testing::WithParamInterface<audio_usage_t> {
+};
+
+TEST_P(AudioPolicyManagerTestBitPerfectHigherPriorityUseCaseActive,
+       RejectBitPerfectWhenHigherPriorityUseCaseIsActive) {
+    if (!com::android::media::audioserver::
+                fix_concurrent_playback_behavior_with_bit_perfect_client()) {
+        GTEST_SKIP()
+                << "Flag fix_concurrent_playback_behavior_with_bit_perfect_client is not enabled";
+    }
+
+    ASSERT_NO_FATAL_FAILURE(startBitPerfectOutput());
+
+    audio_attributes_t attr = {
+            .usage = GetParam(),
+            .content_type = AUDIO_CONTENT_TYPE_UNKNOWN
+    };
+    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
+    audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
+    ASSERT_NO_FATAL_FAILURE(
+            getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+                   48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, attr));
+    EXPECT_NE(mBitPerfectOutput, output);
+    EXPECT_EQ(NO_ERROR, mManager->startOutput(portId));
+    // When a high priority use case is active, the bit-perfect output will be closed.
+    EXPECT_EQ(nullptr, mManager->getOutputs().valueFor(mBitPerfectOutput));
+
+    // When any higher priority use case is active, the bit-perfect request will be rejected.
+    ASSERT_NO_FATAL_FAILURE(getBitPerfectOutput(INVALID_OPERATION));
+}
+
+INSTANTIATE_TEST_CASE_P(
+        HigherPriorityUseCases,
+        AudioPolicyManagerTestBitPerfectHigherPriorityUseCaseActive,
+        testing::Values(AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE,
+                        AUDIO_USAGE_ALARM)
+);
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index daaeae6..38476a4 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -72,6 +72,7 @@
         "libsensorprivacy",
         "libstagefright",
         "libstagefright_foundation",
+        "libvendorsupport",
         "libxml2",
         "libyuv",
         "android.companion.virtual.virtualdevice_aidl-cpp",
@@ -87,6 +88,7 @@
         "android.hardware.common-V2-ndk",
         "android.hardware.common.fmq-V1-ndk",
         "camera_platform_flags_c_lib",
+        "com.android.window.flags.window-aconfig_flags_c_lib",
         "media_permission-aidl-cpp",
     ],
 
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index b34c268..7a2b434 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -82,10 +82,11 @@
 #include "CameraService.h"
 #include "api1/Camera2Client.h"
 #include "api2/CameraDeviceClient.h"
-#include "utils/CameraTraces.h"
-#include "utils/TagMonitor.h"
 #include "utils/CameraServiceProxyWrapper.h"
+#include "utils/CameraTraces.h"
 #include "utils/SessionConfigurationUtils.h"
+#include "utils/TagMonitor.h"
+#include "utils/Utils.h"
 
 namespace {
     const char* kPermissionServiceName = "permission";
@@ -165,6 +166,8 @@
 const std::string CameraService::kOfflineDevice("offline-");
 const std::string CameraService::kWatchAllClientsFlag("all");
 
+constexpr int32_t kInvalidDeviceId = -1;
+
 // Set to keep track of logged service error events.
 static std::set<std::string> sServiceErrorEventSet;
 
@@ -339,25 +342,6 @@
                 mappedCameraId, deviceId);
         i->handleBinderStatus(ret, "%s: Failed to trigger onTorchStatusChanged for %d:%d: %d",
                 __FUNCTION__, i->getListenerUid(), i->getListenerPid(), ret.exceptionCode());
-
-        // Only cameras of the default device can be remapped to a different camera (using
-        // remapCameraIds method), so do the following only if the camera is associated with the
-        // default device.
-        if (deviceId == kDefaultDeviceId) {
-            // For the default device, also trigger the torch callbacks for cameras that were
-            // remapped to the current cameraId for the specific package that this listener belongs
-            // to.
-            std::vector<std::string> remappedCameraIds =
-                    findOriginalIdsForRemappedCameraId(cameraId, i->getListenerUid());
-            for (auto &remappedCameraId: remappedCameraIds) {
-                ret = i->getListener()->onTorchStatusChanged(mapToInterface(status),
-                        remappedCameraId, kDefaultDeviceId);
-                i->handleBinderStatus(ret,
-                        "%s: Failed to trigger onTorchStatusChanged for %d:%d: %d",
-                        __FUNCTION__, i->getListenerUid(), i->getListenerPid(),
-                        ret.exceptionCode());
-            }
-        }
     }
 }
 
@@ -379,7 +363,8 @@
         if (vd_flags::camera_device_awareness()) {
             CameraMetadata cameraInfo;
             status_t res = mCameraProviderManager->getCameraCharacteristics(
-                    cameraId, false, &cameraInfo, false);
+                    cameraId, false, &cameraInfo,
+                    hardware::ICameraService::ROTATION_OVERRIDE_NONE);
             int32_t deviceId = kDefaultDeviceId;
             if (res != OK) {
                 ALOGW("%s: Not able to get camera characteristics for camera id %s",
@@ -442,8 +427,9 @@
         int facing = -1;
         int orientation = 0;
         int portraitRotation;
-        getDeviceVersion(cameraId, /*overrideToPortrait*/false, /*out*/&portraitRotation,
-                /*out*/&facing, /*out*/&orientation);
+        getDeviceVersion(cameraId,
+                /*rotationOverride*/hardware::ICameraService::ROTATION_OVERRIDE_NONE,
+                /*out*/&portraitRotation, /*out*/&facing, /*out*/&orientation);
         if (facing == -1) {
             ALOGE("%s: Unable to get camera device \"%s\" facing", __FUNCTION__, cameraId.c_str());
             return;
@@ -775,7 +761,7 @@
 
     CameraMetadata cameraInfo;
     status_t res = mCameraProviderManager->getCameraCharacteristics(
-            cam_id, false, &cameraInfo, false);
+            cam_id, false, &cameraInfo, hardware::ICameraService::ROTATION_OVERRIDE_NONE);
     if (res != OK){
         ALOGE("%s: Not able to get camera characteristics for camera id %s",__FUNCTION__,
                 cam_id.c_str());
@@ -834,24 +820,6 @@
     return Status::ok();
 }
 
-Status CameraService::remapCameraIds(const hardware::CameraIdRemapping& cameraIdRemapping) {
-    if (!checkCallingPermission(toString16(sCameraInjectExternalCameraPermission))) {
-        const int pid = getCallingPid();
-        const int uid = getCallingUid();
-        ALOGE("%s: Permission Denial: can't configure camera ID mapping pid=%d, uid=%d",
-                __FUNCTION__, pid, uid);
-        return STATUS_ERROR(ERROR_PERMISSION_DENIED,
-                "Permission Denial: no permission to configure camera id mapping");
-    }
-    TCameraIdRemapping cameraIdRemappingMap{};
-    binder::Status parseStatus = parseCameraIdRemapping(cameraIdRemapping, &cameraIdRemappingMap);
-    if (!parseStatus.isOk()) {
-        return parseStatus;
-    }
-    remapCameraIds(cameraIdRemappingMap);
-    return Status::ok();
-}
-
 Status CameraService::createDefaultRequest(const std::string& unresolvedCameraId, int templateId,
         int32_t deviceId, int32_t devicePolicy,
         /* out */
@@ -869,7 +837,7 @@
     }
 
     std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
-            devicePolicy, getCallingUid());
+            devicePolicy);
     if (!cameraIdOptional.has_value()) {
         std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
                 unresolvedCameraId.c_str(), deviceId);
@@ -932,7 +900,7 @@
     }
 
     std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
-            devicePolicy, getCallingUid());
+            devicePolicy);
     if (!cameraIdOptional.has_value()) {
         std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
                 unresolvedCameraId.c_str(), deviceId);
@@ -1008,7 +976,7 @@
 }
 
 Status CameraService::getSessionCharacteristics(const std::string& unresolvedCameraId,
-        int targetSdkVersion, bool overrideToPortrait,
+        int targetSdkVersion, int rotationOverride,
         const SessionConfiguration& sessionConfiguration, int32_t deviceId, int32_t devicePolicy,
         /*out*/ CameraMetadata* outMetadata) {
     ATRACE_CALL();
@@ -1027,7 +995,7 @@
     }
 
     std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
-                                                                  devicePolicy, getCallingUid());
+                                                                  devicePolicy);
     if (!cameraIdOptional.has_value()) {
         std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
                                        unresolvedCameraId.c_str(), deviceId);
@@ -1065,7 +1033,7 @@
     }
 
     status_t ret = mCameraProviderManager->getSessionCharacteristics(
-            cameraId, sessionConfiguration, overrideForPerfClass, overrideToPortrait, outMetadata);
+            cameraId, sessionConfiguration, overrideForPerfClass, rotationOverride, outMetadata);
 
     switch (ret) {
         case OK:
@@ -1166,120 +1134,6 @@
     return Status::ok();
 }
 
-Status CameraService::parseCameraIdRemapping(
-        const hardware::CameraIdRemapping& cameraIdRemapping,
-        /* out */ TCameraIdRemapping* cameraIdRemappingMap) {
-    std::string packageName;
-    std::string cameraIdToReplace, updatedCameraId;
-    for (const auto& packageIdRemapping: cameraIdRemapping.packageIdRemappings) {
-        packageName = packageIdRemapping.packageName;
-        if (packageName.empty()) {
-            return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT,
-                    "CameraIdRemapping: Package name cannot be empty");
-        }
-        if (packageIdRemapping.cameraIdsToReplace.size()
-            != packageIdRemapping.updatedCameraIds.size()) {
-            return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
-                    "CameraIdRemapping: Mismatch in CameraId Remapping lists sizes for package %s",
-                    packageName.c_str());
-        }
-        for (size_t i = 0; i < packageIdRemapping.cameraIdsToReplace.size(); i++) {
-            cameraIdToReplace = packageIdRemapping.cameraIdsToReplace[i];
-            updatedCameraId = packageIdRemapping.updatedCameraIds[i];
-            if (cameraIdToReplace.empty() || updatedCameraId.empty()) {
-                return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
-                        "CameraIdRemapping: Camera Id cannot be empty for package %s",
-                        packageName.c_str());
-            }
-            if (cameraIdToReplace == updatedCameraId) {
-                return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
-                        "CameraIdRemapping: CameraIdToReplace cannot be the same"
-                        " as updatedCameraId for %s",
-                        packageName.c_str());
-            }
-
-            // Do not allow any camera remapping that involves a virtual camera.
-            auto [deviceIdForCameraToReplace, _] =
-                    mVirtualDeviceCameraIdMapper.getDeviceIdAndMappedCameraIdPair(
-                            cameraIdToReplace);
-            if (deviceIdForCameraToReplace != kDefaultDeviceId) {
-                return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT,
-                        "CameraIdRemapping: CameraIdToReplace cannot be a virtual camera");
-            }
-            [[maybe_unused]] auto [deviceIdForUpdatedCamera, unusedMappedCameraId] =
-                    mVirtualDeviceCameraIdMapper.getDeviceIdAndMappedCameraIdPair(updatedCameraId);
-            if (deviceIdForUpdatedCamera != kDefaultDeviceId) {
-                return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT,
-                        "CameraIdRemapping: UpdatedCameraId cannot be a virtual camera");
-            }
-
-            (*cameraIdRemappingMap)[packageName][cameraIdToReplace] = updatedCameraId;
-        }
-    }
-    return Status::ok();
-}
-
-void CameraService::remapCameraIds(const TCameraIdRemapping& cameraIdRemapping) {
-    // Acquire mServiceLock and prevent other clients from connecting
-    std::unique_ptr<AutoConditionLock> serviceLockWrapper =
-            AutoConditionLock::waitAndAcquire(mServiceLockWrapper);
-
-    // Collect all existing clients for camera Ids that are being
-    // remapped in the new cameraIdRemapping, but only if they were being used by a
-    // targeted packageName.
-    std::vector<sp<BasicClient>> clientsToDisconnect;
-    std::vector<std::string> cameraIdsToUpdate;
-    for (const auto& [packageName, injectionMap] : cameraIdRemapping) {
-        for (auto& [id0, id1] : injectionMap) {
-            ALOGI("%s: UPDATE:= %s: %s: %s", __FUNCTION__, packageName.c_str(),
-                    id0.c_str(), id1.c_str());
-            auto clientDescriptor = mActiveClientManager.get(id0);
-            if (clientDescriptor != nullptr) {
-                sp<BasicClient> clientSp = clientDescriptor->getValue();
-                if (clientSp->getPackageName() == packageName) {
-                    // This camera is being used by a targeted packageName and
-                    // being remapped to a new camera Id. We should disconnect it.
-                    clientsToDisconnect.push_back(clientSp);
-                    cameraIdsToUpdate.push_back(id0);
-                }
-            }
-        }
-    }
-
-    for (auto& clientSp : clientsToDisconnect) {
-        // Notify the clients about the disconnection.
-        clientSp->notifyError(hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DISCONNECTED,
-                CaptureResultExtras{});
-    }
-
-    // Do not hold mServiceLock while disconnecting clients, but retain the condition
-    // blocking other clients from connecting in mServiceLockWrapper if held.
-    mServiceLock.unlock();
-
-    // Clear calling identity for disconnect() PID checks.
-    int64_t token = clearCallingIdentity();
-
-    // Disconnect clients.
-    for (auto& clientSp : clientsToDisconnect) {
-        // This also triggers a call to updateStatus() which also reads mCameraIdRemapping
-        // and requires mCameraIdRemappingLock.
-        clientSp->disconnect();
-    }
-
-    // Invoke destructors (which call disconnect()) now while we don't hold the mServiceLock.
-    clientsToDisconnect.clear();
-
-    restoreCallingIdentity(token);
-    mServiceLock.lock();
-
-    {
-        Mutex::Autolock lock(mCameraIdRemappingLock);
-        // Update mCameraIdRemapping.
-        mCameraIdRemapping.clear();
-        mCameraIdRemapping.insert(cameraIdRemapping.begin(), cameraIdRemapping.end());
-    }
-}
-
 Status CameraService::injectSessionParams(
         const std::string& cameraId,
         const CameraMetadata& sessionParams) {
@@ -1320,56 +1174,10 @@
     return Status::ok();
 }
 
-std::vector<std::string> CameraService::findOriginalIdsForRemappedCameraId(
-    const std::string& inputCameraId, int clientUid) {
-    std::string packageName = getPackageNameFromUid(clientUid);
-    std::vector<std::string> cameraIds;
-    Mutex::Autolock lock(mCameraIdRemappingLock);
-    if (auto packageMapIter = mCameraIdRemapping.find(packageName);
-        packageMapIter != mCameraIdRemapping.end()) {
-        for (auto& [id0, id1]: packageMapIter->second) {
-            if (id1 == inputCameraId) {
-                cameraIds.push_back(id0);
-            }
-        }
-    }
-    return cameraIds;
-}
-
-std::string CameraService::resolveCameraId(
-    const std::string& inputCameraId,
-    int clientUid,
-    const std::string& packageName) {
-    std::string packageNameVal = packageName;
-    if (packageName.empty()) {
-        packageNameVal = getPackageNameFromUid(clientUid);
-    }
-    if (clientUid < AID_APP_START || packageNameVal.empty()) {
-        // We shouldn't remap cameras for processes with system/vendor UIDs.
-        return inputCameraId;
-    }
-    Mutex::Autolock lock(mCameraIdRemappingLock);
-    if (auto packageMapIter = mCameraIdRemapping.find(packageNameVal);
-        packageMapIter != mCameraIdRemapping.end()) {
-        auto packageMap = packageMapIter->second;
-        if (auto replacementIdIter = packageMap.find(inputCameraId);
-            replacementIdIter != packageMap.end()) {
-            ALOGI("%s: resolveCameraId: remapping cameraId %s for %s to %s",
-                    __FUNCTION__, inputCameraId.c_str(),
-                    packageNameVal.c_str(),
-                    replacementIdIter->second.c_str());
-            return replacementIdIter->second;
-        }
-    }
-    return inputCameraId;
-}
-
 std::optional<std::string> CameraService::resolveCameraId(
         const std::string& inputCameraId,
         int32_t deviceId,
-        int32_t devicePolicy,
-        int clientUid,
-        const std::string& packageName) {
+        int32_t devicePolicy) {
     if ((deviceId == kDefaultDeviceId)
             || (devicePolicy == IVirtualDeviceManagerNative::DEVICE_POLICY_DEFAULT)) {
         auto [storedDeviceId, _] =
@@ -1381,13 +1189,13 @@
             ALOGE("%s: %s", __FUNCTION__, msg.c_str());
             return std::nullopt;
         }
-        return resolveCameraId(inputCameraId, clientUid, packageName);
+        return inputCameraId;
     }
 
     return mVirtualDeviceCameraIdMapper.getActualCameraId(deviceId, inputCameraId);
 }
 
-Status CameraService::getCameraInfo(int cameraId, bool overrideToPortrait, int32_t deviceId,
+Status CameraService::getCameraInfo(int cameraId,  int rotationOverride, int32_t deviceId,
         int32_t devicePolicy, CameraInfo* cameraInfo) {
     ATRACE_CALL();
     Mutex::Autolock l(mServiceLock);
@@ -1423,7 +1231,7 @@
     Status ret = Status::ok();
     int portraitRotation;
     status_t err = mCameraProviderManager->getCameraInfo(
-            cameraIdStr, overrideToPortrait, &portraitRotation, cameraInfo);
+            cameraIdStr, rotationOverride, &portraitRotation, cameraInfo);
     if (err != OK) {
         ret = STATUS_ERROR_FMT(ERROR_INVALID_OPERATION,
                 "Error retrieving camera info from device %d: %s (%d)", cameraId,
@@ -1458,8 +1266,7 @@
         return std::string{};
     }
 
-    std::string unresolvedCameraId = (*cameraIds)[cameraIdInt];
-    return resolveCameraId(unresolvedCameraId, getCallingUid());
+    return (*cameraIds)[cameraIdInt];
 }
 
 std::string CameraService::cameraIdIntToStr(int cameraIdInt, int32_t deviceId,
@@ -1469,7 +1276,7 @@
 }
 
 Status CameraService::getCameraCharacteristics(const std::string& unresolvedCameraId,
-        int targetSdkVersion, bool overrideToPortrait, int32_t deviceId, int32_t devicePolicy,
+        int targetSdkVersion, int rotationOverride, int32_t deviceId, int32_t devicePolicy,
         CameraMetadata* cameraInfo) {
     ATRACE_CALL();
 
@@ -1486,7 +1293,7 @@
     }
 
     std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
-            devicePolicy, getCallingUid());
+            devicePolicy);
     if (!cameraIdOptional.has_value()) {
         std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
                 unresolvedCameraId.c_str(), deviceId);
@@ -1504,7 +1311,7 @@
             SessionConfigurationUtils::targetPerfClassPrimaryCamera(mPerfClassPrimaryCameraIds,
                     cameraId, targetSdkVersion);
     status_t res = mCameraProviderManager->getCameraCharacteristics(
-            cameraId, overrideForPerfClass, cameraInfo, overrideToPortrait);
+            cameraId, overrideForPerfClass, cameraInfo, rotationOverride);
     if (res != OK) {
         if (res == NAME_NOT_FOUND) {
             return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT, "Unable to retrieve camera "
@@ -1528,7 +1335,7 @@
     Mutex::Autolock l(mServiceLock);
 
     std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
-            devicePolicy, getCallingUid());
+            devicePolicy);
     if (!cameraIdOptional.has_value()) {
         std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
                 unresolvedCameraId.c_str(), deviceId);
@@ -1600,7 +1407,8 @@
 }
 
 std::pair<int, IPCTransport> CameraService::getDeviceVersion(const std::string& cameraId,
-        bool overrideToPortrait, int* portraitRotation, int* facing, int* orientation) {
+        int rotationOverride, int* portraitRotation, int* facing,
+        int* orientation) {
     ATRACE_CALL();
 
     int deviceVersion = 0;
@@ -1618,7 +1426,7 @@
 
     hardware::CameraInfo info;
     if (facing) {
-        res = mCameraProviderManager->getCameraInfo(cameraId, overrideToPortrait,
+        res = mCameraProviderManager->getCameraInfo(cameraId, rotationOverride,
                 portraitRotation, &info);
         if (res != OK) {
             return std::make_pair(-1, IPCTransport::INVALID);
@@ -1654,7 +1462,7 @@
         const std::optional<std::string>& featureId,  const std::string& cameraId,
         int api1CameraId, int facing, int sensorOrientation, int clientPid, uid_t clientUid,
         int servicePid, std::pair<int, IPCTransport> deviceVersionAndTransport,
-        apiLevel effectiveApiLevel, bool overrideForPerfClass, bool overrideToPortrait,
+        apiLevel effectiveApiLevel, bool overrideForPerfClass, int rotationOverride,
         bool forceSlowJpegMode, const std::string& originalCameraId,
         /*out*/sp<BasicClient>* client) {
     // For HIDL devices
@@ -1690,10 +1498,10 @@
         *client = new Camera2Client(cameraService, tmp, cameraService->mCameraServiceProxyWrapper,
                 cameraService->mAttributionAndPermissionUtils, packageName, featureId, cameraId,
                 api1CameraId, facing, sensorOrientation,
-                clientPid, clientUid, servicePid, overrideForPerfClass, overrideToPortrait,
+                clientPid, clientUid, servicePid, overrideForPerfClass, rotationOverride,
                 forceSlowJpegMode);
-        ALOGI("%s: Camera1 API (legacy), override to portrait %d, forceSlowJpegMode %d",
-                __FUNCTION__, overrideToPortrait, forceSlowJpegMode);
+        ALOGI("%s: Camera1 API (legacy), rotationOverride %d, forceSlowJpegMode %d",
+                __FUNCTION__, rotationOverride, forceSlowJpegMode);
     } else { // Camera2 API route
         sp<hardware::camera2::ICameraDeviceCallbacks> tmp =
                 static_cast<hardware::camera2::ICameraDeviceCallbacks*>(cameraCb.get());
@@ -1701,8 +1509,8 @@
                 cameraService->mCameraServiceProxyWrapper,
                 cameraService->mAttributionAndPermissionUtils, packageName, systemNativeClient,
                 featureId, cameraId, facing, sensorOrientation, clientPid, clientUid, servicePid,
-                overrideForPerfClass, overrideToPortrait, originalCameraId);
-        ALOGI("%s: Camera2 API, override to portrait %d", __FUNCTION__, overrideToPortrait);
+                overrideForPerfClass, rotationOverride, originalCameraId);
+        ALOGI("%s: Camera2 API, rotationOverride %d", __FUNCTION__, rotationOverride);
     }
     return Status::ok();
 }
@@ -1791,7 +1599,8 @@
             sp<ICameraClient>{nullptr}, cameraIdStr, cameraId,
             kServiceName, /*systemNativeClient*/ false, {}, uid, USE_CALLING_PID,
             API_1, /*shimUpdateOnly*/ true, /*oomScoreOffset*/ 0,
-            /*targetSdkVersion*/ __ANDROID_API_FUTURE__, /*overrideToPortrait*/ true,
+            /*targetSdkVersion*/ __ANDROID_API_FUTURE__,
+            /*rotationOverride*/hardware::ICameraService::ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT,
             /*forceSlowJpegMode*/false, cameraIdStr, /*out*/ tmp)
             ).isOk()) {
         ALOGE("%s: Error initializing shim metadata: %s", __FUNCTION__, ret.toString8().c_str());
@@ -1812,9 +1621,7 @@
         return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, "Parameters must not be null");
     }
 
-    std::string unresolvedCameraId = std::to_string(cameraId);
-    std::string cameraIdStr = resolveCameraId(unresolvedCameraId,
-            getCallingUid());
+    std::string cameraIdStr = std::to_string(cameraId);
 
     // Check if we already have parameters
     {
@@ -2315,7 +2122,7 @@
         int clientUid,
         int clientPid,
         int targetSdkVersion,
-        bool overrideToPortrait,
+        int rotationOverride,
         bool forceSlowJpegMode,
         int32_t deviceId,
         int32_t devicePolicy,
@@ -2336,7 +2143,7 @@
     ret = connectHelper<ICameraClient,Client>(cameraClient, cameraIdStr, api1CameraId,
             clientPackageName, /*systemNativeClient*/ false, {}, clientUid, clientPid, API_1,
             /*shimUpdateOnly*/ false, /*oomScoreOffset*/ 0, targetSdkVersion,
-            overrideToPortrait, forceSlowJpegMode, cameraIdStr, /*out*/client);
+            rotationOverride, forceSlowJpegMode, cameraIdStr, /*out*/client);
 
     if (!ret.isOk()) {
         logRejected(cameraIdStr, getCallingPid(), clientPackageName, toStdString(ret.toString8()));
@@ -2419,10 +2226,11 @@
         const std::string& clientPackageName,
         const std::optional<std::string>& clientFeatureId,
         int clientUid, int oomScoreOffset, int targetSdkVersion,
-        bool overrideToPortrait, int32_t deviceId, int32_t devicePolicy,
+        int rotationOverride, int32_t deviceId, int32_t devicePolicy,
         /*out*/
         sp<hardware::camera2::ICameraDeviceUser>* device) {
     ATRACE_CALL();
+    RunThreadWithRealtimePriority priorityBump;
     Status ret = Status::ok();
     sp<CameraDeviceClient> client = nullptr;
     std::string clientPackageNameAdj = clientPackageName;
@@ -2436,7 +2244,7 @@
     }
 
     std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
-            devicePolicy, callingUid, clientPackageNameAdj);
+            devicePolicy);
     if (!cameraIdOptional.has_value()) {
         std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
                 unresolvedCameraId.c_str(), deviceId);
@@ -2483,7 +2291,7 @@
     ret = connectHelper<hardware::camera2::ICameraDeviceCallbacks,CameraDeviceClient>(cameraCb,
             cameraId, /*api1CameraId*/-1, clientPackageNameAdj, systemNativeClient, clientFeatureId,
             clientUid, USE_CALLING_PID, API_2, /*shimUpdateOnly*/ false, oomScoreOffset,
-            targetSdkVersion, overrideToPortrait, /*forceSlowJpegMode*/false, unresolvedCameraId,
+            targetSdkVersion, rotationOverride, /*forceSlowJpegMode*/false, unresolvedCameraId,
             /*out*/client);
 
     if (!ret.isOk()) {
@@ -2597,8 +2405,8 @@
         int api1CameraId, const std::string& clientPackageNameMaybe, bool systemNativeClient,
         const std::optional<std::string>& clientFeatureId, int clientUid, int clientPid,
         apiLevel effectiveApiLevel, bool shimUpdateOnly, int oomScoreOffset, int targetSdkVersion,
-        bool overrideToPortrait, bool forceSlowJpegMode, const std::string& originalCameraId,
-        /*out*/sp<CLIENT>& device) {
+        int rotationOverride, bool forceSlowJpegMode,
+        const std::string& originalCameraId, /*out*/sp<CLIENT>& device) {
     binder::Status ret = binder::Status::ok();
 
     bool isNonSystemNdk = false;
@@ -2697,7 +2505,7 @@
 
         int portraitRotation;
         auto deviceVersionAndTransport =
-                getDeviceVersion(cameraId, overrideToPortrait, /*out*/&portraitRotation,
+                getDeviceVersion(cameraId, rotationOverride, /*out*/&portraitRotation,
                         /*out*/&facing, /*out*/&orientation);
         if (facing == -1) {
             ALOGE("%s: Unable to get camera device \"%s\"  facing", __FUNCTION__, cameraId.c_str());
@@ -2708,11 +2516,12 @@
         sp<BasicClient> tmp = nullptr;
         bool overrideForPerfClass = SessionConfigurationUtils::targetPerfClassPrimaryCamera(
                 mPerfClassPrimaryCameraIds, cameraId, targetSdkVersion);
+
         if(!(ret = makeClient(this, cameraCb, clientPackageName, systemNativeClient,
                 clientFeatureId, cameraId, api1CameraId, facing,
                 orientation, clientPid, clientUid, getpid(),
                 deviceVersionAndTransport, effectiveApiLevel, overrideForPerfClass,
-                overrideToPortrait, forceSlowJpegMode, originalCameraId,
+                rotationOverride, forceSlowJpegMode, originalCameraId,
                 /*out*/&tmp)).isOk()) {
             return ret;
         }
@@ -2776,7 +2585,7 @@
         CameraMetadata chars;
         bool rotateAndCropSupported = true;
         err = mCameraProviderManager->getCameraCharacteristics(cameraId, overrideForPerfClass,
-                &chars, overrideToPortrait);
+                &chars, rotationOverride);
         if (err == OK) {
             auto availableRotateCropEntry = chars.find(
                     ANDROID_SCALER_AVAILABLE_ROTATE_AND_CROP_MODES);
@@ -2792,7 +2601,8 @@
             // Set rotate-and-crop override behavior
             if (mOverrideRotateAndCropMode != ANDROID_SCALER_ROTATE_AND_CROP_AUTO) {
                 client->setRotateAndCropOverride(mOverrideRotateAndCropMode);
-            } else if (overrideToPortrait && portraitRotation != 0) {
+            } else if (rotationOverride != hardware::ICameraService::ROTATION_OVERRIDE_NONE &&
+                    portraitRotation != 0) {
                 uint8_t rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_AUTO;
                 switch (portraitRotation) {
                     case 90:
@@ -2808,6 +2618,8 @@
                         ALOGE("Unexpected portrait rotation: %d", portraitRotation);
                         break;
                 }
+                // Here we're communicating to the client the chosen rotate
+                // and crop mode to send to the HAL
                 client->setRotateAndCropOverride(rotateAndCropMode);
             } else {
                 client->setRotateAndCropOverride(
@@ -3016,7 +2828,7 @@
 
     int uid = getCallingUid();
     std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
-            devicePolicy, uid);
+            devicePolicy);
     if (!cameraIdOptional.has_value()) {
         std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
                 unresolvedCameraId.c_str(), deviceId);
@@ -3153,7 +2965,7 @@
 
     int uid = getCallingUid();
     std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
-            devicePolicy, uid);
+            devicePolicy);
     if (!cameraIdOptional.has_value()) {
         std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
                 unresolvedCameraId.c_str(), deviceId);
@@ -3431,7 +3243,6 @@
     return Status::ok();
 }
 
-// TODO(b/291736219): This to be made device-aware.
 Status CameraService::getConcurrentCameraIds(
         std::vector<ConcurrentCameraIdCombination>* concurrentCameraIds) {
     ATRACE_CALL();
@@ -3451,7 +3262,8 @@
     std::vector<std::unordered_set<std::string>> concurrentCameraCombinations =
             mCameraProviderManager->getConcurrentCameraIds();
     for (auto &combination : concurrentCameraCombinations) {
-        std::vector<std::string> validCombination;
+        std::vector<std::pair<std::string, int32_t>> validCombination;
+        int32_t firstDeviceId = kInvalidDeviceId;
         for (auto &cameraId : combination) {
             // if the camera state is not present, skip
             auto state = getCameraState(cameraId);
@@ -3466,7 +3278,17 @@
             if (shouldRejectSystemCameraConnection(cameraId)) {
                 continue;
             }
-            validCombination.push_back(cameraId);
+            auto [cameraOwnerDeviceId, mappedCameraId] =
+                    mVirtualDeviceCameraIdMapper.getDeviceIdAndMappedCameraIdPair(cameraId);
+            if (firstDeviceId == kInvalidDeviceId) {
+                firstDeviceId = cameraOwnerDeviceId;
+            } else if (firstDeviceId != cameraOwnerDeviceId) {
+                // Found an invalid combination which contains cameras with different device id's,
+                // hence discard it.
+                validCombination.clear();
+                break;
+            }
+            validCombination.push_back({mappedCameraId, cameraOwnerDeviceId});
         }
         if (validCombination.size() != 0) {
             concurrentCameraIds->push_back(std::move(validCombination));
@@ -3477,7 +3299,8 @@
 
 Status CameraService::isConcurrentSessionConfigurationSupported(
         const std::vector<CameraIdAndSessionConfiguration>& cameraIdsAndSessionConfigurations,
-        int targetSdkVersion, /*out*/bool* isSupported) {
+        int targetSdkVersion, int32_t deviceId, int32_t devicePolicy,
+        /*out*/bool* isSupported) {
     if (!isSupported) {
         ALOGE("%s: isSupported is NULL", __FUNCTION__);
         return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, "isSupported is NULL");
@@ -3489,12 +3312,25 @@
                 "Camera subsystem is not available");
     }
 
+    for (auto cameraIdAndSessionConfiguration : cameraIdsAndSessionConfigurations) {
+        std::optional<std::string> cameraIdOptional =
+                resolveCameraId(cameraIdAndSessionConfiguration.mCameraId, deviceId, devicePolicy);
+        if (!cameraIdOptional.has_value()) {
+            std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
+                    cameraIdAndSessionConfiguration.mCameraId.c_str(), deviceId);
+            ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+            return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+        }
+        cameraIdAndSessionConfiguration.mCameraId = cameraIdOptional.value();
+    }
+
     // Check for camera permissions
     int callingPid = getCallingPid();
     int callingUid = getCallingUid();
-    // TODO(b/291736219): Pass deviceId owning the camera if we make this method device-aware.
     bool hasCameraPermission = ((callingPid == getpid()) ||
-            hasPermissionsForCamera(callingPid, callingUid, kDefaultDeviceId));
+            hasPermissionsForCamera(callingPid, callingUid,
+                    devicePolicy == IVirtualDeviceManagerNative::DEVICE_POLICY_DEFAULT
+                        ? kDefaultDeviceId : deviceId));
     if (!hasCameraPermission) {
         return STATUS_ERROR(ERROR_PERMISSION_DENIED,
                 "android.permission.CAMERA needed to call"
@@ -3594,8 +3430,7 @@
                 [this, &isVendorListener, &clientPid, &clientUid](const hardware::CameraStatus& s) {
                         std::string cameraId = s.cameraId;
                         std::optional<std::string> cameraIdOptional = resolveCameraId(s.cameraId,
-                                s.deviceId, IVirtualDeviceManagerNative::DEVICE_POLICY_CUSTOM,
-                                clientUid);
+                                s.deviceId, IVirtualDeviceManagerNative::DEVICE_POLICY_CUSTOM);
                         if (!cameraIdOptional.has_value()) {
                             std::string msg =
                                     fmt::sprintf(
@@ -3701,13 +3536,10 @@
     return ret;
 }
 
-Status CameraService::supportsCameraApi(const std::string& unresolvedCameraId, int apiVersion,
+Status CameraService::supportsCameraApi(const std::string& cameraId, int apiVersion,
         /*out*/ bool *isSupported) {
     ATRACE_CALL();
 
-    const std::string cameraId = resolveCameraId(
-            unresolvedCameraId, getCallingUid());
-
     ALOGV("%s: for camera ID = %s", __FUNCTION__, cameraId.c_str());
 
     switch (apiVersion) {
@@ -3721,7 +3553,10 @@
     }
 
     int portraitRotation;
-    auto deviceVersionAndTransport = getDeviceVersion(cameraId, false, &portraitRotation);
+    auto deviceVersionAndTransport =
+            getDeviceVersion(cameraId,
+                    /*rotationOverride*/hardware::ICameraService::ROTATION_OVERRIDE_NONE,
+                    &portraitRotation);
     if (deviceVersionAndTransport.first == -1) {
         std::string msg = fmt::sprintf("Unknown camera ID %s", cameraId.c_str());
         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
@@ -3766,13 +3601,10 @@
     return Status::ok();
 }
 
-Status CameraService::isHiddenPhysicalCamera(const std::string& unresolvedCameraId,
+Status CameraService::isHiddenPhysicalCamera(const std::string& cameraId,
         /*out*/ bool *isSupported) {
     ATRACE_CALL();
 
-    const std::string cameraId = resolveCameraId(unresolvedCameraId,
-            getCallingUid());
-
     ALOGV("%s: for camera ID = %s", __FUNCTION__, cameraId.c_str());
     *isSupported = mCameraProviderManager->isHiddenPhysicalCamera(cameraId);
 
@@ -4226,14 +4058,14 @@
         const std::string& cameraIdStr,
         int api1CameraId, int cameraFacing, int sensorOrientation,
         int clientPid, uid_t clientUid,
-        int servicePid, bool overrideToPortrait) :
+        int servicePid, int rotationOverride) :
         CameraService::BasicClient(cameraService,
                 IInterface::asBinder(cameraClient),
                 attributionAndPermissionUtils,
                 clientPackageName, systemNativeClient, clientFeatureId,
                 cameraIdStr, cameraFacing, sensorOrientation,
                 clientPid, clientUid,
-                servicePid, overrideToPortrait),
+                servicePid, rotationOverride),
         mCameraId(api1CameraId)
 {
     int callingPid = getCallingPid();
@@ -4264,7 +4096,7 @@
         const std::string& clientPackageName, bool nativeClient,
         const std::optional<std::string>& clientFeatureId, const std::string& cameraIdStr,
         int cameraFacing, int sensorOrientation, int clientPid, uid_t clientUid,
-        int servicePid, bool overrideToPortrait):
+        int servicePid, int rotationOverride):
         AttributionAndPermissionUtilsEncapsulator(attributionAndPermissionUtils),
         mDestructionStarted(false),
         mCameraIdStr(cameraIdStr), mCameraFacing(cameraFacing), mOrientation(sensorOrientation),
@@ -4273,7 +4105,7 @@
         mClientPid(clientPid), mClientUid(clientUid),
         mServicePid(servicePid),
         mDisconnected(false), mUidIsTrusted(false),
-        mOverrideToPortrait(overrideToPortrait),
+        mRotationOverride(rotationOverride),
         mAudioRestriction(hardware::camera2::ICameraDeviceUser::AUDIO_RESTRICTION_NONE),
         mRemoteBinder(remoteCallback),
         mOpsActive(false),
@@ -4444,8 +4276,10 @@
         // connection has been fully established and at that time camera muting
         // capabilities are unknown.
         if (!isUidActive || !isCameraPrivacyEnabled) {
-            ALOGI("Camera %s: Access for \"%s\" has been restricted",
-                    mCameraIdStr.c_str(), mClientPackageName.c_str());
+            ALOGI("Camera %s: Access for \"%s\" has been restricted."
+                    "uid active: %s, privacy enabled: %s", mCameraIdStr.c_str(),
+                    mClientPackageName.c_str(), isUidActive ? "true" : "false",
+                    isCameraPrivacyEnabled ? "true" : "false");
             // Return the same error as for device policy manager rejection
             return -EACCES;
         }
@@ -5112,7 +4946,7 @@
 
 bool CameraService::SensorPrivacyPolicy::isCameraPrivacyEnabled(const String16& packageName) {
     if (!hasCameraPrivacyFeature()) {
-        return SensorPrivacyManager::DISABLED;
+        return false;
     }
     return mSpm.isCameraPrivacyEnabled(packageName);
 }
@@ -5802,7 +5636,7 @@
     if (vd_flags::camera_device_awareness() && status == StatusInternal::PRESENT) {
         CameraMetadata cameraInfo;
         status_t res = mCameraProviderManager->getCameraCharacteristics(
-                cameraId, false, &cameraInfo, false);
+                cameraId, false, &cameraInfo, hardware::ICameraService::ROTATION_OVERRIDE_NONE);
         if (res != OK) {
             ALOGW("%s: Not able to get camera characteristics for camera id %s",
                   __FUNCTION__, cameraId.c_str());
@@ -5876,26 +5710,6 @@
                             "%s: Failed to trigger onStatusChanged callback for %d:%d: %d",
                             __FUNCTION__, listener->getListenerUid(), listener->getListenerPid(),
                             ret.exceptionCode());
-
-                    // Only cameras of the default device can be remapped to a different camera
-                    // (using remapCameraIds method), so do the following only if the camera is
-                    // associated with the default device.
-                    if (deviceId == kDefaultDeviceId) {
-                        // For the default device, also trigger the callbacks for cameras that were
-                        // remapped to the current cameraId for the specific package that this
-                        // listener belongs to.
-                        std::vector<std::string> remappedCameraIds =
-                                findOriginalIdsForRemappedCameraId(cameraId,
-                                        listener->getListenerUid());
-                        for (auto &remappedCameraId: remappedCameraIds) {
-                            ret = listener->getListener()->onStatusChanged(
-                                    mapToInterface(status), remappedCameraId, kDefaultDeviceId);
-                            listener->handleBinderStatus(ret,
-                                    "%s: Failed to trigger onStatusChanged callback for %d:%d: %d",
-                                    __FUNCTION__, listener->getListenerUid(),
-                                    listener->getListenerPid(), ret.exceptionCode());
-                        }
-                    }
                 }
             });
 }
@@ -6126,8 +5940,6 @@
         return handleWatchCommand(args, in, out);
     } else if (args.size() >= 2 && args[0] == toString16("set-watchdog")) {
         return handleSetCameraServiceWatchdog(args);
-    } else if (args.size() >= 4 && args[0] == toString16("remap-camera-id")) {
-        return handleCameraIdRemapping(args, err);
     } else if (args.size() == 1 && args[0] == toString16("help")) {
         printHelp(out);
         return OK;
@@ -6136,23 +5948,6 @@
     return BAD_VALUE;
 }
 
-status_t CameraService::handleCameraIdRemapping(const Vector<String16>& args, int err) {
-    uid_t uid = IPCThreadState::self()->getCallingUid();
-    if (uid != AID_ROOT) {
-        dprintf(err, "Must be adb root\n");
-        return PERMISSION_DENIED;
-    }
-    if (args.size() != 4) {
-        dprintf(err, "Expected format: remap-camera-id <PACKAGE> <Id0> <Id1>\n");
-        return BAD_VALUE;
-    }
-    std::string packageName = toStdString(args[1]);
-    std::string cameraIdToReplace = toStdString(args[2]);
-    std::string cameraIdNew = toStdString(args[3]);
-    remapCameraIds({{packageName, {{cameraIdToReplace, cameraIdNew}}}});
-    return OK;
-}
-
 status_t CameraService::handleSetUidState(const Vector<String16>& args, int err) {
     std::string packageName = toStdString(args[1]);
 
@@ -6769,7 +6564,6 @@
         "  set-watchdog <VALUE> enables or disables the camera service watchdog\n"
         "      Valid values 0=disable, 1=enable\n"
         "  watch <start|stop|dump|print|clear> manages tag monitoring in connected clients\n"
-        "  remap-camera-id <PACKAGE> <Id0> <Id1> remaps camera ids. Must use adb root\n"
         "  help print this message\n");
 }
 
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 8dbd591..9998fb8 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -21,7 +21,6 @@
 #include <android/hardware/BnCameraService.h>
 #include <android/hardware/BnSensorPrivacyListener.h>
 #include <android/hardware/ICameraServiceListener.h>
-#include <android/hardware/CameraIdRemapping.h>
 #include <android/hardware/camera2/BnCameraInjectionSession.h>
 #include <android/hardware/camera2/ICameraInjectionCallback.h>
 
@@ -153,15 +152,15 @@
     // ICameraService
     // IMPORTANT: All binder calls that deal with logicalCameraId should use
     // resolveCameraId(logicalCameraId, deviceId, devicePolicy) to arrive at the correct
-    // cameraId to perform the operation on (in case of Id Remapping, or in case of contexts
+    // cameraId to perform the operation on (in case of contexts
     // associated with virtual devices).
     virtual binder::Status     getNumberOfCameras(int32_t type, int32_t deviceId,
             int32_t devicePolicy, int32_t* numCameras);
 
-    virtual binder::Status     getCameraInfo(int cameraId, bool overrideToPortrait,
+    virtual binder::Status     getCameraInfo(int cameraId, int rotationOverride,
             int32_t deviceId, int32_t devicePolicy, hardware::CameraInfo* cameraInfo) override;
     virtual binder::Status     getCameraCharacteristics(const std::string& cameraId,
-            int targetSdkVersion, bool overrideToPortrait, int32_t deviceId,
+            int targetSdkVersion, int rotationOverride, int32_t deviceId,
             int32_t devicePolicy, CameraMetadata* cameraInfo) override;
     virtual binder::Status     getCameraVendorTagDescriptor(
             /*out*/
@@ -173,14 +172,14 @@
     virtual binder::Status     connect(const sp<hardware::ICameraClient>& cameraClient,
             int32_t cameraId, const std::string& clientPackageName,
             int32_t clientUid, int clientPid, int targetSdkVersion,
-            bool overrideToPortrait, bool forceSlowJpegMode, int32_t deviceId,
+            int rotationOverride, bool forceSlowJpegMode, int32_t deviceId,
             int32_t devicePolicy, /*out*/ sp<hardware::ICamera>* device) override;
 
     virtual binder::Status     connectDevice(
             const sp<hardware::camera2::ICameraDeviceCallbacks>& cameraCb,
             const std::string& cameraId,
             const std::string& clientPackageName, const std::optional<std::string>& clientFeatureId,
-            int32_t clientUid, int scoreOffset, int targetSdkVersion, bool overrideToPortrait,
+            int32_t clientUid, int scoreOffset, int targetSdkVersion, int rotationOverride,
             int32_t deviceId, int32_t devicePolicy,
             /*out*/
             sp<hardware::camera2::ICameraDeviceUser>* device);
@@ -191,14 +190,14 @@
     virtual binder::Status    removeListener(
             const sp<hardware::ICameraServiceListener>& listener);
 
-    // TODO(b/291736219): This to be made device-aware.
     virtual binder::Status getConcurrentCameraIds(
         /*out*/
         std::vector<hardware::camera2::utils::ConcurrentCameraIdCombination>* concurrentCameraIds);
 
     virtual binder::Status isConcurrentSessionConfigurationSupported(
         const std::vector<hardware::camera2::utils::CameraIdAndSessionConfiguration>& sessions,
-        int targetSdkVersion, /*out*/bool* supported);
+        int targetSdkVersion, int32_t deviceId, int32_t devicePolicy,
+        /*out*/bool* supported);
 
     virtual binder::Status    getLegacyParameters(
             int32_t cameraId,
@@ -243,9 +242,6 @@
     virtual binder::Status reportExtensionSessionStats(
             const hardware::CameraExtensionSessionStats& stats, std::string* sessionKey /*out*/);
 
-    virtual binder::Status remapCameraIds(const hardware::CameraIdRemapping&
-            cameraIdRemapping);
-
     virtual binder::Status injectSessionParams(
             const std::string& cameraId,
             const hardware::camera2::impl::CameraMetadataNative& sessionParams);
@@ -262,7 +258,7 @@
             /*out*/ bool* supported);
 
     virtual binder::Status getSessionCharacteristics(
-            const std::string& cameraId, int targetSdkVersion, bool overrideToPortrait,
+            const std::string& cameraId, int targetSdkVersion, int rotationOverride,
             const SessionConfiguration& sessionConfiguration, int32_t deviceId,
             int32_t devicePolicy, /*out*/ CameraMetadata* outMetadata);
 
@@ -307,7 +303,8 @@
     /////////////////////////////////////////////////////////////////////
     // CameraDeviceFactory functionality
     std::pair<int, IPCTransport>    getDeviceVersion(const std::string& cameraId,
-            bool overrideToPortrait, int* portraitRotation,
+            int rotationOverride,
+            int* portraitRotation,
             int* facing = nullptr, int* orientation = nullptr);
 
     /////////////////////////////////////////////////////////////////////
@@ -357,7 +354,7 @@
         }
 
         bool getOverrideToPortrait() const {
-            return mOverrideToPortrait;
+            return mRotationOverride == ICameraService::ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT;
         }
 
         // Disallows dumping over binder interface
@@ -461,7 +458,7 @@
                 int clientPid,
                 uid_t clientUid,
                 int servicePid,
-                bool overrideToPortrait);
+                int rotationOverride);
 
         virtual ~BasicClient();
 
@@ -484,7 +481,7 @@
         const pid_t                     mServicePid;
         bool                            mDisconnected;
         bool                            mUidIsTrusted;
-        bool                            mOverrideToPortrait;
+        int                             mRotationOverride;
 
         mutable Mutex                   mAudioRestrictionLock;
         int32_t                         mAudioRestriction;
@@ -576,7 +573,7 @@
                 int clientPid,
                 uid_t clientUid,
                 int servicePid,
-                bool overrideToPortrait);
+                int rotationOverride);
         ~Client();
 
         // return our camera client
@@ -1008,7 +1005,8 @@
             int api1CameraId, const std::string& clientPackageNameMaybe, bool systemNativeClient,
             const std::optional<std::string>& clientFeatureId, int clientUid, int clientPid,
             apiLevel effectiveApiLevel, bool shimUpdateOnly, int scoreOffset, int targetSdkVersion,
-            bool overrideToPortrait, bool forceSlowJpegMode, const std::string& originalCameraId,
+            int rotationOverride, bool forceSlowJpegMode,
+            const std::string& originalCameraId,
             /*out*/sp<CLIENT>& device);
 
     // Lock guarding camera service state
@@ -1041,43 +1039,9 @@
     mutable Mutex mCameraStatesLock;
 
     /**
-     * Mapping from packageName -> {cameraIdToReplace -> newCameraIdtoUse}.
-     *
-     * This specifies that for packageName, for every binder operation targeting
-     * cameraIdToReplace, use newCameraIdToUse instead.
-     */
-    typedef std::map<std::string, std::map<std::string, std::string>> TCameraIdRemapping;
-    TCameraIdRemapping mCameraIdRemapping{};
-    /** Mutex guarding mCameraIdRemapping. */
-    Mutex mCameraIdRemappingLock;
-
-    /** Parses cameraIdRemapping parcelable into the native cameraIdRemappingMap. */
-    binder::Status parseCameraIdRemapping(
-            const hardware::CameraIdRemapping& cameraIdRemapping,
-            /* out */ TCameraIdRemapping* cameraIdRemappingMap);
-
-    /**
-     * Resolve the (potentially remapped) camera id to use for packageName for the default device
-     * context.
-     *
-     * This returns the Camera id to use in case inputCameraId was remapped to a
-     * different id for the given packageName. Otherwise, it returns the inputCameraId.
-     *
-     * If the packageName is not provided, it will be inferred from the clientUid.
-     */
-    std::string resolveCameraId(
-            const std::string& inputCameraId,
-            int clientUid,
-            const std::string& packageName = "");
-
-    /**
      * Resolve the (potentially remapped) camera id for the given input camera id and the given
      * device id and device policy (for the device associated with the context of the caller).
      *
-     * For any context associated with the default device or a virtual device with default camera
-     * policy, this will return the actual camera id (in case inputCameraId was remapped using
-     * the remapCameraIds method).
-     *
      * For any context associated with a virtual device with custom camera policy, this will return
      * the actual camera id if inputCameraId corresponds to the mapped id of a virtual camera
      * (for virtual devices with custom camera policy, the back and front virtual cameras of that
@@ -1086,20 +1050,7 @@
     std::optional<std::string> resolveCameraId(
             const std::string& inputCameraId,
             int32_t deviceId,
-            int32_t devicePolicy,
-            int clientUid,
-            const std::string& packageName = "");
-
-    /**
-     * Updates the state of mCameraIdRemapping, while disconnecting active clients as necessary.
-     */
-    void remapCameraIds(const TCameraIdRemapping& cameraIdRemapping);
-
-    /**
-     * Finds the Camera Ids that were remapped to the inputCameraId for the given client.
-     */
-    std::vector<std::string> findOriginalIdsForRemappedCameraId(
-        const std::string& inputCameraId, int clientUid);
+            int32_t devicePolicy);
 
     // Circular buffer for storing event logging for dumps
     RingBuffer<std::string> mEventLog;
@@ -1549,7 +1500,7 @@
             const std::string& cameraId, int api1CameraId, int facing, int sensorOrientation,
             int clientPid, uid_t clientUid, int servicePid,
             std::pair<int, IPCTransport> deviceVersionAndIPCTransport, apiLevel effectiveApiLevel,
-            bool overrideForPerfClass, bool overrideToPortrait, bool forceSlowJpegMode,
+            bool overrideForPerfClass, int rotationOverride, bool forceSlowJpegMode,
             const std::string& originalCameraId,
             /*out*/ sp<BasicClient>* client);
 
diff --git a/services/camera/libcameraservice/CameraServiceWatchdog.h b/services/camera/libcameraservice/CameraServiceWatchdog.h
index afc432d..165dece 100644
--- a/services/camera/libcameraservice/CameraServiceWatchdog.h
+++ b/services/camera/libcameraservice/CameraServiceWatchdog.h
@@ -26,7 +26,7 @@
  *   and single call monitoring differently. See function documentation for
  *   more details.
  * To disable/enable:
- *   - adb shell cmd media.camera set-cameraservice-watchdog [0/1]
+ *   - adb shell cmd media.camera set-watchdog [0/1]
  */
 #pragma once
 #include <chrono>
diff --git a/services/camera/libcameraservice/aidl/AidlCameraService.cpp b/services/camera/libcameraservice/aidl/AidlCameraService.cpp
index cb11023..2886942 100644
--- a/services/camera/libcameraservice/aidl/AidlCameraService.cpp
+++ b/services/camera/libcameraservice/aidl/AidlCameraService.cpp
@@ -38,6 +38,7 @@
 using ::android::hardware::cameraservice::utils::conversion::aidl::cloneToAidl;
 using ::android::hardware::cameraservice::utils::conversion::aidl::convertToAidl;
 using ::android::hardware::cameraservice::utils::conversion::aidl::filterVndkKeys;
+using hardware::BnCameraService::ROTATION_OVERRIDE_NONE;
 using ::ndk::ScopedAStatus;
 
 // VNDK classes
@@ -90,7 +91,7 @@
     ::android::CameraMetadata cameraMetadata;
     UStatus ret = mCameraService->getCameraCharacteristics(in_cameraId,
                                                            mVndkVersion,
-                                                           /* overrideToPortrait= */ false,
+                                                           ROTATION_OVERRIDE_NONE,
                                                            kDefaultDeviceId,
                                                            /* devicePolicy= */ 0,
                                                            &cameraMetadata);
@@ -150,7 +151,7 @@
             hardware::ICameraService::USE_CALLING_UID,
             /* scoreOffset= */ 0,
             /* targetSdkVersion= */ __ANDROID_API_FUTURE__,
-            /* overrideToPortrait= */ false,
+            ROTATION_OVERRIDE_NONE,
             kDefaultDeviceId,
             /* devicePolicy= */ 0,
             &unstableDevice);
diff --git a/services/camera/libcameraservice/aidl/AidlUtils.cpp b/services/camera/libcameraservice/aidl/AidlUtils.cpp
index 48d78e1..d23566c 100644
--- a/services/camera/libcameraservice/aidl/AidlUtils.cpp
+++ b/services/camera/libcameraservice/aidl/AidlUtils.cpp
@@ -15,6 +15,7 @@
  */
 
 #define LOG_TAG "AidlUtils"
+//#define LOG_NDEBUG 0
 
 #include <aidl/AidlUtils.h>
 #include <aidl/ExtensionMetadataTags.h>
@@ -314,6 +315,7 @@
     if (vndkVersion == __ANDROID_API_FUTURE__) {
         // VNDK version derived from ro.board.api_level is a version code-name that
         // corresponds to the current SDK version.
+        ALOGV("%s: VNDK version is API FUTURE, not filtering any keys", __FUNCTION__);
         return OK;
     }
     const auto &apiLevelToKeys =
@@ -322,9 +324,14 @@
     // versions above the given one, need to have their keys filtered from the
     // metadata in order to avoid metadata invalidation.
     auto it = apiLevelToKeys.upper_bound(vndkVersion);
+    ALOGV("%s: VNDK version for filtering is %d", __FUNCTION__ , vndkVersion);
     while (it != apiLevelToKeys.end()) {
         for (const auto &key : it->second) {
             status_t res = metadata.erase(key);
+            // Should be okay to not use get_local_camera_metadata_tag_name
+            // since we're not filtering vendor tags
+            ALOGV("%s: Metadata key being filtered is %s", __FUNCTION__ ,
+                    get_camera_metadata_tag_name(key));
             if (res != OK) {
                 ALOGE("%s metadata key %d could not be erased", __FUNCTION__, key);
                 return res;
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index 8ec2e4f..61577e4 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -66,13 +66,13 @@
         uid_t clientUid,
         int servicePid,
         bool overrideForPerfClass,
-        bool overrideToPortrait,
+        int rotationOverride,
         bool forceSlowJpegMode):
         Camera2ClientBase(cameraService, cameraClient, cameraServiceProxyWrapper,
                 attributionAndPermissionUtils, clientPackageName,
                 false/*systemNativeClient - since no ndk for api1*/, clientFeatureId,
                 cameraDeviceId, api1CameraId, cameraFacing, sensorOrientation, clientPid,
-                clientUid, servicePid, overrideForPerfClass, overrideToPortrait,
+                clientUid, servicePid, overrideForPerfClass, rotationOverride,
                 /*legacyClient*/ true),
         mParameters(api1CameraId, cameraFacing),
         mLatestRequestIds(kMaxRequestIds),
diff --git a/services/camera/libcameraservice/api1/Camera2Client.h b/services/camera/libcameraservice/api1/Camera2Client.h
index 2654a25..a0c9f2d 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.h
+++ b/services/camera/libcameraservice/api1/Camera2Client.h
@@ -114,7 +114,7 @@
             uid_t clientUid,
             int servicePid,
             bool overrideForPerfClass,
-            bool overrideToPortrait,
+            int rotationOverride,
             bool forceSlowJpegMode);
 
     virtual ~Camera2Client();
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 105d04f..2990099 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -71,7 +71,7 @@
         int clientPid,
         uid_t clientUid,
         int servicePid,
-        bool overrideToPortrait) :
+        int rotationOverride) :
     BasicClient(cameraService,
             IInterface::asBinder(remoteCallback),
             attributionAndPermissionUtils,
@@ -84,7 +84,7 @@
             clientPid,
             clientUid,
             servicePid,
-            overrideToPortrait),
+            rotationOverride),
     mRemoteCallback(remoteCallback) {
 }
 
@@ -104,13 +104,13 @@
         uid_t clientUid,
         int servicePid,
         bool overrideForPerfClass,
-        bool overrideToPortrait,
+        int rotationOverride,
         const std::string& originalCameraId) :
     Camera2ClientBase(cameraService, remoteCallback, cameraServiceProxyWrapper,
             attributionAndPermissionUtils, clientPackageName,
             systemNativeClient, clientFeatureId, cameraId, /*API1 camera ID*/ -1, cameraFacing,
             sensorOrientation, clientPid, clientUid, servicePid, overrideForPerfClass,
-            overrideToPortrait),
+            rotationOverride),
     mInputStream(),
     mStreamingRequestId(REQUEST_ID_NONE),
     mRequestIdCounter(0),
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index 505c086..42f2752 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -61,7 +61,7 @@
             int clientPid,
             uid_t clientUid,
             int servicePid,
-            bool overrideToPortrait);
+            int rotationOverride);
 
     sp<hardware::camera2::ICameraDeviceCallbacks> mRemoteCallback;
 };
@@ -193,7 +193,7 @@
             uid_t clientUid,
             int servicePid,
             bool overrideForPerfClass,
-            bool overrideToPortrait,
+            int rotationOverride,
             const std::string& originalCameraId);
     virtual ~CameraDeviceClient();
 
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
index 82c3d6d..77de874 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
@@ -17,6 +17,7 @@
 #ifndef ANDROID_SERVERS_CAMERA_PHOTOGRAPHY_CAMERAOFFLINESESSIONCLIENT_H
 #define ANDROID_SERVERS_CAMERA_PHOTOGRAPHY_CAMERAOFFLINESESSIONCLIENT_H
 
+#include <android/hardware/ICameraService.h>
 #include <android/hardware/camera2/BnCameraOfflineSession.h>
 #include <android/hardware/camera2/ICameraDeviceCallbacks.h>
 #include "common/FrameProcessorBase.h"
@@ -59,7 +60,7 @@
                     // (v)ndk doesn't have offline session support
                     clientPackageName, /*overridePackageName*/false, clientFeatureId,
                     cameraIdStr, cameraFacing, sensorOrientation, clientPid, clientUid, servicePid,
-                    /*overrideToPortrait*/false),
+                    hardware::ICameraService::ROTATION_OVERRIDE_NONE),
             mRemoteCallback(remoteCallback), mOfflineSession(session),
             mCompositeStreamMap(offlineCompositeStreamMap) {}
 
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
index 1bd0b85..8b41d00 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
@@ -517,6 +517,15 @@
     return false;
 }
 
+bool DepthCompositeStream::isDepthCompositeStreamInfo(const OutputStreamInfo& streamInfo) {
+    if ((streamInfo.dataSpace == static_cast<android_dataspace_t>(HAL_DATASPACE_DYNAMIC_DEPTH)) &&
+            (streamInfo.format == HAL_PIXEL_FORMAT_BLOB)) {
+        return true;
+    }
+
+    return false;
+}
+
 static bool setContains(std::unordered_set<int32_t> containerSet, int32_t value) {
     return containerSet.find(value) != containerSet.end();
 }
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.h b/services/camera/libcameraservice/api2/DepthCompositeStream.h
index f797f9c..3f8f6a2 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.h
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.h
@@ -46,6 +46,7 @@
     ~DepthCompositeStream() override;
 
     static bool isDepthCompositeStream(const sp<Surface> &surface);
+    static bool isDepthCompositeStreamInfo(const OutputStreamInfo& streamInfo);
 
     // CompositeStream overrides
     status_t createInternalStreams(const std::vector<sp<Surface>>& consumers,
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index 68e9ad4..225d7f5 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -94,6 +94,11 @@
     mMainImageSurface.clear();
 }
 
+bool HeicCompositeStream::isHeicCompositeStreamInfo(const OutputStreamInfo& streamInfo) {
+    return ((streamInfo.dataSpace == static_cast<android_dataspace_t>(HAL_DATASPACE_HEIF)) &&
+            (streamInfo.format == HAL_PIXEL_FORMAT_BLOB));
+}
+
 bool HeicCompositeStream::isHeicCompositeStream(const sp<Surface> &surface) {
     ANativeWindow *anw = surface.get();
     status_t err;
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.h b/services/camera/libcameraservice/api2/HeicCompositeStream.h
index b539cdd..9cfd78b 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.h
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.h
@@ -42,6 +42,7 @@
     ~HeicCompositeStream() override;
 
     static bool isHeicCompositeStream(const sp<Surface> &surface);
+    static bool isHeicCompositeStreamInfo(const OutputStreamInfo& streamInfo);
 
     status_t createInternalStreams(const std::vector<sp<Surface>>& consumers,
             bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
diff --git a/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp b/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
index a1b9383..dadefcc 100644
--- a/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
@@ -520,6 +520,15 @@
     return false;
 }
 
+bool JpegRCompositeStream::isJpegRCompositeStreamInfo(const OutputStreamInfo& streamInfo) {
+    if ((streamInfo.format == HAL_PIXEL_FORMAT_BLOB) &&
+            (streamInfo.dataSpace == static_cast<int>(kJpegRDataSpace))) {
+        return true;
+    }
+
+    return false;
+}
+
 void JpegRCompositeStream::deriveDynamicRangeAndDataspace(int64_t dynamicProfile,
         int64_t* /*out*/dynamicRange, int64_t* /*out*/dataSpace) {
     if ((dynamicRange == nullptr) || (dataSpace == nullptr)) {
@@ -832,8 +841,8 @@
     (*compositeOutput)[0].colorSpace =
         ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED;
 
-    if (CameraProviderManager::isConcurrentDynamicRangeCaptureSupported(staticInfo,
-                streamInfo.dynamicRangeProfile,
+    if (CameraProviderManager::isConcurrentDynamicRangeCaptureSupported(
+                staticInfo, dynamicRange,
                 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD)) {
         compositeOutput->push_back({});
         (*compositeOutput)[1].width = streamInfo.width;
diff --git a/services/camera/libcameraservice/api2/JpegRCompositeStream.h b/services/camera/libcameraservice/api2/JpegRCompositeStream.h
index 016d57c..6669739 100644
--- a/services/camera/libcameraservice/api2/JpegRCompositeStream.h
+++ b/services/camera/libcameraservice/api2/JpegRCompositeStream.h
@@ -43,6 +43,7 @@
     ~JpegRCompositeStream() override;
 
     static bool isJpegRCompositeStream(const sp<Surface> &surface);
+    static bool isJpegRCompositeStreamInfo(const OutputStreamInfo& streamInfo);
 
     // CompositeStream overrides
     status_t createInternalStreams(const std::vector<sp<Surface>>& consumers,
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index 2239c9f..352c6f8 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -27,8 +27,10 @@
 #include <gui/Surface.h>
 #include <gui/Surface.h>
 
+#include <android/hardware/ICameraService.h>
 #include <camera/CameraSessionStats.h>
 #include <camera/StringUtils.h>
+#include <com_android_window_flags.h>
 
 #include "common/Camera2ClientBase.h"
 
@@ -42,6 +44,8 @@
 
 using namespace camera2;
 
+namespace wm_flags = com::android::window::flags;
+
 // Interface used by CameraService
 
 template <typename TClientBase>
@@ -61,11 +65,11 @@
         uid_t clientUid,
         int servicePid,
         bool overrideForPerfClass,
-        bool overrideToPortrait,
+        int rotationOverride,
         bool legacyClient):
         TClientBase(cameraService, remoteCallback, attributionAndPermissionUtils, clientPackageName,
                 systemNativeClient, clientFeatureId, cameraId, api1CameraId, cameraFacing,
-                sensorOrientation, clientPid, clientUid, servicePid, overrideToPortrait),
+                sensorOrientation, clientPid, clientUid, servicePid, rotationOverride),
         mSharedCameraCallbacks(remoteCallback),
         mCameraServiceProxyWrapper(cameraServiceProxyWrapper),
         mDeviceActive(false), mApi1CameraId(api1CameraId)
@@ -117,14 +121,14 @@
                     new HidlCamera3Device(mCameraServiceProxyWrapper,
                             TClientBase::mAttributionAndPermissionUtils,
                             TClientBase::mCameraIdStr, mOverrideForPerfClass,
-                            TClientBase::mOverrideToPortrait, mLegacyClient);
+                            TClientBase::mRotationOverride, mLegacyClient);
             break;
         case IPCTransport::AIDL:
             mDevice =
                     new AidlCamera3Device(mCameraServiceProxyWrapper,
                             TClientBase::mAttributionAndPermissionUtils,
                             TClientBase::mCameraIdStr, mOverrideForPerfClass,
-                            TClientBase::mOverrideToPortrait, mLegacyClient);
+                            TClientBase::mRotationOverride, mLegacyClient);
              break;
         default:
             ALOGE("%s Invalid transport for camera id %s", __FUNCTION__,
@@ -339,8 +343,9 @@
 
 template <typename TClientBase>
 void Camera2ClientBase<TClientBase>::notifyPhysicalCameraChange(const std::string &physicalId) {
-    // We're only interested in this notification if overrideToPortrait is turned on.
-    if (!TClientBase::mOverrideToPortrait) {
+    using android::hardware::ICameraService;
+    // We're only interested in this notification if rotationOverride is turned on.
+    if (TClientBase::mRotationOverride == ICameraService::ROTATION_OVERRIDE_NONE) {
         return;
     }
 
@@ -350,8 +355,13 @@
     if (orientationEntry.count == 1) {
         int orientation = orientationEntry.data.i32[0];
         int rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_NONE;
-
-        if (orientation == 0 || orientation == 180) {
+        bool landscapeSensor =  (orientation == 0 || orientation == 180);
+        if (((TClientBase::mRotationOverride ==
+                ICameraService::ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT) && landscapeSensor) ||
+                        ((wm_flags::camera_compat_for_freeform() &&
+                                TClientBase::mRotationOverride ==
+                                ICameraService::ROTATION_OVERRIDE_ROTATION_ONLY)
+                                && !landscapeSensor)) {
             rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_90;
         }
 
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h
index c24f92b..c9d5735 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.h
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.h
@@ -64,7 +64,7 @@
                       uid_t clientUid,
                       int servicePid,
                       bool overrideForPerfClass,
-                      bool overrideToPortrait,
+                      int rotationOverride,
                       bool legacyClient = false);
     virtual ~Camera2ClientBase();
 
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index 45c3a1f..6416c11 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -38,6 +38,7 @@
 #include <android/hidl/manager/1.2/IServiceManager.h>
 #include <hidl/ServiceManagement.h>
 #include <com_android_internal_camera_flags.h>
+#include <com_android_window_flags.h>
 #include <functional>
 #include <camera_metadata_hidden.h>
 #include <android-base/parseint.h>
@@ -62,6 +63,7 @@
 
 namespace flags = com::android::internal::camera::flags;
 namespace vd_flags = android::companion::virtualdevice::flags;
+namespace wm_flags = com::android::window::flags;
 
 namespace {
 const bool kEnableLazyHal(property_get_bool("ro.camera.enableLazyHal", false));
@@ -403,13 +405,14 @@
 }
 
 status_t CameraProviderManager::getCameraInfo(const std::string &id,
-        bool overrideToPortrait, int *portraitRotation, hardware::CameraInfo* info) const {
+         int rotationOverride, int *portraitRotation,
+         hardware::CameraInfo* info) const {
     std::lock_guard<std::mutex> lock(mInterfaceMutex);
 
     auto deviceInfo = findDeviceInfoLocked(id);
     if (deviceInfo == nullptr) return NAME_NOT_FOUND;
 
-    return deviceInfo->getCameraInfo(overrideToPortrait, portraitRotation, info);
+    return deviceInfo->getCameraInfo(rotationOverride, portraitRotation, info);
 }
 
 status_t CameraProviderManager::isSessionConfigurationSupported(const std::string& id,
@@ -425,7 +428,8 @@
             bool overrideForPerfClass) {
         CameraMetadata metadata;
         this->getCameraCharacteristicsLocked(id, overrideForPerfClass,
-                                             &metadata, /*overrideToPortrait*/false);
+                                             &metadata,
+                                             hardware::ICameraService::ROTATION_OVERRIDE_NONE);
         return metadata;
     };
     return deviceInfo->isSessionConfigurationSupported(configuration,
@@ -464,7 +468,7 @@
 
 status_t CameraProviderManager::getSessionCharacteristics(
         const std::string& id, const SessionConfiguration& configuration, bool overrideForPerfClass,
-        bool overrideToPortrait, CameraMetadata* sessionCharacteristics /*out*/) const {
+        int rotationOverride, CameraMetadata* sessionCharacteristics /*out*/) const {
     if (!flags::feature_combination_query()) {
         return INVALID_OPERATION;
     }
@@ -475,11 +479,11 @@
         return NAME_NOT_FOUND;
     }
 
-    metadataGetter getMetadata = [this, overrideToPortrait](const std::string& id,
+    metadataGetter getMetadata = [this, rotationOverride](const std::string& id,
                                                             bool overrideForPerfClass) {
         CameraMetadata metadata;
         status_t ret = this->getCameraCharacteristicsLocked(id, overrideForPerfClass, &metadata,
-                                                            overrideToPortrait);
+                                                            rotationOverride);
         if (ret != OK) {
             ALOGE("%s: Could not get CameraCharacteristics for device %s", __FUNCTION__,
                   id.c_str());
@@ -508,10 +512,10 @@
 
 status_t CameraProviderManager::getCameraCharacteristics(const std::string &id,
         bool overrideForPerfClass, CameraMetadata* characteristics,
-        bool overrideToPortrait) const {
+        int rotationOverride) const {
     std::lock_guard<std::mutex> lock(mInterfaceMutex);
     return getCameraCharacteristicsLocked(id, overrideForPerfClass, characteristics,
-            overrideToPortrait);
+            rotationOverride);
 }
 
 status_t CameraProviderManager::getHighestSupportedVersion(const std::string &id,
@@ -2471,8 +2475,9 @@
                 device->hasFlashUnit() ? "true" : "false");
         hardware::CameraInfo info;
         int portraitRotation;
-        status_t res = device->getCameraInfo(/*overrideToPortrait*/false, &portraitRotation,
-                &info);
+        status_t res = device->getCameraInfo(
+                /*rotationOverride*/hardware::ICameraService::ROTATION_OVERRIDE_NONE,
+                &portraitRotation, &info);
         if (res != OK) {
             dprintf(fd, "   <Error reading camera info: %s (%d)>\n",
                     strerror(-res), res);
@@ -2483,7 +2488,7 @@
         }
         CameraMetadata info2;
         res = device->getCameraCharacteristics(true /*overrideForPerfClass*/, &info2,
-                /*overrideToPortrait*/false);
+                hardware::ICameraService::ROTATION_OVERRIDE_NONE);
         if (res == INVALID_OPERATION) {
             dprintf(fd, "  API2 not directly supported\n");
         } else if (res != OK) {
@@ -2761,10 +2766,15 @@
 }
 
 status_t CameraProviderManager::ProviderInfo::DeviceInfo3::getCameraInfo(
-        bool overrideToPortrait, int *portraitRotation,
+        int rotationOverride, int *portraitRotation,
         hardware::CameraInfo *info) const {
     if (info == nullptr) return BAD_VALUE;
 
+    bool freeform_compat_enabled = wm_flags::camera_compat_for_freeform();
+    if (!freeform_compat_enabled &&
+            rotationOverride > hardware::ICameraService::ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT) {
+        ALOGW("Camera compat freeform flag disabled but rotation override is %d", rotationOverride);
+    }
     camera_metadata_ro_entry facing =
             mCameraCharacteristics.find(ANDROID_LENS_FACING);
     if (facing.count == 1) {
@@ -2792,13 +2802,18 @@
         return NAME_NOT_FOUND;
     }
 
-    if (overrideToPortrait && (info->orientation == 0 || info->orientation == 180)) {
+    if (rotationOverride == hardware::ICameraService::ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT
+            && (info->orientation == 0 || info->orientation == 180)) {
         *portraitRotation = 90;
         if (info->facing == hardware::CAMERA_FACING_FRONT) {
             info->orientation = (360 + info->orientation - 90) % 360;
         } else {
             info->orientation = (360 + info->orientation + 90) % 360;
         }
+    } else if (freeform_compat_enabled &&
+            rotationOverride == hardware::ICameraService::ROTATION_OVERRIDE_ROTATION_ONLY
+            && (info->orientation == 90 || info->orientation == 270)) {
+        *portraitRotation = info->facing == hardware::CAMERA_FACING_BACK ? 90 : 270;
     } else {
         *portraitRotation = 0;
     }
@@ -2828,7 +2843,8 @@
 }
 
 status_t CameraProviderManager::ProviderInfo::DeviceInfo3::getCameraCharacteristics(
-        bool overrideForPerfClass, CameraMetadata *characteristics, bool overrideToPortrait) {
+        bool overrideForPerfClass, CameraMetadata *characteristics,
+        int rotationOverride) {
     if (characteristics == nullptr) return BAD_VALUE;
 
     if (!overrideForPerfClass && mCameraCharNoPCOverride != nullptr) {
@@ -2837,7 +2853,7 @@
         *characteristics = mCameraCharacteristics;
     }
 
-    if (overrideToPortrait) {
+    if (rotationOverride == hardware::ICameraService::ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT) {
         const auto &lensFacingEntry = characteristics->find(ANDROID_LENS_FACING);
         const auto &sensorOrientationEntry = characteristics->find(ANDROID_SENSOR_ORIENTATION);
         uint8_t lensFacing = lensFacingEntry.data.u8[0];
@@ -3198,11 +3214,11 @@
 
 status_t CameraProviderManager::getCameraCharacteristicsLocked(const std::string &id,
         bool overrideForPerfClass, CameraMetadata* characteristics,
-        bool overrideToPortrait) const {
+        int rotationOverride) const {
     auto deviceInfo = findDeviceInfoLocked(id);
     if (deviceInfo != nullptr) {
         return deviceInfo->getCameraCharacteristics(overrideForPerfClass, characteristics,
-                overrideToPortrait);
+                rotationOverride);
     }
 
     // Find hidden physical camera characteristics
@@ -3238,8 +3254,9 @@
 
         hardware::CameraInfo info;
         int portraitRotation;
-        status_t res = deviceInfo->getCameraInfo(/*overrideToPortrait*/false, &portraitRotation,
-                &info);
+        status_t res = deviceInfo->getCameraInfo(
+                /*rotationOverride*/hardware::ICameraService::ROTATION_OVERRIDE_NONE,
+                &portraitRotation, &info);
         if (res != OK) {
             ALOGE("%s: Error reading camera info: %s (%d)", __FUNCTION__, strerror(-res), res);
             continue;
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index 248227d..4a64b44 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -292,7 +292,8 @@
      * Return the old camera API camera info
      */
     status_t getCameraInfo(const std::string &id,
-            bool overrideToPortrait, int *portraitRotation, hardware::CameraInfo* info) const;
+            int rotationOverride, int *portraitRotation,
+            hardware::CameraInfo* info) const;
 
     /**
      * Return API2 camera characteristics - returns NAME_NOT_FOUND if a device ID does
@@ -300,7 +301,7 @@
      */
     status_t getCameraCharacteristics(const std::string &id,
             bool overrideForPerfClass, CameraMetadata* characteristics,
-            bool overrideToPortrait) const;
+            int rotationOverride) const;
 
     status_t isConcurrentSessionConfigurationSupported(
             const std::vector<hardware::camera2::utils::CameraIdAndSessionConfiguration>
@@ -331,7 +332,7 @@
      status_t getSessionCharacteristics(const std::string& id,
             const SessionConfiguration &configuration,
             bool overrideForPerfClass,
-            bool overrideToPortrait,
+            int rotationOverride,
             CameraMetadata* sessionCharacteristics /*out*/) const;
 
     /**
@@ -626,7 +627,8 @@
             virtual status_t setTorchMode(bool enabled) = 0;
             virtual status_t turnOnTorchWithStrengthLevel(int32_t torchStrength) = 0;
             virtual status_t getTorchStrengthLevel(int32_t *torchStrength) = 0;
-            virtual status_t getCameraInfo(bool overrideToPortrait,
+            virtual status_t getCameraInfo(
+                    int rotationOverride,
                     int *portraitRotation,
                     hardware::CameraInfo *info) const = 0;
             virtual bool isAPI1Compatible() const = 0;
@@ -634,7 +636,7 @@
             virtual status_t getCameraCharacteristics(
                     [[maybe_unused]] bool overrideForPerfClass,
                     [[maybe_unused]] CameraMetadata *characteristics,
-                    [[maybe_unused]] bool overrideToPortrait) {
+                    [[maybe_unused]] int rotationOverride) {
                 return INVALID_OPERATION;
             }
             virtual status_t getPhysicalCameraCharacteristics(
@@ -705,7 +707,8 @@
             virtual status_t setTorchMode(bool enabled) = 0;
             virtual status_t turnOnTorchWithStrengthLevel(int32_t torchStrength) = 0;
             virtual status_t getTorchStrengthLevel(int32_t *torchStrength) = 0;
-            virtual status_t getCameraInfo(bool overrideToPortrait,
+            virtual status_t getCameraInfo(
+                    int rotationOverride,
                     int *portraitRotation,
                     hardware::CameraInfo *info) const override;
             virtual bool isAPI1Compatible() const override;
@@ -713,7 +716,7 @@
             virtual status_t getCameraCharacteristics(
                     bool overrideForPerfClass,
                     CameraMetadata *characteristics,
-                    bool overrideToPortrait) override;
+                    int rotationOverride) override;
             virtual status_t getPhysicalCameraCharacteristics(const std::string& physicalCameraId,
                     CameraMetadata *characteristics) const override;
             virtual status_t filterSmallJpegSizes() override;
@@ -917,7 +920,7 @@
         const hardware::camera::common::V1_0::TorchModeStatus&);
 
     status_t getCameraCharacteristicsLocked(const std::string &id, bool overrideForPerfClass,
-            CameraMetadata* characteristics, bool overrideToPortrait) const;
+            CameraMetadata* characteristics, int rotationOverride) const;
     void filterLogicalCameraIdsLocked(std::vector<std::string>& deviceIds) const;
 
     status_t getSystemCameraKindLocked(const std::string& id, SystemCameraKind *kind) const;
diff --git a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
index 41e0cd1..e76b750 100644
--- a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
@@ -1001,16 +1001,16 @@
                 SessionConfigurationUtils::targetPerfClassPrimaryCamera(
                         perfClassPrimaryCameraIds, cameraId, targetSdkVersion);
         res = mManager->getCameraCharacteristicsLocked(cameraId, overrideForPerfClass, &deviceInfo,
-                /*overrideToPortrait*/false);
+                hardware::ICameraService::ROTATION_OVERRIDE_NONE);
         if (res != OK) {
             return res;
         }
         camera3::metadataGetter getMetadata =
                 [this](const std::string &id, bool overrideForPerfClass) {
                     CameraMetadata physicalDeviceInfo;
-                    mManager->getCameraCharacteristicsLocked(id, overrideForPerfClass,
-                                                   &physicalDeviceInfo,
-                                                   /*overrideToPortrait*/false);
+                    mManager->getCameraCharacteristicsLocked(
+                            id, overrideForPerfClass, &physicalDeviceInfo,
+                            hardware::ICameraService::ROTATION_OVERRIDE_NONE);
                     return physicalDeviceInfo;
                 };
         std::vector<std::string> physicalCameraIds;
diff --git a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
index 6eaf41f..1e546fb 100644
--- a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
@@ -957,7 +957,7 @@
                 SessionConfigurationUtils::targetPerfClassPrimaryCamera(
                         perfClassPrimaryCameraIds, cameraId, targetSdkVersion);
         res = mManager->getCameraCharacteristicsLocked(cameraId, overrideForPerfClass, &deviceInfo,
-                /*overrideToPortrait*/false);
+                 hardware::ICameraService::ROTATION_OVERRIDE_NONE);
         if (res != OK) {
             return res;
         }
@@ -965,7 +965,7 @@
                 [this](const std::string &id, bool overrideForPerfClass) {
                     CameraMetadata physicalDeviceInfo;
                     mManager->getCameraCharacteristicsLocked(id, overrideForPerfClass,
-                            &physicalDeviceInfo, /*overrideToPortrait*/false);
+                            &physicalDeviceInfo, hardware::ICameraService::ROTATION_OVERRIDE_NONE);
                     return physicalDeviceInfo;
                 };
         std::vector<std::string> physicalCameraIds;
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index f4d8f7f..97cfdac 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -85,7 +85,7 @@
 
 Camera3Device::Camera3Device(std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
         std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
-        const std::string &id, bool overrideForPerfClass, bool overrideToPortrait,
+        const std::string &id, bool overrideForPerfClass, int rotationOverride,
         bool legacyClient):
         AttributionAndPermissionUtilsEncapsulator(attributionAndPermissionUtils),
         mCameraServiceProxyWrapper(cameraServiceProxyWrapper),
@@ -111,7 +111,7 @@
         mLastTemplateId(-1),
         mNeedFixupMonochromeTags(false),
         mOverrideForPerfClass(overrideForPerfClass),
-        mOverrideToPortrait(overrideToPortrait),
+        mRotationOverride(rotationOverride),
         mRotateAndCropOverride(ANDROID_SCALER_ROTATE_AND_CROP_NONE),
         mComposerOutput(false),
         mAutoframingOverride(ANDROID_CONTROL_AUTOFRAMING_OFF),
@@ -187,7 +187,7 @@
     /** Start up request queue thread */
     mRequestThread = createNewRequestThread(
             this, mStatusTracker, mInterface, sessionParamKeys,
-            mUseHalBufManager, mSupportCameraMute, mOverrideToPortrait,
+            mUseHalBufManager, mSupportCameraMute, mRotationOverride,
             mSupportZoomOverride);
     res = mRequestThread->run((std::string("C3Dev-") + mId + "-ReqQueue").c_str());
     if (res != OK) {
@@ -1398,7 +1398,7 @@
                 request->mRotateAndCropAuto = false;
             }
 
-            overrideAutoRotateAndCrop(request, mOverrideToPortrait, mRotateAndCropOverride);
+            overrideAutoRotateAndCrop(request, mRotationOverride, mRotateAndCropOverride);
         }
 
         if (autoframingSessionKey) {
@@ -2378,46 +2378,15 @@
     return ret;
 }
 
-Camera3Device::RunThreadWithRealtimePriority::RunThreadWithRealtimePriority(int tid) : mTid(tid),
-        mPreviousPolicy(sched_getscheduler(tid)) {
-    if (flags::surface_ipc()) {
-        auto res = sched_getparam(mTid, &mPreviousParams);
-        if (res != OK) {
-            ALOGE("Can't retrieve thread scheduler parameters: %s (%d)",
-                    strerror(-res), res);
-            return;
-        }
-
-        struct sched_param param = {0};
-        param.sched_priority = kRequestThreadPriority;
-
-        res = sched_setscheduler(mTid, SCHED_FIFO, &param);
-        if (res != OK) {
-            ALOGW("Can't set realtime priority for thread: %s (%d)",
-                    strerror(-res), res);
-        } else {
-            ALOGD("Set real time priority for thread (tid %d)", mTid);
-            mPolicyBumped = true;
-        }
-    }
-}
-
-Camera3Device::RunThreadWithRealtimePriority::~RunThreadWithRealtimePriority() {
-    if (mPolicyBumped && flags::surface_ipc()) {
-        auto res = sched_setscheduler(mTid, mPreviousPolicy, &mPreviousParams);
-        if (res != OK) {
-            ALOGE("Can't set regular priority for thread: %s (%d)",
-                    strerror(-res), res);
-        } else {
-            ALOGD("Set regular priority for thread (tid %d)", mTid);
-        }
-    }
-}
 
 status_t Camera3Device::configureStreamsLocked(int operatingMode,
         const CameraMetadata& sessionParams, bool notifyRequestThread) {
     ATRACE_CALL();
     status_t res;
+    // Stream/surface setup can include a lot of binder IPC. Raise the
+    // thread priority when running the binder IPC heavy configuration
+    // sequence.
+    RunThreadWithRealtimePriority priorityBump;
 
     if (mStatus != STATUS_UNCONFIGURED && mStatus != STATUS_CONFIGURED) {
         CLOGE("Not idle");
@@ -2610,50 +2579,43 @@
         mRequestThread->setHalBufferManagedStreams(mHalBufManagedStreamIds);
     }
 
-    {
-        // Stream/surface setup can include a lot of binder IPC. Raise the
-        // thread priority when running the binder IPC heavy configuration
-        // sequence.
-        RunThreadWithRealtimePriority priorityBump;
+    // Finish all stream configuration immediately.
+    // TODO: Try to relax this later back to lazy completion, which should be
+    // faster
 
-        // Finish all stream configuration immediately.
-        // TODO: Try to relax this later back to lazy completion, which should be
-        // faster
+    if (mInputStream != NULL && mInputStream->isConfiguring()) {
+        bool streamReConfigured = false;
+        res = mInputStream->finishConfiguration(&streamReConfigured);
+        if (res != OK) {
+            CLOGE("Can't finish configuring input stream %d: %s (%d)", mInputStream->getId(),
+                  strerror(-res), res);
+            cancelStreamsConfigurationLocked();
+            if ((res == NO_INIT || res == DEAD_OBJECT) && mInputStream->isAbandoned()) {
+                return DEAD_OBJECT;
+            }
+            return BAD_VALUE;
+        }
+        if (streamReConfigured) {
+            mInterface->onStreamReConfigured(mInputStream->getId());
+        }
+    }
 
-        if (mInputStream != NULL && mInputStream->isConfiguring()) {
+    for (size_t i = 0; i < mOutputStreams.size(); i++) {
+        sp<Camera3OutputStreamInterface> outputStream = mOutputStreams[i];
+        if (outputStream->isConfiguring() && !outputStream->isConsumerConfigurationDeferred()) {
             bool streamReConfigured = false;
-            res = mInputStream->finishConfiguration(&streamReConfigured);
+            res = outputStream->finishConfiguration(&streamReConfigured);
             if (res != OK) {
-                CLOGE("Can't finish configuring input stream %d: %s (%d)",
-                        mInputStream->getId(), strerror(-res), res);
+                CLOGE("Can't finish configuring output stream %d: %s (%d)", outputStream->getId(),
+                      strerror(-res), res);
                 cancelStreamsConfigurationLocked();
-                if ((res == NO_INIT || res == DEAD_OBJECT) && mInputStream->isAbandoned()) {
+                if ((res == NO_INIT || res == DEAD_OBJECT) && outputStream->isAbandoned()) {
                     return DEAD_OBJECT;
                 }
                 return BAD_VALUE;
             }
             if (streamReConfigured) {
-                mInterface->onStreamReConfigured(mInputStream->getId());
-            }
-        }
-
-        for (size_t i = 0; i < mOutputStreams.size(); i++) {
-            sp<Camera3OutputStreamInterface> outputStream = mOutputStreams[i];
-            if (outputStream->isConfiguring() && !outputStream->isConsumerConfigurationDeferred()) {
-                bool streamReConfigured = false;
-                res = outputStream->finishConfiguration(&streamReConfigured);
-                if (res != OK) {
-                    CLOGE("Can't finish configuring output stream %d: %s (%d)",
-                            outputStream->getId(), strerror(-res), res);
-                    cancelStreamsConfigurationLocked();
-                    if ((res == NO_INIT || res == DEAD_OBJECT) && outputStream->isAbandoned()) {
-                        return DEAD_OBJECT;
-                    }
-                    return BAD_VALUE;
-                }
-                if (streamReConfigured) {
-                    mInterface->onStreamReConfigured(outputStream->getId());
-                }
+                mInterface->onStreamReConfigured(outputStream->getId());
             }
         }
     }
@@ -2673,8 +2635,8 @@
     if (disableFifo != 1) {
         // Boost priority of request thread to SCHED_FIFO.
         pid_t requestThreadTid = mRequestThread->getTid();
-        res = SchedulingPolicyUtils::requestPriorityDirect(getpid(), requestThreadTid,
-                kRequestThreadPriority);
+        res = SchedulingPolicyUtils::requestPriorityDirect(
+                getpid(), requestThreadTid, RunThreadWithRealtimePriority::kRequestThreadPriority);
         if (res != OK) {
             ALOGW("Can't set realtime priority for request processing thread: %s (%d)",
                     strerror(-res), res);
@@ -3079,7 +3041,7 @@
         sp<HalInterface> interface, const Vector<int32_t>& sessionParamKeys,
         bool useHalBufManager,
         bool supportCameraMute,
-        bool overrideToPortrait,
+        int rotationOverride,
         bool supportSettingsOverride) :
         Thread(/*canCallJava*/false),
         mParent(parent),
@@ -3113,7 +3075,7 @@
         mLatestSessionParams(sessionParamKeys.size()),
         mUseHalBufManager(useHalBufManager),
         mSupportCameraMute(supportCameraMute),
-        mOverrideToPortrait(overrideToPortrait),
+        mRotationOverride(rotationOverride),
         mSupportSettingsOverride(supportSettingsOverride) {
     mStatusId = statusTracker->addComponent("RequestThread");
     mVndkVersion = getVNDKVersionFromProp(__ANDROID_API_FUTURE__);
@@ -3651,10 +3613,12 @@
         sp<CaptureRequest> captureRequest = nextRequest.captureRequest;
         captureRequest->mTestPatternChanged = overrideTestPattern(captureRequest);
         // Do not override rotate&crop for stream configurations that include
-        // SurfaceViews(HW_COMPOSER) output, unless mOverrideToPortrait is set.
+        // SurfaceViews(HW_COMPOSER) output, unless mRotationOverride is set.
         // The display rotation there will be compensated by NATIVE_WINDOW_TRANSFORM_INVERSE_DISPLAY
-        captureRequest->mRotateAndCropChanged = (mComposerOutput && !mOverrideToPortrait) ? false :
-            overrideAutoRotateAndCrop(captureRequest);
+        using hardware::ICameraService::ROTATION_OVERRIDE_NONE;
+        captureRequest->mRotateAndCropChanged =
+                (mComposerOutput && (mRotationOverride == ROTATION_OVERRIDE_NONE)) ?
+                        false : overrideAutoRotateAndCrop(captureRequest);
         captureRequest->mAutoframingChanged = overrideAutoframing(captureRequest);
         if (flags::inject_session_params()) {
             injectSessionParams(captureRequest, mInjectedSessionParams);
@@ -3706,19 +3670,18 @@
         cleanUpFailedRequests(/*sendRequestError*/ true);
         // Check if any stream is abandoned.
         checkAndStopRepeatingRequest();
+        // Inform waitUntilRequestProcessed thread of a failed request ID
+        wakeupLatestRequest(/*failedRequestId*/true, latestRequestId);
         return true;
     } else if (res != OK) {
         cleanUpFailedRequests(/*sendRequestError*/ false);
+        // Inform waitUntilRequestProcessed thread of a failed request ID
+        wakeupLatestRequest(/*failedRequestId*/true, latestRequestId);
         return false;
     }
 
     // Inform waitUntilRequestProcessed thread of a new request ID
-    {
-        Mutex::Autolock al(mLatestRequestMutex);
-
-        mLatestRequestId = latestRequestId;
-        mLatestRequestSignal.signal();
-    }
+    wakeupLatestRequest(/*failedRequestId*/false, latestRequestId);
 
     // Submit a batch of requests to HAL.
     // Use flush lock only when submitting multilple requests in a batch.
@@ -4473,12 +4436,7 @@
                         hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST,
                         captureRequest->mResultExtras);
             }
-            {
-                Mutex::Autolock al(mLatestRequestMutex);
-
-                mLatestFailedRequestId = captureRequest->mResultExtras.requestId;
-                mLatestRequestSignal.signal();
-            }
+            wakeupLatestRequest(/*failedRequestId*/true, captureRequest->mResultExtras.requestId);
         }
 
         // Remove yet-to-be submitted inflight request from inflightMap
@@ -4930,16 +4888,16 @@
 bool Camera3Device::RequestThread::overrideAutoRotateAndCrop(const sp<CaptureRequest> &request) {
     ATRACE_CALL();
     Mutex::Autolock l(mTriggerMutex);
-    return Camera3Device::overrideAutoRotateAndCrop(request, this->mOverrideToPortrait,
+    return Camera3Device::overrideAutoRotateAndCrop(request, this->mRotationOverride,
             this->mRotateAndCropOverride);
 }
 
 bool Camera3Device::overrideAutoRotateAndCrop(const sp<CaptureRequest> &request,
-        bool overrideToPortrait,
+        int rotationOverride,
         camera_metadata_enum_android_scaler_rotate_and_crop_t rotateAndCropOverride) {
     ATRACE_CALL();
 
-    if (overrideToPortrait) {
+    if (rotationOverride != hardware::ICameraService::ROTATION_OVERRIDE_NONE) {
         uint8_t rotateAndCrop_u8 = rotateAndCropOverride;
         CameraMetadata &metadata = request->mSettingsList.begin()->metadata;
         metadata.update(ANDROID_SCALER_ROTATE_AND_CROP,
@@ -5140,6 +5098,20 @@
     return OK;
 }
 
+void  Camera3Device::RequestThread::wakeupLatestRequest(
+        bool latestRequestFailed,
+        int32_t latestRequestId) {
+    Mutex::Autolock al(mLatestRequestMutex);
+
+    if (latestRequestFailed) {
+        mLatestFailedRequestId = latestRequestId;
+    } else {
+        mLatestRequestId = latestRequestId;
+    }
+    mLatestRequestSignal.signal();
+}
+
+
 /**
  * PreparerThread inner class methods
  */
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index d646886..9f414e8 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -88,7 +88,7 @@
 
     explicit Camera3Device(std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
             std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
-            const std::string& id, bool overrideForPerfClass, bool overrideToPortrait,
+            const std::string& id, bool overrideForPerfClass, int rotationOverride,
             bool legacyClient = false);
 
     virtual ~Camera3Device();
@@ -373,8 +373,6 @@
     static const size_t        kInFlightWarnLimitHighSpeed = 256; // batch size 32 * pipe depth 8
     static const nsecs_t       kMinInflightDuration = 5000000000; // 5 s
     static const nsecs_t       kBaseGetBufferWait = 3000000000; // 3 sec.
-    // SCHED_FIFO priority for request submission thread in HFR mode
-    static const int           kRequestThreadPriority = 1;
 
     struct                     RequestTrigger;
     // minimal jpeg buffer size: 256KB + blob header
@@ -879,7 +877,7 @@
 
     // Override rotate_and_crop control if needed
     static bool    overrideAutoRotateAndCrop(const sp<CaptureRequest> &request /*out*/,
-            bool overrideToPortrait,
+            int rotationOverride,
             camera_metadata_enum_android_scaler_rotate_and_crop_t rotateAndCropOverride);
 
     // Override auto framing control if needed
@@ -916,7 +914,7 @@
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
                 bool supportCameraMute,
-                bool overrideToPortrait,
+                int rotationOverride,
                 bool supportSettingsOverride);
         ~RequestThread();
 
@@ -1038,6 +1036,11 @@
             const sp<CaptureRequest> &request,
             const CameraMetadata& injectedSessionParams);
 
+        /**
+         * signal mLatestRequestmutex
+         **/
+        void wakeupLatestRequest(bool latestRequestFailed, int32_t latestRequestId);
+
       protected:
 
         virtual bool threadLoop();
@@ -1226,7 +1229,7 @@
         bool               mUseHalBufManager = false;
         std::set<int32_t > mHalBufManagedStreamIds;
         const bool         mSupportCameraMute;
-        const bool         mOverrideToPortrait;
+        const bool         mRotationOverride;
         const bool         mSupportSettingsOverride;
         int32_t            mVndkVersion = -1;
     };
@@ -1237,7 +1240,7 @@
                 const Vector<int32_t>& /*sessionParamKeys*/,
                 bool /*useHalBufManager*/,
                 bool /*supportCameraMute*/,
-                bool /*overrideToPortrait*/,
+                int /*rotationOverride*/,
                 bool /*supportSettingsOverride*/) = 0;
 
     sp<RequestThread> mRequestThread;
@@ -1519,7 +1522,7 @@
 
     // Whether the camera framework overrides the device characteristics for
     // app compatibility reasons.
-    bool mOverrideToPortrait;
+    int mRotationOverride;
     camera_metadata_enum_android_scaler_rotate_and_crop_t mRotateAndCropOverride;
     bool mComposerOutput;
 
@@ -1630,34 +1633,6 @@
 
     void overrideStreamUseCaseLocked();
 
-    // An instance of this class will raise the scheduling policy of a given
-    // given thread to real time and keep it this way throughout the lifetime
-    // of the object. The thread scheduling policy will revert back to its original
-    // state after the instances is released. By default the implementation will
-    // raise the priority of the current thread unless clients explicitly specify
-    // another thread id.
-    // Client must avoid:
-    //  - Keeping an instance of this class for extended and long running operations.
-    //    This is only intended for short/temporarily priority bumps that mitigate
-    //    scheduling delays within critical camera paths.
-    //  - Allocating instances of this class on the memory heap unless clients have
-    //    complete control over the object lifetime. It is preferable to allocate
-    //    instances of this class on the stack instead.
-    //  - Nesting multiple instances of this class using the same default or same thread id.
-    class RunThreadWithRealtimePriority final {
-        public:
-            RunThreadWithRealtimePriority(int tid = gettid());
-            ~RunThreadWithRealtimePriority();
-
-            RunThreadWithRealtimePriority(const RunThreadWithRealtimePriority&) = delete;
-            RunThreadWithRealtimePriority& operator=(const RunThreadWithRealtimePriority&) = delete;
-
-        private:
-            int mTid;
-            int mPreviousPolicy;
-            bool mPolicyBumped = false;
-            struct sched_param mPreviousParams;
-    };
 
 }; // class Camera3Device
 
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
index 89e08a1..31707ec 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
@@ -36,6 +36,7 @@
 #include <utils/SortedVector.h>
 #include <utils/Trace.h>
 
+#include <android/hardware/ICameraService.h>
 #include <android/hardware/camera2/ICameraDeviceCallbacks.h>
 
 #include <android/hardware/camera/device/3.4/ICameraDeviceCallback.h>
@@ -675,8 +676,9 @@
                     states.listener->notifyPhysicalCameraChange(physicalId);
                 }
                 states.activePhysicalId = physicalId;
-
-                if (!states.legacyClient && !states.overrideToPortrait) {
+                using hardware::ICameraService::ROTATION_OVERRIDE_NONE;
+                if (!states.legacyClient &&
+                        states.rotationOverride == ROTATION_OVERRIDE_NONE) {
                     auto deviceInfo = states.physicalDeviceInfoMap.find(physicalId);
                     if (deviceInfo != states.physicalDeviceInfoMap.end()) {
                         auto orientation = deviceInfo->second.find(ANDROID_SENSOR_ORIENTATION);
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.h b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
index 75864d7..21965f5 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
@@ -154,7 +154,7 @@
         bool legacyClient;
         nsecs_t& minFrameDuration;
         bool& isFixedFps;
-        bool overrideToPortrait;
+        int rotationOverride;
         std::string &activePhysicalId;
     };
 
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
index 7f30f5e..57297bc 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
@@ -173,10 +173,10 @@
 AidlCamera3Device::AidlCamera3Device(
         std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
         std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
-        const std::string& id, bool overrideForPerfClass, bool overrideToPortrait,
+        const std::string& id, bool overrideForPerfClass, int rotationOverride,
         bool legacyClient) :
         Camera3Device(cameraServiceProxyWrapper, attributionAndPermissionUtils, id,
-                overrideForPerfClass, overrideToPortrait, legacyClient) {
+                overrideForPerfClass, rotationOverride, legacyClient) {
     mCallbacks = ndk::SharedRefBase::make<AidlCameraDeviceCallbacks>(this);
 }
 
@@ -207,7 +207,7 @@
       return INVALID_OPERATION;
     }
     res = manager->getCameraCharacteristics(mId, mOverrideForPerfClass, &mDeviceInfo,
-            mOverrideToPortrait);
+            mRotationOverride);
     if (res != OK) {
         SET_ERR_L("Could not retrieve camera characteristics: %s (%d)", strerror(-res), res);
         session->close();
@@ -223,7 +223,7 @@
             // Do not override characteristics for physical cameras
             res = manager->getCameraCharacteristics(
                     physicalId, /*overrideForPerfClass*/false, &mPhysicalDeviceInfoMap[physicalId],
-                    mOverrideToPortrait);
+                    mRotationOverride);
             if (res != OK) {
                 SET_ERR_L("Could not retrieve camera %s characteristics: %s (%d)",
                         physicalId.c_str(), strerror(-res), res);
@@ -417,7 +417,7 @@
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
         *this, *(mInterface), mLegacyClient, mMinExpectedDuration, mIsFixedFps,
-        mOverrideToPortrait, mActivePhysicalId}, mResultMetadataQueue
+        mRotationOverride, mActivePhysicalId}, mResultMetadataQueue
     };
 
     for (const auto& result : results) {
@@ -459,7 +459,7 @@
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
         *this, *(mInterface), mLegacyClient, mMinExpectedDuration, mIsFixedFps,
-        mOverrideToPortrait, mActivePhysicalId}, mResultMetadataQueue
+        mRotationOverride, mActivePhysicalId}, mResultMetadataQueue
     };
     for (const auto& msg : msgs) {
         camera3::notify(states, msg, mSensorReadoutTimestampSupported);
@@ -1480,10 +1480,10 @@
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
                 bool supportCameraMute,
-                bool overrideToPortrait,
+                int rotationOverride,
                 bool supportSettingsOverride) :
           RequestThread(parent, statusTracker, interface, sessionParamKeys,
-                  useHalBufManager, supportCameraMute, overrideToPortrait,
+                  useHalBufManager, supportCameraMute, rotationOverride,
                   supportSettingsOverride) {}
 
 status_t AidlCamera3Device::AidlRequestThread::switchToOffline(
@@ -1714,10 +1714,10 @@
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
                 bool supportCameraMute,
-                bool overrideToPortrait,
+                int rotationOverride,
                 bool supportSettingsOverride) {
     return new AidlRequestThread(parent, statusTracker, interface, sessionParamKeys,
-            useHalBufManager, supportCameraMute, overrideToPortrait,
+            useHalBufManager, supportCameraMute, rotationOverride,
             supportSettingsOverride);
 };
 
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
index ac29bbc..abc3f9c 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
@@ -42,7 +42,7 @@
     explicit AidlCamera3Device(
             std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
             std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
-            const std::string& id, bool overrideForPerfClass, bool overrideToPortrait,
+            const std::string& id, bool overrideForPerfClass, int rotationOverride,
             bool legacyClient = false);
 
     virtual ~AidlCamera3Device() { }
@@ -184,7 +184,7 @@
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
                 bool supportCameraMute,
-                bool overrideToPortrait,
+                int rotationOverride,
                 bool supportSettingsOverride);
 
         status_t switchToOffline(
@@ -275,7 +275,7 @@
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
                 bool supportCameraMute,
-                bool overrideToPortrait,
+                int rotationOverride,
                 bool supportSettingsOverride) override;
 
     virtual sp<Camera3DeviceInjectionMethods>
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp
index f8308df..cc32c2a 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp
@@ -29,6 +29,7 @@
 
 #include <utils/Trace.h>
 
+#include <android/hardware/ICameraService.h>
 #include <android/hardware/camera2/ICameraDeviceCallbacks.h>
 #include <android/binder_ibinder_platform.h>
 #include <camera/StringUtils.h>
@@ -127,7 +128,7 @@
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
         *this, mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps,
-        /*overrideToPortrait*/false, activePhysicalId}, mResultMetadataQueue
+        hardware::ICameraService::ROTATION_OVERRIDE_NONE, activePhysicalId}, mResultMetadataQueue
     };
 
     std::lock_guard<std::mutex> lock(mProcessCaptureResultLock);
@@ -174,7 +175,7 @@
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
         *this, mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps,
-        /*overrideToPortrait*/false, activePhysicalId}, mResultMetadataQueue
+        hardware::ICameraService::ROTATION_OVERRIDE_NONE, activePhysicalId}, mResultMetadataQueue
     };
     for (const auto& msg : msgs) {
         camera3::notify(states, msg, mSensorReadoutTimestampSupported);
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
index f2e618f..9dacaf6 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
@@ -166,7 +166,7 @@
     }
 
     res = manager->getCameraCharacteristics(mId, mOverrideForPerfClass, &mDeviceInfo,
-            /*overrideToPortrait*/false);
+            hardware::ICameraService::ROTATION_OVERRIDE_NONE);
     if (res != OK) {
         SET_ERR_L("Could not retrieve camera characteristics: %s (%d)", strerror(-res), res);
         session->close();
@@ -181,7 +181,7 @@
             // Do not override characteristics for physical cameras
             res = manager->getCameraCharacteristics(
                     physicalId, /*overrideForPerfClass*/false, &mPhysicalDeviceInfoMap[physicalId],
-                    /*overrideToPortrait*/false);
+                    hardware::ICameraService::ROTATION_OVERRIDE_NONE);
             if (res != OK) {
                 SET_ERR_L("Could not retrieve camera %s characteristics: %s (%d)",
                         physicalId.c_str(), strerror(-res), res);
@@ -370,7 +370,7 @@
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
-        *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps, mOverrideToPortrait,
+        *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps, mRotationOverride,
         mActivePhysicalId}, mResultMetadataQueue
     };
 
@@ -433,7 +433,7 @@
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
-        *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps, mOverrideToPortrait,
+        *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps, mRotationOverride,
         mActivePhysicalId}, mResultMetadataQueue
     };
 
@@ -481,7 +481,7 @@
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
-        *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps, mOverrideToPortrait,
+        *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps, mRotationOverride,
         mActivePhysicalId}, mResultMetadataQueue
     };
     for (const auto& msg : msgs) {
@@ -717,10 +717,10 @@
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
                 bool supportCameraMute,
-                bool overrideToPortrait,
+                int rotationOverride,
                 bool supportSettingsOverride) {
         return new HidlRequestThread(parent, statusTracker, interface, sessionParamKeys,
-                useHalBufManager, supportCameraMute, overrideToPortrait,
+                useHalBufManager, supportCameraMute, rotationOverride,
                 supportSettingsOverride);
 };
 
@@ -1721,10 +1721,10 @@
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
                 bool supportCameraMute,
-                bool overrideToPortrait,
+                int rotationOverride,
                 bool supportSettingsOverride) :
           RequestThread(parent, statusTracker, interface, sessionParamKeys, useHalBufManager,
-                  supportCameraMute, overrideToPortrait, supportSettingsOverride) {}
+                  supportCameraMute, rotationOverride, supportSettingsOverride) {}
 
 status_t HidlCamera3Device::HidlRequestThread::switchToOffline(
         const std::vector<int32_t>& streamsToKeep,
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
index f11db5d..bcc4d80 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
@@ -34,10 +34,10 @@
     explicit HidlCamera3Device(
         std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
         std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
-        const std::string& id, bool overrideForPerfClass, bool overrideToPortrait,
+        const std::string& id, bool overrideForPerfClass, int rotationOverride,
         bool legacyClient = false) :
         Camera3Device(cameraServiceProxyWrapper, attributionAndPermissionUtils, id,
-                overrideForPerfClass, overrideToPortrait, legacyClient) { }
+                overrideForPerfClass, rotationOverride, legacyClient) { }
 
     virtual ~HidlCamera3Device() {}
 
@@ -179,7 +179,7 @@
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
                 bool supportCameraMute,
-                bool overrideToPortrait,
+                int rotationOverride,
                 bool supportSettingsOverride);
 
         status_t switchToOffline(
@@ -232,7 +232,7 @@
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
                 bool supportCameraMute,
-                bool overrideToPortrait,
+                int rotationOverride,
                 bool supportSettingsOverride) override;
 
     virtual sp<Camera3DeviceInjectionMethods>
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp b/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp
index aa4b762..c26583e 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp
@@ -22,6 +22,7 @@
 
 #include <utils/Trace.h>
 
+#include <android/hardware/ICameraService.h>
 #include <android/hardware/camera2/ICameraDeviceCallbacks.h>
 #include <camera/StringUtils.h>
 
@@ -108,7 +109,7 @@
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
         mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps,
-        /*overrideToPortrait*/false, activePhysicalId}, mResultMetadataQueue
+        hardware::ICameraService::ROTATION_OVERRIDE_NONE, activePhysicalId}, mResultMetadataQueue
     };
 
     std::lock_guard<std::mutex> lock(mProcessCaptureResultLock);
@@ -150,7 +151,7 @@
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
         mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps,
-        /*overrideToPortrait*/false, activePhysicalId}, mResultMetadataQueue
+        hardware::ICameraService::ROTATION_OVERRIDE_NONE, activePhysicalId}, mResultMetadataQueue
     };
 
     std::lock_guard<std::mutex> lock(mProcessCaptureResultLock);
@@ -187,7 +188,7 @@
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
         mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps,
-        /*overrideToPortrait*/false, activePhysicalId}, mResultMetadataQueue
+        hardware::ICameraService::ROTATION_OVERRIDE_NONE, activePhysicalId}, mResultMetadataQueue
     };
     for (const auto& msg : msgs) {
         camera3::notify(states, msg);
diff --git a/services/camera/libcameraservice/hidl/HidlCameraService.cpp b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
index 8f25ad6..d3b2a51 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraService.cpp
+++ b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
@@ -37,6 +37,7 @@
 
 using frameworks::cameraservice::service::V2_0::implementation::HidlCameraService;
 using hardware::hidl_vec;
+using hardware::BnCameraService::ROTATION_OVERRIDE_NONE;
 using hardware::cameraservice::utils::conversion::convertToHidl;
 using hardware::cameraservice::utils::conversion::B2HStatus;
 using hardware::Void;
@@ -69,7 +70,7 @@
     HStatus status = HStatus::NO_ERROR;
     binder::Status serviceRet =
         mAidlICameraService->getCameraCharacteristics(cameraId,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, ROTATION_OVERRIDE_NONE,
                 kDefaultDeviceId, 0, &cameraMetadata);
     HCameraMetadata hidlMetadata;
     if (!serviceRet.isOk()) {
@@ -121,7 +122,7 @@
     binder::Status serviceRet = mAidlICameraService->connectDevice(
             callbacks, cameraId, std::string(), {},
             hardware::ICameraService::USE_CALLING_UID, 0/*oomScoreOffset*/,
-            /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
+            /*targetSdkVersion*/__ANDROID_API_FUTURE__, ROTATION_OVERRIDE_NONE,
             kDefaultDeviceId, /*devicePolicy*/0, /*out*/&deviceRemote);
     HStatus status = HStatus::NO_ERROR;
     if (!serviceRet.isOk()) {
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
index 778b428..bce0faf 100644
--- a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
@@ -40,6 +40,9 @@
 using namespace hardware;
 using namespace std;
 
+using ICameraService::ROTATION_OVERRIDE_NONE;
+using ICameraService::ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT;
+
 const int32_t kPreviewThreshold = 8;
 const int32_t kNumRequestsTested = 8;
 const nsecs_t kPreviewTimeout = 5000000000;  // .5 [s.]
@@ -236,12 +239,12 @@
     mCameraService->getCameraVendorTagCache(&cache);
 
     CameraInfo cameraInfo;
-    mCameraService->getCameraInfo(cameraId, /*overrideToPortrait*/false, kDefaultDeviceId,
+    mCameraService->getCameraInfo(cameraId, ROTATION_OVERRIDE_NONE, kDefaultDeviceId,
             /*devicePolicy*/0, &cameraInfo);
 
     CameraMetadata metadata;
     mCameraService->getCameraCharacteristics(cameraIdStr,
-            /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
+            /*targetSdkVersion*/__ANDROID_API_FUTURE__, ROTATION_OVERRIDE_NONE,
             kDefaultDeviceId, /*devicePolicy*/0, &metadata);
 }
 
@@ -350,7 +353,8 @@
                                  android::CameraService::USE_CALLING_UID,
                                  android::CameraService::USE_CALLING_PID,
                                  /*targetSdkVersion*/ __ANDROID_API_FUTURE__,
-                                 /*overrideToPortrait*/true, /*forceSlowJpegMode*/false,
+                                 ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT,
+                                 /*forceSlowJpegMode*/false,
                                  kDefaultDeviceId, /*devicePolicy*/0, &cameraDevice);
     if (!rc.isOk()) {
         // camera not connected
@@ -588,7 +592,8 @@
         sp<hardware::camera2::ICameraDeviceUser> device;
         mCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
                 android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/true,
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+                ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT,
                 kDefaultDeviceId, /*devicePolicy*/0, &device);
         if (device == nullptr) {
             continue;
diff --git a/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp b/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp
index 30b4691..cf86a05 100644
--- a/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp
+++ b/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp
@@ -230,7 +230,8 @@
         binder::Status status =
                 sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
                 android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+                hardware::ICameraService::ROTATION_OVERRIDE_NONE,
                 kDefaultDeviceId, /*devicePolicy*/0, &device);
         AutoDisconnectDevice autoDisconnect(device);
         ASSERT_TRUE(!status.isOk()) << "connectDevice returned OK status";
@@ -245,7 +246,8 @@
         binder::Status status =
                 sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
                 android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+                hardware::ICameraService::ROTATION_OVERRIDE_NONE,
                 kDefaultDeviceId, /*devicePolicy*/0, &device);
         AutoDisconnectDevice autoDisconnect(device);
         ASSERT_TRUE(status.isOk());
@@ -265,7 +267,8 @@
         binder::Status status =
                 sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
                 android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+                hardware::ICameraService::ROTATION_OVERRIDE_NONE,
                 kDefaultDeviceId, /*devicePolicy*/0, &deviceA);
         AutoDisconnectDevice autoDisconnectA(deviceA);
         ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
@@ -273,7 +276,8 @@
         status =
                 sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
                 android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+                hardware::ICameraService::ROTATION_OVERRIDE_NONE,
                 kDefaultDeviceId, /*devicePolicy*/0, &deviceB);
         AutoDisconnectDevice autoDisconnectB(deviceB);
         ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
@@ -295,7 +299,8 @@
         binder::Status status =
                 sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
                 android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+                hardware::ICameraService::ROTATION_OVERRIDE_NONE,
                 kDefaultDeviceId, /*devicePolicy*/0, &deviceA);
         AutoDisconnectDevice autoDisconnectA(deviceA);
         ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
@@ -303,7 +308,8 @@
         status =
                 sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
                 android::CameraService::USE_CALLING_UID, 1/*oomScoreDiff*/,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+                hardware::ICameraService::ROTATION_OVERRIDE_NONE,
                 kDefaultDeviceId, /*devicePolicy*/0, &deviceB);
         AutoDisconnectDevice autoDisconnectB(deviceB);
         ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
index a7a2b5e..40ca276 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
@@ -21,6 +21,7 @@
 #include "../api2/HeicCompositeStream.h"
 #include "aidl/android/hardware/graphics/common/Dataspace.h"
 #include "api2/JpegRCompositeStream.h"
+#include "binder/Status.h"
 #include "common/CameraDeviceBase.h"
 #include "common/HalConversionsTemplated.h"
 #include "../CameraService.h"
@@ -679,6 +680,67 @@
     stream->useCase = static_cast<StreamUseCases>(streamInfo.streamUseCase);
 }
 
+binder::Status mapStream(const OutputStreamInfo& streamInfo, bool isCompositeJpegRDisabled,
+        const CameraMetadata& deviceInfo, camera_stream_rotation_t rotation,
+        size_t* streamIdx/*out*/, const std::string &physicalId, int32_t groupId,
+        const std::string& logicalCameraId,
+        aidl::android::hardware::camera::device::StreamConfiguration &streamConfiguration /*out*/,
+        bool *earlyExit /*out*/) {
+    bool isDepthCompositeStream =
+            camera3::DepthCompositeStream::isDepthCompositeStreamInfo(streamInfo);
+    bool isHeicCompositeStream =
+            camera3::HeicCompositeStream::isHeicCompositeStreamInfo(streamInfo);
+    bool isJpegRCompositeStream =
+            camera3::JpegRCompositeStream::isJpegRCompositeStreamInfo(streamInfo) &&
+            !isCompositeJpegRDisabled;
+    if (isDepthCompositeStream || isHeicCompositeStream || isJpegRCompositeStream) {
+        // We need to take in to account that composite streams can have
+        // additional internal camera streams.
+        std::vector<OutputStreamInfo> compositeStreams;
+        status_t ret;
+        if (isDepthCompositeStream) {
+          // TODO: Take care of composite streams.
+            ret = camera3::DepthCompositeStream::getCompositeStreamInfo(streamInfo,
+                    deviceInfo, &compositeStreams);
+        } else if (isHeicCompositeStream) {
+            ret = camera3::HeicCompositeStream::getCompositeStreamInfo(streamInfo,
+                deviceInfo, &compositeStreams);
+        } else {
+            ret = camera3::JpegRCompositeStream::getCompositeStreamInfo(streamInfo,
+                deviceInfo, &compositeStreams);
+        }
+
+        if (ret != OK) {
+            std::string msg = fmt::sprintf(
+                    "Camera %s: Failed adding composite streams: %s (%d)",
+                    logicalCameraId.c_str(), strerror(-ret), ret);
+            ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+            return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+        }
+
+        if (compositeStreams.size() == 0) {
+            // No internal streams means composite stream not
+            // supported.
+            *earlyExit = true;
+            return binder::Status::ok();
+        } else if (compositeStreams.size() > 1) {
+            size_t streamCount = streamConfiguration.streams.size() + compositeStreams.size() - 1;
+            streamConfiguration.streams.resize(streamCount);
+        }
+
+        for (const auto& compositeStream : compositeStreams) {
+            mapStreamInfo(compositeStream, rotation,
+                    physicalId, groupId,
+                    &streamConfiguration.streams[(*streamIdx)++]);
+        }
+    } else {
+        mapStreamInfo(streamInfo, rotation,
+                physicalId, groupId, &streamConfiguration.streams[(*streamIdx)++]);
+    }
+
+    return binder::Status::ok();
+}
+
 binder::Status
 convertToHALStreamCombination(
         const SessionConfiguration& sessionConfiguration,
@@ -831,8 +893,13 @@
                                 "Deferred surface sensor pixel modes not valid");
             }
             streamInfo.streamUseCase = streamUseCase;
-            mapStreamInfo(streamInfo, camera3::CAMERA_STREAM_ROTATION_0, physicalCameraId, groupId,
-                    &streamConfiguration.streams[streamIdx++]);
+            auto status = mapStream(streamInfo, isCompositeJpegRDisabled, deviceInfo,
+                    camera3::CAMERA_STREAM_ROTATION_0, &streamIdx, physicalCameraId, groupId,
+                    logicalCameraId, streamConfiguration, earlyExit);
+            if (*earlyExit || !status.isOk()) {
+                return status;
+            }
+
             isStreamInfoValid = true;
 
             if (numBufferProducers == 0) {
@@ -851,57 +918,11 @@
                 return res;
 
             if (!isStreamInfoValid) {
-                bool isDepthCompositeStream =
-                        camera3::DepthCompositeStream::isDepthCompositeStream(surface);
-                bool isHeicCompositeStream =
-                        camera3::HeicCompositeStream::isHeicCompositeStream(surface);
-                bool isJpegRCompositeStream =
-                        camera3::JpegRCompositeStream::isJpegRCompositeStream(surface) &&
-                        !isCompositeJpegRDisabled;
-                if (isDepthCompositeStream || isHeicCompositeStream || isJpegRCompositeStream) {
-                    // We need to take in to account that composite streams can have
-                    // additional internal camera streams.
-                    std::vector<OutputStreamInfo> compositeStreams;
-                    if (isDepthCompositeStream) {
-                      // TODO: Take care of composite streams.
-                        ret = camera3::DepthCompositeStream::getCompositeStreamInfo(streamInfo,
-                                deviceInfo, &compositeStreams);
-                    } else if (isHeicCompositeStream) {
-                        ret = camera3::HeicCompositeStream::getCompositeStreamInfo(streamInfo,
-                            deviceInfo, &compositeStreams);
-                    } else {
-                        ret = camera3::JpegRCompositeStream::getCompositeStreamInfo(streamInfo,
-                            deviceInfo, &compositeStreams);
-                    }
-
-                    if (ret != OK) {
-                        std::string msg = fmt::sprintf(
-                                "Camera %s: Failed adding composite streams: %s (%d)",
-                                logicalCameraId.c_str(), strerror(-ret), ret);
-                        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
-                        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
-                    }
-
-                    if (compositeStreams.size() == 0) {
-                        // No internal streams means composite stream not
-                        // supported.
-                        *earlyExit = true;
-                        return binder::Status::ok();
-                    } else if (compositeStreams.size() > 1) {
-                        streamCount += compositeStreams.size() - 1;
-                        streamConfiguration.streams.resize(streamCount);
-                    }
-
-                    for (const auto& compositeStream : compositeStreams) {
-                        mapStreamInfo(compositeStream,
-                                static_cast<camera_stream_rotation_t> (it.getRotation()),
-                                physicalCameraId, groupId,
-                                &streamConfiguration.streams[streamIdx++]);
-                    }
-                } else {
-                    mapStreamInfo(streamInfo,
-                            static_cast<camera_stream_rotation_t> (it.getRotation()),
-                            physicalCameraId, groupId, &streamConfiguration.streams[streamIdx++]);
+                auto status  = mapStream(streamInfo, isCompositeJpegRDisabled, deviceInfo,
+                        static_cast<camera_stream_rotation_t> (it.getRotation()), &streamIdx,
+                        physicalCameraId, groupId, logicalCameraId, streamConfiguration, earlyExit);
+                if (*earlyExit || !status.isOk()) {
+                    return status;
                 }
                 isStreamInfoValid = true;
             }
diff --git a/services/camera/libcameraservice/utils/Utils.cpp b/services/camera/libcameraservice/utils/Utils.cpp
index c8f5e86..76517dc 100644
--- a/services/camera/libcameraservice/utils/Utils.cpp
+++ b/services/camera/libcameraservice/utils/Utils.cpp
@@ -14,22 +14,27 @@
  * limitations under the License.
  */
 
+#define LOG_TAG "Camera3-Utils"
+
 #include "Utils.h"
 #include <android-base/properties.h>
 #include <com_android_internal_camera_flags.h>
-
+#include <utils/Errors.h>
+#include <utils/Log.h>
+#include <vendorsupport/api_level.h>
 
 namespace android {
 
-using namespace com::android::internal::camera::flags;
+namespace flags = com::android::internal::camera::flags;
 
-constexpr const char *LEGACY_VNDK_VERSION_PROP = "ro.vndk.version";
-constexpr const char *BOARD_API_LEVEL_PROP = "ro.board.api_level";
+namespace {
+constexpr const char* LEGACY_VNDK_VERSION_PROP = "ro.vndk.version";
+constexpr const char* BOARD_API_LEVEL_PROP = "ro.board.api_level";
 constexpr int MAX_VENDOR_API_LEVEL = 1000000;
 constexpr int FIRST_VNDK_VERSION = 202404;
 
-int getVNDKVersionFromProp(int defaultVersion) {
-    if (!com_android_internal_camera_flags_use_ro_board_api_level_for_vndk_version()) {
+int legacyGetVNDKVersionFromProp(int defaultVersion) {
+    if (!flags::use_ro_board_api_level_for_vndk_version()) {
         return base::GetIntProperty(LEGACY_VNDK_VERSION_PROP, defaultVersion);
     }
 
@@ -51,5 +56,56 @@
     vndkVersion = (vndkVersion - FIRST_VNDK_VERSION) / 100;
     return __ANDROID_API_V__ + vndkVersion;
 }
+}  // anonymous namespace
 
-} // namespace android
+int getVNDKVersionFromProp(int defaultVersion) {
+    if (!flags::use_system_api_for_vndk_version()) {
+        return legacyGetVNDKVersionFromProp(defaultVersion);
+    }
+
+    int vendorApiLevel = AVendorSupport_getVendorApiLevel();
+    if (vendorApiLevel == 0) {
+        // Couldn't find vendor API level, return default
+        return defaultVersion;
+    }
+
+    // Vendor API level for Android V and above are of the format YYYYMM starting with 202404.
+    // AVendorSupport_getSdkApiLevelOf maps them back to SDK API levels while leaving older
+    // values unchanged.
+    return AVendorSupport_getSdkApiLevelOf(vendorApiLevel);
+}
+
+RunThreadWithRealtimePriority::RunThreadWithRealtimePriority(int tid)
+    : mTid(tid), mPreviousPolicy(sched_getscheduler(tid)) {
+    if (flags::realtime_priority_bump()) {
+        auto res = sched_getparam(mTid, &mPreviousParams);
+        if (res != OK) {
+            ALOGE("Can't retrieve thread scheduler parameters: %s (%d)", strerror(-res), res);
+            return;
+        }
+
+        struct sched_param param = {0};
+        param.sched_priority = kRequestThreadPriority;
+
+        res = sched_setscheduler(mTid, SCHED_FIFO, &param);
+        if (res != OK) {
+            ALOGW("Can't set realtime priority for thread: %s (%d)", strerror(-res), res);
+        } else {
+            ALOGD("Set real time priority for thread (tid %d)", mTid);
+            mPolicyBumped = true;
+        }
+    }
+}
+
+RunThreadWithRealtimePriority::~RunThreadWithRealtimePriority() {
+    if (mPolicyBumped && flags::realtime_priority_bump()) {
+        auto res = sched_setscheduler(mTid, mPreviousPolicy, &mPreviousParams);
+        if (res != OK) {
+            ALOGE("Can't set regular priority for thread: %s (%d)", strerror(-res), res);
+        } else {
+            ALOGD("Set regular priority for thread (tid %d)", mTid);
+        }
+    }
+}
+
+}  // namespace android
diff --git a/services/camera/libcameraservice/utils/Utils.h b/services/camera/libcameraservice/utils/Utils.h
index f8a107d..4e90871 100644
--- a/services/camera/libcameraservice/utils/Utils.h
+++ b/services/camera/libcameraservice/utils/Utils.h
@@ -17,6 +17,9 @@
 #ifndef ANDROID_SERVERS_CAMERA_UTILS_H
 #define ANDROID_SERVERS_CAMERA_UTILS_H
 
+#include <sched.h>
+#include <unistd.h>
+
 namespace android {
 
 /**
@@ -28,6 +31,40 @@
  */
 int getVNDKVersionFromProp(int defaultVersion);
 
+/**
+ * An instance of this class will raise the scheduling policy of a given
+ * given thread to real time and keep it this way throughout the lifetime
+ * of the object. The thread scheduling policy will revert back to its original
+ * state after the instances is released. By default the implementation will
+ * raise the priority of the current thread unless clients explicitly specify
+ * another thread id.
+ * Client must avoid:
+ *  - Keeping an instance of this class for extended and long running operations.
+ *    This is only intended for short/temporarily priority bumps that mitigate
+ *    scheduling delays within critical camera paths.
+ *  - Allocating instances of this class on the memory heap unless clients have
+ *    complete control over the object lifetime. It is preferable to allocate
+ *    instances of this class on the stack instead.
+ *  - Nesting multiple instances of this class using the same default or same thread id.
+ */
+class RunThreadWithRealtimePriority final {
+  public:
+    RunThreadWithRealtimePriority(int tid = gettid());
+    ~RunThreadWithRealtimePriority();
+
+    RunThreadWithRealtimePriority(const RunThreadWithRealtimePriority&) = delete;
+    RunThreadWithRealtimePriority& operator=(const RunThreadWithRealtimePriority&) = delete;
+
+    // SCHED_FIFO priority for request submission thread in HFR mode
+    static const int kRequestThreadPriority = 1;
+
+  private:
+    int mTid;
+    int mPreviousPolicy;
+    bool mPolicyBumped = false;
+    struct sched_param mPreviousParams;
+};
+
 } // namespace android
 
 #endif //ANDROID_SERVERS_CAMERA_UTILS_H
diff --git a/services/camera/libcameraservice/utils/VirtualDeviceCameraIdMapper.cpp b/services/camera/libcameraservice/utils/VirtualDeviceCameraIdMapper.cpp
index 1d0c5a2..22dd806 100644
--- a/services/camera/libcameraservice/utils/VirtualDeviceCameraIdMapper.cpp
+++ b/services/camera/libcameraservice/utils/VirtualDeviceCameraIdMapper.cpp
@@ -29,48 +29,46 @@
 void VirtualDeviceCameraIdMapper::addCamera(const std::string& cameraId,
         int32_t deviceId, const std::string& mappedCameraId) {
     if (!vd_flags::camera_device_awareness()) {
-        ALOGD("%s: Device-aware camera feature is not enabled", __func__);
+        ALOGV("%s: Device-aware camera feature is not enabled", __func__);
         return;
     }
 
     if (deviceId == kDefaultDeviceId) {
-        ALOGD("%s: Not adding entry for a camera of the default device", __func__);
+        ALOGV("%s: Not adding entry for a camera of the default device", __func__);
         return;
     }
 
-    ALOGD("%s: Adding camera %s for device %d with mapped id %s", __func__, cameraId.c_str(),
+    ALOGV("%s: Adding camera %s for device %d with mapped id %s", __func__, cameraId.c_str(),
           deviceId, mappedCameraId.c_str());
 
     std::scoped_lock lock(mLock);
     mDeviceIdMappedCameraIdPairToCameraIdMap[{deviceId, mappedCameraId}] = cameraId;
+    mCameraIdToDeviceIdMappedCameraIdPairMap[cameraId] = {deviceId, mappedCameraId};
 }
 
 void VirtualDeviceCameraIdMapper::removeCamera(const std::string& cameraId) {
     if (!vd_flags::camera_device_awareness()) {
-        ALOGD("%s: Device-aware camera feature is not enabled", __func__);
+        ALOGV("%s: Device-aware camera feature is not enabled", __func__);
         return;
     }
 
+    auto deviceIdAndMappedCameraIdPair = getDeviceIdAndMappedCameraIdPair(cameraId);
+
     std::scoped_lock lock(mLock);
-    for (auto it = mDeviceIdMappedCameraIdPairToCameraIdMap.begin();
-         it != mDeviceIdMappedCameraIdPairToCameraIdMap.end(); ++it) {
-        if (it->first.second == cameraId) {
-            mDeviceIdMappedCameraIdPairToCameraIdMap.erase(it);
-            return;
-        }
-    }
+    mCameraIdToDeviceIdMappedCameraIdPairMap.erase(cameraId);
+    mDeviceIdMappedCameraIdPairToCameraIdMap.erase(deviceIdAndMappedCameraIdPair);
 }
 
 std::optional<std::string> VirtualDeviceCameraIdMapper::getActualCameraId(
         int32_t deviceId, const std::string& mappedCameraId) const {
     if (deviceId == kDefaultDeviceId) {
-        ALOGD("%s: Returning the camera id as the mapped camera id for camera %s, as it "
+        ALOGV("%s: Returning the camera id as the mapped camera id for camera %s, as it "
               "belongs to the default device", __func__, mappedCameraId.c_str());
         return mappedCameraId;
     }
 
     if (!vd_flags::camera_device_awareness()) {
-        ALOGD("%s: Device-aware camera feature is not enabled, returning the camera id as "
+        ALOGV("%s: Device-aware camera feature is not enabled, returning the camera id as "
               "the mapped camera id for camera %s", __func__, mappedCameraId.c_str());
         return mappedCameraId;
     }
@@ -79,7 +77,7 @@
     auto iterator = mDeviceIdMappedCameraIdPairToCameraIdMap.find(
             {deviceId, mappedCameraId});
     if (iterator == mDeviceIdMappedCameraIdPairToCameraIdMap.end()) {
-        ALOGW("%s: No entry found for device id %d and mapped camera id %s", __func__,
+        ALOGV("%s: No entry found for device id %d and mapped camera id %s", __func__,
               deviceId, mappedCameraId.c_str());
         return std::nullopt;
     }
@@ -89,19 +87,17 @@
 std::pair<int32_t, std::string> VirtualDeviceCameraIdMapper::getDeviceIdAndMappedCameraIdPair(
         const std::string& cameraId) const {
     if (!vd_flags::camera_device_awareness()) {
-        ALOGD("%s: Device-aware camera feature is not enabled", __func__);
+        ALOGV("%s: Device-aware camera feature is not enabled", __func__);
         return std::make_pair(kDefaultDeviceId, cameraId);
     }
 
     std::scoped_lock lock(mLock);
-    for (const auto& [deviceIdMappedCameraIdPair, actualCameraId]
-            : mDeviceIdMappedCameraIdPairToCameraIdMap) {
-        if (actualCameraId == cameraId) {
-            return deviceIdMappedCameraIdPair;
-        }
+    auto iterator = mCameraIdToDeviceIdMappedCameraIdPairMap.find(cameraId);
+    if (iterator != mCameraIdToDeviceIdMappedCameraIdPairMap.end()) {
+        return iterator->second;
     }
     ALOGV("%s: No device id and mapped camera id found for camera id %s, so it must belong "
-            "to the default device ? ", __func__, cameraId.c_str());
+            "to the default device ?", __func__, cameraId.c_str());
     return std::make_pair(kDefaultDeviceId, cameraId);
 }
 
@@ -124,7 +120,7 @@
 std::optional<std::string> VirtualDeviceCameraIdMapper::getActualCameraId(
         int api1CameraId, int32_t deviceId) const {
     if (!vd_flags::camera_device_awareness()) {
-        ALOGD("%s: Device-aware camera feature is not enabled", __func__);
+        ALOGV("%s: Device-aware camera feature is not enabled", __func__);
         return std::nullopt;
     }
 
@@ -139,7 +135,7 @@
             matchingCameraIndex++;
         }
     }
-    ALOGW("%s: No entry found for device id %d and API 1 camera id %d", __func__,
+    ALOGV("%s: No entry found for device id %d and API 1 camera id %d", __func__,
           deviceId, api1CameraId);
     return std::nullopt;
 }
diff --git a/services/camera/libcameraservice/utils/VirtualDeviceCameraIdMapper.h b/services/camera/libcameraservice/utils/VirtualDeviceCameraIdMapper.h
index 96c0cb4..fdfde23 100644
--- a/services/camera/libcameraservice/utils/VirtualDeviceCameraIdMapper.h
+++ b/services/camera/libcameraservice/utils/VirtualDeviceCameraIdMapper.h
@@ -79,6 +79,9 @@
     // Map of (deviceId, app-visible cameraId) -> HAL-visible cameraId
     std::map<std::pair<int32_t, std::string>, std::string>
             mDeviceIdMappedCameraIdPairToCameraIdMap GUARDED_BY(mLock);
+    // Map of HAL-visible cameraId -> (deviceId, app-visible cameraId)
+    std::map<std::string, std::pair<int32_t, std::string>>
+            mCameraIdToDeviceIdMappedCameraIdPairMap GUARDED_BY(mLock);
 };
 
 } // namespace android
diff --git a/services/camera/virtualcamera/Android.bp b/services/camera/virtualcamera/Android.bp
index 90530f6..fc186fb 100644
--- a/services/camera/virtualcamera/Android.bp
+++ b/services/camera/virtualcamera/Android.bp
@@ -50,7 +50,6 @@
         "util/JpegUtil.cc",
         "util/MetadataUtil.cc",
         "util/Util.cc",
-        "util/TestPatternHelper.cc",
         "util/EglDisplayContext.cc",
         "util/EglFramebuffer.cc",
         "util/EglProgram.cc",
@@ -72,6 +71,7 @@
         "VirtualCameraStream.cc",
         "VirtualCameraService.cc",
         "VirtualCameraSessionContext.cc",
+        "VirtualCameraTestInstance.cc",
         "VirtualCameraRenderThread.cc",
     ],
     defaults: [
diff --git a/services/camera/virtualcamera/VirtualCameraDevice.cc b/services/camera/virtualcamera/VirtualCameraDevice.cc
index fe9e0ed..ba4ea6b 100644
--- a/services/camera/virtualcamera/VirtualCameraDevice.cc
+++ b/services/camera/virtualcamera/VirtualCameraDevice.cc
@@ -73,13 +73,14 @@
 
 constexpr int32_t kMaxJpegSize = 3 * 1024 * 1024 /*3MiB*/;
 
-constexpr int32_t kMinFps = 15;
-
 constexpr std::chrono::nanoseconds kMaxFrameDuration =
-    std::chrono::duration_cast<std::chrono::nanoseconds>(1e9ns / kMinFps);
+    std::chrono::duration_cast<std::chrono::nanoseconds>(
+        1e9ns / VirtualCameraDevice::kMinFps);
 
 constexpr uint8_t kPipelineMaxDepth = 2;
 
+constexpr int k30Fps = 30;
+
 constexpr MetadataBuilder::ControlRegion kDefaultEmptyControlRegion{};
 
 const std::array<Resolution, 5> kStandardJpegThumbnailSizes{
@@ -130,16 +131,20 @@
   std::set<FpsRange> availableRanges;
 
   for (const SupportedStreamConfiguration& config : configs) {
-    availableRanges.insert({.minFps = kMinFps, .maxFps = config.maxFps});
+    availableRanges.insert(
+        {.minFps = VirtualCameraDevice::kMinFps, .maxFps = config.maxFps});
     availableRanges.insert({.minFps = config.maxFps, .maxFps = config.maxFps});
   }
 
   if (std::any_of(configs.begin(), configs.end(),
                   [](const SupportedStreamConfiguration& config) {
-                    return config.maxFps >= 30;
+                    return config.maxFps >= k30Fps;
                   })) {
-    availableRanges.insert({.minFps = kMinFps, .maxFps = 30});
-    availableRanges.insert({.minFps = 30, .maxFps = 30});
+    // Extend the set of available ranges with (minFps <= 15, 30) & (30, 30) as
+    // required by CDD.
+    availableRanges.insert(
+        {.minFps = VirtualCameraDevice::kMinFps, .maxFps = k30Fps});
+    availableRanges.insert({.minFps = k30Fps, .maxFps = k30Fps});
   }
 
   return std::vector<FpsRange>(availableRanges.begin(), availableRanges.end());
diff --git a/services/camera/virtualcamera/VirtualCameraDevice.h b/services/camera/virtualcamera/VirtualCameraDevice.h
index cba0674..53dcc4d 100644
--- a/services/camera/virtualcamera/VirtualCameraDevice.h
+++ b/services/camera/virtualcamera/VirtualCameraDevice.h
@@ -126,6 +126,9 @@
   // Default JPEG orientation.
   static constexpr uint8_t kDefaultJpegOrientation = 0;
 
+  // Lowest min fps advertised in supported fps ranges.
+  static constexpr int kMinFps = 1;
+
   // Default Make and Model for Exif
   static constexpr char kDefaultMakeAndModel[] = "Android Virtual Camera";
 
diff --git a/services/camera/virtualcamera/VirtualCameraRenderThread.cc b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
index ca62cce..9a5bd1e 100644
--- a/services/camera/virtualcamera/VirtualCameraRenderThread.cc
+++ b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
@@ -14,7 +14,6 @@
  * limitations under the License.
  */
 
-#include "hardware/gralloc.h"
 #define LOG_TAG "VirtualCameraRenderThread"
 #include "VirtualCameraRenderThread.h"
 
@@ -45,13 +44,13 @@
 #include "android-base/thread_annotations.h"
 #include "android/binder_auto_utils.h"
 #include "android/hardware_buffer.h"
+#include "hardware/gralloc.h"
 #include "system/camera_metadata.h"
 #include "ui/GraphicBuffer.h"
 #include "ui/Rect.h"
 #include "util/EglFramebuffer.h"
 #include "util/JpegUtil.h"
 #include "util/MetadataUtil.h"
-#include "util/TestPatternHelper.h"
 #include "util/Util.h"
 #include "utils/Errors.h"
 
@@ -278,6 +277,16 @@
   return app1Data;
 }
 
+std::chrono::nanoseconds getMaxFrameDuration(
+    const RequestSettings& requestSettings) {
+  if (requestSettings.fpsRange.has_value()) {
+    return std::chrono::nanoseconds(static_cast<uint64_t>(
+        1e9 / std::max(1, requestSettings.fpsRange->minFps)));
+  }
+  return std::chrono::nanoseconds(
+      static_cast<uint64_t>(1e9 / VirtualCameraDevice::kMinFps));
+}
+
 }  // namespace
 
 CaptureRequestBuffer::CaptureRequestBuffer(int streamId, int bufferId,
@@ -300,12 +309,12 @@
 VirtualCameraRenderThread::VirtualCameraRenderThread(
     VirtualCameraSessionContext& sessionContext,
     const Resolution inputSurfaceSize, const Resolution reportedSensorSize,
-    std::shared_ptr<ICameraDeviceCallback> cameraDeviceCallback, bool testMode)
+    std::shared_ptr<ICameraDeviceCallback> cameraDeviceCallback)
     : mCameraDeviceCallback(cameraDeviceCallback),
       mInputSurfaceSize(inputSurfaceSize),
       mReportedSensorSize(reportedSensorSize),
-      mTestMode(testMode),
-      mSessionContext(sessionContext) {
+      mSessionContext(sessionContext),
+      mInputSurfaceFuture(mInputSurfacePromise.get_future()) {
 }
 
 VirtualCameraRenderThread::~VirtualCameraRenderThread() {
@@ -365,7 +374,7 @@
 }
 
 sp<Surface> VirtualCameraRenderThread::getInputSurface() {
-  return mInputSurfacePromise.get_future().get();
+  return mInputSurfaceFuture.get();
 }
 
 std::unique_ptr<ProcessCaptureRequestTask>
@@ -401,10 +410,6 @@
   mEglSurfaceTexture = std::make_unique<EglSurfaceTexture>(
       mInputSurfaceSize.width, mInputSurfaceSize.height);
 
-  sp<Surface> inputSurface = mEglSurfaceTexture->getSurface();
-  if (mTestMode) {
-    inputSurface->connect(NATIVE_WINDOW_API_CPU, false, nullptr);
-  }
   mInputSurfacePromise.set_value(mEglSurfaceTexture->getSurface());
 
   while (std::unique_ptr<ProcessCaptureRequestTask> task = dequeueTask()) {
@@ -422,9 +427,64 @@
 
 void VirtualCameraRenderThread::processCaptureRequest(
     const ProcessCaptureRequestTask& request) {
-  const std::chrono::nanoseconds timestamp =
+  std::chrono::nanoseconds timestamp =
       std::chrono::duration_cast<std::chrono::nanoseconds>(
           std::chrono::steady_clock::now().time_since_epoch());
+  const std::chrono::nanoseconds lastAcquisitionTimestamp(
+      mLastAcquisitionTimestampNanoseconds.exchange(timestamp.count(),
+                                                    std::memory_order_relaxed));
+
+  if (request.getRequestSettings().fpsRange) {
+    const int maxFps =
+        std::max(1, request.getRequestSettings().fpsRange->maxFps);
+    const std::chrono::nanoseconds minFrameDuration(
+        static_cast<uint64_t>(1e9 / maxFps));
+    const std::chrono::nanoseconds frameDuration =
+        timestamp - lastAcquisitionTimestamp;
+    if (frameDuration < minFrameDuration) {
+      // We're too fast for the configured maxFps, let's wait a bit.
+      const std::chrono::nanoseconds sleepTime =
+          minFrameDuration - frameDuration;
+      ALOGV("Current frame duration would  be %" PRIu64
+            " ns corresponding to, "
+            "sleeping for %" PRIu64
+            " ns before updating texture to match maxFps %d",
+            static_cast<uint64_t>(frameDuration.count()),
+            static_cast<uint64_t>(sleepTime.count()), maxFps);
+
+      std::this_thread::sleep_for(sleepTime);
+      timestamp = std::chrono::duration_cast<std::chrono::nanoseconds>(
+          std::chrono::steady_clock::now().time_since_epoch());
+      mLastAcquisitionTimestampNanoseconds.store(timestamp.count(),
+                                                 std::memory_order_relaxed);
+    }
+  }
+
+  // Calculate the maximal amount of time we can afford to wait for next frame.
+  const std::chrono::nanoseconds maxFrameDuration =
+      getMaxFrameDuration(request.getRequestSettings());
+  const std::chrono::nanoseconds elapsedDuration =
+      timestamp - lastAcquisitionTimestamp;
+  if (elapsedDuration < maxFrameDuration) {
+    // We can afford to wait for next frame.
+    // Note that if there's already new frame in the input Surface, the call
+    // below returns immediatelly.
+    bool gotNewFrame = mEglSurfaceTexture->waitForNextFrame(maxFrameDuration -
+                                                            elapsedDuration);
+    timestamp = std::chrono::duration_cast<std::chrono::nanoseconds>(
+        std::chrono::steady_clock::now().time_since_epoch());
+    if (!gotNewFrame) {
+      ALOGV(
+          "%s: No new frame received on input surface after waiting for "
+          "%" PRIu64 "ns, repeating last frame.",
+          __func__,
+          static_cast<uint64_t>((timestamp - lastAcquisitionTimestamp).count()));
+    }
+    mLastAcquisitionTimestampNanoseconds.store(timestamp.count(),
+                                               std::memory_order_relaxed);
+  }
+  // Acquire new (most recent) image from the Surface.
+  mEglSurfaceTexture->updateTexture();
 
   CaptureResult captureResult;
   captureResult.fmqResultSize = 0;
@@ -439,14 +499,6 @@
   const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
   captureResult.outputBuffers.resize(buffers.size());
 
-  if (mTestMode) {
-    // In test mode let's just render something to the Surface ourselves.
-    renderTestPatternYCbCr420(mEglSurfaceTexture->getSurface(),
-                              request.getFrameNumber());
-  }
-
-  mEglSurfaceTexture->updateTexture();
-
   for (int i = 0; i < buffers.size(); ++i) {
     const CaptureRequestBuffer& reqBuffer = buffers[i];
     StreamBuffer& resBuffer = captureResult.outputBuffers[i];
diff --git a/services/camera/virtualcamera/VirtualCameraRenderThread.h b/services/camera/virtualcamera/VirtualCameraRenderThread.h
index e222d5b..b23c30c 100644
--- a/services/camera/virtualcamera/VirtualCameraRenderThread.h
+++ b/services/camera/virtualcamera/VirtualCameraRenderThread.h
@@ -17,6 +17,7 @@
 #ifndef ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERARENDERTHREAD_H
 #define ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERARENDERTHREAD_H
 
+#include <atomic>
 #include <cstdint>
 #include <deque>
 #include <future>
@@ -110,8 +111,7 @@
       Resolution reportedSensorSize,
       std::shared_ptr<
           ::aidl::android::hardware::camera::device::ICameraDeviceCallback>
-          cameraDeviceCallback,
-      bool testMode = false);
+          cameraDeviceCallback);
 
   ~VirtualCameraRenderThread();
 
@@ -183,7 +183,6 @@
 
   const Resolution mInputSurfaceSize;
   const Resolution mReportedSensorSize;
-  const int mTestMode;
 
   VirtualCameraSessionContext& mSessionContext;
 
@@ -195,6 +194,9 @@
   std::condition_variable mCondVar;
   volatile bool mPendingExit GUARDED_BY(mLock);
 
+  // Acquisition timestamp of last frame.
+  std::atomic<uint64_t> mLastAcquisitionTimestampNanoseconds;
+
   // EGL helpers - constructed and accessed only from rendering thread.
   std::unique_ptr<EglDisplayContext> mEglDisplayContext;
   std::unique_ptr<EglTextureProgram> mEglTextureYuvProgram;
@@ -202,6 +204,7 @@
   std::unique_ptr<EglSurfaceTexture> mEglSurfaceTexture;
 
   std::promise<sp<Surface>> mInputSurfacePromise;
+  std::shared_future<sp<Surface>> mInputSurfaceFuture;
 };
 
 }  // namespace virtualcamera
diff --git a/services/camera/virtualcamera/VirtualCameraService.cc b/services/camera/virtualcamera/VirtualCameraService.cc
index b5b07f0..724ec62 100644
--- a/services/camera/virtualcamera/VirtualCameraService.cc
+++ b/services/camera/virtualcamera/VirtualCameraService.cc
@@ -30,10 +30,12 @@
 
 #include "VirtualCameraDevice.h"
 #include "VirtualCameraProvider.h"
+#include "VirtualCameraTestInstance.h"
 #include "aidl/android/companion/virtualcamera/Format.h"
 #include "aidl/android/companion/virtualcamera/LensFacing.h"
 #include "aidl/android/companion/virtualcamera/VirtualCameraConfiguration.h"
 #include "android/binder_auto_utils.h"
+#include "android/binder_interface_utils.h"
 #include "android/binder_libbinder.h"
 #include "android/binder_status.h"
 #include "binder/Status.h"
@@ -64,6 +66,7 @@
 constexpr int kVgaWidth = 640;
 constexpr int kVgaHeight = 480;
 constexpr int kMaxFps = 60;
+constexpr int kTestCameraDefaultInputFps = 30;
 constexpr char kEnableTestCameraCmd[] = "enable_test_camera";
 constexpr char kDisableTestCameraCmd[] = "disable_test_camera";
 constexpr char kHelp[] = "help";
@@ -75,6 +78,7 @@
      Options:
        --camera_id=(ID) - override numerical ID for test camera instance
        --lens_facing=(front|back|external) - specifies lens facing for test camera instance
+       --input_fps=(fps) - specify input fps for test camera, valid values are from 1 to 1000
  * disable_test_camera
 )";
 constexpr char kCreateVirtualDevicePermission[] =
@@ -94,6 +98,13 @@
         Status::EX_ILLEGAL_ARGUMENT);
   }
 
+  if (configuration.virtualCameraCallback == nullptr) {
+    ALOGE("%s: Input configuration is missing virtual camera callback",
+          __func__);
+    return ndk::ScopedAStatus::fromServiceSpecificError(
+        Status::EX_ILLEGAL_ARGUMENT);
+  }
+
   for (const SupportedStreamConfiguration& config :
        configuration.supportedStreamConfigs) {
     if (!isFormatSupportedForInput(config.width, config.height,
@@ -411,6 +422,18 @@
     }
   }
 
+  std::optional<int> inputFps;
+  it = options.find("input_fps");
+  if (it != options.end()) {
+    inputFps = parseInt(it->second);
+    if (!inputFps.has_value() || inputFps.value() < 1 ||
+        inputFps.value() > 1000) {
+      dprintf(err, "Invalid input fps: %s\n, must be integer in <1,1000> range.",
+              it->second.c_str());
+      return STATUS_BAD_VALUE;
+    }
+  }
+
   sp<BBinder> token = sp<BBinder>::make();
   mTestCameraToken.set(AIBinder_fromPlatformBinder(token));
 
@@ -418,9 +441,12 @@
   VirtualCameraConfiguration configuration;
   configuration.supportedStreamConfigs.push_back({.width = kVgaWidth,
                                                   .height = kVgaHeight,
-                                                  Format::YUV_420_888,
+                                                  Format::RGBA_8888,
                                                   .maxFps = kMaxFps});
   configuration.lensFacing = lensFacing.value_or(LensFacing::EXTERNAL);
+  configuration.virtualCameraCallback =
+      ndk::SharedRefBase::make<VirtualCameraTestInstance>(
+          inputFps.value_or(kTestCameraDefaultInputFps));
   registerCamera(mTestCameraToken, configuration, cameraId.value_or(sNextId++),
                  kDefaultDeviceId, &ret);
   if (ret) {
diff --git a/services/camera/virtualcamera/VirtualCameraSession.cc b/services/camera/virtualcamera/VirtualCameraSession.cc
index 28fa495..7f0adc3 100644
--- a/services/camera/virtualcamera/VirtualCameraSession.cc
+++ b/services/camera/virtualcamera/VirtualCameraSession.cc
@@ -66,7 +66,6 @@
 #include "util/EglProgram.h"
 #include "util/JpegUtil.h"
 #include "util/MetadataUtil.h"
-#include "util/TestPatternHelper.h"
 #include "util/Util.h"
 
 namespace android {
@@ -360,12 +359,9 @@
       return cameraStatus(Status::ILLEGAL_ARGUMENT);
     }
     if (mRenderThread == nullptr) {
-      // If there's no client callback, start camera in test mode.
-      const bool testMode = mVirtualCameraClientCallback == nullptr;
       mRenderThread = std::make_unique<VirtualCameraRenderThread>(
           mSessionContext, resolutionFromInputConfig(*inputConfig),
-          virtualCamera->getMaxInputResolution(), mCameraDeviceCallback,
-          testMode);
+          virtualCamera->getMaxInputResolution(), mCameraDeviceCallback);
       mRenderThread->start();
       inputSurface = mRenderThread->getInputSurface();
     }
diff --git a/services/camera/virtualcamera/VirtualCameraStream.cc b/services/camera/virtualcamera/VirtualCameraStream.cc
index 03da171..fad6cac 100644
--- a/services/camera/virtualcamera/VirtualCameraStream.cc
+++ b/services/camera/virtualcamera/VirtualCameraStream.cc
@@ -26,8 +26,6 @@
 
 #include "EGL/egl.h"
 #include "aidl/android/hardware/camera/device/Stream.h"
-#include "aidl/android/hardware/camera/device/StreamBuffer.h"
-#include "aidl/android/hardware/graphics/common/PixelFormat.h"
 #include "aidlcommonsupport/NativeHandle.h"
 #include "android/hardware_buffer.h"
 #include "cutils/native_handle.h"
@@ -39,52 +37,33 @@
 namespace virtualcamera {
 
 using ::aidl::android::hardware::camera::device::Stream;
-using ::aidl::android::hardware::camera::device::StreamBuffer;
 using ::aidl::android::hardware::common::NativeHandle;
-using ::aidl::android::hardware::graphics::common::PixelFormat;
 
 namespace {
 
-sp<GraphicBuffer> createBlobGraphicBuffer(GraphicBufferMapper& mapper,
-                                          buffer_handle_t bufferHandle) {
-  uint64_t allocationSize;
-  uint64_t usage;
-  uint64_t layerCount;
-  if (mapper.getAllocationSize(bufferHandle, &allocationSize) != NO_ERROR ||
-      mapper.getUsage(bufferHandle, &usage) != NO_ERROR ||
-      mapper.getLayerCount(bufferHandle, &layerCount) != NO_ERROR) {
-    ALOGE("Error fetching metadata for the imported BLOB buffer handle.");
-    return nullptr;
-  }
-
-  return sp<GraphicBuffer>::make(
-      bufferHandle, GraphicBuffer::HandleWrapMethod::TAKE_HANDLE,
-      allocationSize, /*height=*/1, static_cast<int>(ui::PixelFormat::BLOB),
-      layerCount, usage, 0);
-}
-
-sp<GraphicBuffer> createYCbCr420GraphicBuffer(GraphicBufferMapper& mapper,
-                                              buffer_handle_t bufferHandle) {
+sp<GraphicBuffer> createGraphicBuffer(GraphicBufferMapper& mapper,
+                                      const buffer_handle_t bufferHandle) {
   uint64_t width;
   uint64_t height;
   uint64_t usage;
   uint64_t layerCount;
+  ui::PixelFormat pixelFormat;
   if (mapper.getWidth(bufferHandle, &width) != NO_ERROR ||
       mapper.getHeight(bufferHandle, &height) != NO_ERROR ||
       mapper.getUsage(bufferHandle, &usage) != NO_ERROR ||
-      mapper.getLayerCount(bufferHandle, &layerCount) != NO_ERROR) {
+      mapper.getLayerCount(bufferHandle, &layerCount) != NO_ERROR ||
+      mapper.getPixelFormatRequested(bufferHandle, &pixelFormat) != NO_ERROR) {
     ALOGE("Error fetching metadata for the imported YCbCr420 buffer handle.");
     return nullptr;
   }
 
   return sp<GraphicBuffer>::make(
       bufferHandle, GraphicBuffer::HandleWrapMethod::TAKE_HANDLE, width, height,
-      static_cast<int>(ui::PixelFormat::YCBCR_420_888), /*layers=*/1, usage,
-      width);
+      static_cast<int>(pixelFormat), layerCount, usage, width);
 }
 
 std::shared_ptr<AHardwareBuffer> importBufferInternal(
-    const NativeHandle& aidlHandle, const Stream& streamConfig) {
+    const NativeHandle& aidlHandle) {
   if (aidlHandle.fds.empty()) {
     ALOGE("Empty handle - nothing to import");
     return nullptr;
@@ -103,12 +82,9 @@
     return nullptr;
   }
 
-  sp<GraphicBuffer> buf =
-      streamConfig.format == PixelFormat::BLOB
-          ? createBlobGraphicBuffer(mapper, bufferHandle)
-          : createYCbCr420GraphicBuffer(mapper, bufferHandle);
+  sp<GraphicBuffer> buf = createGraphicBuffer(mapper, bufferHandle);
 
-  if (buf->initCheck() != NO_ERROR) {
+  if (buf == nullptr || buf->initCheck() != NO_ERROR) {
     ALOGE("Imported graphic buffer is not correcly initialized.");
     return nullptr;
   }
@@ -128,7 +104,7 @@
 
 std::shared_ptr<AHardwareBuffer> VirtualCameraStream::importBuffer(
     const ::aidl::android::hardware::camera::device::StreamBuffer& buffer) {
-  auto hwBufferPtr = importBufferInternal(buffer.buffer, mStreamConfig);
+  auto hwBufferPtr = importBufferInternal(buffer.buffer);
   if (hwBufferPtr != nullptr) {
     std::lock_guard<std::mutex> lock(mLock);
     mBuffers.emplace(std::piecewise_construct,
diff --git a/services/camera/virtualcamera/VirtualCameraTestInstance.cc b/services/camera/virtualcamera/VirtualCameraTestInstance.cc
new file mode 100644
index 0000000..bae871d
--- /dev/null
+++ b/services/camera/virtualcamera/VirtualCameraTestInstance.cc
@@ -0,0 +1,154 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "VirtualCameraTestInstance"
+
+#include "VirtualCameraTestInstance.h"
+
+#include <atomic>
+#include <chrono>
+#include <memory>
+#include <mutex>
+#include <ratio>
+#include <thread>
+
+#include "GLES/gl.h"
+#include "android/binder_auto_utils.h"
+#include "android/native_window.h"
+#include "log/log.h"
+#include "util/EglDisplayContext.h"
+#include "util/EglProgram.h"
+
+namespace android {
+namespace companion {
+namespace virtualcamera {
+
+using ::aidl::android::companion::virtualcamera::Format;
+using ::aidl::android::view::Surface;
+using ::ndk::ScopedAStatus;
+
+namespace {
+
+std::shared_ptr<ANativeWindow> nativeWindowFromSurface(const Surface& surface) {
+  ANativeWindow* nativeWindow = surface.get();
+  if (nativeWindow != nullptr) {
+    ANativeWindow_acquire(nativeWindow);
+  }
+  return std::shared_ptr<ANativeWindow>(nativeWindow, ANativeWindow_release);
+}
+
+std::chrono::nanoseconds getCurrentTimestamp() {
+  return std::chrono::duration_cast<std::chrono::nanoseconds>(
+      std::chrono::steady_clock::now().time_since_epoch());
+}
+
+}  // namespace
+
+TestPatternRenderer::TestPatternRenderer(
+    std::shared_ptr<ANativeWindow> nativeWindow, int fps)
+    : mFps(fps), mNativeWindow(nativeWindow) {
+}
+
+void TestPatternRenderer::start() {
+  std::lock_guard<std::mutex> lock(mLock);
+  if (mRunning.exchange(true, std::memory_order_relaxed)) {
+    ALOGW("Render thread already started.");
+    return;
+  }
+  mThread =
+      std::thread(&TestPatternRenderer::renderThreadLoop, this, mNativeWindow);
+}
+
+void TestPatternRenderer::stop() {
+  std::lock_guard<std::mutex> lock(mLock);
+  if (!mRunning.exchange(false, std::memory_order_relaxed)) {
+    ALOGW("Render thread already stopped.");
+    return;
+  }
+  mThread.detach();
+  mRunning = false;
+}
+
+void TestPatternRenderer::renderThreadLoop(
+    std::shared_ptr<ANativeWindow> nativeWindow) {
+  // Prevent destruction of this instance until the thread terminates.
+  std::shared_ptr<TestPatternRenderer> thiz = shared_from_this();
+
+  ALOGV("Starting test client render loop");
+
+  EglDisplayContext eglDisplayContext(nativeWindow);
+  EglTestPatternProgram testPatternProgram;
+
+  const std::chrono::nanoseconds frameDuration(
+      static_cast<uint64_t>(1e9 / mFps));
+
+  std::chrono::nanoseconds lastFrameTs(0);
+  int frameNumber = 0;
+  while (mRunning) {
+    // Wait for appropriate amount of time to meet configured FPS.
+    std::chrono::nanoseconds ts = getCurrentTimestamp();
+    std::chrono::nanoseconds currentDuration = ts - lastFrameTs;
+    if (currentDuration < frameDuration) {
+      std::this_thread::sleep_for(frameDuration - currentDuration);
+    }
+
+    // Render the test pattern and update timestamp.
+    testPatternProgram.draw(frameNumber++);
+    eglDisplayContext.swapBuffers();
+    lastFrameTs = getCurrentTimestamp();
+  }
+
+  ALOGV("Terminating test client render loop");
+}
+
+VirtualCameraTestInstance::VirtualCameraTestInstance(const int fps)
+    : mFps(fps) {
+}
+
+ScopedAStatus VirtualCameraTestInstance::onStreamConfigured(
+    const int32_t streamId, const Surface& surface, const int32_t width,
+    const int32_t height, const Format pixelFormat) {
+  ALOGV("%s: streamId %d, %dx%d pixFmt=%s", __func__, streamId, width, height,
+        toString(pixelFormat).c_str());
+
+  std::lock_guard<std::mutex> lock(mLock);
+  mRenderer = std::make_shared<TestPatternRenderer>(
+      nativeWindowFromSurface(surface), mFps);
+  mRenderer->start();
+
+  return ScopedAStatus::ok();
+}
+
+ScopedAStatus VirtualCameraTestInstance::onProcessCaptureRequest(
+    const int32_t /*in_streamId*/, const int32_t /*in_frameId*/) {
+  return ScopedAStatus::ok();
+}
+
+ScopedAStatus VirtualCameraTestInstance::onStreamClosed(const int32_t streamId) {
+  ALOGV("%s: streamId %d", __func__, streamId);
+
+  std::lock_guard<std::mutex> lock(mLock);
+  if (mRenderer != nullptr) {
+    mRenderer->stop();
+    mRenderer.reset();
+  }
+  return ScopedAStatus::ok();
+}
+
+}  // namespace virtualcamera
+}  // namespace companion
+}  // namespace android
diff --git a/services/camera/virtualcamera/VirtualCameraTestInstance.h b/services/camera/virtualcamera/VirtualCameraTestInstance.h
new file mode 100644
index 0000000..43e33d5
--- /dev/null
+++ b/services/camera/virtualcamera/VirtualCameraTestInstance.h
@@ -0,0 +1,90 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERATESTINSTANCE_H
+#define ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERATESTINSTANCE_H
+
+#include <atomic>
+#include <condition_variable>
+#include <memory>
+#include <thread>
+
+#include "aidl/android/companion/virtualcamera/BnVirtualCameraCallback.h"
+#include "android/native_window.h"
+#include "utils/Mutex.h"
+
+namespace android {
+namespace companion {
+namespace virtualcamera {
+
+// Wraps render loop run in a dedicated thread, rendering test pattern to
+// provided Surface (a.k.a. native window) at configured FPS.
+class TestPatternRenderer
+    : public std::enable_shared_from_this<TestPatternRenderer> {
+ public:
+  TestPatternRenderer(std::shared_ptr<ANativeWindow> nativeWindow, int fps);
+
+  // Start rendering.
+  void start() EXCLUDES(mLock);
+
+  // Stop rendering.
+  // Call returns immediatelly, render thread might take some time (1 frame)
+  // to finish rendering and terminate the thread.
+  void stop() EXCLUDES(mLock);
+
+ private:
+  // Render thread entry point.
+  void renderThreadLoop(std::shared_ptr<ANativeWindow> nativeWindow);
+
+  const int mFps;
+
+  std::shared_ptr<ANativeWindow> mNativeWindow;
+
+  std::mutex mLock;
+  std::atomic_bool mRunning;
+  std::thread mThread GUARDED_BY(mLock);
+};
+
+// VirtualCamera callback implementation for test camera.
+//
+// For every configure call, starts rendering of test pattern on provided surface.
+class VirtualCameraTestInstance
+    : public aidl::android::companion::virtualcamera::BnVirtualCameraCallback {
+ public:
+  explicit VirtualCameraTestInstance(int fps = 30);
+
+  ::ndk::ScopedAStatus onStreamConfigured(
+      int32_t streamId, const ::aidl::android::view::Surface& surface,
+      int32_t width, int32_t height,
+      ::aidl::android::companion::virtualcamera::Format pixelFormat) override
+      EXCLUDES(mLock);
+
+  ::ndk::ScopedAStatus onProcessCaptureRequest(int32_t in_streamId,
+                                               int32_t in_frameId) override;
+
+  ::ndk::ScopedAStatus onStreamClosed(int32_t streamId) override EXCLUDES(mLock);
+
+ private:
+  const int mFps;
+
+  std::mutex mLock;
+  std::shared_ptr<TestPatternRenderer> mRenderer GUARDED_BY(mLock);
+};
+
+}  // namespace virtualcamera
+}  // namespace companion
+}  // namespace android
+
+#endif  // ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERATESTINSTANCE_H
diff --git a/services/camera/virtualcamera/main.cc b/services/camera/virtualcamera/main.cc
index 43b0219..3db9d9c 100644
--- a/services/camera/virtualcamera/main.cc
+++ b/services/camera/virtualcamera/main.cc
@@ -38,7 +38,7 @@
 }  // namespace
 
 int main() {
-  ALOGI("CameraProvider: virtual webcam service is starting.");
+  ALOGI("virtual_camera service is starting.");
 
   ABinderProcess_setThreadPoolMaxThreadCount(HWBINDER_THREAD_COUNT);
 
diff --git a/services/camera/virtualcamera/tests/EglUtilTest.cc b/services/camera/virtualcamera/tests/EglUtilTest.cc
index 589e312..813be75 100644
--- a/services/camera/virtualcamera/tests/EglUtilTest.cc
+++ b/services/camera/virtualcamera/tests/EglUtilTest.cc
@@ -55,6 +55,11 @@
 };
 
 TEST_F(EglTest, EglTestPatternProgramSuccessfulInit) {
+  if (!isGlExtensionSupported(kGlExtYuvTarget)) {
+    GTEST_SKIP() << "Skipping test because of missing required GL extension "
+                 << kGlExtYuvTarget;
+  }
+
   EglTestPatternProgram eglTestPatternProgram;
 
   // Verify the shaders compiled and linked successfully.
diff --git a/services/camera/virtualcamera/tests/VirtualCameraServiceTest.cc b/services/camera/virtualcamera/tests/VirtualCameraServiceTest.cc
index 5927b05..50977d8 100644
--- a/services/camera/virtualcamera/tests/VirtualCameraServiceTest.cc
+++ b/services/camera/virtualcamera/tests/VirtualCameraServiceTest.cc
@@ -15,6 +15,7 @@
  */
 
 #include <algorithm>
+#include <cstdint>
 #include <cstdio>
 #include <iterator>
 #include <memory>
@@ -75,6 +76,17 @@
 
 const VirtualCameraConfiguration kEmptyVirtualCameraConfiguration;
 
+class MockVirtualCameraCallback : public BnVirtualCameraCallback {
+ public:
+  MOCK_METHOD(ndk::ScopedAStatus, onStreamConfigured,
+              (int32_t, const ::aidl::android::view::Surface&, int, int,
+               ::aidl::android::companion::virtualcamera::Format pixelFormat),
+              (override));
+  MOCK_METHOD(ndk::ScopedAStatus, onProcessCaptureRequest, (int32_t, int32_t),
+              (override));
+  MOCK_METHOD(ndk::ScopedAStatus, onStreamClosed, (int32_t), (override));
+};
+
 VirtualCameraConfiguration createConfiguration(const int width, const int height,
                                                const Format format,
                                                const int maxFps) {
@@ -85,6 +97,8 @@
                                                   .maxFps = maxFps});
   configuration.sensorOrientation = kSensorOrientation;
   configuration.lensFacing = kLensFacing;
+  configuration.virtualCameraCallback =
+      ndk::SharedRefBase::make<MockVirtualCameraCallback>();
   return configuration;
 }
 
@@ -247,6 +261,24 @@
   EXPECT_THAT(getCameraIds(), IsEmpty());
 }
 
+TEST_F(VirtualCameraServiceTest,
+       ConfigurationWithoutVirtualCameraCallbackFails) {
+  sp<BBinder> token = sp<BBinder>::make();
+  ndk::SpAIBinder ndkToken(AIBinder_fromPlatformBinder(token));
+  bool aidlRet;
+
+  VirtualCameraConfiguration config =
+      createConfiguration(kVgaWidth, kVgaHeight, Format::RGBA_8888, kMaxFps);
+  config.virtualCameraCallback = nullptr;
+
+  ASSERT_FALSE(mCameraService
+                   ->registerCamera(ndkToken, config, kDefaultDeviceId, &aidlRet)
+                   .isOk());
+
+  EXPECT_FALSE(aidlRet);
+  EXPECT_THAT(getCameraIds(), IsEmpty());
+}
+
 TEST_F(VirtualCameraServiceTest, ConfigurationWithUnsupportedPixelFormatFails) {
   bool aidlRet;
 
@@ -456,6 +488,23 @@
               Eq(STATUS_BAD_VALUE));
 }
 
+TEST_F(VirtualCameraServiceTest, TestCameraShellCmdWithInputFps) {
+  EXPECT_THAT(execute_shell_command("enable_test_camera --input_fps=15"),
+              Eq(NO_ERROR));
+
+  std::vector<std::string> cameraIds = getCameraIds();
+  ASSERT_THAT(cameraIds, SizeIs(1));
+}
+
+TEST_F(VirtualCameraServiceTest, TestCameraShellCmdWithInvalidInputFps) {
+  EXPECT_THAT(execute_shell_command("enable_test_camera --input_fps=1001"),
+              Eq(STATUS_BAD_VALUE));
+  EXPECT_THAT(execute_shell_command("enable_test_camera --input_fps=0"),
+              Eq(STATUS_BAD_VALUE));
+  EXPECT_THAT(execute_shell_command("enable_test_camera --input_fps=foo"),
+              Eq(STATUS_BAD_VALUE));
+}
+
 }  // namespace
 }  // namespace virtualcamera
 }  // namespace companion
diff --git a/services/camera/virtualcamera/util/EglDisplayContext.cc b/services/camera/virtualcamera/util/EglDisplayContext.cc
index 6d343a2..166ac75 100644
--- a/services/camera/virtualcamera/util/EglDisplayContext.cc
+++ b/services/camera/virtualcamera/util/EglDisplayContext.cc
@@ -30,8 +30,9 @@
 namespace companion {
 namespace virtualcamera {
 
-EglDisplayContext::EglDisplayContext()
+EglDisplayContext::EglDisplayContext(std::shared_ptr<ANativeWindow> nativeWindow)
     : mEglDisplay(EGL_NO_DISPLAY),
+      mEglSurface(EGL_NO_SURFACE),
       mEglContext(EGL_NO_CONTEXT),
       mEglConfig(nullptr) {
   EGLBoolean result;
@@ -52,8 +53,10 @@
 
   EGLint numConfigs = 0;
   EGLint configAttribs[] = {
-      EGL_SURFACE_TYPE, EGL_PBUFFER_BIT, EGL_RENDERABLE_TYPE,
-      EGL_OPENGL_ES2_BIT, EGL_RED_SIZE, 8, EGL_GREEN_SIZE, 8, EGL_BLUE_SIZE, 8,
+      EGL_SURFACE_TYPE,
+      nativeWindow == nullptr ? EGL_PBUFFER_BIT : EGL_WINDOW_BIT,
+      EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT, EGL_RED_SIZE, 8, EGL_GREEN_SIZE,
+      8, EGL_BLUE_SIZE, 8,
       // no alpha
       EGL_NONE};
 
@@ -72,6 +75,14 @@
     return;
   }
 
+  if (nativeWindow != nullptr) {
+    mEglSurface = eglCreateWindowSurface(mEglDisplay, mEglConfig,
+                                         nativeWindow.get(), NULL);
+    if (mEglSurface == EGL_NO_SURFACE) {
+      ALOGE("eglCreateWindowSurface error: %#x", eglGetError());
+    }
+  }
+
   if (!makeCurrent()) {
     ALOGE(
         "Failed to set newly initialized EGLContext and EGLDisplay connection "
@@ -82,6 +93,9 @@
 }
 
 EglDisplayContext::~EglDisplayContext() {
+  if (mEglSurface != EGL_NO_SURFACE) {
+    eglDestroySurface(mEglDisplay, mEglSurface);
+  }
   if (mEglDisplay != EGL_NO_DISPLAY) {
     eglTerminate(mEglDisplay);
   }
@@ -99,8 +113,14 @@
   return mEglContext != EGL_NO_CONTEXT && mEglDisplay != EGL_NO_DISPLAY;
 }
 
+void EglDisplayContext::swapBuffers() const {
+  if (mEglSurface != EGL_NO_SURFACE) {
+    eglSwapBuffers(mEglDisplay, mEglSurface);
+  }
+}
+
 bool EglDisplayContext::makeCurrent() {
-  if (!eglMakeCurrent(mEglDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE, mEglContext)) {
+  if (!eglMakeCurrent(mEglDisplay, mEglSurface, mEglSurface, mEglContext)) {
     ALOGE("eglMakeCurrent failed: %#x", eglGetError());
     return false;
   }
diff --git a/services/camera/virtualcamera/util/EglDisplayContext.h b/services/camera/virtualcamera/util/EglDisplayContext.h
index 402ca3c..6dc3080 100644
--- a/services/camera/virtualcamera/util/EglDisplayContext.h
+++ b/services/camera/virtualcamera/util/EglDisplayContext.h
@@ -17,7 +17,10 @@
 #ifndef ANDROID_COMPANION_VIRTUALCAMERA_EGLDISPLAYCONTEXT_H
 #define ANDROID_COMPANION_VIRTUALCAMERA_EGLDISPLAYCONTEXT_H
 
+#include <memory>
+
 #include "EGL/egl.h"
+#include "system/window.h"
 
 namespace android {
 namespace companion {
@@ -30,7 +33,7 @@
 // out of scope.
 class EglDisplayContext {
  public:
-  EglDisplayContext();
+  EglDisplayContext(std::shared_ptr<ANativeWindow> nativeWindow = nullptr);
   ~EglDisplayContext();
 
   // Sets EGLDisplay & EGLContext for current thread.
@@ -44,8 +47,13 @@
   // EGLDisplay & EGLContext.
   bool isInitialized() const;
 
+  void swapBuffers() const;
+
  private:
+  std::shared_ptr<ANativeWindow> mNativeWindow;
+
   EGLDisplay mEglDisplay;
+  EGLSurface mEglSurface;
   EGLContext mEglContext;
   EGLConfig mEglConfig;
 };
diff --git a/services/camera/virtualcamera/util/EglProgram.cc b/services/camera/virtualcamera/util/EglProgram.cc
index 7554a67..7498fbc 100644
--- a/services/camera/virtualcamera/util/EglProgram.cc
+++ b/services/camera/virtualcamera/util/EglProgram.cc
@@ -35,19 +35,28 @@
 
 constexpr char kGlExtYuvTarget[] = "GL_EXT_YUV_target";
 
-constexpr char kIdentityVertexShader[] = R"(
-    attribute vec4 vPosition;
+constexpr char kJuliaFractalVertexShader[] = R"(#version 300 es
+    in vec4 aPosition;
+    in vec2 aTextureCoord;
+    out vec2 vFractalCoord;
+    out vec2 vUVCoord;
     void main() {
-      gl_Position = vPosition;
+      gl_Position = aPosition;
+      vUVCoord = aTextureCoord;
+      vFractalCoord = vec2(aTextureCoord.x - 0.5, aTextureCoord.y - 0.5) * 4.0;
     })";
 
-constexpr char kJuliaFractalFragmentShader[] = R"(
+constexpr char kJuliaFractalFragmentShader[] = R"(#version 300 es
+    #extension GL_EXT_YUV_target : require
     precision mediump float;
-    uniform vec2 uResolution;
-    uniform vec2 uC;
-    uniform vec2 uUV;
+
     const float kIter = 64.0;
 
+    in vec2 vFractalCoord;
+    in vec2 vUVCoord;
+    out vec4 fragColor;
+    uniform vec2 uC;
+
     vec2 imSq(vec2 n){
       return vec2(pow(n.x,2.0)-pow(n.y,2.0), 2.0*n.x*n.y);
     }
@@ -62,9 +71,8 @@
     }
 
     void main() {
-      vec2 uv = vec2(gl_FragCoord.x / uResolution.x - 0.5, gl_FragCoord.y / uResolution.y - 0.5);
-      float juliaVal = julia(uv * 4.0, uC);
-      gl_FragColor = vec4( juliaVal,uUV.x,uUV.y,0.0);
+      float juliaVal = julia(vFractalCoord, uC);
+      fragColor = vec4(yuv_2_rgb(vec3(juliaVal, vUVCoord.x, vUVCoord.y), itu_601_full_range), 0.0);
     })";
 
 constexpr char kExternalTextureVertexShader[] = R"(#version 300 es
@@ -89,7 +97,7 @@
     })";
 
 constexpr char kExternalRgbaTextureFragmentShader[] = R"(#version 300 es
-    #extension GL_OES_EGL_image_external : require
+    #extension GL_OES_EGL_image_external_essl3 : require
     #extension GL_EXT_YUV_target : require
     precision mediump float;
     in vec2 vTextureCoord;
@@ -200,17 +208,28 @@
 }
 
 EglTestPatternProgram::EglTestPatternProgram() {
-  if (initialize(kIdentityVertexShader, kJuliaFractalFragmentShader)) {
+  if (initialize(kJuliaFractalVertexShader, kJuliaFractalFragmentShader)) {
     ALOGV("Successfully initialized EGL shaders for test pattern program.");
   } else {
     ALOGE("Test pattern EGL shader program initialization failed.");
   }
+
+  mCHandle = glGetUniformLocation(mProgram, "uC");
+  mPositionHandle = glGetAttribLocation(mProgram, "aPosition");
+  mTextureCoordHandle = glGetAttribLocation(mProgram, "aTextureCoord");
+
+  // Pass vertex array to draw.
+  glEnableVertexAttribArray(mPositionHandle);
+  // Prepare the triangle coordinate data.
+  glVertexAttribPointer(mPositionHandle, kCoordsPerVertex, GL_FLOAT, false,
+                        kSquareCoords.size(), kSquareCoords.data());
+
+  glEnableVertexAttribArray(mTextureCoordHandle);
+  glVertexAttribPointer(mTextureCoordHandle, 2, GL_FLOAT, false,
+                        kTextureCoords.size(), kTextureCoords.data());
 }
 
-bool EglTestPatternProgram::draw(int width, int height, int frameNumber) {
-  glViewport(0, 0, static_cast<GLsizei>(width), static_cast<GLsizei>(height));
-  checkEglError("glViewport");
-
+bool EglTestPatternProgram::draw(int frameNumber) {
   // Load compiled shader.
   glUseProgram(mProgram);
   checkEglError("glUseProgram");
@@ -219,28 +238,8 @@
   float time = float(frameNumber) / 120.0f;
   const std::complex<float> c(std::sin(time) * 0.78f, std::cos(time) * 0.78f);
 
-  // Pass uniform values to the shader.
-  int resolutionHandle = glGetUniformLocation(mProgram, "uResolution");
-  checkEglError("glGetUniformLocation -> uResolution");
-  glUniform2f(resolutionHandle, static_cast<float>(width),
-              static_cast<float>(height));
-  checkEglError("glUniform2f -> uResolution");
-
   // Pass "C" constant value determining the Julia set to the shader.
-  int cHandle = glGetUniformLocation(mProgram, "uC");
-  glUniform2f(cHandle, c.imag(), c.real());
-
-  // Pass chroma value to the shader.
-  int uvHandle = glGetUniformLocation(mProgram, "uUV");
-  glUniform2f(uvHandle, (c.imag() + 1.f) / 2.f, (c.real() + 1.f) / 2.f);
-
-  // Pass vertex array to draw.
-  int positionHandle = glGetAttribLocation(mProgram, "vPosition");
-  glEnableVertexAttribArray(positionHandle);
-
-  // Prepare the triangle coordinate data.
-  glVertexAttribPointer(positionHandle, kCoordsPerVertex, GL_FLOAT, false,
-                        kSquareCoords.size(), kSquareCoords.data());
+  glUniform2f(mCHandle, c.imag(), c.real());
 
   // Draw triangle strip forming a square filling the viewport.
   glDrawElements(GL_TRIANGLES, kDrawOrder.size(), GL_UNSIGNED_BYTE,
diff --git a/services/camera/virtualcamera/util/EglProgram.h b/services/camera/virtualcamera/util/EglProgram.h
index c695cbb..d09f11e 100644
--- a/services/camera/virtualcamera/util/EglProgram.h
+++ b/services/camera/virtualcamera/util/EglProgram.h
@@ -46,7 +46,12 @@
  public:
   EglTestPatternProgram();
 
-  bool draw(int width, int height, int frameNumber);
+  bool draw(int frameNumber);
+
+ private:
+  int mPositionHandle = -1;
+  int mTextureCoordHandle = -1;
+  int mCHandle = -1;
 };
 
 // Shader program to  draw texture.
diff --git a/services/camera/virtualcamera/util/EglSurfaceTexture.cc b/services/camera/virtualcamera/util/EglSurfaceTexture.cc
index 9f26e19..7de5020 100644
--- a/services/camera/virtualcamera/util/EglSurfaceTexture.cc
+++ b/services/camera/virtualcamera/util/EglSurfaceTexture.cc
@@ -15,6 +15,7 @@
  */
 
 // #define LOG_NDEBUG 0
+#include "utils/Timers.h"
 #define LOG_TAG "EglSurfaceTexture"
 
 #include <cstdint>
@@ -63,6 +64,11 @@
   return mGlConsumer->getCurrentBuffer();
 }
 
+bool EglSurfaceTexture::waitForNextFrame(const std::chrono::nanoseconds timeout) {
+  return mSurface->waitForNextFrame(mGlConsumer->getFrameNumber(),
+                                    static_cast<nsecs_t>(timeout.count()));
+}
+
 GLuint EglSurfaceTexture::updateTexture() {
   mGlConsumer->updateTexImage();
   return mTextureId;
diff --git a/services/camera/virtualcamera/util/EglSurfaceTexture.h b/services/camera/virtualcamera/util/EglSurfaceTexture.h
index faad7c4..b9c5126 100644
--- a/services/camera/virtualcamera/util/EglSurfaceTexture.h
+++ b/services/camera/virtualcamera/util/EglSurfaceTexture.h
@@ -17,6 +17,7 @@
 #ifndef ANDROID_COMPANION_VIRTUALCAMERA_EGLSURFACETEXTURE_H
 #define ANDROID_COMPANION_VIRTUALCAMERA_EGLSURFACETEXTURE_H
 
+#include <chrono>
 #include <cstdint>
 
 #include "GLES/gl.h"
@@ -51,6 +52,12 @@
   // Get height of surface / texture.
   uint32_t getHeight() const;
 
+  // Wait for next frame to be available in the surface
+  // until timeout.
+  //
+  // Returns false on timeout, true if new frame was received before timeout.
+  bool waitForNextFrame(std::chrono::nanoseconds timeout);
+
   // Update the texture with the most recent submitted buffer.
   // Most be called on thread with EGL context.
   //
diff --git a/services/camera/virtualcamera/util/EglUtil.cc b/services/camera/virtualcamera/util/EglUtil.cc
index 481d8f0..1c685f1 100644
--- a/services/camera/virtualcamera/util/EglUtil.cc
+++ b/services/camera/virtualcamera/util/EglUtil.cc
@@ -20,6 +20,7 @@
 
 #include <cstring>
 
+#include "EglDisplayContext.h"
 #include "GLES/gl.h"
 #include "log/log.h"
 
@@ -27,6 +28,9 @@
 namespace companion {
 namespace virtualcamera {
 
+// Lower bound for maximum supported texture size is at least 2048x2048
+constexpr int kDefaultMaxTextureSize = 2048;
+
 bool checkEglError(const char* operation) {
   GLenum err = glGetError();
   if (err == GL_NO_ERROR) {
@@ -45,6 +49,20 @@
   return strstr(extensions, extension) != nullptr;
 }
 
+int getMaximumTextureSize() {
+  static const int kMaxTextureSize = [] {
+    EglDisplayContext displayContext;
+    displayContext.makeCurrent();
+    int maxTextureSize = -1;
+    glGetIntegerv(GL_MAX_TEXTURE_SIZE, &maxTextureSize);
+    return maxTextureSize;
+  }();
+  if (kMaxTextureSize <= 0) {
+    return kDefaultMaxTextureSize;
+  }
+  return kMaxTextureSize;
+}
+
 }  // namespace virtualcamera
 }  // namespace companion
 }  // namespace android
diff --git a/services/camera/virtualcamera/util/EglUtil.h b/services/camera/virtualcamera/util/EglUtil.h
index 71640e3..f339b4e 100644
--- a/services/camera/virtualcamera/util/EglUtil.h
+++ b/services/camera/virtualcamera/util/EglUtil.h
@@ -27,6 +27,8 @@
 // Returns true if the GL extension is supported, false otherwise.
 bool isGlExtensionSupported(const char* extension);
 
+int getMaximumTextureSize();
+
 }  // namespace virtualcamera
 }  // namespace companion
 }  // namespace android
diff --git a/services/camera/virtualcamera/util/TestPatternHelper.cc b/services/camera/virtualcamera/util/TestPatternHelper.cc
deleted file mode 100644
index 274996a..0000000
--- a/services/camera/virtualcamera/util/TestPatternHelper.cc
+++ /dev/null
@@ -1,152 +0,0 @@
-/*
- * Copyright (C) 2023 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-// #define LOG_NDEBUG 0
-
-#define LOG_TAG "TestPatternHelper"
-
-#include "TestPatternHelper.h"
-
-#include <complex>
-#include <cstdint>
-
-#include "log/log.h"
-#include "nativebase/nativebase.h"
-#include "system/graphics.h"
-#include "ui/GraphicBuffer.h"
-#include "utils/Errors.h"
-
-namespace android {
-namespace companion {
-namespace virtualcamera {
-
-namespace {
-
-using namespace std::chrono_literals;
-
-static constexpr std::chrono::milliseconds kAcquireFenceTimeout = 500ms;
-
-uint8_t julia(const std::complex<float> n, const std::complex<float> c) {
-  std::complex<float> z = n;
-  for (int i = 0; i < 64; i++) {
-    z = z * z + c;
-    if (std::abs(z) > 2.0) return i * 4;
-  }
-  return 0xff;
-}
-
-uint8_t pixelToFractal(const int x, const int y, const int width,
-                       const int height, const std::complex<float> c) {
-  std::complex<float> n(float(x) / float(width) - 0.5,
-                        float(y) / float(height) - 0.5);
-  return julia(n * 5.f, c);
-}
-
-void renderTestPatternYcbCr420(const android_ycbcr& ycbr, const int width,
-                               const int height, const int frameNumber) {
-  float time = float(frameNumber) / 120.0f;
-  const std::complex<float> c(std::sin(time), std::cos(time));
-
-  uint8_t* y = reinterpret_cast<uint8_t*>(ycbr.y);
-  uint8_t* cb = reinterpret_cast<uint8_t*>(ycbr.cb);
-  uint8_t* cr = reinterpret_cast<uint8_t*>(ycbr.cr);
-
-  for (int row = 0; row < height; row++) {
-    for (int col = 0; col < width; col++) {
-      y[row * ycbr.ystride + col] =
-          pixelToFractal(col, row, width, height, c * 0.78f);
-    }
-  }
-
-  int cWidth = width / 2;
-  int cHeight = height / 2;
-  for (int row = 0; row < cHeight; row++) {
-    for (int col = 0; col < cWidth; col++) {
-      cb[row * ycbr.cstride + col * ycbr.chroma_step] =
-          static_cast<uint8_t>((float(col) / float(cWidth)) * 255.f);
-      cr[row * ycbr.cstride + col * ycbr.chroma_step] =
-          static_cast<uint8_t>((float(row) / float(cHeight)) * 255.f);
-    }
-  }
-}
-
-}  // namespace
-
-void renderTestPatternYCbCr420(sp<Surface> surface, int frameNumber) {
-  if (surface == nullptr) {
-    ALOGE("%s: null surface, skipping render", __func__);
-    return;
-  }
-
-  ANativeWindowBuffer* buffer;
-  int fenceFd;
-  int ret = ANativeWindow_dequeueBuffer(surface.get(), &buffer, &fenceFd);
-  if (ret != NO_ERROR) {
-    ALOGE(
-        "%s: Error while deuqueing buffer from surface, "
-        "ANativeWindow_dequeueBuffer returned %d",
-        __func__, ret);
-    return;
-  }
-
-  if (buffer == nullptr) {
-    ALOGE("%s: ANativeWindowBuffer is null after dequeing", __func__);
-    return;
-  }
-
-  sp<Fence> fence = sp<Fence>::make(fenceFd);
-  if (fence->isValid()) {
-    ret = fence->wait(kAcquireFenceTimeout.count());
-    if (ret != NO_ERROR) {
-      ALOGE("%s: Timeout while waiting for the fence to clear", __func__);
-      ANativeWindow_queueBuffer(surface.get(), buffer, fence->dup());
-      return;
-    }
-  }
-
-  sp<GraphicBuffer> gBuffer = GraphicBuffer::from(buffer);
-  android_ycbcr ycbr;
-
-  ret = gBuffer->lockAsyncYCbCr(GraphicBuffer::USAGE_SW_WRITE_OFTEN, &ycbr,
-                                fence->dup());
-  if (ret != NO_ERROR) {
-    ALOGE("%s: Failed to lock buffer retrieved from surface, ret %d", __func__,
-          ret);
-    return;
-  }
-
-  renderTestPatternYcbCr420(ycbr, gBuffer->getWidth(), gBuffer->getHeight(),
-                            frameNumber);
-
-  ret = gBuffer->unlock();
-  if (ret != NO_ERROR) {
-    ALOGE("%s: Failed to unlock buffer, ret %d", __func__, ret);
-    return;
-  }
-
-  ret = ANativeWindow_queueBuffer(surface.get(), buffer, /*fenceFd=*/-1);
-  if (ret != NO_ERROR) {
-    ALOGE(
-        "%s: Error while queing buffer to surface, ANativeWindow_queueBuffer "
-        "returned %d",
-        __func__, ret);
-    return;
-  }
-}
-
-}  // namespace virtualcamera
-}  // namespace companion
-}  // namespace android
diff --git a/services/camera/virtualcamera/util/TestPatternHelper.h b/services/camera/virtualcamera/util/TestPatternHelper.h
deleted file mode 100644
index f842b29..0000000
--- a/services/camera/virtualcamera/util/TestPatternHelper.h
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Copyright (C) 2023 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_COMPANION_VIRTUALCAMERA_TESTPATTERNHELPER_H
-#define ANDROID_COMPANION_VIRTUALCAMERA_TESTPATTERNHELPER_H
-
-#include "gui/Surface.h"
-
-namespace android {
-namespace companion {
-namespace virtualcamera {
-
-// Helper function for rendering test pattern into Surface.
-void renderTestPatternYCbCr420(sp<Surface> surface, int frameNumber);
-
-}  // namespace virtualcamera
-}  // namespace companion
-}  // namespace android
-
-#endif  // ANDROID_COMPANION_VIRTUALCAMERA_TESTPATTERNHELPER_H
diff --git a/services/camera/virtualcamera/util/Util.cc b/services/camera/virtualcamera/util/Util.cc
index 0c607d7..4aff60f 100644
--- a/services/camera/virtualcamera/util/Util.cc
+++ b/services/camera/virtualcamera/util/Util.cc
@@ -23,6 +23,7 @@
 #include <cstdint>
 #include <memory>
 
+#include "EglUtil.h"
 #include "android/hardware_buffer.h"
 #include "jpeglib.h"
 #include "ui/GraphicBuffer.h"
@@ -35,11 +36,6 @@
 using ::aidl::android::companion::virtualcamera::Format;
 using ::aidl::android::hardware::common::NativeHandle;
 
-// Lower bound for maximal supported texture size is at least 2048x2048
-// but on most platforms will be more.
-// TODO(b/301023410) - Query actual max texture size.
-constexpr int kMaxTextureSize = 2048;
-constexpr int kLibJpegDctSize = DCTSIZE;
 constexpr int kMaxFpsUpperLimit = 60;
 
 constexpr std::array<Format, 2> kSupportedFormats{Format::YUV_420_888,
@@ -69,7 +65,6 @@
   if (gBuffer == nullptr) {
     return;
   }
-  gBuffer->unlock();
   status_t status = gBuffer->unlock();
   if (status != NO_ERROR) {
     ALOGE("Failed to unlock graphic buffer: %s", statusToString(status).c_str());
@@ -141,8 +136,9 @@
     return false;
   }
 
-  if (width <= 0 || height <= 0 || width > kMaxTextureSize ||
-      height > kMaxTextureSize) {
+  int maxTextureSize = getMaximumTextureSize();
+  if (width <= 0 || height <= 0 || width > maxTextureSize ||
+      height > maxTextureSize) {
     return false;
   }
 
diff --git a/services/mediametrics/include/mediametricsservice/AudioTypes.h b/services/mediametrics/include/mediametricsservice/AudioTypes.h
index b5fe28b..59654bf 100644
--- a/services/mediametrics/include/mediametricsservice/AudioTypes.h
+++ b/services/mediametrics/include/mediametricsservice/AudioTypes.h
@@ -18,6 +18,7 @@
 
 #include <string>
 #include <unordered_map>
+#include <vector>
 
 namespace android::mediametrics::types {
 
diff --git a/services/mediaresourcemanager/ResourceManagerMetrics.cpp b/services/mediaresourcemanager/ResourceManagerMetrics.cpp
index cd00937..a8a1de1 100644
--- a/services/mediaresourcemanager/ResourceManagerMetrics.cpp
+++ b/services/mediaresourcemanager/ResourceManagerMetrics.cpp
@@ -109,23 +109,17 @@
     return CodecBucketUnspecified;
 }
 
-static bool getLogMessage(int hwCount, int swCount, std::stringstream& logMsg) {
-    bool update = false;
-    logMsg.clear();
+static std::string getLogMessage(const std::string& firstKey, const long& firstValue,
+                                 const std::string& secondKey, const long& secondValue) {
 
-    if (hwCount > 0) {
-        logMsg << " HW: " << hwCount;
-        update = true;
+    std::stringstream logMsg;
+    if (firstValue > 0) {
+        logMsg << firstKey << firstValue;
     }
-    if (swCount > 0) {
-        logMsg << " SW: " << swCount;
-        update = true;
+    if (secondValue > 0) {
+        logMsg << secondKey << secondValue;
     }
-
-    if (update) {
-        logMsg << " ] ";
-    }
-    return update;
+    return logMsg.str();
 }
 
 ResourceManagerMetrics::ResourceManagerMetrics(const sp<ProcessInfoInterface>& processInfo) {
@@ -364,6 +358,15 @@
     std::scoped_lock lock(mLock);
     // post MediaCodecConcurrentUsageReported for this terminated pid.
     pushConcurrentUsageReport(pid, uid);
+    // Remove all the metrics associated with this process.
+    std::map<int32_t, ConcurrentCodecs>::iterator it1 = mProcessConcurrentCodecsMap.find(pid);
+    if (it1 != mProcessConcurrentCodecsMap.end()) {
+        mProcessConcurrentCodecsMap.erase(it1);
+    }
+    std::map<int32_t, PixelCount>::iterator it2 = mProcessPixelsMap.find(pid);
+    if (it2 != mProcessPixelsMap.end()) {
+        mProcessPixelsMap.erase(it2);
+    }
 }
 
 void ResourceManagerMetrics::pushConcurrentUsageReport(int32_t pid, uid_t uid) {
@@ -400,24 +403,30 @@
 
     std::stringstream peakCodecLog;
     peakCodecLog << "Peak { ";
-    std::stringstream logMsg;
-    if (getLogMessage(peakHwAudioEncoderCount, peakSwAudioEncoderCount, logMsg)) {
-        peakCodecLog << "AudioEnc[" << logMsg.str();
+    std::string logMsg;
+    logMsg = getLogMessage(" HW: ", peakHwAudioEncoderCount, " SW: ", peakSwAudioEncoderCount);
+    if (!logMsg.empty()) {
+        peakCodecLog << "AudioEnc[ " << logMsg << " ] ";
     }
-    if (getLogMessage(peakHwAudioDecoderCount, peakSwAudioDecoderCount, logMsg)) {
-        peakCodecLog << "AudioDec[" << logMsg.str();
+    logMsg = getLogMessage(" HW: ", peakHwAudioDecoderCount, " SW: ", peakSwAudioDecoderCount);
+    if (!logMsg.empty()) {
+        peakCodecLog << "AudioDec[" << logMsg << " ] ";
     }
-    if (getLogMessage(peakHwVideoEncoderCount, peakSwVideoEncoderCount, logMsg)) {
-        peakCodecLog << "VideoEnc[" << logMsg.str();
+    logMsg = getLogMessage(" HW: ", peakHwVideoEncoderCount, " SW: ", peakSwVideoEncoderCount);
+    if (!logMsg.empty()) {
+        peakCodecLog << "VideoEnc[" << logMsg << " ] ";
     }
-    if (getLogMessage(peakHwVideoDecoderCount, peakSwVideoDecoderCount, logMsg)) {
-        peakCodecLog << "VideoDec[" << logMsg.str();
+    logMsg = getLogMessage(" HW: ", peakHwVideoDecoderCount, " SW: ", peakSwVideoDecoderCount);
+    if (!logMsg.empty()) {
+        peakCodecLog << "VideoDec[" << logMsg << " ] ";
     }
-    if (getLogMessage(peakHwImageEncoderCount, peakSwImageEncoderCount, logMsg)) {
-        peakCodecLog << "ImageEnc[" << logMsg.str();
+    logMsg = getLogMessage(" HW: ", peakHwImageEncoderCount, " SW: ", peakSwImageEncoderCount);
+    if (!logMsg.empty()) {
+        peakCodecLog << "ImageEnc[" << logMsg << " ] ";
     }
-    if (getLogMessage(peakHwImageDecoderCount, peakSwImageDecoderCount, logMsg)) {
-        peakCodecLog << "ImageDec[" << logMsg.str();
+    logMsg = getLogMessage(" HW: ", peakHwImageDecoderCount, " SW: ", peakSwImageDecoderCount);
+    if (!logMsg.empty()) {
+        peakCodecLog << "ImageDec[" << logMsg << " ] ";
     }
     peakCodecLog << "}";
 
@@ -705,4 +714,114 @@
     return 0;
 }
 
+static std::string getConcurrentInstanceCount(const std::map<std::string, int>& resourceMap) {
+    if (resourceMap.empty()) {
+        return "";
+    }
+    std::stringstream concurrentInstanceInfo;
+    for (const auto& [name, count] : resourceMap) {
+        if (count > 0) {
+            concurrentInstanceInfo << "      Name: " << name << " Instances: " << count << "\n";
+        }
+    }
+
+    std::string info = concurrentInstanceInfo.str();
+    if (info.empty()) {
+        return "";
+    }
+    return "    Current Concurrent Codec Instances:\n" + info;
+}
+
+static std::string getAppsPixelCount(const std::map<int32_t, PixelCount>& pixelMap) {
+    if (pixelMap.empty()) {
+        return "";
+    }
+    std::stringstream pixelInfo;
+    for (const auto& [pid, pixelCount] : pixelMap) {
+        std::string logMsg = getLogMessage(" Current Pixels: ", pixelCount.mCurrent,
+                                           " Peak Pixels: ", pixelCount.mPeak);
+        if (!logMsg.empty()) {
+            pixelInfo  << "      PID[" << pid << "]: {" << logMsg << " }\n";
+        }
+    }
+
+    return "    Applications Pixel Usage:\n" + pixelInfo.str();
+}
+
+static std::string getCodecUsageMetrics(const ConcurrentCodecsMap& codecsMap) {
+    int peakHwAudioEncoderCount = codecsMap[HwAudioEncoder];
+    int peakHwAudioDecoderCount = codecsMap[HwAudioDecoder];
+    int peakHwVideoEncoderCount = codecsMap[HwVideoEncoder];
+    int peakHwVideoDecoderCount = codecsMap[HwVideoDecoder];
+    int peakHwImageEncoderCount = codecsMap[HwImageEncoder];
+    int peakHwImageDecoderCount = codecsMap[HwImageDecoder];
+    int peakSwAudioEncoderCount = codecsMap[SwAudioEncoder];
+    int peakSwAudioDecoderCount = codecsMap[SwAudioDecoder];
+    int peakSwVideoEncoderCount = codecsMap[SwVideoEncoder];
+    int peakSwVideoDecoderCount = codecsMap[SwVideoDecoder];
+    int peakSwImageEncoderCount = codecsMap[SwImageEncoder];
+    int peakSwImageDecoderCount = codecsMap[SwImageDecoder];
+    std::stringstream usageMetrics;
+    std::string logMsg;
+    logMsg = getLogMessage(" HW: ", peakHwAudioEncoderCount, " SW: ", peakSwAudioEncoderCount);
+    if (!logMsg.empty()) {
+        usageMetrics << "AudioEnc[" << logMsg << " ] ";
+    }
+    logMsg = getLogMessage(" HW: ", peakHwAudioDecoderCount, " SW: ", peakSwAudioDecoderCount);
+    if (!logMsg.empty()) {
+        usageMetrics << "AudioDec[" << logMsg << " ] ";
+    }
+    logMsg = getLogMessage(" HW: ", peakHwVideoEncoderCount, " SW: ", peakSwVideoEncoderCount);
+    if (!logMsg.empty()) {
+        usageMetrics << "VideoEnc[" << logMsg << " ] ";
+    }
+    logMsg = getLogMessage(" HW: ", peakHwVideoDecoderCount, " SW: ", peakSwVideoDecoderCount);
+    if (!logMsg.empty()) {
+        usageMetrics << "VideoDec[" << logMsg << " ] ";
+    }
+    logMsg = getLogMessage(" HW: ", peakHwImageEncoderCount, " SW: ", peakSwImageEncoderCount);
+    if (!logMsg.empty()) {
+        usageMetrics << "ImageEnc[" << logMsg << " ] ";
+    }
+    logMsg = getLogMessage(" HW: ", peakHwImageDecoderCount, " SW: ", peakSwImageDecoderCount);
+    if (!logMsg.empty()) {
+        usageMetrics << "ImageDec[" << logMsg << " ] ";
+    }
+
+    return usageMetrics.str();
+}
+
+static std::string getAppsCodecUsageMetrics(
+        const std::map<int32_t, ConcurrentCodecs>& processCodecsMap) {
+    if (processCodecsMap.empty()) {
+        return "";
+    }
+    std::stringstream codecUsage;
+    std::string info;
+    for (const auto& [pid, codecMap] : processCodecsMap) {
+        codecUsage << "      PID[" << pid << "]: ";
+        info = getCodecUsageMetrics(codecMap.mCurrent);
+        if (!info.empty()) {
+            codecUsage << "Current Codec Usage: { " << info << "} ";
+        }
+        info = getCodecUsageMetrics(codecMap.mPeak);
+        if (!info.empty()) {
+            codecUsage << "Peak Codec Usage: { " << info << "}";
+        }
+        codecUsage << "\n";
+    }
+
+    return "    Applications Codec Usage:\n" + codecUsage.str();
+}
+
+
+std::string ResourceManagerMetrics::dump() const {
+    std::string metricsLog("  Metrics logs:\n");
+    metricsLog += getConcurrentInstanceCount(mConcurrentResourceCountMap);
+    metricsLog += getAppsPixelCount(mProcessPixelsMap);
+    metricsLog += getAppsCodecUsageMetrics(mProcessConcurrentCodecsMap);
+
+    return std::move(metricsLog);
+}
+
 } // namespace android
diff --git a/services/mediaresourcemanager/ResourceManagerMetrics.h b/services/mediaresourcemanager/ResourceManagerMetrics.h
index 7a5a89f..9904f7d 100644
--- a/services/mediaresourcemanager/ResourceManagerMetrics.h
+++ b/services/mediaresourcemanager/ResourceManagerMetrics.h
@@ -171,6 +171,9 @@
     // Get the current concurrent pixel count (associated with the video codecs) for the process.
     long getCurrentConcurrentPixelCount(int pid) const;
 
+    // retrieves metrics log.
+    std::string dump() const;
+
 private:
     ResourceManagerMetrics(const ResourceManagerMetrics&) = delete;
     ResourceManagerMetrics(ResourceManagerMetrics&&) = delete;
@@ -204,9 +207,9 @@
     // Map of resources (name) and number of concurrent instances
     std::map<std::string, int> mConcurrentResourceCountMap;
 
-    // Map of concurrent codes by CodecBucket across the system.
+    // Map of concurrent codecs by CodecBucket across the system.
     ConcurrentCodecsMap mConcurrentCodecsMap;
-    // Map of concurrent and peak codes by CodecBucket for each process/application.
+    // Map of concurrent and peak codecs by CodecBucket for each process/application.
     std::map<int32_t, ConcurrentCodecs> mProcessConcurrentCodecsMap;
 
     // Uid Observer to monitor the application termination.
diff --git a/services/mediaresourcemanager/ResourceManagerService.cpp b/services/mediaresourcemanager/ResourceManagerService.cpp
index d37d893..9c2fb7c 100644
--- a/services/mediaresourcemanager/ResourceManagerService.cpp
+++ b/services/mediaresourcemanager/ResourceManagerService.cpp
@@ -108,10 +108,17 @@
         serviceLog = mServiceLog->toString("    " /* linePrefix */);
     }
 
-    // Get all the resource (and overload pid) logs
+    // Get all the resource (and overload pid) log.
     std::string resourceLog;
     getResourceDump(resourceLog);
 
+    // Get all the metrics log.
+    std::string metricsLog;
+    {
+        std::scoped_lock lock{mLock};
+        metricsLog = mResourceManagerMetrics->dump();
+    }
+
     const size_t SIZE = 256;
     char buffer[SIZE];
     snprintf(buffer, SIZE, "ResourceManagerService: %p\n", this);
@@ -123,11 +130,16 @@
             supportsSecureWithNonSecureCodec);
     result.append(buffer);
 
+    // Add resource log.
     result.append(resourceLog.c_str());
 
+    // Add service log.
     result.append("  Events logs (most recent at top):\n");
     result.append(serviceLog);
 
+    // Add metrics log.
+    result.append(metricsLog.c_str());
+
     write(fd, result.c_str(), result.size());
     return OK;
 }
diff --git a/services/mediaresourcemanager/fuzzer/Android.bp b/services/mediaresourcemanager/fuzzer/Android.bp
index 5bac062..3f04f69 100644
--- a/services/mediaresourcemanager/fuzzer/Android.bp
+++ b/services/mediaresourcemanager/fuzzer/Android.bp
@@ -47,7 +47,7 @@
     ],
     fuzz_config: {
         cc: [
-            "android-media-fuzzing-reports@google.com",
+            "girishshetty@google.com",
         ],
         componentid: 155276,
         hotlists: [
diff --git a/services/mediaresourcemanager/test/Android.bp b/services/mediaresourcemanager/test/Android.bp
index 6a64823..5dfec30 100644
--- a/services/mediaresourcemanager/test/Android.bp
+++ b/services/mediaresourcemanager/test/Android.bp
@@ -29,6 +29,9 @@
         "libactivitymanager_aidl",
         "server_configurable_flags",
     ],
+    defaults: [
+        "aconfig_lib_cc_static_link.defaults",
+    ],
     include_dirs: [
         "frameworks/av/include",
         "frameworks/av/services/mediaresourcemanager",
diff --git a/services/oboeservice/AAudioServiceEndpointPlay.cpp b/services/oboeservice/AAudioServiceEndpointPlay.cpp
index 637405d..5d6e2ae 100644
--- a/services/oboeservice/AAudioServiceEndpointPlay.cpp
+++ b/services/oboeservice/AAudioServiceEndpointPlay.cpp
@@ -88,7 +88,8 @@
                 }
 
                 aaudio_stream_state_t state = clientStream->getState();
-                if (state == AAUDIO_STREAM_STATE_STOPPING) {
+                if (state == AAUDIO_STREAM_STATE_STOPPING ||
+                    state == AAUDIO_STREAM_STATE_PAUSING) {
                     allowUnderflow = false; // just read what is already in the FIFO
                 } else if (state != AAUDIO_STREAM_STATE_STARTED) {
                     continue; // this stream is not running so skip it.
diff --git a/services/oboeservice/AAudioServiceStreamBase.cpp b/services/oboeservice/AAudioServiceStreamBase.cpp
index dc70c79..4e46bbf 100644
--- a/services/oboeservice/AAudioServiceStreamBase.cpp
+++ b/services/oboeservice/AAudioServiceStreamBase.cpp
@@ -22,6 +22,7 @@
 #include <iostream>
 #include <mutex>
 
+#include <com_android_media_aaudio.h>
 #include <media/MediaMetricsItem.h>
 #include <media/TypeConverter.h>
 #include <mediautils/SchedulingPolicyService.h>
@@ -219,7 +220,7 @@
     return closeAndClear();
 }
 
-aaudio_result_t AAudioServiceStreamBase::startDevice() {
+aaudio_result_t AAudioServiceStreamBase::startDevice_l() {
     mClientHandle = AUDIO_PORT_HANDLE_NONE;
     sp<AAudioServiceEndpoint> endpoint = mServiceEndpointWeak.promote();
     if (endpoint == nullptr) {
@@ -274,7 +275,7 @@
     mAtomicStreamTimestamp.clear();
 
     mClientHandle = AUDIO_PORT_HANDLE_NONE;
-    result = startDevice();
+    result = startDevice_l();
     if (result != AAUDIO_OK) goto error;
 
     // This should happen at the end of the start.
@@ -307,6 +308,8 @@
             .set(AMEDIAMETRICS_PROP_STATUS, (int32_t)result)
             .record(); });
 
+    setState(AAUDIO_STREAM_STATE_PAUSING);
+
     sp<AAudioServiceEndpoint> endpoint = mServiceEndpointWeak.promote();
     if (endpoint == nullptr) {
         ALOGE("%s() has no endpoint", __func__);
@@ -518,6 +521,18 @@
                                                        : exitStandby_l(param->mParcelable);
                     standbyTime = AudioClock::getNanoseconds() + IDLE_TIMEOUT_NANOS;
                 } break;
+                case START_CLIENT: {
+                    auto param = (StartClientParam *) command->parameter.get();
+                    command->result = param == nullptr ? AAUDIO_ERROR_ILLEGAL_ARGUMENT
+                                                       : startClient_l(param->mClient,
+                                                                       param->mAttr,
+                                                                       param->mClientHandle);
+                } break;
+                case STOP_CLIENT: {
+                    auto param = (StopClientParam *) command->parameter.get();
+                    command->result = param == nullptr ? AAUDIO_ERROR_ILLEGAL_ARGUMENT
+                                                       : stopClient_l(param->mClientHandle);
+                } break;
                 default:
                     ALOGE("Invalid command op code: %d", command->operationCode);
                     break;
@@ -730,6 +745,26 @@
     return mCommandQueue.sendCommand(command);
 }
 
+aaudio_result_t AAudioServiceStreamBase::sendStartClientCommand(const android::AudioClient &client,
+                                                                const audio_attributes_t *attr,
+                                                                audio_port_handle_t *clientHandle) {
+    auto command = std::make_shared<AAudioCommand>(
+            START_CLIENT,
+            std::make_shared<StartClientParam>(client, attr, clientHandle),
+            true /*waitForReply*/,
+            TIMEOUT_NANOS);
+    return mCommandQueue.sendCommand(command);
+}
+
+aaudio_result_t AAudioServiceStreamBase::sendStopClientCommand(audio_port_handle_t clientHandle) {
+    auto command = std::make_shared<AAudioCommand>(
+            STOP_CLIENT,
+            std::make_shared<StopClientParam>(clientHandle),
+            true /*waitForReply*/,
+            TIMEOUT_NANOS);
+    return mCommandQueue.sendCommand(command);
+}
+
 void AAudioServiceStreamBase::onVolumeChanged(float volume) {
     sendServiceEvent(AAUDIO_SERVICE_EVENT_VOLUME, volume);
 }
diff --git a/services/oboeservice/AAudioServiceStreamBase.h b/services/oboeservice/AAudioServiceStreamBase.h
index 96a6d44..8057f87 100644
--- a/services/oboeservice/AAudioServiceStreamBase.h
+++ b/services/oboeservice/AAudioServiceStreamBase.h
@@ -279,7 +279,7 @@
      * Device specific startup.
      * @return AAUDIO_OK or negative error.
      */
-    virtual aaudio_result_t startDevice();
+    virtual aaudio_result_t startDevice_l() REQUIRES(mLock);
 
     aaudio_result_t writeUpMessageQueue(AAudioServiceMessage *command)
             EXCLUDES(mUpMessageQueueLock);
@@ -288,6 +288,12 @@
 
     aaudio_result_t sendXRunCount(int32_t xRunCount);
 
+    aaudio_result_t sendStartClientCommand(const android::AudioClient& client,
+                                           const audio_attributes_t *attr,
+                                           audio_port_handle_t *clientHandle) EXCLUDES(mLock);
+
+    aaudio_result_t sendStopClientCommand(audio_port_handle_t clientHandle) EXCLUDES(mLock);
+
     /**
      * @param positionFrames
      * @param timeNanos
@@ -342,6 +348,40 @@
     }
     virtual void reportData_l() REQUIRES(mLock) { return; }
 
+    class StartClientParam : public AAudioCommandParam {
+    public:
+        StartClientParam(const android::AudioClient& client, const audio_attributes_t* attr,
+                         audio_port_handle_t* clientHandle)
+                : AAudioCommandParam(), mClient(client), mAttr(attr), mClientHandle(clientHandle) {
+        }
+        ~StartClientParam() override = default;
+
+        android::AudioClient mClient;
+        const audio_attributes_t* mAttr;
+        audio_port_handle_t* mClientHandle;
+    };
+    virtual aaudio_result_t startClient_l(
+            const android::AudioClient& client,
+            const audio_attributes_t *attr __unused,
+            audio_port_handle_t *clientHandle __unused) REQUIRES(mLock) {
+        ALOGD("AAudioServiceStreamBase::startClient_l(%p, ...) AAUDIO_ERROR_UNAVAILABLE", &client);
+        return AAUDIO_ERROR_UNAVAILABLE;
+    }
+
+    class StopClientParam : public AAudioCommandParam {
+    public:
+        explicit StopClientParam(audio_port_handle_t clientHandle)
+                : AAudioCommandParam(), mClientHandle(clientHandle) {
+        }
+        ~StopClientParam() override = default;
+
+        audio_port_handle_t mClientHandle;
+    };
+    virtual aaudio_result_t stopClient_l(audio_port_handle_t clientHandle) REQUIRES(mLock) {
+        ALOGD("AAudioServiceStreamBase::stopClient(%d) AAUDIO_ERROR_UNAVAILABLE", clientHandle);
+        return AAUDIO_ERROR_UNAVAILABLE;
+    }
+
     pid_t                   mRegisteredClientThread = ILLEGAL_THREAD_ID;
 
     std::mutex              mUpMessageQueueLock;
@@ -358,6 +398,8 @@
         UNREGISTER_AUDIO_THREAD,
         GET_DESCRIPTION,
         EXIT_STANDBY,
+        START_CLIENT,
+        STOP_CLIENT,
     };
     AAudioThread            mCommandThread;
     std::atomic_bool        mThreadEnabled{false};
diff --git a/services/oboeservice/AAudioServiceStreamMMAP.cpp b/services/oboeservice/AAudioServiceStreamMMAP.cpp
index 89f6e33..fc53949 100644
--- a/services/oboeservice/AAudioServiceStreamMMAP.cpp
+++ b/services/oboeservice/AAudioServiceStreamMMAP.cpp
@@ -24,6 +24,7 @@
 #include <iostream>
 #include <stdint.h>
 
+#include <com_android_media_aaudio.h>
 #include <utils/String16.h>
 #include <media/nbaio/AudioStreamOutSink.h>
 #include <media/MmapStreamInterface.h>
@@ -83,11 +84,12 @@
 }
 
 // Start the flow of data.
-aaudio_result_t AAudioServiceStreamMMAP::startDevice() {
-    aaudio_result_t result = AAudioServiceStreamBase::startDevice();
+aaudio_result_t AAudioServiceStreamMMAP::startDevice_l() {
+    aaudio_result_t result = AAudioServiceStreamBase::startDevice_l();
     if (!mInService && result == AAUDIO_OK) {
         // Note that this can sometimes take 200 to 300 msec for a cold start!
-        result = startClient(mMmapClient, nullptr /*const audio_attributes_t* */, &mClientHandle);
+        result = startClient_l(
+                mMmapClient, nullptr /*const audio_attributes_t* */, &mClientHandle);
     }
     return result;
 }
@@ -100,7 +102,7 @@
     aaudio_result_t result = AAudioServiceStreamBase::pause_l();
     // TODO put before base::pause()?
     if (!mInService) {
-        (void) stopClient(mClientHandle);
+        (void) stopClient_l(mClientHandle);
     }
     return result;
 }
@@ -112,7 +114,7 @@
     aaudio_result_t result = AAudioServiceStreamBase::stop_l();
     // TODO put before base::stop()?
     if (!mInService) {
-        (void) stopClient(mClientHandle);
+        (void) stopClient_l(mClientHandle);
     }
     return result;
 }
@@ -149,6 +151,37 @@
 aaudio_result_t AAudioServiceStreamMMAP::startClient(const android::AudioClient& client,
                                                      const audio_attributes_t *attr,
                                                      audio_port_handle_t *clientHandle) {
+    if (com::android::media::aaudio::start_stop_client_from_command_thread()) {
+        return sendStartClientCommand(client, attr, clientHandle);
+    } else {
+        sp<AAudioServiceEndpoint> endpoint = mServiceEndpointWeak.promote();
+        if (endpoint == nullptr) {
+            ALOGE("%s() has no endpoint", __func__);
+            return AAUDIO_ERROR_INVALID_STATE;
+        }
+        // Start the client on behalf of the application. Generate a new porthandle.
+        aaudio_result_t result = endpoint->startClient(client, attr, clientHandle);
+        return result;
+    }
+}
+
+aaudio_result_t AAudioServiceStreamMMAP::stopClient(audio_port_handle_t clientHandle) {
+    if (com::android::media::aaudio::start_stop_client_from_command_thread()) {
+        return sendStopClientCommand(clientHandle);
+    } else {
+        sp<AAudioServiceEndpoint> endpoint = mServiceEndpointWeak.promote();
+        if (endpoint == nullptr) {
+            ALOGE("%s() has no endpoint", __func__);
+            return AAUDIO_ERROR_INVALID_STATE;
+        }
+        aaudio_result_t result = endpoint->stopClient(clientHandle);
+        return result;
+    }
+}
+
+aaudio_result_t AAudioServiceStreamMMAP::startClient_l(const android::AudioClient& client,
+                                                       const audio_attributes_t *attr,
+                                                       audio_port_handle_t *clientHandle) {
     sp<AAudioServiceEndpoint> endpoint = mServiceEndpointWeak.promote();
     if (endpoint == nullptr) {
         ALOGE("%s() has no endpoint", __func__);
@@ -159,7 +192,7 @@
     return result;
 }
 
-aaudio_result_t AAudioServiceStreamMMAP::stopClient(audio_port_handle_t clientHandle) {
+aaudio_result_t AAudioServiceStreamMMAP::stopClient_l(audio_port_handle_t clientHandle) {
     sp<AAudioServiceEndpoint> endpoint = mServiceEndpointWeak.promote();
     if (endpoint == nullptr) {
         ALOGE("%s() has no endpoint", __func__);
diff --git a/services/oboeservice/AAudioServiceStreamMMAP.h b/services/oboeservice/AAudioServiceStreamMMAP.h
index 42032d7..f4ce83d 100644
--- a/services/oboeservice/AAudioServiceStreamMMAP.h
+++ b/services/oboeservice/AAudioServiceStreamMMAP.h
@@ -93,7 +93,13 @@
      * Device specific startup.
      * @return AAUDIO_OK or negative error.
      */
-    aaudio_result_t startDevice() override;
+    aaudio_result_t startDevice_l() REQUIRES(mLock) override;
+
+    aaudio_result_t startClient_l(const android::AudioClient& client,
+                                  const audio_attributes_t *attr,
+                                  audio_port_handle_t *clientHandle) REQUIRES(mLock) override;
+
+    aaudio_result_t stopClient_l(audio_port_handle_t clientHandle) REQUIRES(mLock) override;
 
 private: